effect
stringclasses 48
values | original_source_type
stringlengths 0
23k
| opens_and_abbrevs
listlengths 2
92
| isa_cross_project_example
bool 1
class | source_definition
stringlengths 9
57.9k
| partial_definition
stringlengths 7
23.3k
| is_div
bool 2
classes | is_type
null | is_proof
bool 2
classes | completed_definiton
stringlengths 1
250k
| dependencies
dict | effect_flags
sequencelengths 0
2
| ideal_premises
sequencelengths 0
236
| mutual_with
sequencelengths 0
11
| file_context
stringlengths 0
407k
| interleaved
bool 1
class | is_simply_typed
bool 2
classes | file_name
stringlengths 5
48
| vconfig
dict | is_simple_lemma
null | source_type
stringlengths 10
23k
| proof_features
sequencelengths 0
1
| name
stringlengths 8
95
| source
dict | verbose_type
stringlengths 1
7.42k
| source_range
dict |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
FStar.All.ML | val resolve_atomic_action (env: qenv) (ac: atomic_action) : ML atomic_action | [
{
"abbrev": true,
"full_module": "Hashtable",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "Ast",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let resolve_atomic_action (env:qenv) (ac:atomic_action) : ML atomic_action =
match ac with
| Action_return e -> Action_return (resolve_expr env e)
| Action_abort
| Action_field_pos_64
| Action_field_pos_32
| Action_field_ptr -> ac
| Action_field_ptr_after e write_to -> Action_field_ptr_after (resolve_expr env e) write_to
| Action_deref i -> Action_deref i //most certainly a type parameter
| Action_assignment lhs rhs ->
Action_assignment lhs (resolve_expr env rhs) //lhs is an action-local variable
| Action_call f args ->
Action_call (resolve_ident env f) (List.map (resolve_expr env) args) | val resolve_atomic_action (env: qenv) (ac: atomic_action) : ML atomic_action
let resolve_atomic_action (env: qenv) (ac: atomic_action) : ML atomic_action = | true | null | false | match ac with
| Action_return e -> Action_return (resolve_expr env e)
| Action_abort | Action_field_pos_64 | Action_field_pos_32 | Action_field_ptr -> ac
| Action_field_ptr_after e write_to -> Action_field_ptr_after (resolve_expr env e) write_to
| Action_deref i -> Action_deref i
| Action_assignment lhs rhs -> Action_assignment lhs (resolve_expr env rhs)
| Action_call f args -> Action_call (resolve_ident env f) (List.map (resolve_expr env) args) | {
"checked_file": "Desugar.fst.checked",
"dependencies": [
"prims.fst.checked",
"Hashtable.fsti.checked",
"GlobalEnv.fst.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.All.fst.checked",
"Ast.fst.checked"
],
"interface_file": false,
"source_file": "Desugar.fst"
} | [
"ml"
] | [
"Desugar.qenv",
"Ast.atomic_action",
"Ast.expr",
"Ast.Action_return",
"Desugar.resolve_expr",
"Ast.out_expr",
"Ast.Action_field_ptr_after",
"Ast.ident",
"Ast.Action_deref",
"Ast.Action_assignment",
"Prims.list",
"Ast.Action_call",
"FStar.List.map",
"Desugar.resolve_ident"
] | [] | (*
Copyright 2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Desugar
open FStar.List.Tot
open FStar.Mul
open Ast
open FStar.All
module H = Hashtable
(* This module implements a pass over the source AST,
implementing various simple desugarings
* Desugar enums with newly defined idents and explicit constant
assignments to enums where all the tags are previously defined
constants.
* Resolve module-qualified names, including the use of module
abbreviations
* Set the kind (Spec/Output/Extern) in the type nodes
* Finds variables in Static expressions and hoists them as assumptions
and removes the Static
*)
let check_desugared_enum_cases (cases:list enum_case) : ML (list ident) =
List.map
(function
| (i, None) -> i
| (i, _) -> failwith "Enum should already have been desugared")
cases
let desugar_enum_cases (ityp:integer_type) (cases:list enum_case) (export:bool)
: ML (list enum_case & list decl) =
let find_definition (i:ident) (d:decl) =
match d.d_decl.v with
| Define j _ (Int _ _) ->
i.v = j.v
| _ ->
false
in
let _, cases_rev, ds_rev =
List.fold_left
(fun (next, cases_rev, ds_rev) (i, jopt) ->
let next =
match jopt with
| Some (Inl j) -> j
| Some (Inr j) ->
begin
match List.Tot.find (find_definition j) ds_rev with
| Some ({d_decl={v=Define _ _ (Int _ k)}}) -> k
| _ -> error (Printf.sprintf "Enum identifier %s not found" (print_ident j)) j.range
end
| None -> next
in
let case = (i, None) in
let d = mk_decl
(Define i None (Int ityp next))
i.range
["Enum constant"]
export
in
(next + 1,
case :: cases_rev,
d :: ds_rev))
(0, [], [])
cases
in
List.rev cases_rev,
List.rev ds_rev
let desugar_one_enum (d:decl) : ML (list decl) =
match d.d_decl.v with
| Enum t i cases ->
if List.for_all (fun (_, jopt) -> None? jopt) cases
then [d] //no enum value assignments; no desugaring to do
else //if we have any assignments at all, then we treat all the
//tags as fresh constants and assign them values in sequence
//with respect to the assigned values of preceding tags
let cases, ds = desugar_enum_cases (typ_as_integer_type t) cases d.d_exported in
let enum = decl_with_v d (Enum t i cases) in
ds@[enum]
| _ -> [d]
(* This code is currently not used
It desugars an Enum to a record with a single refined field *)
// let eliminate_enum (d:decl) : ML decl =
// match d.v with
// | Enum t i cases ->
// let names = {
// typedef_name = { i with v = { i.v with name=Ast.reserved_prefix ^ Ast.reserved_prefix ^ i.v.name }};
// typedef_abbrev = i;
// typedef_ptr_abbrev = { i with v = {i.v with
// name = Ast.reserved_prefix ^ Ast.reserved_prefix ^ "P" ^ i.v.name }};
// typedef_attributes = [];
// } in
// let params = [] in
// let where = None in
// let field_ident = with_dummy_range (to_ident' (Ast.reserved_prefix ^ "enum_field")) in
// let field_ident_expr = with_dummy_range (Identifier field_ident) in
// let field_constraint =
// List.fold_right
// (fun (case, _) out ->
// let eq = with_range (App Eq [field_ident_expr; with_range (Identifier case) case.range]) case.range in
// with_dummy_range (App Or [eq; out]))
// cases
// (with_dummy_range (Constant (Bool false)))
// in
// let field = {
// field_dependence = false;
// field_ident = field_ident;
// field_type = t;
// field_array_opt = FieldScalar;
// field_constraint = Some field_constraint;
// field_number = None;
// field_bitwidth = None;
// field_action = None
// } in
// let d' = Record names params where [with_dummy_range field] in
// {d with v = d'}
// | _ -> d
(*
* output_types and extern_types tables to set the kind in the Typ_app nodes
*)
noeq
type qenv = {
mname : string;
module_abbrevs : H.t string string;
output_types : H.t ident' unit;
extern_types : H.t ident' unit;
local_names : list string;
global_env: GlobalEnv.global_env;
}
let push_module_abbrev (env:qenv) (i m:string) : ML unit =
H.insert env.module_abbrevs i m
let push_output_type (env:qenv) (out_t:out_typ) : ML unit =
H.insert env.output_types out_t.out_typ_names.typedef_name.v ();
H.insert env.output_types out_t.out_typ_names.typedef_abbrev.v ()
let push_extern_type (env:qenv) (td:typedef_names) : ML unit =
H.insert env.extern_types td.typedef_name.v ();
H.insert env.extern_types td.typedef_abbrev.v ()
let push_name (env:qenv) (name:string) : qenv =
{ env with local_names = name::env.local_names }
let prim_consts = [
"unit"; "Bool"; "UINT8"; "UINT16"; "UINT32"; "UINT64";
"UINT8BE"; "UINT16BE"; "UINT32BE"; "UINT64BE";
"field_id"; "PUINT8";
"all_bytes"; "all_zeros";
"is_range_okay";
"void" ]
let resolve_ident (env:qenv) (i:ident) : ML ident =
let resolve_to_current_module i =
{ i with v = { i.v with modul_name = Some env.mname } }
in
let maybe_resolve_as_ifdef i
: ML ident
= match env.global_env.ge_cfg with
| None -> resolve_to_current_module i
| Some (cfg, cfg_module_name) ->
if List.mem i.v.name cfg.compile_time_flags.flags
then { i with v = { i.v with modul_name = Some cfg_module_name } }
else resolve_to_current_module i
in
if List.mem i.v.name prim_consts //it's a primitive constant, e.g. UINT8, leave as is
then i
else if List.mem i.v.name env.local_names //it's a local name (e.g. a parameter name)
then (if Some? i.v.modul_name //must have module name set to None
then error (Printf.sprintf
"Ident %s is a local name but has a qualifying modul %s"
i.v.name
(Some?.v i.v.modul_name))
i.range
else i) //return the local name as is
else (match i.v.modul_name with //it's a top-level name
| None -> maybe_resolve_as_ifdef i
| Some m -> //if already qualified, check if it is an abbreviation
(match H.try_find env.module_abbrevs m with
| None -> i
| Some m -> { i with v = { i.v with modul_name = Some m } }))
let rec collect_ifdef_guards (env:qenv) (e:expr)
: ML unit
= let check_resolved_to_ifdef i =
match env.global_env.ge_cfg with
| None -> false
| Some (cfg, cfg_module_name) ->
List.mem i.v.name cfg.compile_time_flags.flags
&& i.v.modul_name = Some cfg_module_name
in
match e.v with
| This -> error "'this' is not allowed in the guard of an #if" e.range
| Static _ -> failwith "static should have been eliminated already"
| Constant _ -> ()
| Identifier i ->
if not (check_resolved_to_ifdef i)
then error (Printf.sprintf "Identifier %s is not a compile-time macro but is used in a #if" i.v.name) e.range
| App op args ->
begin
match op with
| And
| Or
| Not -> List.iter (collect_ifdef_guards env) args
| _ -> error "Only boolean expressions over identifiers are supported in #if guards" e.range
end
let rec resolve_expr' (env:qenv) (e:expr') r : ML expr' =
match e with
| Constant _ -> e
| Identifier i -> Identifier (resolve_ident env i)
| This -> e
| Static e' ->
let e' = resolve_expr env e' in
collect_ifdef_guards env e';//mark any variables as top-level IfDef symbols
e'.v
| App op args ->
let args = List.map (resolve_expr env) args in
App op args
and resolve_expr (env:qenv) (e:expr) : ML expr = { e with v = resolve_expr' env e.v e.range }
let resolve_typ_param (env:qenv) (p:typ_param) : ML typ_param =
match p with
| Inl e -> resolve_expr env e |> Inl
| _ -> p //Currently not going inside output expressions, should we?
let kind_of_ident (env:qenv) (i:ident)
: ML t_kind
= let _or_ (b0 b1:bool) = b0 || b1 in
if Some? (H.try_find env.output_types i.v) `_or_`
Some? (H.try_find env.global_env.ge_out_t i.v)
then KindOutput
else if
Some? (H.try_find env.extern_types i.v) `_or_`
Some? (H.try_find env.global_env.ge_extern_t i.v)
then KindExtern
else KindSpec
let rec resolve_typ' (env:qenv) (t:typ') : ML typ' =
match t with
| Type_app hd _ args ->
let hd = resolve_ident env hd in
//Set is_out argument to the Type_app appropriately
let k = kind_of_ident env hd in
Type_app hd k (List.map (resolve_typ_param env) args)
| Pointer t -> Pointer (resolve_typ env t)
and resolve_typ (env:qenv) (t:typ) : ML typ = { t with v = resolve_typ' env t.v } | false | false | Desugar.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val resolve_atomic_action (env: qenv) (ac: atomic_action) : ML atomic_action | [] | Desugar.resolve_atomic_action | {
"file_name": "src/3d/Desugar.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | env: Desugar.qenv -> ac: Ast.atomic_action -> FStar.All.ML Ast.atomic_action | {
"end_col": 72,
"end_line": 287,
"start_col": 2,
"start_line": 276
} |
FStar.All.ML | val resolve_field_array_t (env: qenv) (farr: field_array_t) : ML field_array_t | [
{
"abbrev": true,
"full_module": "Hashtable",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "Ast",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let resolve_field_array_t (env:qenv) (farr:field_array_t) : ML field_array_t =
match farr with
| FieldScalar -> farr
| FieldArrayQualified (e, aq) ->
FieldArrayQualified (resolve_expr env e, aq)
| FieldString None -> farr
| FieldString (Some e) -> FieldString (Some (resolve_expr env e)) | val resolve_field_array_t (env: qenv) (farr: field_array_t) : ML field_array_t
let resolve_field_array_t (env: qenv) (farr: field_array_t) : ML field_array_t = | true | null | false | match farr with
| FieldScalar -> farr
| FieldArrayQualified (e, aq) -> FieldArrayQualified (resolve_expr env e, aq)
| FieldString None -> farr
| FieldString (Some e) -> FieldString (Some (resolve_expr env e)) | {
"checked_file": "Desugar.fst.checked",
"dependencies": [
"prims.fst.checked",
"Hashtable.fsti.checked",
"GlobalEnv.fst.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.All.fst.checked",
"Ast.fst.checked"
],
"interface_file": false,
"source_file": "Desugar.fst"
} | [
"ml"
] | [
"Desugar.qenv",
"Ast.field_array_t",
"Ast.expr",
"Ast.array_qualifier",
"Ast.FieldArrayQualified",
"FStar.Pervasives.Native.tuple2",
"FStar.Pervasives.Native.Mktuple2",
"Desugar.resolve_expr",
"Ast.FieldString",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.Some"
] | [] | (*
Copyright 2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Desugar
open FStar.List.Tot
open FStar.Mul
open Ast
open FStar.All
module H = Hashtable
(* This module implements a pass over the source AST,
implementing various simple desugarings
* Desugar enums with newly defined idents and explicit constant
assignments to enums where all the tags are previously defined
constants.
* Resolve module-qualified names, including the use of module
abbreviations
* Set the kind (Spec/Output/Extern) in the type nodes
* Finds variables in Static expressions and hoists them as assumptions
and removes the Static
*)
let check_desugared_enum_cases (cases:list enum_case) : ML (list ident) =
List.map
(function
| (i, None) -> i
| (i, _) -> failwith "Enum should already have been desugared")
cases
let desugar_enum_cases (ityp:integer_type) (cases:list enum_case) (export:bool)
: ML (list enum_case & list decl) =
let find_definition (i:ident) (d:decl) =
match d.d_decl.v with
| Define j _ (Int _ _) ->
i.v = j.v
| _ ->
false
in
let _, cases_rev, ds_rev =
List.fold_left
(fun (next, cases_rev, ds_rev) (i, jopt) ->
let next =
match jopt with
| Some (Inl j) -> j
| Some (Inr j) ->
begin
match List.Tot.find (find_definition j) ds_rev with
| Some ({d_decl={v=Define _ _ (Int _ k)}}) -> k
| _ -> error (Printf.sprintf "Enum identifier %s not found" (print_ident j)) j.range
end
| None -> next
in
let case = (i, None) in
let d = mk_decl
(Define i None (Int ityp next))
i.range
["Enum constant"]
export
in
(next + 1,
case :: cases_rev,
d :: ds_rev))
(0, [], [])
cases
in
List.rev cases_rev,
List.rev ds_rev
let desugar_one_enum (d:decl) : ML (list decl) =
match d.d_decl.v with
| Enum t i cases ->
if List.for_all (fun (_, jopt) -> None? jopt) cases
then [d] //no enum value assignments; no desugaring to do
else //if we have any assignments at all, then we treat all the
//tags as fresh constants and assign them values in sequence
//with respect to the assigned values of preceding tags
let cases, ds = desugar_enum_cases (typ_as_integer_type t) cases d.d_exported in
let enum = decl_with_v d (Enum t i cases) in
ds@[enum]
| _ -> [d]
(* This code is currently not used
It desugars an Enum to a record with a single refined field *)
// let eliminate_enum (d:decl) : ML decl =
// match d.v with
// | Enum t i cases ->
// let names = {
// typedef_name = { i with v = { i.v with name=Ast.reserved_prefix ^ Ast.reserved_prefix ^ i.v.name }};
// typedef_abbrev = i;
// typedef_ptr_abbrev = { i with v = {i.v with
// name = Ast.reserved_prefix ^ Ast.reserved_prefix ^ "P" ^ i.v.name }};
// typedef_attributes = [];
// } in
// let params = [] in
// let where = None in
// let field_ident = with_dummy_range (to_ident' (Ast.reserved_prefix ^ "enum_field")) in
// let field_ident_expr = with_dummy_range (Identifier field_ident) in
// let field_constraint =
// List.fold_right
// (fun (case, _) out ->
// let eq = with_range (App Eq [field_ident_expr; with_range (Identifier case) case.range]) case.range in
// with_dummy_range (App Or [eq; out]))
// cases
// (with_dummy_range (Constant (Bool false)))
// in
// let field = {
// field_dependence = false;
// field_ident = field_ident;
// field_type = t;
// field_array_opt = FieldScalar;
// field_constraint = Some field_constraint;
// field_number = None;
// field_bitwidth = None;
// field_action = None
// } in
// let d' = Record names params where [with_dummy_range field] in
// {d with v = d'}
// | _ -> d
(*
* output_types and extern_types tables to set the kind in the Typ_app nodes
*)
noeq
type qenv = {
mname : string;
module_abbrevs : H.t string string;
output_types : H.t ident' unit;
extern_types : H.t ident' unit;
local_names : list string;
global_env: GlobalEnv.global_env;
}
let push_module_abbrev (env:qenv) (i m:string) : ML unit =
H.insert env.module_abbrevs i m
let push_output_type (env:qenv) (out_t:out_typ) : ML unit =
H.insert env.output_types out_t.out_typ_names.typedef_name.v ();
H.insert env.output_types out_t.out_typ_names.typedef_abbrev.v ()
let push_extern_type (env:qenv) (td:typedef_names) : ML unit =
H.insert env.extern_types td.typedef_name.v ();
H.insert env.extern_types td.typedef_abbrev.v ()
let push_name (env:qenv) (name:string) : qenv =
{ env with local_names = name::env.local_names }
let prim_consts = [
"unit"; "Bool"; "UINT8"; "UINT16"; "UINT32"; "UINT64";
"UINT8BE"; "UINT16BE"; "UINT32BE"; "UINT64BE";
"field_id"; "PUINT8";
"all_bytes"; "all_zeros";
"is_range_okay";
"void" ]
let resolve_ident (env:qenv) (i:ident) : ML ident =
let resolve_to_current_module i =
{ i with v = { i.v with modul_name = Some env.mname } }
in
let maybe_resolve_as_ifdef i
: ML ident
= match env.global_env.ge_cfg with
| None -> resolve_to_current_module i
| Some (cfg, cfg_module_name) ->
if List.mem i.v.name cfg.compile_time_flags.flags
then { i with v = { i.v with modul_name = Some cfg_module_name } }
else resolve_to_current_module i
in
if List.mem i.v.name prim_consts //it's a primitive constant, e.g. UINT8, leave as is
then i
else if List.mem i.v.name env.local_names //it's a local name (e.g. a parameter name)
then (if Some? i.v.modul_name //must have module name set to None
then error (Printf.sprintf
"Ident %s is a local name but has a qualifying modul %s"
i.v.name
(Some?.v i.v.modul_name))
i.range
else i) //return the local name as is
else (match i.v.modul_name with //it's a top-level name
| None -> maybe_resolve_as_ifdef i
| Some m -> //if already qualified, check if it is an abbreviation
(match H.try_find env.module_abbrevs m with
| None -> i
| Some m -> { i with v = { i.v with modul_name = Some m } }))
let rec collect_ifdef_guards (env:qenv) (e:expr)
: ML unit
= let check_resolved_to_ifdef i =
match env.global_env.ge_cfg with
| None -> false
| Some (cfg, cfg_module_name) ->
List.mem i.v.name cfg.compile_time_flags.flags
&& i.v.modul_name = Some cfg_module_name
in
match e.v with
| This -> error "'this' is not allowed in the guard of an #if" e.range
| Static _ -> failwith "static should have been eliminated already"
| Constant _ -> ()
| Identifier i ->
if not (check_resolved_to_ifdef i)
then error (Printf.sprintf "Identifier %s is not a compile-time macro but is used in a #if" i.v.name) e.range
| App op args ->
begin
match op with
| And
| Or
| Not -> List.iter (collect_ifdef_guards env) args
| _ -> error "Only boolean expressions over identifiers are supported in #if guards" e.range
end
let rec resolve_expr' (env:qenv) (e:expr') r : ML expr' =
match e with
| Constant _ -> e
| Identifier i -> Identifier (resolve_ident env i)
| This -> e
| Static e' ->
let e' = resolve_expr env e' in
collect_ifdef_guards env e';//mark any variables as top-level IfDef symbols
e'.v
| App op args ->
let args = List.map (resolve_expr env) args in
App op args
and resolve_expr (env:qenv) (e:expr) : ML expr = { e with v = resolve_expr' env e.v e.range }
let resolve_typ_param (env:qenv) (p:typ_param) : ML typ_param =
match p with
| Inl e -> resolve_expr env e |> Inl
| _ -> p //Currently not going inside output expressions, should we?
let kind_of_ident (env:qenv) (i:ident)
: ML t_kind
= let _or_ (b0 b1:bool) = b0 || b1 in
if Some? (H.try_find env.output_types i.v) `_or_`
Some? (H.try_find env.global_env.ge_out_t i.v)
then KindOutput
else if
Some? (H.try_find env.extern_types i.v) `_or_`
Some? (H.try_find env.global_env.ge_extern_t i.v)
then KindExtern
else KindSpec
let rec resolve_typ' (env:qenv) (t:typ') : ML typ' =
match t with
| Type_app hd _ args ->
let hd = resolve_ident env hd in
//Set is_out argument to the Type_app appropriately
let k = kind_of_ident env hd in
Type_app hd k (List.map (resolve_typ_param env) args)
| Pointer t -> Pointer (resolve_typ env t)
and resolve_typ (env:qenv) (t:typ) : ML typ = { t with v = resolve_typ' env t.v }
let resolve_atomic_action (env:qenv) (ac:atomic_action) : ML atomic_action =
match ac with
| Action_return e -> Action_return (resolve_expr env e)
| Action_abort
| Action_field_pos_64
| Action_field_pos_32
| Action_field_ptr -> ac
| Action_field_ptr_after e write_to -> Action_field_ptr_after (resolve_expr env e) write_to
| Action_deref i -> Action_deref i //most certainly a type parameter
| Action_assignment lhs rhs ->
Action_assignment lhs (resolve_expr env rhs) //lhs is an action-local variable
| Action_call f args ->
Action_call (resolve_ident env f) (List.map (resolve_expr env) args)
let rec resolve_action' (env:qenv) (act:action') : ML action' =
match act with
| Atomic_action ac -> Atomic_action (resolve_atomic_action env ac)
| Action_seq hd tl ->
Action_seq (resolve_atomic_action env hd) (resolve_action env tl)
| Action_ite hd then_ else_ ->
Action_ite (resolve_expr env hd) (resolve_action env then_) (map_opt (resolve_action env) else_)
| Action_let i a k ->
Action_let i (resolve_atomic_action env a) (resolve_action (push_name env i.v.name) k)
| Action_act a ->
Action_act (resolve_action env a)
and resolve_action (env:qenv) (act:action) : ML action =
{ act with v = resolve_action' env act.v }
let resolve_param (env:qenv) (p:param) : ML (param & qenv) =
let t, i, q = p in
(resolve_typ env t, i, q),
push_name env i.v.name
let resolve_params (env:qenv) (params:list param) : ML (list param & qenv) =
List.fold_left (fun (params, env) p ->
let p, env = resolve_param env p in
params@[p], env) ([], env) params
let resolve_field_bitwidth_t (env:qenv) (fb:field_bitwidth_t) : ML field_bitwidth_t =
let resolve_bitfield_attr' (env:qenv) (b:bitfield_attr') : ML bitfield_attr' =
{ b with bitfield_type = resolve_typ env b.bitfield_type } in
let resolve_bitfield_attr (env:qenv) (b:bitfield_attr) : ML bitfield_attr =
{ b with v = resolve_bitfield_attr' env b.v } in
match fb with
| Inl _ -> fb
| Inr b -> Inr (resolve_bitfield_attr env b) | false | false | Desugar.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val resolve_field_array_t (env: qenv) (farr: field_array_t) : ML field_array_t | [] | Desugar.resolve_field_array_t | {
"file_name": "src/3d/Desugar.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | env: Desugar.qenv -> farr: Ast.field_array_t -> FStar.All.ML Ast.field_array_t | {
"end_col": 67,
"end_line": 331,
"start_col": 2,
"start_line": 326
} |
FStar.All.ML | val resolve_decl' (env: qenv) (d: decl') : ML decl' | [
{
"abbrev": true,
"full_module": "Hashtable",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "Ast",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let resolve_decl' (env:qenv) (d:decl') : ML decl' =
match d with
| ModuleAbbrev i m -> push_module_abbrev env i.v.name m.v.name; d
| Define i topt c ->
Define (resolve_ident env i) (map_opt (resolve_typ env) topt) c
| TypeAbbrev t i ->
TypeAbbrev (resolve_typ env t) (resolve_ident env i)
| Enum t i ecs ->
Enum (resolve_typ env t) (resolve_ident env i) (List.map (resolve_enum_case env) ecs)
| Record td_names params where flds ->
let td_names = resolve_typedef_names env td_names in
let params, env = resolve_params env params in
let where = map_opt (resolve_expr env) where in
let flds, _ = resolve_fields env flds in
Record td_names params where flds
| CaseType td_names params sc ->
let td_names = resolve_typedef_names env td_names in
let params, env = resolve_params env params in
let sc = resolve_switch_case env sc in
CaseType td_names params sc
| OutputType out_t ->
let out_t = resolve_out_type env out_t in
push_output_type env out_t;
OutputType out_t
| ExternType td_names ->
let td_names = resolve_typedef_names env td_names in
push_extern_type env td_names;
ExternType td_names
| ExternFn id ret params ->
let id = resolve_ident env id in
let ret = resolve_typ env ret in
let params, _ = resolve_params env params in
ExternFn id ret params | val resolve_decl' (env: qenv) (d: decl') : ML decl'
let resolve_decl' (env: qenv) (d: decl') : ML decl' = | true | null | false | match d with
| ModuleAbbrev i m ->
push_module_abbrev env i.v.name m.v.name;
d
| Define i topt c -> Define (resolve_ident env i) (map_opt (resolve_typ env) topt) c
| TypeAbbrev t i -> TypeAbbrev (resolve_typ env t) (resolve_ident env i)
| Enum t i ecs ->
Enum (resolve_typ env t) (resolve_ident env i) (List.map (resolve_enum_case env) ecs)
| Record td_names params where flds ->
let td_names = resolve_typedef_names env td_names in
let params, env = resolve_params env params in
let where = map_opt (resolve_expr env) where in
let flds, _ = resolve_fields env flds in
Record td_names params where flds
| CaseType td_names params sc ->
let td_names = resolve_typedef_names env td_names in
let params, env = resolve_params env params in
let sc = resolve_switch_case env sc in
CaseType td_names params sc
| OutputType out_t ->
let out_t = resolve_out_type env out_t in
push_output_type env out_t;
OutputType out_t
| ExternType td_names ->
let td_names = resolve_typedef_names env td_names in
push_extern_type env td_names;
ExternType td_names
| ExternFn id ret params ->
let id = resolve_ident env id in
let ret = resolve_typ env ret in
let params, _ = resolve_params env params in
ExternFn id ret params | {
"checked_file": "Desugar.fst.checked",
"dependencies": [
"prims.fst.checked",
"Hashtable.fsti.checked",
"GlobalEnv.fst.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.All.fst.checked",
"Ast.fst.checked"
],
"interface_file": false,
"source_file": "Desugar.fst"
} | [
"ml"
] | [
"Desugar.qenv",
"Ast.decl'",
"Ast.ident",
"Prims.unit",
"Desugar.push_module_abbrev",
"Ast.__proj__Mkident'__item__name",
"Ast.__proj__Mkwith_meta_t__item__v",
"Ast.ident'",
"FStar.Pervasives.Native.option",
"Ast.typ",
"Ast.constant",
"Ast.Define",
"Ast.map_opt",
"Desugar.resolve_typ",
"Desugar.resolve_ident",
"Ast.TypeAbbrev",
"Prims.list",
"Ast.enum_case",
"Ast.Enum",
"FStar.List.map",
"Desugar.resolve_enum_case",
"Ast.typedef_names",
"Ast.param",
"Ast.expr",
"Ast.record",
"Ast.field",
"Ast.Record",
"FStar.Pervasives.Native.tuple2",
"Desugar.resolve_fields",
"Desugar.resolve_expr",
"Desugar.resolve_params",
"Desugar.resolve_typedef_names",
"Ast.switch_case",
"Ast.CaseType",
"Desugar.resolve_switch_case",
"Ast.out_typ",
"Ast.OutputType",
"Desugar.push_output_type",
"Desugar.resolve_out_type",
"Ast.ExternType",
"Desugar.push_extern_type",
"Ast.ExternFn"
] | [] | (*
Copyright 2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Desugar
open FStar.List.Tot
open FStar.Mul
open Ast
open FStar.All
module H = Hashtable
(* This module implements a pass over the source AST,
implementing various simple desugarings
* Desugar enums with newly defined idents and explicit constant
assignments to enums where all the tags are previously defined
constants.
* Resolve module-qualified names, including the use of module
abbreviations
* Set the kind (Spec/Output/Extern) in the type nodes
* Finds variables in Static expressions and hoists them as assumptions
and removes the Static
*)
let check_desugared_enum_cases (cases:list enum_case) : ML (list ident) =
List.map
(function
| (i, None) -> i
| (i, _) -> failwith "Enum should already have been desugared")
cases
let desugar_enum_cases (ityp:integer_type) (cases:list enum_case) (export:bool)
: ML (list enum_case & list decl) =
let find_definition (i:ident) (d:decl) =
match d.d_decl.v with
| Define j _ (Int _ _) ->
i.v = j.v
| _ ->
false
in
let _, cases_rev, ds_rev =
List.fold_left
(fun (next, cases_rev, ds_rev) (i, jopt) ->
let next =
match jopt with
| Some (Inl j) -> j
| Some (Inr j) ->
begin
match List.Tot.find (find_definition j) ds_rev with
| Some ({d_decl={v=Define _ _ (Int _ k)}}) -> k
| _ -> error (Printf.sprintf "Enum identifier %s not found" (print_ident j)) j.range
end
| None -> next
in
let case = (i, None) in
let d = mk_decl
(Define i None (Int ityp next))
i.range
["Enum constant"]
export
in
(next + 1,
case :: cases_rev,
d :: ds_rev))
(0, [], [])
cases
in
List.rev cases_rev,
List.rev ds_rev
let desugar_one_enum (d:decl) : ML (list decl) =
match d.d_decl.v with
| Enum t i cases ->
if List.for_all (fun (_, jopt) -> None? jopt) cases
then [d] //no enum value assignments; no desugaring to do
else //if we have any assignments at all, then we treat all the
//tags as fresh constants and assign them values in sequence
//with respect to the assigned values of preceding tags
let cases, ds = desugar_enum_cases (typ_as_integer_type t) cases d.d_exported in
let enum = decl_with_v d (Enum t i cases) in
ds@[enum]
| _ -> [d]
(* This code is currently not used
It desugars an Enum to a record with a single refined field *)
// let eliminate_enum (d:decl) : ML decl =
// match d.v with
// | Enum t i cases ->
// let names = {
// typedef_name = { i with v = { i.v with name=Ast.reserved_prefix ^ Ast.reserved_prefix ^ i.v.name }};
// typedef_abbrev = i;
// typedef_ptr_abbrev = { i with v = {i.v with
// name = Ast.reserved_prefix ^ Ast.reserved_prefix ^ "P" ^ i.v.name }};
// typedef_attributes = [];
// } in
// let params = [] in
// let where = None in
// let field_ident = with_dummy_range (to_ident' (Ast.reserved_prefix ^ "enum_field")) in
// let field_ident_expr = with_dummy_range (Identifier field_ident) in
// let field_constraint =
// List.fold_right
// (fun (case, _) out ->
// let eq = with_range (App Eq [field_ident_expr; with_range (Identifier case) case.range]) case.range in
// with_dummy_range (App Or [eq; out]))
// cases
// (with_dummy_range (Constant (Bool false)))
// in
// let field = {
// field_dependence = false;
// field_ident = field_ident;
// field_type = t;
// field_array_opt = FieldScalar;
// field_constraint = Some field_constraint;
// field_number = None;
// field_bitwidth = None;
// field_action = None
// } in
// let d' = Record names params where [with_dummy_range field] in
// {d with v = d'}
// | _ -> d
(*
* output_types and extern_types tables to set the kind in the Typ_app nodes
*)
noeq
type qenv = {
mname : string;
module_abbrevs : H.t string string;
output_types : H.t ident' unit;
extern_types : H.t ident' unit;
local_names : list string;
global_env: GlobalEnv.global_env;
}
let push_module_abbrev (env:qenv) (i m:string) : ML unit =
H.insert env.module_abbrevs i m
let push_output_type (env:qenv) (out_t:out_typ) : ML unit =
H.insert env.output_types out_t.out_typ_names.typedef_name.v ();
H.insert env.output_types out_t.out_typ_names.typedef_abbrev.v ()
let push_extern_type (env:qenv) (td:typedef_names) : ML unit =
H.insert env.extern_types td.typedef_name.v ();
H.insert env.extern_types td.typedef_abbrev.v ()
let push_name (env:qenv) (name:string) : qenv =
{ env with local_names = name::env.local_names }
let prim_consts = [
"unit"; "Bool"; "UINT8"; "UINT16"; "UINT32"; "UINT64";
"UINT8BE"; "UINT16BE"; "UINT32BE"; "UINT64BE";
"field_id"; "PUINT8";
"all_bytes"; "all_zeros";
"is_range_okay";
"void" ]
let resolve_ident (env:qenv) (i:ident) : ML ident =
let resolve_to_current_module i =
{ i with v = { i.v with modul_name = Some env.mname } }
in
let maybe_resolve_as_ifdef i
: ML ident
= match env.global_env.ge_cfg with
| None -> resolve_to_current_module i
| Some (cfg, cfg_module_name) ->
if List.mem i.v.name cfg.compile_time_flags.flags
then { i with v = { i.v with modul_name = Some cfg_module_name } }
else resolve_to_current_module i
in
if List.mem i.v.name prim_consts //it's a primitive constant, e.g. UINT8, leave as is
then i
else if List.mem i.v.name env.local_names //it's a local name (e.g. a parameter name)
then (if Some? i.v.modul_name //must have module name set to None
then error (Printf.sprintf
"Ident %s is a local name but has a qualifying modul %s"
i.v.name
(Some?.v i.v.modul_name))
i.range
else i) //return the local name as is
else (match i.v.modul_name with //it's a top-level name
| None -> maybe_resolve_as_ifdef i
| Some m -> //if already qualified, check if it is an abbreviation
(match H.try_find env.module_abbrevs m with
| None -> i
| Some m -> { i with v = { i.v with modul_name = Some m } }))
let rec collect_ifdef_guards (env:qenv) (e:expr)
: ML unit
= let check_resolved_to_ifdef i =
match env.global_env.ge_cfg with
| None -> false
| Some (cfg, cfg_module_name) ->
List.mem i.v.name cfg.compile_time_flags.flags
&& i.v.modul_name = Some cfg_module_name
in
match e.v with
| This -> error "'this' is not allowed in the guard of an #if" e.range
| Static _ -> failwith "static should have been eliminated already"
| Constant _ -> ()
| Identifier i ->
if not (check_resolved_to_ifdef i)
then error (Printf.sprintf "Identifier %s is not a compile-time macro but is used in a #if" i.v.name) e.range
| App op args ->
begin
match op with
| And
| Or
| Not -> List.iter (collect_ifdef_guards env) args
| _ -> error "Only boolean expressions over identifiers are supported in #if guards" e.range
end
let rec resolve_expr' (env:qenv) (e:expr') r : ML expr' =
match e with
| Constant _ -> e
| Identifier i -> Identifier (resolve_ident env i)
| This -> e
| Static e' ->
let e' = resolve_expr env e' in
collect_ifdef_guards env e';//mark any variables as top-level IfDef symbols
e'.v
| App op args ->
let args = List.map (resolve_expr env) args in
App op args
and resolve_expr (env:qenv) (e:expr) : ML expr = { e with v = resolve_expr' env e.v e.range }
let resolve_typ_param (env:qenv) (p:typ_param) : ML typ_param =
match p with
| Inl e -> resolve_expr env e |> Inl
| _ -> p //Currently not going inside output expressions, should we?
let kind_of_ident (env:qenv) (i:ident)
: ML t_kind
= let _or_ (b0 b1:bool) = b0 || b1 in
if Some? (H.try_find env.output_types i.v) `_or_`
Some? (H.try_find env.global_env.ge_out_t i.v)
then KindOutput
else if
Some? (H.try_find env.extern_types i.v) `_or_`
Some? (H.try_find env.global_env.ge_extern_t i.v)
then KindExtern
else KindSpec
let rec resolve_typ' (env:qenv) (t:typ') : ML typ' =
match t with
| Type_app hd _ args ->
let hd = resolve_ident env hd in
//Set is_out argument to the Type_app appropriately
let k = kind_of_ident env hd in
Type_app hd k (List.map (resolve_typ_param env) args)
| Pointer t -> Pointer (resolve_typ env t)
and resolve_typ (env:qenv) (t:typ) : ML typ = { t with v = resolve_typ' env t.v }
let resolve_atomic_action (env:qenv) (ac:atomic_action) : ML atomic_action =
match ac with
| Action_return e -> Action_return (resolve_expr env e)
| Action_abort
| Action_field_pos_64
| Action_field_pos_32
| Action_field_ptr -> ac
| Action_field_ptr_after e write_to -> Action_field_ptr_after (resolve_expr env e) write_to
| Action_deref i -> Action_deref i //most certainly a type parameter
| Action_assignment lhs rhs ->
Action_assignment lhs (resolve_expr env rhs) //lhs is an action-local variable
| Action_call f args ->
Action_call (resolve_ident env f) (List.map (resolve_expr env) args)
let rec resolve_action' (env:qenv) (act:action') : ML action' =
match act with
| Atomic_action ac -> Atomic_action (resolve_atomic_action env ac)
| Action_seq hd tl ->
Action_seq (resolve_atomic_action env hd) (resolve_action env tl)
| Action_ite hd then_ else_ ->
Action_ite (resolve_expr env hd) (resolve_action env then_) (map_opt (resolve_action env) else_)
| Action_let i a k ->
Action_let i (resolve_atomic_action env a) (resolve_action (push_name env i.v.name) k)
| Action_act a ->
Action_act (resolve_action env a)
and resolve_action (env:qenv) (act:action) : ML action =
{ act with v = resolve_action' env act.v }
let resolve_param (env:qenv) (p:param) : ML (param & qenv) =
let t, i, q = p in
(resolve_typ env t, i, q),
push_name env i.v.name
let resolve_params (env:qenv) (params:list param) : ML (list param & qenv) =
List.fold_left (fun (params, env) p ->
let p, env = resolve_param env p in
params@[p], env) ([], env) params
let resolve_field_bitwidth_t (env:qenv) (fb:field_bitwidth_t) : ML field_bitwidth_t =
let resolve_bitfield_attr' (env:qenv) (b:bitfield_attr') : ML bitfield_attr' =
{ b with bitfield_type = resolve_typ env b.bitfield_type } in
let resolve_bitfield_attr (env:qenv) (b:bitfield_attr) : ML bitfield_attr =
{ b with v = resolve_bitfield_attr' env b.v } in
match fb with
| Inl _ -> fb
| Inr b -> Inr (resolve_bitfield_attr env b)
let resolve_field_array_t (env:qenv) (farr:field_array_t) : ML field_array_t =
match farr with
| FieldScalar -> farr
| FieldArrayQualified (e, aq) ->
FieldArrayQualified (resolve_expr env e, aq)
| FieldString None -> farr
| FieldString (Some e) -> FieldString (Some (resolve_expr env e))
let rec resolve_field (env:qenv) (ff:field) : ML (field & qenv) =
match ff.v with
| AtomicField f -> let f, e = resolve_atomic_field env f in {ff with v = AtomicField f}, e
| RecordField f i -> let fs, _ = resolve_fields env f in {ff with v = RecordField fs i}, env //record fields are not in scope outside the record
| SwitchCaseField f i -> let f = resolve_switch_case env f in {ff with v = SwitchCaseField f i}, env
and resolve_atomic_field (env:qenv) (f:atomic_field) : ML (atomic_field & qenv) =
let resolve_atomic_field' (env:qenv) (sf:atomic_field') : ML atomic_field' =
{ sf with
field_type = resolve_typ env sf.field_type;
field_array_opt = resolve_field_array_t env sf.field_array_opt;
field_constraint = map_opt (resolve_expr env) sf.field_constraint;
field_bitwidth = map_opt (resolve_field_bitwidth_t env) sf.field_bitwidth;
field_action = map_opt (fun (a, b) -> resolve_action env a, b) sf.field_action } in
let env = push_name env f.v.field_ident.v.name in
{ f with v = resolve_atomic_field' env f.v }, env
and resolve_fields (env:qenv) (flds:list field) : ML (list field & qenv) =
List.fold_left (fun (flds, env) f ->
let f, env = resolve_field env f in
flds@[f], env) ([], env) flds
and resolve_switch_case (env:qenv) (sc:switch_case) : ML switch_case = //case fields do not escape their scope
let resolve_case (env:qenv) (c:case) : ML case =
match c with
| Case e f -> Case (resolve_expr env e) (fst (resolve_field env f))
| DefaultCase f -> DefaultCase (fst (resolve_field env f)) in
let e, l = sc in
resolve_expr env e, List.map (resolve_case env) l
let resolve_typedef_names (env:qenv) (td_names:typedef_names) : ML typedef_names =
{ td_names with
typedef_name = resolve_ident env td_names.typedef_name;
typedef_abbrev = resolve_ident env td_names.typedef_abbrev;
typedef_ptr_abbrev = resolve_ident env td_names.typedef_ptr_abbrev }
let resolve_enum_case (env:qenv) (ec:enum_case) : ML enum_case =
match ec with
| i, None -> resolve_ident env i, None
| _ -> error "Unexpected enum_case in resolve_enum_case" (fst ec).range
let rec resolve_out_field (env:qenv) (fld:out_field) : ML out_field =
match fld with
| Out_field_named i t n -> Out_field_named i (resolve_typ env t) n
| Out_field_anon l u -> Out_field_anon (resolve_out_fields env l) u
and resolve_out_fields (env:qenv) (flds:list out_field) : ML (list out_field) =
List.map (resolve_out_field env) flds
let resolve_out_type (env:qenv) (out_t:out_typ) : ML out_typ =
{ out_t with
out_typ_names = resolve_typedef_names env out_t.out_typ_names;
out_typ_fields = List.map (resolve_out_field env) out_t.out_typ_fields } | false | false | Desugar.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val resolve_decl' (env: qenv) (d: decl') : ML decl' | [] | Desugar.resolve_decl' | {
"file_name": "src/3d/Desugar.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | env: Desugar.qenv -> d: Ast.decl' -> FStar.All.ML Ast.decl' | {
"end_col": 26,
"end_line": 421,
"start_col": 2,
"start_line": 390
} |
FStar.All.ML | val kind_of_ident (env: qenv) (i: ident) : ML t_kind | [
{
"abbrev": true,
"full_module": "Hashtable",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "Ast",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let kind_of_ident (env:qenv) (i:ident)
: ML t_kind
= let _or_ (b0 b1:bool) = b0 || b1 in
if Some? (H.try_find env.output_types i.v) `_or_`
Some? (H.try_find env.global_env.ge_out_t i.v)
then KindOutput
else if
Some? (H.try_find env.extern_types i.v) `_or_`
Some? (H.try_find env.global_env.ge_extern_t i.v)
then KindExtern
else KindSpec | val kind_of_ident (env: qenv) (i: ident) : ML t_kind
let kind_of_ident (env: qenv) (i: ident) : ML t_kind = | true | null | false | let _or_ (b0 b1: bool) = b0 || b1 in
if (Some? (H.try_find env.output_types i.v)) `_or_` (Some? (H.try_find env.global_env.ge_out_t i.v))
then KindOutput
else
if
(Some? (H.try_find env.extern_types i.v))
`_or_`
(Some? (H.try_find env.global_env.ge_extern_t i.v))
then KindExtern
else KindSpec | {
"checked_file": "Desugar.fst.checked",
"dependencies": [
"prims.fst.checked",
"Hashtable.fsti.checked",
"GlobalEnv.fst.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.All.fst.checked",
"Ast.fst.checked"
],
"interface_file": false,
"source_file": "Desugar.fst"
} | [
"ml"
] | [
"Desugar.qenv",
"Ast.ident",
"Ast.KindOutput",
"Ast.t_kind",
"Prims.bool",
"Ast.KindExtern",
"Ast.KindSpec",
"FStar.Pervasives.Native.uu___is_Some",
"Ast.decl",
"FStar.Pervasives.Native.option",
"Hashtable.try_find",
"Ast.ident'",
"GlobalEnv.__proj__Mkglobal_env__item__ge_extern_t",
"Desugar.__proj__Mkqenv__item__global_env",
"Ast.__proj__Mkwith_meta_t__item__v",
"Prims.unit",
"Desugar.__proj__Mkqenv__item__extern_types",
"GlobalEnv.__proj__Mkglobal_env__item__ge_out_t",
"Desugar.__proj__Mkqenv__item__output_types",
"Prims.op_BarBar"
] | [] | (*
Copyright 2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Desugar
open FStar.List.Tot
open FStar.Mul
open Ast
open FStar.All
module H = Hashtable
(* This module implements a pass over the source AST,
implementing various simple desugarings
* Desugar enums with newly defined idents and explicit constant
assignments to enums where all the tags are previously defined
constants.
* Resolve module-qualified names, including the use of module
abbreviations
* Set the kind (Spec/Output/Extern) in the type nodes
* Finds variables in Static expressions and hoists them as assumptions
and removes the Static
*)
let check_desugared_enum_cases (cases:list enum_case) : ML (list ident) =
List.map
(function
| (i, None) -> i
| (i, _) -> failwith "Enum should already have been desugared")
cases
let desugar_enum_cases (ityp:integer_type) (cases:list enum_case) (export:bool)
: ML (list enum_case & list decl) =
let find_definition (i:ident) (d:decl) =
match d.d_decl.v with
| Define j _ (Int _ _) ->
i.v = j.v
| _ ->
false
in
let _, cases_rev, ds_rev =
List.fold_left
(fun (next, cases_rev, ds_rev) (i, jopt) ->
let next =
match jopt with
| Some (Inl j) -> j
| Some (Inr j) ->
begin
match List.Tot.find (find_definition j) ds_rev with
| Some ({d_decl={v=Define _ _ (Int _ k)}}) -> k
| _ -> error (Printf.sprintf "Enum identifier %s not found" (print_ident j)) j.range
end
| None -> next
in
let case = (i, None) in
let d = mk_decl
(Define i None (Int ityp next))
i.range
["Enum constant"]
export
in
(next + 1,
case :: cases_rev,
d :: ds_rev))
(0, [], [])
cases
in
List.rev cases_rev,
List.rev ds_rev
let desugar_one_enum (d:decl) : ML (list decl) =
match d.d_decl.v with
| Enum t i cases ->
if List.for_all (fun (_, jopt) -> None? jopt) cases
then [d] //no enum value assignments; no desugaring to do
else //if we have any assignments at all, then we treat all the
//tags as fresh constants and assign them values in sequence
//with respect to the assigned values of preceding tags
let cases, ds = desugar_enum_cases (typ_as_integer_type t) cases d.d_exported in
let enum = decl_with_v d (Enum t i cases) in
ds@[enum]
| _ -> [d]
(* This code is currently not used
It desugars an Enum to a record with a single refined field *)
// let eliminate_enum (d:decl) : ML decl =
// match d.v with
// | Enum t i cases ->
// let names = {
// typedef_name = { i with v = { i.v with name=Ast.reserved_prefix ^ Ast.reserved_prefix ^ i.v.name }};
// typedef_abbrev = i;
// typedef_ptr_abbrev = { i with v = {i.v with
// name = Ast.reserved_prefix ^ Ast.reserved_prefix ^ "P" ^ i.v.name }};
// typedef_attributes = [];
// } in
// let params = [] in
// let where = None in
// let field_ident = with_dummy_range (to_ident' (Ast.reserved_prefix ^ "enum_field")) in
// let field_ident_expr = with_dummy_range (Identifier field_ident) in
// let field_constraint =
// List.fold_right
// (fun (case, _) out ->
// let eq = with_range (App Eq [field_ident_expr; with_range (Identifier case) case.range]) case.range in
// with_dummy_range (App Or [eq; out]))
// cases
// (with_dummy_range (Constant (Bool false)))
// in
// let field = {
// field_dependence = false;
// field_ident = field_ident;
// field_type = t;
// field_array_opt = FieldScalar;
// field_constraint = Some field_constraint;
// field_number = None;
// field_bitwidth = None;
// field_action = None
// } in
// let d' = Record names params where [with_dummy_range field] in
// {d with v = d'}
// | _ -> d
(*
* output_types and extern_types tables to set the kind in the Typ_app nodes
*)
noeq
type qenv = {
mname : string;
module_abbrevs : H.t string string;
output_types : H.t ident' unit;
extern_types : H.t ident' unit;
local_names : list string;
global_env: GlobalEnv.global_env;
}
let push_module_abbrev (env:qenv) (i m:string) : ML unit =
H.insert env.module_abbrevs i m
let push_output_type (env:qenv) (out_t:out_typ) : ML unit =
H.insert env.output_types out_t.out_typ_names.typedef_name.v ();
H.insert env.output_types out_t.out_typ_names.typedef_abbrev.v ()
let push_extern_type (env:qenv) (td:typedef_names) : ML unit =
H.insert env.extern_types td.typedef_name.v ();
H.insert env.extern_types td.typedef_abbrev.v ()
let push_name (env:qenv) (name:string) : qenv =
{ env with local_names = name::env.local_names }
let prim_consts = [
"unit"; "Bool"; "UINT8"; "UINT16"; "UINT32"; "UINT64";
"UINT8BE"; "UINT16BE"; "UINT32BE"; "UINT64BE";
"field_id"; "PUINT8";
"all_bytes"; "all_zeros";
"is_range_okay";
"void" ]
let resolve_ident (env:qenv) (i:ident) : ML ident =
let resolve_to_current_module i =
{ i with v = { i.v with modul_name = Some env.mname } }
in
let maybe_resolve_as_ifdef i
: ML ident
= match env.global_env.ge_cfg with
| None -> resolve_to_current_module i
| Some (cfg, cfg_module_name) ->
if List.mem i.v.name cfg.compile_time_flags.flags
then { i with v = { i.v with modul_name = Some cfg_module_name } }
else resolve_to_current_module i
in
if List.mem i.v.name prim_consts //it's a primitive constant, e.g. UINT8, leave as is
then i
else if List.mem i.v.name env.local_names //it's a local name (e.g. a parameter name)
then (if Some? i.v.modul_name //must have module name set to None
then error (Printf.sprintf
"Ident %s is a local name but has a qualifying modul %s"
i.v.name
(Some?.v i.v.modul_name))
i.range
else i) //return the local name as is
else (match i.v.modul_name with //it's a top-level name
| None -> maybe_resolve_as_ifdef i
| Some m -> //if already qualified, check if it is an abbreviation
(match H.try_find env.module_abbrevs m with
| None -> i
| Some m -> { i with v = { i.v with modul_name = Some m } }))
let rec collect_ifdef_guards (env:qenv) (e:expr)
: ML unit
= let check_resolved_to_ifdef i =
match env.global_env.ge_cfg with
| None -> false
| Some (cfg, cfg_module_name) ->
List.mem i.v.name cfg.compile_time_flags.flags
&& i.v.modul_name = Some cfg_module_name
in
match e.v with
| This -> error "'this' is not allowed in the guard of an #if" e.range
| Static _ -> failwith "static should have been eliminated already"
| Constant _ -> ()
| Identifier i ->
if not (check_resolved_to_ifdef i)
then error (Printf.sprintf "Identifier %s is not a compile-time macro but is used in a #if" i.v.name) e.range
| App op args ->
begin
match op with
| And
| Or
| Not -> List.iter (collect_ifdef_guards env) args
| _ -> error "Only boolean expressions over identifiers are supported in #if guards" e.range
end
let rec resolve_expr' (env:qenv) (e:expr') r : ML expr' =
match e with
| Constant _ -> e
| Identifier i -> Identifier (resolve_ident env i)
| This -> e
| Static e' ->
let e' = resolve_expr env e' in
collect_ifdef_guards env e';//mark any variables as top-level IfDef symbols
e'.v
| App op args ->
let args = List.map (resolve_expr env) args in
App op args
and resolve_expr (env:qenv) (e:expr) : ML expr = { e with v = resolve_expr' env e.v e.range }
let resolve_typ_param (env:qenv) (p:typ_param) : ML typ_param =
match p with
| Inl e -> resolve_expr env e |> Inl
| _ -> p //Currently not going inside output expressions, should we?
let kind_of_ident (env:qenv) (i:ident) | false | false | Desugar.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val kind_of_ident (env: qenv) (i: ident) : ML t_kind | [] | Desugar.kind_of_ident | {
"file_name": "src/3d/Desugar.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | env: Desugar.qenv -> i: Ast.ident -> FStar.All.ML Ast.t_kind | {
"end_col": 17,
"end_line": 262,
"start_col": 3,
"start_line": 254
} |
FStar.All.ML | val collect_ifdef_guards (env: qenv) (e: expr) : ML unit | [
{
"abbrev": true,
"full_module": "Hashtable",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "Ast",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec collect_ifdef_guards (env:qenv) (e:expr)
: ML unit
= let check_resolved_to_ifdef i =
match env.global_env.ge_cfg with
| None -> false
| Some (cfg, cfg_module_name) ->
List.mem i.v.name cfg.compile_time_flags.flags
&& i.v.modul_name = Some cfg_module_name
in
match e.v with
| This -> error "'this' is not allowed in the guard of an #if" e.range
| Static _ -> failwith "static should have been eliminated already"
| Constant _ -> ()
| Identifier i ->
if not (check_resolved_to_ifdef i)
then error (Printf.sprintf "Identifier %s is not a compile-time macro but is used in a #if" i.v.name) e.range
| App op args ->
begin
match op with
| And
| Or
| Not -> List.iter (collect_ifdef_guards env) args
| _ -> error "Only boolean expressions over identifiers are supported in #if guards" e.range
end | val collect_ifdef_guards (env: qenv) (e: expr) : ML unit
let rec collect_ifdef_guards (env: qenv) (e: expr) : ML unit = | true | null | false | let check_resolved_to_ifdef i =
match env.global_env.ge_cfg with
| None -> false
| Some (cfg, cfg_module_name) ->
List.mem i.v.name cfg.compile_time_flags.flags && i.v.modul_name = Some cfg_module_name
in
match e.v with
| This -> error "'this' is not allowed in the guard of an #if" e.range
| Static _ -> failwith "static should have been eliminated already"
| Constant _ -> ()
| Identifier i ->
if not (check_resolved_to_ifdef i)
then
error (Printf.sprintf "Identifier %s is not a compile-time macro but is used in a #if" i.v.name)
e.range
| App op args ->
match op with
| And | Or | Not -> List.iter (collect_ifdef_guards env) args
| _ -> error "Only boolean expressions over identifiers are supported in #if guards" e.range | {
"checked_file": "Desugar.fst.checked",
"dependencies": [
"prims.fst.checked",
"Hashtable.fsti.checked",
"GlobalEnv.fst.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.All.fst.checked",
"Ast.fst.checked"
],
"interface_file": false,
"source_file": "Desugar.fst"
} | [
"ml"
] | [
"Desugar.qenv",
"Ast.expr",
"Ast.__proj__Mkwith_meta_t__item__v",
"Ast.expr'",
"Ast.error",
"Prims.unit",
"Ast.__proj__Mkwith_meta_t__item__range",
"Ast.with_meta_t",
"FStar.All.failwith",
"Ast.constant",
"Ast.ident",
"Prims.op_Negation",
"FStar.Printf.sprintf",
"Ast.__proj__Mkident'__item__name",
"Ast.ident'",
"Prims.bool",
"Ast.op",
"Prims.list",
"FStar.List.iter",
"Desugar.collect_ifdef_guards",
"GlobalEnv.__proj__Mkglobal_env__item__ge_cfg",
"Desugar.__proj__Mkqenv__item__global_env",
"Config.config",
"Prims.string",
"Prims.op_AmpAmp",
"FStar.List.Tot.Base.mem",
"Config.__proj__Mkcompile_time_flags__item__flags",
"Config.__proj__Mkconfig__item__compile_time_flags",
"Prims.op_Equality",
"FStar.Pervasives.Native.option",
"Ast.__proj__Mkident'__item__modul_name",
"FStar.Pervasives.Native.Some"
] | [] | (*
Copyright 2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Desugar
open FStar.List.Tot
open FStar.Mul
open Ast
open FStar.All
module H = Hashtable
(* This module implements a pass over the source AST,
implementing various simple desugarings
* Desugar enums with newly defined idents and explicit constant
assignments to enums where all the tags are previously defined
constants.
* Resolve module-qualified names, including the use of module
abbreviations
* Set the kind (Spec/Output/Extern) in the type nodes
* Finds variables in Static expressions and hoists them as assumptions
and removes the Static
*)
let check_desugared_enum_cases (cases:list enum_case) : ML (list ident) =
List.map
(function
| (i, None) -> i
| (i, _) -> failwith "Enum should already have been desugared")
cases
let desugar_enum_cases (ityp:integer_type) (cases:list enum_case) (export:bool)
: ML (list enum_case & list decl) =
let find_definition (i:ident) (d:decl) =
match d.d_decl.v with
| Define j _ (Int _ _) ->
i.v = j.v
| _ ->
false
in
let _, cases_rev, ds_rev =
List.fold_left
(fun (next, cases_rev, ds_rev) (i, jopt) ->
let next =
match jopt with
| Some (Inl j) -> j
| Some (Inr j) ->
begin
match List.Tot.find (find_definition j) ds_rev with
| Some ({d_decl={v=Define _ _ (Int _ k)}}) -> k
| _ -> error (Printf.sprintf "Enum identifier %s not found" (print_ident j)) j.range
end
| None -> next
in
let case = (i, None) in
let d = mk_decl
(Define i None (Int ityp next))
i.range
["Enum constant"]
export
in
(next + 1,
case :: cases_rev,
d :: ds_rev))
(0, [], [])
cases
in
List.rev cases_rev,
List.rev ds_rev
let desugar_one_enum (d:decl) : ML (list decl) =
match d.d_decl.v with
| Enum t i cases ->
if List.for_all (fun (_, jopt) -> None? jopt) cases
then [d] //no enum value assignments; no desugaring to do
else //if we have any assignments at all, then we treat all the
//tags as fresh constants and assign them values in sequence
//with respect to the assigned values of preceding tags
let cases, ds = desugar_enum_cases (typ_as_integer_type t) cases d.d_exported in
let enum = decl_with_v d (Enum t i cases) in
ds@[enum]
| _ -> [d]
(* This code is currently not used
It desugars an Enum to a record with a single refined field *)
// let eliminate_enum (d:decl) : ML decl =
// match d.v with
// | Enum t i cases ->
// let names = {
// typedef_name = { i with v = { i.v with name=Ast.reserved_prefix ^ Ast.reserved_prefix ^ i.v.name }};
// typedef_abbrev = i;
// typedef_ptr_abbrev = { i with v = {i.v with
// name = Ast.reserved_prefix ^ Ast.reserved_prefix ^ "P" ^ i.v.name }};
// typedef_attributes = [];
// } in
// let params = [] in
// let where = None in
// let field_ident = with_dummy_range (to_ident' (Ast.reserved_prefix ^ "enum_field")) in
// let field_ident_expr = with_dummy_range (Identifier field_ident) in
// let field_constraint =
// List.fold_right
// (fun (case, _) out ->
// let eq = with_range (App Eq [field_ident_expr; with_range (Identifier case) case.range]) case.range in
// with_dummy_range (App Or [eq; out]))
// cases
// (with_dummy_range (Constant (Bool false)))
// in
// let field = {
// field_dependence = false;
// field_ident = field_ident;
// field_type = t;
// field_array_opt = FieldScalar;
// field_constraint = Some field_constraint;
// field_number = None;
// field_bitwidth = None;
// field_action = None
// } in
// let d' = Record names params where [with_dummy_range field] in
// {d with v = d'}
// | _ -> d
(*
* output_types and extern_types tables to set the kind in the Typ_app nodes
*)
noeq
type qenv = {
mname : string;
module_abbrevs : H.t string string;
output_types : H.t ident' unit;
extern_types : H.t ident' unit;
local_names : list string;
global_env: GlobalEnv.global_env;
}
let push_module_abbrev (env:qenv) (i m:string) : ML unit =
H.insert env.module_abbrevs i m
let push_output_type (env:qenv) (out_t:out_typ) : ML unit =
H.insert env.output_types out_t.out_typ_names.typedef_name.v ();
H.insert env.output_types out_t.out_typ_names.typedef_abbrev.v ()
let push_extern_type (env:qenv) (td:typedef_names) : ML unit =
H.insert env.extern_types td.typedef_name.v ();
H.insert env.extern_types td.typedef_abbrev.v ()
let push_name (env:qenv) (name:string) : qenv =
{ env with local_names = name::env.local_names }
let prim_consts = [
"unit"; "Bool"; "UINT8"; "UINT16"; "UINT32"; "UINT64";
"UINT8BE"; "UINT16BE"; "UINT32BE"; "UINT64BE";
"field_id"; "PUINT8";
"all_bytes"; "all_zeros";
"is_range_okay";
"void" ]
let resolve_ident (env:qenv) (i:ident) : ML ident =
let resolve_to_current_module i =
{ i with v = { i.v with modul_name = Some env.mname } }
in
let maybe_resolve_as_ifdef i
: ML ident
= match env.global_env.ge_cfg with
| None -> resolve_to_current_module i
| Some (cfg, cfg_module_name) ->
if List.mem i.v.name cfg.compile_time_flags.flags
then { i with v = { i.v with modul_name = Some cfg_module_name } }
else resolve_to_current_module i
in
if List.mem i.v.name prim_consts //it's a primitive constant, e.g. UINT8, leave as is
then i
else if List.mem i.v.name env.local_names //it's a local name (e.g. a parameter name)
then (if Some? i.v.modul_name //must have module name set to None
then error (Printf.sprintf
"Ident %s is a local name but has a qualifying modul %s"
i.v.name
(Some?.v i.v.modul_name))
i.range
else i) //return the local name as is
else (match i.v.modul_name with //it's a top-level name
| None -> maybe_resolve_as_ifdef i
| Some m -> //if already qualified, check if it is an abbreviation
(match H.try_find env.module_abbrevs m with
| None -> i
| Some m -> { i with v = { i.v with modul_name = Some m } }))
let rec collect_ifdef_guards (env:qenv) (e:expr) | false | false | Desugar.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val collect_ifdef_guards (env: qenv) (e: expr) : ML unit | [
"recursion"
] | Desugar.collect_ifdef_guards | {
"file_name": "src/3d/Desugar.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | env: Desugar.qenv -> e: Ast.expr -> FStar.All.ML Prims.unit | {
"end_col": 9,
"end_line": 228,
"start_col": 3,
"start_line": 207
} |
FStar.All.ML | val desugar_enum_cases (ityp: integer_type) (cases: list enum_case) (export: bool)
: ML (list enum_case & list decl) | [
{
"abbrev": true,
"full_module": "Hashtable",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "Ast",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let desugar_enum_cases (ityp:integer_type) (cases:list enum_case) (export:bool)
: ML (list enum_case & list decl) =
let find_definition (i:ident) (d:decl) =
match d.d_decl.v with
| Define j _ (Int _ _) ->
i.v = j.v
| _ ->
false
in
let _, cases_rev, ds_rev =
List.fold_left
(fun (next, cases_rev, ds_rev) (i, jopt) ->
let next =
match jopt with
| Some (Inl j) -> j
| Some (Inr j) ->
begin
match List.Tot.find (find_definition j) ds_rev with
| Some ({d_decl={v=Define _ _ (Int _ k)}}) -> k
| _ -> error (Printf.sprintf "Enum identifier %s not found" (print_ident j)) j.range
end
| None -> next
in
let case = (i, None) in
let d = mk_decl
(Define i None (Int ityp next))
i.range
["Enum constant"]
export
in
(next + 1,
case :: cases_rev,
d :: ds_rev))
(0, [], [])
cases
in
List.rev cases_rev,
List.rev ds_rev | val desugar_enum_cases (ityp: integer_type) (cases: list enum_case) (export: bool)
: ML (list enum_case & list decl)
let desugar_enum_cases (ityp: integer_type) (cases: list enum_case) (export: bool)
: ML (list enum_case & list decl) = | true | null | false | let find_definition (i: ident) (d: decl) =
match d.d_decl.v with
| Define j _ (Int _ _) -> i.v = j.v
| _ -> false
in
let _, cases_rev, ds_rev =
List.fold_left (fun (next, cases_rev, ds_rev) (i, jopt) ->
let next =
match jopt with
| Some (Inl j) -> j
| Some (Inr j) ->
(match List.Tot.find (find_definition j) ds_rev with
| Some { d_decl = { v = Define _ _ (Int _ k) } } -> k
| _ -> error (Printf.sprintf "Enum identifier %s not found" (print_ident j)) j.range)
| None -> next
in
let case = (i, None) in
let d = mk_decl (Define i None (Int ityp next)) i.range ["Enum constant"] export in
(next + 1, case :: cases_rev, d :: ds_rev))
(0, [], [])
cases
in
List.rev cases_rev, List.rev ds_rev | {
"checked_file": "Desugar.fst.checked",
"dependencies": [
"prims.fst.checked",
"Hashtable.fsti.checked",
"GlobalEnv.fst.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.All.fst.checked",
"Ast.fst.checked"
],
"interface_file": false,
"source_file": "Desugar.fst"
} | [
"ml"
] | [
"Ast.integer_type",
"Prims.list",
"Ast.enum_case",
"Prims.bool",
"Prims.int",
"FStar.Pervasives.Native.tuple2",
"Ast.with_meta_t",
"Ast.ident'",
"FStar.Pervasives.Native.option",
"Ast.either",
"Ast.ident",
"Ast.decl",
"FStar.Pervasives.Native.Mktuple2",
"FStar.List.Tot.Base.rev",
"FStar.Pervasives.Native.tuple3",
"FStar.List.fold_left",
"FStar.Pervasives.Native.Mktuple3",
"Prims.op_Addition",
"Prims.Cons",
"Ast.mk_decl",
"Ast.Define",
"FStar.Pervasives.Native.None",
"Ast.typ",
"Ast.Int",
"Ast.__proj__Mkwith_meta_t__item__range",
"Prims.string",
"Prims.Nil",
"FStar.List.Tot.Base.find",
"Ast.range",
"Ast.comments",
"Prims.b2t",
"Ast.error",
"FStar.Printf.sprintf",
"Ast.print_ident",
"Ast.__proj__Mkwith_meta_t__item__v",
"Ast.decl'",
"Ast.__proj__Mkdecl__item__d_decl",
"Prims.op_Equality"
] | [] | (*
Copyright 2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Desugar
open FStar.List.Tot
open FStar.Mul
open Ast
open FStar.All
module H = Hashtable
(* This module implements a pass over the source AST,
implementing various simple desugarings
* Desugar enums with newly defined idents and explicit constant
assignments to enums where all the tags are previously defined
constants.
* Resolve module-qualified names, including the use of module
abbreviations
* Set the kind (Spec/Output/Extern) in the type nodes
* Finds variables in Static expressions and hoists them as assumptions
and removes the Static
*)
let check_desugared_enum_cases (cases:list enum_case) : ML (list ident) =
List.map
(function
| (i, None) -> i
| (i, _) -> failwith "Enum should already have been desugared")
cases | false | false | Desugar.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val desugar_enum_cases (ityp: integer_type) (cases: list enum_case) (export: bool)
: ML (list enum_case & list decl) | [] | Desugar.desugar_enum_cases | {
"file_name": "src/3d/Desugar.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | ityp: Ast.integer_type -> cases: Prims.list Ast.enum_case -> export: Prims.bool
-> FStar.All.ML (Prims.list Ast.enum_case * Prims.list Ast.decl) | {
"end_col": 17,
"end_line": 83,
"start_col": 37,
"start_line": 47
} |
FStar.All.ML | val desugar (genv: GlobalEnv.global_env) (mname: string) (p: prog) : ML prog | [
{
"abbrev": true,
"full_module": "Hashtable",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "Ast",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let desugar (genv:GlobalEnv.global_env) (mname:string) (p:prog) : ML prog =
let decls, refinement = p in
let decls = List.collect desugar_one_enum decls in
let env = {
mname=mname;
module_abbrevs=H.create 10;
output_types=H.create 10;
extern_types=H.create 10;
local_names=[];
global_env=genv
} in
H.insert env.extern_types (Ast.to_ident' "void") ();
let decls = List.map (resolve_decl env) decls in
decls,
(match refinement with
| None -> None
| Some tr ->
Some ({ tr with
type_map =
tr.type_map
|> List.map (fun (i, jopt) -> match jopt with
| None -> i, Some (resolve_ident env i)
| Some j -> i, Some (resolve_ident env j))})) | val desugar (genv: GlobalEnv.global_env) (mname: string) (p: prog) : ML prog
let desugar (genv: GlobalEnv.global_env) (mname: string) (p: prog) : ML prog = | true | null | false | let decls, refinement = p in
let decls = List.collect desugar_one_enum decls in
let env =
{
mname = mname;
module_abbrevs = H.create 10;
output_types = H.create 10;
extern_types = H.create 10;
local_names = [];
global_env = genv
}
in
H.insert env.extern_types (Ast.to_ident' "void") ();
let decls = List.map (resolve_decl env) decls in
decls,
(match refinement with
| None -> None
| Some tr ->
Some
({
tr with
type_map
=
tr.type_map |>
List.map (fun (i, jopt) ->
match jopt with
| None -> i, Some (resolve_ident env i)
| Some j -> i, Some (resolve_ident env j))
})) | {
"checked_file": "Desugar.fst.checked",
"dependencies": [
"prims.fst.checked",
"Hashtable.fsti.checked",
"GlobalEnv.fst.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.All.fst.checked",
"Ast.fst.checked"
],
"interface_file": false,
"source_file": "Desugar.fst"
} | [
"ml"
] | [
"GlobalEnv.global_env",
"Prims.string",
"Ast.prog",
"Prims.list",
"Ast.decl",
"FStar.Pervasives.Native.option",
"Ast.type_refinement",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.Some",
"Ast.Mktype_refinement",
"Ast.__proj__Mktype_refinement__item__includes",
"FStar.Pervasives.Native.tuple2",
"Ast.ident",
"FStar.All.op_Bar_Greater",
"Ast.__proj__Mktype_refinement__item__type_map",
"FStar.List.map",
"Desugar.resolve_ident",
"Desugar.resolve_decl",
"Prims.unit",
"Hashtable.insert",
"Ast.ident'",
"Desugar.__proj__Mkqenv__item__extern_types",
"Ast.to_ident'",
"Desugar.qenv",
"Desugar.Mkqenv",
"Prims.Nil",
"Hashtable.t",
"Hashtable.create",
"FStar.List.collect",
"Desugar.desugar_one_enum"
] | [] | (*
Copyright 2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Desugar
open FStar.List.Tot
open FStar.Mul
open Ast
open FStar.All
module H = Hashtable
(* This module implements a pass over the source AST,
implementing various simple desugarings
* Desugar enums with newly defined idents and explicit constant
assignments to enums where all the tags are previously defined
constants.
* Resolve module-qualified names, including the use of module
abbreviations
* Set the kind (Spec/Output/Extern) in the type nodes
* Finds variables in Static expressions and hoists them as assumptions
and removes the Static
*)
let check_desugared_enum_cases (cases:list enum_case) : ML (list ident) =
List.map
(function
| (i, None) -> i
| (i, _) -> failwith "Enum should already have been desugared")
cases
let desugar_enum_cases (ityp:integer_type) (cases:list enum_case) (export:bool)
: ML (list enum_case & list decl) =
let find_definition (i:ident) (d:decl) =
match d.d_decl.v with
| Define j _ (Int _ _) ->
i.v = j.v
| _ ->
false
in
let _, cases_rev, ds_rev =
List.fold_left
(fun (next, cases_rev, ds_rev) (i, jopt) ->
let next =
match jopt with
| Some (Inl j) -> j
| Some (Inr j) ->
begin
match List.Tot.find (find_definition j) ds_rev with
| Some ({d_decl={v=Define _ _ (Int _ k)}}) -> k
| _ -> error (Printf.sprintf "Enum identifier %s not found" (print_ident j)) j.range
end
| None -> next
in
let case = (i, None) in
let d = mk_decl
(Define i None (Int ityp next))
i.range
["Enum constant"]
export
in
(next + 1,
case :: cases_rev,
d :: ds_rev))
(0, [], [])
cases
in
List.rev cases_rev,
List.rev ds_rev
let desugar_one_enum (d:decl) : ML (list decl) =
match d.d_decl.v with
| Enum t i cases ->
if List.for_all (fun (_, jopt) -> None? jopt) cases
then [d] //no enum value assignments; no desugaring to do
else //if we have any assignments at all, then we treat all the
//tags as fresh constants and assign them values in sequence
//with respect to the assigned values of preceding tags
let cases, ds = desugar_enum_cases (typ_as_integer_type t) cases d.d_exported in
let enum = decl_with_v d (Enum t i cases) in
ds@[enum]
| _ -> [d]
(* This code is currently not used
It desugars an Enum to a record with a single refined field *)
// let eliminate_enum (d:decl) : ML decl =
// match d.v with
// | Enum t i cases ->
// let names = {
// typedef_name = { i with v = { i.v with name=Ast.reserved_prefix ^ Ast.reserved_prefix ^ i.v.name }};
// typedef_abbrev = i;
// typedef_ptr_abbrev = { i with v = {i.v with
// name = Ast.reserved_prefix ^ Ast.reserved_prefix ^ "P" ^ i.v.name }};
// typedef_attributes = [];
// } in
// let params = [] in
// let where = None in
// let field_ident = with_dummy_range (to_ident' (Ast.reserved_prefix ^ "enum_field")) in
// let field_ident_expr = with_dummy_range (Identifier field_ident) in
// let field_constraint =
// List.fold_right
// (fun (case, _) out ->
// let eq = with_range (App Eq [field_ident_expr; with_range (Identifier case) case.range]) case.range in
// with_dummy_range (App Or [eq; out]))
// cases
// (with_dummy_range (Constant (Bool false)))
// in
// let field = {
// field_dependence = false;
// field_ident = field_ident;
// field_type = t;
// field_array_opt = FieldScalar;
// field_constraint = Some field_constraint;
// field_number = None;
// field_bitwidth = None;
// field_action = None
// } in
// let d' = Record names params where [with_dummy_range field] in
// {d with v = d'}
// | _ -> d
(*
* output_types and extern_types tables to set the kind in the Typ_app nodes
*)
noeq
type qenv = {
mname : string;
module_abbrevs : H.t string string;
output_types : H.t ident' unit;
extern_types : H.t ident' unit;
local_names : list string;
global_env: GlobalEnv.global_env;
}
let push_module_abbrev (env:qenv) (i m:string) : ML unit =
H.insert env.module_abbrevs i m
let push_output_type (env:qenv) (out_t:out_typ) : ML unit =
H.insert env.output_types out_t.out_typ_names.typedef_name.v ();
H.insert env.output_types out_t.out_typ_names.typedef_abbrev.v ()
let push_extern_type (env:qenv) (td:typedef_names) : ML unit =
H.insert env.extern_types td.typedef_name.v ();
H.insert env.extern_types td.typedef_abbrev.v ()
let push_name (env:qenv) (name:string) : qenv =
{ env with local_names = name::env.local_names }
let prim_consts = [
"unit"; "Bool"; "UINT8"; "UINT16"; "UINT32"; "UINT64";
"UINT8BE"; "UINT16BE"; "UINT32BE"; "UINT64BE";
"field_id"; "PUINT8";
"all_bytes"; "all_zeros";
"is_range_okay";
"void" ]
let resolve_ident (env:qenv) (i:ident) : ML ident =
let resolve_to_current_module i =
{ i with v = { i.v with modul_name = Some env.mname } }
in
let maybe_resolve_as_ifdef i
: ML ident
= match env.global_env.ge_cfg with
| None -> resolve_to_current_module i
| Some (cfg, cfg_module_name) ->
if List.mem i.v.name cfg.compile_time_flags.flags
then { i with v = { i.v with modul_name = Some cfg_module_name } }
else resolve_to_current_module i
in
if List.mem i.v.name prim_consts //it's a primitive constant, e.g. UINT8, leave as is
then i
else if List.mem i.v.name env.local_names //it's a local name (e.g. a parameter name)
then (if Some? i.v.modul_name //must have module name set to None
then error (Printf.sprintf
"Ident %s is a local name but has a qualifying modul %s"
i.v.name
(Some?.v i.v.modul_name))
i.range
else i) //return the local name as is
else (match i.v.modul_name with //it's a top-level name
| None -> maybe_resolve_as_ifdef i
| Some m -> //if already qualified, check if it is an abbreviation
(match H.try_find env.module_abbrevs m with
| None -> i
| Some m -> { i with v = { i.v with modul_name = Some m } }))
let rec collect_ifdef_guards (env:qenv) (e:expr)
: ML unit
= let check_resolved_to_ifdef i =
match env.global_env.ge_cfg with
| None -> false
| Some (cfg, cfg_module_name) ->
List.mem i.v.name cfg.compile_time_flags.flags
&& i.v.modul_name = Some cfg_module_name
in
match e.v with
| This -> error "'this' is not allowed in the guard of an #if" e.range
| Static _ -> failwith "static should have been eliminated already"
| Constant _ -> ()
| Identifier i ->
if not (check_resolved_to_ifdef i)
then error (Printf.sprintf "Identifier %s is not a compile-time macro but is used in a #if" i.v.name) e.range
| App op args ->
begin
match op with
| And
| Or
| Not -> List.iter (collect_ifdef_guards env) args
| _ -> error "Only boolean expressions over identifiers are supported in #if guards" e.range
end
let rec resolve_expr' (env:qenv) (e:expr') r : ML expr' =
match e with
| Constant _ -> e
| Identifier i -> Identifier (resolve_ident env i)
| This -> e
| Static e' ->
let e' = resolve_expr env e' in
collect_ifdef_guards env e';//mark any variables as top-level IfDef symbols
e'.v
| App op args ->
let args = List.map (resolve_expr env) args in
App op args
and resolve_expr (env:qenv) (e:expr) : ML expr = { e with v = resolve_expr' env e.v e.range }
let resolve_typ_param (env:qenv) (p:typ_param) : ML typ_param =
match p with
| Inl e -> resolve_expr env e |> Inl
| _ -> p //Currently not going inside output expressions, should we?
let kind_of_ident (env:qenv) (i:ident)
: ML t_kind
= let _or_ (b0 b1:bool) = b0 || b1 in
if Some? (H.try_find env.output_types i.v) `_or_`
Some? (H.try_find env.global_env.ge_out_t i.v)
then KindOutput
else if
Some? (H.try_find env.extern_types i.v) `_or_`
Some? (H.try_find env.global_env.ge_extern_t i.v)
then KindExtern
else KindSpec
let rec resolve_typ' (env:qenv) (t:typ') : ML typ' =
match t with
| Type_app hd _ args ->
let hd = resolve_ident env hd in
//Set is_out argument to the Type_app appropriately
let k = kind_of_ident env hd in
Type_app hd k (List.map (resolve_typ_param env) args)
| Pointer t -> Pointer (resolve_typ env t)
and resolve_typ (env:qenv) (t:typ) : ML typ = { t with v = resolve_typ' env t.v }
let resolve_atomic_action (env:qenv) (ac:atomic_action) : ML atomic_action =
match ac with
| Action_return e -> Action_return (resolve_expr env e)
| Action_abort
| Action_field_pos_64
| Action_field_pos_32
| Action_field_ptr -> ac
| Action_field_ptr_after e write_to -> Action_field_ptr_after (resolve_expr env e) write_to
| Action_deref i -> Action_deref i //most certainly a type parameter
| Action_assignment lhs rhs ->
Action_assignment lhs (resolve_expr env rhs) //lhs is an action-local variable
| Action_call f args ->
Action_call (resolve_ident env f) (List.map (resolve_expr env) args)
let rec resolve_action' (env:qenv) (act:action') : ML action' =
match act with
| Atomic_action ac -> Atomic_action (resolve_atomic_action env ac)
| Action_seq hd tl ->
Action_seq (resolve_atomic_action env hd) (resolve_action env tl)
| Action_ite hd then_ else_ ->
Action_ite (resolve_expr env hd) (resolve_action env then_) (map_opt (resolve_action env) else_)
| Action_let i a k ->
Action_let i (resolve_atomic_action env a) (resolve_action (push_name env i.v.name) k)
| Action_act a ->
Action_act (resolve_action env a)
and resolve_action (env:qenv) (act:action) : ML action =
{ act with v = resolve_action' env act.v }
let resolve_param (env:qenv) (p:param) : ML (param & qenv) =
let t, i, q = p in
(resolve_typ env t, i, q),
push_name env i.v.name
let resolve_params (env:qenv) (params:list param) : ML (list param & qenv) =
List.fold_left (fun (params, env) p ->
let p, env = resolve_param env p in
params@[p], env) ([], env) params
let resolve_field_bitwidth_t (env:qenv) (fb:field_bitwidth_t) : ML field_bitwidth_t =
let resolve_bitfield_attr' (env:qenv) (b:bitfield_attr') : ML bitfield_attr' =
{ b with bitfield_type = resolve_typ env b.bitfield_type } in
let resolve_bitfield_attr (env:qenv) (b:bitfield_attr) : ML bitfield_attr =
{ b with v = resolve_bitfield_attr' env b.v } in
match fb with
| Inl _ -> fb
| Inr b -> Inr (resolve_bitfield_attr env b)
let resolve_field_array_t (env:qenv) (farr:field_array_t) : ML field_array_t =
match farr with
| FieldScalar -> farr
| FieldArrayQualified (e, aq) ->
FieldArrayQualified (resolve_expr env e, aq)
| FieldString None -> farr
| FieldString (Some e) -> FieldString (Some (resolve_expr env e))
let rec resolve_field (env:qenv) (ff:field) : ML (field & qenv) =
match ff.v with
| AtomicField f -> let f, e = resolve_atomic_field env f in {ff with v = AtomicField f}, e
| RecordField f i -> let fs, _ = resolve_fields env f in {ff with v = RecordField fs i}, env //record fields are not in scope outside the record
| SwitchCaseField f i -> let f = resolve_switch_case env f in {ff with v = SwitchCaseField f i}, env
and resolve_atomic_field (env:qenv) (f:atomic_field) : ML (atomic_field & qenv) =
let resolve_atomic_field' (env:qenv) (sf:atomic_field') : ML atomic_field' =
{ sf with
field_type = resolve_typ env sf.field_type;
field_array_opt = resolve_field_array_t env sf.field_array_opt;
field_constraint = map_opt (resolve_expr env) sf.field_constraint;
field_bitwidth = map_opt (resolve_field_bitwidth_t env) sf.field_bitwidth;
field_action = map_opt (fun (a, b) -> resolve_action env a, b) sf.field_action } in
let env = push_name env f.v.field_ident.v.name in
{ f with v = resolve_atomic_field' env f.v }, env
and resolve_fields (env:qenv) (flds:list field) : ML (list field & qenv) =
List.fold_left (fun (flds, env) f ->
let f, env = resolve_field env f in
flds@[f], env) ([], env) flds
and resolve_switch_case (env:qenv) (sc:switch_case) : ML switch_case = //case fields do not escape their scope
let resolve_case (env:qenv) (c:case) : ML case =
match c with
| Case e f -> Case (resolve_expr env e) (fst (resolve_field env f))
| DefaultCase f -> DefaultCase (fst (resolve_field env f)) in
let e, l = sc in
resolve_expr env e, List.map (resolve_case env) l
let resolve_typedef_names (env:qenv) (td_names:typedef_names) : ML typedef_names =
{ td_names with
typedef_name = resolve_ident env td_names.typedef_name;
typedef_abbrev = resolve_ident env td_names.typedef_abbrev;
typedef_ptr_abbrev = resolve_ident env td_names.typedef_ptr_abbrev }
let resolve_enum_case (env:qenv) (ec:enum_case) : ML enum_case =
match ec with
| i, None -> resolve_ident env i, None
| _ -> error "Unexpected enum_case in resolve_enum_case" (fst ec).range
let rec resolve_out_field (env:qenv) (fld:out_field) : ML out_field =
match fld with
| Out_field_named i t n -> Out_field_named i (resolve_typ env t) n
| Out_field_anon l u -> Out_field_anon (resolve_out_fields env l) u
and resolve_out_fields (env:qenv) (flds:list out_field) : ML (list out_field) =
List.map (resolve_out_field env) flds
let resolve_out_type (env:qenv) (out_t:out_typ) : ML out_typ =
{ out_t with
out_typ_names = resolve_typedef_names env out_t.out_typ_names;
out_typ_fields = List.map (resolve_out_field env) out_t.out_typ_fields }
let resolve_decl' (env:qenv) (d:decl') : ML decl' =
match d with
| ModuleAbbrev i m -> push_module_abbrev env i.v.name m.v.name; d
| Define i topt c ->
Define (resolve_ident env i) (map_opt (resolve_typ env) topt) c
| TypeAbbrev t i ->
TypeAbbrev (resolve_typ env t) (resolve_ident env i)
| Enum t i ecs ->
Enum (resolve_typ env t) (resolve_ident env i) (List.map (resolve_enum_case env) ecs)
| Record td_names params where flds ->
let td_names = resolve_typedef_names env td_names in
let params, env = resolve_params env params in
let where = map_opt (resolve_expr env) where in
let flds, _ = resolve_fields env flds in
Record td_names params where flds
| CaseType td_names params sc ->
let td_names = resolve_typedef_names env td_names in
let params, env = resolve_params env params in
let sc = resolve_switch_case env sc in
CaseType td_names params sc
| OutputType out_t ->
let out_t = resolve_out_type env out_t in
push_output_type env out_t;
OutputType out_t
| ExternType td_names ->
let td_names = resolve_typedef_names env td_names in
push_extern_type env td_names;
ExternType td_names
| ExternFn id ret params ->
let id = resolve_ident env id in
let ret = resolve_typ env ret in
let params, _ = resolve_params env params in
ExternFn id ret params
let resolve_decl (env:qenv) (d:decl) : ML decl = decl_with_v d (resolve_decl' env d.d_decl.v) | false | false | Desugar.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val desugar (genv: GlobalEnv.global_env) (mname: string) (p: prog) : ML prog | [] | Desugar.desugar | {
"file_name": "src/3d/Desugar.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | genv: GlobalEnv.global_env -> mname: Prims.string -> p: Ast.prog -> FStar.All.ML Ast.prog | {
"end_col": 86,
"end_line": 447,
"start_col": 75,
"start_line": 425
} |
FStar.All.ML | val resolve_ident (env: qenv) (i: ident) : ML ident | [
{
"abbrev": true,
"full_module": "Hashtable",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "Ast",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let resolve_ident (env:qenv) (i:ident) : ML ident =
let resolve_to_current_module i =
{ i with v = { i.v with modul_name = Some env.mname } }
in
let maybe_resolve_as_ifdef i
: ML ident
= match env.global_env.ge_cfg with
| None -> resolve_to_current_module i
| Some (cfg, cfg_module_name) ->
if List.mem i.v.name cfg.compile_time_flags.flags
then { i with v = { i.v with modul_name = Some cfg_module_name } }
else resolve_to_current_module i
in
if List.mem i.v.name prim_consts //it's a primitive constant, e.g. UINT8, leave as is
then i
else if List.mem i.v.name env.local_names //it's a local name (e.g. a parameter name)
then (if Some? i.v.modul_name //must have module name set to None
then error (Printf.sprintf
"Ident %s is a local name but has a qualifying modul %s"
i.v.name
(Some?.v i.v.modul_name))
i.range
else i) //return the local name as is
else (match i.v.modul_name with //it's a top-level name
| None -> maybe_resolve_as_ifdef i
| Some m -> //if already qualified, check if it is an abbreviation
(match H.try_find env.module_abbrevs m with
| None -> i
| Some m -> { i with v = { i.v with modul_name = Some m } })) | val resolve_ident (env: qenv) (i: ident) : ML ident
let resolve_ident (env: qenv) (i: ident) : ML ident = | true | null | false | let resolve_to_current_module i = { i with v = { i.v with modul_name = Some env.mname } } in
let maybe_resolve_as_ifdef i : ML ident =
match env.global_env.ge_cfg with
| None -> resolve_to_current_module i
| Some (cfg, cfg_module_name) ->
if List.mem i.v.name cfg.compile_time_flags.flags
then { i with v = { i.v with modul_name = Some cfg_module_name } }
else resolve_to_current_module i
in
if List.mem i.v.name prim_consts
then i
else
if List.mem i.v.name env.local_names
then
(if Some? i.v.modul_name
then
error (Printf.sprintf "Ident %s is a local name but has a qualifying modul %s"
i.v.name
(Some?.v i.v.modul_name))
i.range
else i)
else
(match i.v.modul_name with
| None -> maybe_resolve_as_ifdef i
| Some m ->
(match H.try_find env.module_abbrevs m with
| None -> i
| Some m -> { i with v = { i.v with modul_name = Some m } })) | {
"checked_file": "Desugar.fst.checked",
"dependencies": [
"prims.fst.checked",
"Hashtable.fsti.checked",
"GlobalEnv.fst.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.All.fst.checked",
"Ast.fst.checked"
],
"interface_file": false,
"source_file": "Desugar.fst"
} | [
"ml"
] | [
"Desugar.qenv",
"Ast.ident",
"FStar.List.Tot.Base.mem",
"Prims.string",
"Ast.__proj__Mkident'__item__name",
"Ast.__proj__Mkwith_meta_t__item__v",
"Ast.ident'",
"Desugar.prim_consts",
"Prims.bool",
"Desugar.__proj__Mkqenv__item__local_names",
"FStar.Pervasives.Native.uu___is_Some",
"Ast.__proj__Mkident'__item__modul_name",
"Ast.error",
"FStar.Printf.sprintf",
"FStar.Pervasives.Native.__proj__Some__item__v",
"Ast.__proj__Mkwith_meta_t__item__range",
"Ast.Mkwith_meta_t",
"Ast.Mkident'",
"FStar.Pervasives.Native.Some",
"Ast.__proj__Mkwith_meta_t__item__comments",
"FStar.Pervasives.Native.option",
"Hashtable.try_find",
"Desugar.__proj__Mkqenv__item__module_abbrevs",
"Ast.with_meta_t",
"GlobalEnv.__proj__Mkglobal_env__item__ge_cfg",
"Desugar.__proj__Mkqenv__item__global_env",
"Config.config",
"Config.__proj__Mkcompile_time_flags__item__flags",
"Config.__proj__Mkconfig__item__compile_time_flags",
"Desugar.__proj__Mkqenv__item__mname"
] | [] | (*
Copyright 2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Desugar
open FStar.List.Tot
open FStar.Mul
open Ast
open FStar.All
module H = Hashtable
(* This module implements a pass over the source AST,
implementing various simple desugarings
* Desugar enums with newly defined idents and explicit constant
assignments to enums where all the tags are previously defined
constants.
* Resolve module-qualified names, including the use of module
abbreviations
* Set the kind (Spec/Output/Extern) in the type nodes
* Finds variables in Static expressions and hoists them as assumptions
and removes the Static
*)
let check_desugared_enum_cases (cases:list enum_case) : ML (list ident) =
List.map
(function
| (i, None) -> i
| (i, _) -> failwith "Enum should already have been desugared")
cases
let desugar_enum_cases (ityp:integer_type) (cases:list enum_case) (export:bool)
: ML (list enum_case & list decl) =
let find_definition (i:ident) (d:decl) =
match d.d_decl.v with
| Define j _ (Int _ _) ->
i.v = j.v
| _ ->
false
in
let _, cases_rev, ds_rev =
List.fold_left
(fun (next, cases_rev, ds_rev) (i, jopt) ->
let next =
match jopt with
| Some (Inl j) -> j
| Some (Inr j) ->
begin
match List.Tot.find (find_definition j) ds_rev with
| Some ({d_decl={v=Define _ _ (Int _ k)}}) -> k
| _ -> error (Printf.sprintf "Enum identifier %s not found" (print_ident j)) j.range
end
| None -> next
in
let case = (i, None) in
let d = mk_decl
(Define i None (Int ityp next))
i.range
["Enum constant"]
export
in
(next + 1,
case :: cases_rev,
d :: ds_rev))
(0, [], [])
cases
in
List.rev cases_rev,
List.rev ds_rev
let desugar_one_enum (d:decl) : ML (list decl) =
match d.d_decl.v with
| Enum t i cases ->
if List.for_all (fun (_, jopt) -> None? jopt) cases
then [d] //no enum value assignments; no desugaring to do
else //if we have any assignments at all, then we treat all the
//tags as fresh constants and assign them values in sequence
//with respect to the assigned values of preceding tags
let cases, ds = desugar_enum_cases (typ_as_integer_type t) cases d.d_exported in
let enum = decl_with_v d (Enum t i cases) in
ds@[enum]
| _ -> [d]
(* This code is currently not used
It desugars an Enum to a record with a single refined field *)
// let eliminate_enum (d:decl) : ML decl =
// match d.v with
// | Enum t i cases ->
// let names = {
// typedef_name = { i with v = { i.v with name=Ast.reserved_prefix ^ Ast.reserved_prefix ^ i.v.name }};
// typedef_abbrev = i;
// typedef_ptr_abbrev = { i with v = {i.v with
// name = Ast.reserved_prefix ^ Ast.reserved_prefix ^ "P" ^ i.v.name }};
// typedef_attributes = [];
// } in
// let params = [] in
// let where = None in
// let field_ident = with_dummy_range (to_ident' (Ast.reserved_prefix ^ "enum_field")) in
// let field_ident_expr = with_dummy_range (Identifier field_ident) in
// let field_constraint =
// List.fold_right
// (fun (case, _) out ->
// let eq = with_range (App Eq [field_ident_expr; with_range (Identifier case) case.range]) case.range in
// with_dummy_range (App Or [eq; out]))
// cases
// (with_dummy_range (Constant (Bool false)))
// in
// let field = {
// field_dependence = false;
// field_ident = field_ident;
// field_type = t;
// field_array_opt = FieldScalar;
// field_constraint = Some field_constraint;
// field_number = None;
// field_bitwidth = None;
// field_action = None
// } in
// let d' = Record names params where [with_dummy_range field] in
// {d with v = d'}
// | _ -> d
(*
* output_types and extern_types tables to set the kind in the Typ_app nodes
*)
noeq
type qenv = {
mname : string;
module_abbrevs : H.t string string;
output_types : H.t ident' unit;
extern_types : H.t ident' unit;
local_names : list string;
global_env: GlobalEnv.global_env;
}
let push_module_abbrev (env:qenv) (i m:string) : ML unit =
H.insert env.module_abbrevs i m
let push_output_type (env:qenv) (out_t:out_typ) : ML unit =
H.insert env.output_types out_t.out_typ_names.typedef_name.v ();
H.insert env.output_types out_t.out_typ_names.typedef_abbrev.v ()
let push_extern_type (env:qenv) (td:typedef_names) : ML unit =
H.insert env.extern_types td.typedef_name.v ();
H.insert env.extern_types td.typedef_abbrev.v ()
let push_name (env:qenv) (name:string) : qenv =
{ env with local_names = name::env.local_names }
let prim_consts = [
"unit"; "Bool"; "UINT8"; "UINT16"; "UINT32"; "UINT64";
"UINT8BE"; "UINT16BE"; "UINT32BE"; "UINT64BE";
"field_id"; "PUINT8";
"all_bytes"; "all_zeros";
"is_range_okay";
"void" ] | false | false | Desugar.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val resolve_ident (env: qenv) (i: ident) : ML ident | [] | Desugar.resolve_ident | {
"file_name": "src/3d/Desugar.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | env: Desugar.qenv -> i: Ast.ident -> FStar.All.ML Ast.ident | {
"end_col": 77,
"end_line": 202,
"start_col": 51,
"start_line": 174
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "Hacl.Meta.Chacha20.Vec",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Chacha20",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Chacha20",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let chacha20_encrypt_32 = vec_chacha20_encrypt_higher #1 True chacha20_init_32 chacha20_core_32 | let chacha20_encrypt_32 = | false | null | false | vec_chacha20_encrypt_higher #1 True chacha20_init_32 chacha20_core_32 | {
"checked_file": "Hacl.Chacha20.Vec32.fst.checked",
"dependencies": [
"prims.fst.checked",
"Hacl.Meta.Chacha20.Vec.fst.checked",
"Hacl.Impl.Chacha20.Vec.fst.checked",
"Hacl.Impl.Chacha20.Core32xN.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Hacl.Chacha20.Vec32.fst"
} | [
"total"
] | [
"Hacl.Meta.Chacha20.Vec.vec_chacha20_encrypt_higher",
"Prims.l_True",
"Hacl.Chacha20.Vec32.chacha20_init_32",
"Hacl.Chacha20.Vec32.chacha20_core_32"
] | [] | module Hacl.Chacha20.Vec32
open Hacl.Meta.Chacha20.Vec
[@CInline]
private
let double_round_32 = Hacl.Impl.Chacha20.Core32xN.double_round #1
[@CInline]
private
let chacha20_core_32 = vec_chacha20_core_higher #1 True double_round_32
[@CInline]
private
let chacha20_init_32 = Hacl.Impl.Chacha20.Vec.chacha20_init #1 | false | false | Hacl.Chacha20.Vec32.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val chacha20_encrypt_32 : Hacl.Meta.Chacha20.Vec.vec_chacha20_encrypt_higher_t Prims.l_True | [] | Hacl.Chacha20.Vec32.chacha20_encrypt_32 | {
"file_name": "code/chacha20/Hacl.Chacha20.Vec32.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Hacl.Meta.Chacha20.Vec.vec_chacha20_encrypt_higher_t Prims.l_True | {
"end_col": 95,
"end_line": 15,
"start_col": 26,
"start_line": 15
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "Hacl.Meta.Chacha20.Vec",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Chacha20",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Chacha20",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let chacha20_decrypt_32 = vec_chacha20_decrypt_higher #1 True chacha20_init_32 chacha20_core_32 | let chacha20_decrypt_32 = | false | null | false | vec_chacha20_decrypt_higher #1 True chacha20_init_32 chacha20_core_32 | {
"checked_file": "Hacl.Chacha20.Vec32.fst.checked",
"dependencies": [
"prims.fst.checked",
"Hacl.Meta.Chacha20.Vec.fst.checked",
"Hacl.Impl.Chacha20.Vec.fst.checked",
"Hacl.Impl.Chacha20.Core32xN.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Hacl.Chacha20.Vec32.fst"
} | [
"total"
] | [
"Hacl.Meta.Chacha20.Vec.vec_chacha20_decrypt_higher",
"Prims.l_True",
"Hacl.Chacha20.Vec32.chacha20_init_32",
"Hacl.Chacha20.Vec32.chacha20_core_32"
] | [] | module Hacl.Chacha20.Vec32
open Hacl.Meta.Chacha20.Vec
[@CInline]
private
let double_round_32 = Hacl.Impl.Chacha20.Core32xN.double_round #1
[@CInline]
private
let chacha20_core_32 = vec_chacha20_core_higher #1 True double_round_32
[@CInline]
private
let chacha20_init_32 = Hacl.Impl.Chacha20.Vec.chacha20_init #1 | false | false | Hacl.Chacha20.Vec32.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val chacha20_decrypt_32 : Hacl.Meta.Chacha20.Vec.vec_chacha20_decrypt_higher_t Prims.l_True | [] | Hacl.Chacha20.Vec32.chacha20_decrypt_32 | {
"file_name": "code/chacha20/Hacl.Chacha20.Vec32.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Hacl.Meta.Chacha20.Vec.vec_chacha20_decrypt_higher_t Prims.l_True | {
"end_col": 95,
"end_line": 16,
"start_col": 26,
"start_line": 16
} |
|
FStar.HyperStack.ST.Stack | [
{
"abbrev": false,
"full_module": "Hacl.Meta.Chacha20.Vec",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Chacha20",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Chacha20",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let chacha20_init_32 = Hacl.Impl.Chacha20.Vec.chacha20_init #1 | let chacha20_init_32 = | true | null | false | Hacl.Impl.Chacha20.Vec.chacha20_init #1 | {
"checked_file": "Hacl.Chacha20.Vec32.fst.checked",
"dependencies": [
"prims.fst.checked",
"Hacl.Meta.Chacha20.Vec.fst.checked",
"Hacl.Impl.Chacha20.Vec.fst.checked",
"Hacl.Impl.Chacha20.Core32xN.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Hacl.Chacha20.Vec32.fst"
} | [] | [
"Hacl.Impl.Chacha20.Vec.chacha20_init"
] | [] | module Hacl.Chacha20.Vec32
open Hacl.Meta.Chacha20.Vec
[@CInline]
private
let double_round_32 = Hacl.Impl.Chacha20.Core32xN.double_round #1
[@CInline]
private
let chacha20_core_32 = vec_chacha20_core_higher #1 True double_round_32
[@CInline] | false | false | Hacl.Chacha20.Vec32.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val chacha20_init_32 : ctx: Hacl.Impl.Chacha20.Core32xN.state 1 ->
k: Lib.Buffer.lbuffer Lib.IntTypes.uint8 32ul ->
n: Lib.Buffer.lbuffer Lib.IntTypes.uint8 12ul ->
ctr0: Lib.IntTypes.size_t
-> FStar.HyperStack.ST.Stack Prims.unit | [] | Hacl.Chacha20.Vec32.chacha20_init_32 | {
"file_name": "code/chacha20/Hacl.Chacha20.Vec32.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
ctx: Hacl.Impl.Chacha20.Core32xN.state 1 ->
k: Lib.Buffer.lbuffer Lib.IntTypes.uint8 32ul ->
n: Lib.Buffer.lbuffer Lib.IntTypes.uint8 12ul ->
ctr0: Lib.IntTypes.size_t
-> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 62,
"end_line": 13,
"start_col": 23,
"start_line": 13
} |
|
FStar.HyperStack.ST.Stack | [
{
"abbrev": false,
"full_module": "Hacl.Meta.Chacha20.Vec",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Chacha20",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Chacha20",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let double_round_32 = Hacl.Impl.Chacha20.Core32xN.double_round #1 | let double_round_32 = | true | null | false | Hacl.Impl.Chacha20.Core32xN.double_round #1 | {
"checked_file": "Hacl.Chacha20.Vec32.fst.checked",
"dependencies": [
"prims.fst.checked",
"Hacl.Meta.Chacha20.Vec.fst.checked",
"Hacl.Impl.Chacha20.Vec.fst.checked",
"Hacl.Impl.Chacha20.Core32xN.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Hacl.Chacha20.Vec32.fst"
} | [] | [
"Hacl.Impl.Chacha20.Core32xN.double_round"
] | [] | module Hacl.Chacha20.Vec32
open Hacl.Meta.Chacha20.Vec
[@CInline] | false | false | Hacl.Chacha20.Vec32.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val double_round_32 : st: Hacl.Impl.Chacha20.Core32xN.state 1 -> FStar.HyperStack.ST.Stack Prims.unit | [] | Hacl.Chacha20.Vec32.double_round_32 | {
"file_name": "code/chacha20/Hacl.Chacha20.Vec32.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | st: Hacl.Impl.Chacha20.Core32xN.state 1 -> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 65,
"end_line": 7,
"start_col": 22,
"start_line": 7
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "Hacl.Meta.Chacha20.Vec",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Chacha20",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Chacha20",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let chacha20_core_32 = vec_chacha20_core_higher #1 True double_round_32 | let chacha20_core_32 = | false | null | false | vec_chacha20_core_higher #1 True double_round_32 | {
"checked_file": "Hacl.Chacha20.Vec32.fst.checked",
"dependencies": [
"prims.fst.checked",
"Hacl.Meta.Chacha20.Vec.fst.checked",
"Hacl.Impl.Chacha20.Vec.fst.checked",
"Hacl.Impl.Chacha20.Core32xN.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Hacl.Chacha20.Vec32.fst"
} | [
"total"
] | [
"Hacl.Meta.Chacha20.Vec.vec_chacha20_core_higher",
"Prims.l_True",
"Hacl.Chacha20.Vec32.double_round_32"
] | [] | module Hacl.Chacha20.Vec32
open Hacl.Meta.Chacha20.Vec
[@CInline]
private
let double_round_32 = Hacl.Impl.Chacha20.Core32xN.double_round #1
[@CInline] | false | false | Hacl.Chacha20.Vec32.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val chacha20_core_32 : Hacl.Meta.Chacha20.Vec.vec_chacha20_core_higher_t Prims.l_True | [] | Hacl.Chacha20.Vec32.chacha20_core_32 | {
"file_name": "code/chacha20/Hacl.Chacha20.Vec32.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Hacl.Meta.Chacha20.Vec.vec_chacha20_core_higher_t Prims.l_True | {
"end_col": 71,
"end_line": 10,
"start_col": 23,
"start_line": 10
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "Hacl.Impl.Exponentiation",
"short_module": null
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lexp_double_fw_st
(a_t:inttype_a)
(len:size_t{v len > 0})
(ctx_len:size_t)
(k:concrete_ops a_t len ctx_len)
(l:size_window_t a_t len) =
ctx:lbuffer (uint_t a_t SEC) ctx_len
-> a1:lbuffer (uint_t a_t SEC) len
-> bLen:size_t
-> bBits:size_t{(v bBits - 1) / bits a_t < v bLen}
-> b1:lbuffer (uint_t a_t SEC) bLen
-> a2:lbuffer (uint_t a_t SEC) len
-> b2:lbuffer (uint_t a_t SEC) bLen
-> res:lbuffer (uint_t a_t SEC) len ->
Stack unit
(requires fun h ->
live h a1 /\ live h b1 /\ live h a2 /\ live h b2 /\ live h res /\ live h ctx /\
eq_or_disjoint a1 a2 /\ disjoint a1 res /\ disjoint a1 ctx /\
disjoint a2 res /\ disjoint a2 ctx /\
disjoint res b1 /\ disjoint res b2 /\ disjoint res ctx /\
BD.bn_v h b1 < pow2 (v bBits) /\
BD.bn_v h b2 < pow2 (v bBits) /\
k.to.linv_ctx (as_seq h ctx) /\
k.to.linv (as_seq h a1) /\ k.to.linv (as_seq h a2))
(ensures fun h0 _ h1 -> modifies (loc res) h0 h1 /\
k.to.linv (as_seq h1 res) /\
k.to.refl (as_seq h1 res) ==
S.exp_double_fw #k.to.a_spec k.to.comm_monoid
(k.to.refl (as_seq h0 a1)) (v bBits) (BD.bn_v h0 b1)
(k.to.refl (as_seq h0 a2)) (BD.bn_v h0 b2) (v l)) | let lexp_double_fw_st
(a_t: inttype_a)
(len: size_t{v len > 0})
(ctx_len: size_t)
(k: concrete_ops a_t len ctx_len)
(l: size_window_t a_t len)
= | false | null | false |
ctx: lbuffer (uint_t a_t SEC) ctx_len ->
a1: lbuffer (uint_t a_t SEC) len ->
bLen: size_t ->
bBits: size_t{(v bBits - 1) / bits a_t < v bLen} ->
b1: lbuffer (uint_t a_t SEC) bLen ->
a2: lbuffer (uint_t a_t SEC) len ->
b2: lbuffer (uint_t a_t SEC) bLen ->
res: lbuffer (uint_t a_t SEC) len
-> Stack unit
(requires
fun h ->
live h a1 /\ live h b1 /\ live h a2 /\ live h b2 /\ live h res /\ live h ctx /\
eq_or_disjoint a1 a2 /\ disjoint a1 res /\ disjoint a1 ctx /\ disjoint a2 res /\
disjoint a2 ctx /\ disjoint res b1 /\ disjoint res b2 /\ disjoint res ctx /\
BD.bn_v h b1 < pow2 (v bBits) /\ BD.bn_v h b2 < pow2 (v bBits) /\
k.to.linv_ctx (as_seq h ctx) /\ k.to.linv (as_seq h a1) /\ k.to.linv (as_seq h a2))
(ensures
fun h0 _ h1 ->
modifies (loc res) h0 h1 /\ k.to.linv (as_seq h1 res) /\
k.to.refl (as_seq h1 res) ==
S.exp_double_fw #k.to.a_spec
k.to.comm_monoid
(k.to.refl (as_seq h0 a1))
(v bBits)
(BD.bn_v h0 b1)
(k.to.refl (as_seq h0 a2))
(BD.bn_v h0 b2)
(v l)) | {
"checked_file": "Hacl.Impl.MultiExponentiation.fsti.checked",
"dependencies": [
"prims.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Exponentiation.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Exponentiation.fsti.checked",
"Hacl.Bignum.Definitions.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.MultiExponentiation.fsti"
} | [
"total"
] | [
"Hacl.Impl.Exponentiation.Definitions.inttype_a",
"Lib.IntTypes.size_t",
"Prims.b2t",
"Prims.op_GreaterThan",
"Lib.IntTypes.v",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Hacl.Impl.Exponentiation.Definitions.concrete_ops",
"Hacl.Impl.Exponentiation.size_window_t",
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint_t",
"Lib.IntTypes.SEC",
"Prims.op_LessThan",
"Prims.op_Division",
"Prims.op_Subtraction",
"Lib.IntTypes.bits",
"Prims.unit",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"Lib.Buffer.live",
"Lib.Buffer.MUT",
"Lib.Buffer.eq_or_disjoint",
"Lib.Buffer.disjoint",
"Hacl.Bignum.Definitions.bn_v",
"Prims.pow2",
"Hacl.Impl.Exponentiation.Definitions.__proj__Mkto_comm_monoid__item__linv_ctx",
"FStar.Ghost.reveal",
"Hacl.Impl.Exponentiation.Definitions.to_comm_monoid",
"Hacl.Impl.Exponentiation.Definitions.__proj__Mkconcrete_ops__item__to",
"Lib.Buffer.as_seq",
"Hacl.Impl.Exponentiation.Definitions.__proj__Mkto_comm_monoid__item__linv",
"Lib.Buffer.modifies",
"Lib.Buffer.loc",
"Prims.eq2",
"Hacl.Impl.Exponentiation.Definitions.__proj__Mkto_comm_monoid__item__a_spec",
"Hacl.Impl.Exponentiation.Definitions.__proj__Mkto_comm_monoid__item__refl",
"Lib.Exponentiation.exp_double_fw",
"Hacl.Impl.Exponentiation.Definitions.__proj__Mkto_comm_monoid__item__comm_monoid"
] | [] | module Hacl.Impl.MultiExponentiation
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
module ST = FStar.HyperStack.ST
module S = Lib.Exponentiation
module BD = Hacl.Bignum.Definitions
open Hacl.Impl.Exponentiation
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
// Double Fixed-window method using two precomputed tables
//---------------------------------------------------------
inline_for_extraction noextract
let lexp_double_fw_tables_st
(a_t:inttype_a)
(len:size_t{v len > 0})
(ctx_len:size_t)
(k:concrete_ops a_t len ctx_len)
(l:size_window_t a_t len)
(table_len:table_len_t len)
(table_inv1:table_inv_t a_t len table_len)
(table_inv2:table_inv_t a_t len table_len)
=
ctx:lbuffer (uint_t a_t SEC) ctx_len
-> a1:lbuffer (uint_t a_t SEC) len
-> bLen:size_t
-> bBits:size_t{(v bBits - 1) / bits a_t < v bLen}
-> b1:lbuffer (uint_t a_t SEC) bLen
-> a2:lbuffer (uint_t a_t SEC) len
-> b2:lbuffer (uint_t a_t SEC) bLen
-> table1:clbuffer (uint_t a_t SEC) (table_len *! len)
-> table2:clbuffer (uint_t a_t SEC) (table_len *! len)
-> res:lbuffer (uint_t a_t SEC) len ->
Stack unit
(requires fun h ->
live h a1 /\ live h b1 /\ live h a2 /\ live h b2 /\
live h res /\ live h ctx /\ live h table1 /\ live h table2 /\
eq_or_disjoint a1 a2 /\ disjoint a1 res /\ disjoint a1 ctx /\
disjoint b1 res /\ disjoint a2 res /\ disjoint a2 ctx /\
disjoint b2 res /\ disjoint res ctx /\ disjoint res table1 /\ disjoint res table2 /\
BD.bn_v h b1 < pow2 (v bBits) /\
BD.bn_v h b2 < pow2 (v bBits) /\
k.to.linv_ctx (as_seq h ctx) /\
k.to.linv (as_seq h a1) /\ k.to.linv (as_seq h a2) /\
table_inv1 (as_seq h a1) (as_seq h table1) /\
table_inv2 (as_seq h a2) (as_seq h table2))
(ensures fun h0 _ h1 -> modifies (loc res) h0 h1 /\
k.to.linv (as_seq h1 res) /\
k.to.refl (as_seq h1 res) ==
S.exp_double_fw k.to.comm_monoid
(k.to.refl (as_seq h0 a1)) (v bBits) (BD.bn_v h0 b1)
(k.to.refl (as_seq h0 a2)) (BD.bn_v h0 b2) (v l))
inline_for_extraction noextract
val mk_lexp_double_fw_tables:
#a_t:inttype_a
-> len:size_t{v len > 0}
-> ctx_len:size_t
-> k:concrete_ops a_t len ctx_len
-> l:size_window_t a_t len
-> table_len:table_len_t len
-> table_inv1:table_inv_t a_t len table_len
-> table_inv2:table_inv_t a_t len table_len
-> pow_a_to_small_b1:pow_a_to_small_b_st a_t len ctx_len k l table_len table_inv1
-> pow_a_to_small_b2:pow_a_to_small_b_st a_t len ctx_len k l table_len table_inv2 ->
lexp_double_fw_tables_st a_t len ctx_len k l table_len table_inv1 table_inv2
// Double Fixed-window method with two precomputed tables
// table1 = [a1^0 = one; a1^1; a1^2; ..; a1^(table_len - 1)]
// table2 = [a2^0 = one; a2^1; a2^2; ..; a2^(table_len - 1)]
//-----------------------------------------------------------
inline_for_extraction noextract
let lexp_double_fw_st
(a_t:inttype_a)
(len:size_t{v len > 0})
(ctx_len:size_t)
(k:concrete_ops a_t len ctx_len) | false | false | Hacl.Impl.MultiExponentiation.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lexp_double_fw_st : a_t: Hacl.Impl.Exponentiation.Definitions.inttype_a ->
len: Lib.IntTypes.size_t{Lib.IntTypes.v len > 0} ->
ctx_len: Lib.IntTypes.size_t ->
k: Hacl.Impl.Exponentiation.Definitions.concrete_ops a_t len ctx_len ->
l: Hacl.Impl.Exponentiation.size_window_t a_t len
-> Type0 | [] | Hacl.Impl.MultiExponentiation.lexp_double_fw_st | {
"file_name": "code/bignum/Hacl.Impl.MultiExponentiation.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
a_t: Hacl.Impl.Exponentiation.Definitions.inttype_a ->
len: Lib.IntTypes.size_t{Lib.IntTypes.v len > 0} ->
ctx_len: Lib.IntTypes.size_t ->
k: Hacl.Impl.Exponentiation.Definitions.concrete_ops a_t len ctx_len ->
l: Hacl.Impl.Exponentiation.size_window_t a_t len
-> Type0 | {
"end_col": 55,
"end_line": 115,
"start_col": 4,
"start_line": 92
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "Hacl.Impl.Exponentiation",
"short_module": null
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lexp_double_fw_tables_st
(a_t:inttype_a)
(len:size_t{v len > 0})
(ctx_len:size_t)
(k:concrete_ops a_t len ctx_len)
(l:size_window_t a_t len)
(table_len:table_len_t len)
(table_inv1:table_inv_t a_t len table_len)
(table_inv2:table_inv_t a_t len table_len)
=
ctx:lbuffer (uint_t a_t SEC) ctx_len
-> a1:lbuffer (uint_t a_t SEC) len
-> bLen:size_t
-> bBits:size_t{(v bBits - 1) / bits a_t < v bLen}
-> b1:lbuffer (uint_t a_t SEC) bLen
-> a2:lbuffer (uint_t a_t SEC) len
-> b2:lbuffer (uint_t a_t SEC) bLen
-> table1:clbuffer (uint_t a_t SEC) (table_len *! len)
-> table2:clbuffer (uint_t a_t SEC) (table_len *! len)
-> res:lbuffer (uint_t a_t SEC) len ->
Stack unit
(requires fun h ->
live h a1 /\ live h b1 /\ live h a2 /\ live h b2 /\
live h res /\ live h ctx /\ live h table1 /\ live h table2 /\
eq_or_disjoint a1 a2 /\ disjoint a1 res /\ disjoint a1 ctx /\
disjoint b1 res /\ disjoint a2 res /\ disjoint a2 ctx /\
disjoint b2 res /\ disjoint res ctx /\ disjoint res table1 /\ disjoint res table2 /\
BD.bn_v h b1 < pow2 (v bBits) /\
BD.bn_v h b2 < pow2 (v bBits) /\
k.to.linv_ctx (as_seq h ctx) /\
k.to.linv (as_seq h a1) /\ k.to.linv (as_seq h a2) /\
table_inv1 (as_seq h a1) (as_seq h table1) /\
table_inv2 (as_seq h a2) (as_seq h table2))
(ensures fun h0 _ h1 -> modifies (loc res) h0 h1 /\
k.to.linv (as_seq h1 res) /\
k.to.refl (as_seq h1 res) ==
S.exp_double_fw k.to.comm_monoid
(k.to.refl (as_seq h0 a1)) (v bBits) (BD.bn_v h0 b1)
(k.to.refl (as_seq h0 a2)) (BD.bn_v h0 b2) (v l)) | let lexp_double_fw_tables_st
(a_t: inttype_a)
(len: size_t{v len > 0})
(ctx_len: size_t)
(k: concrete_ops a_t len ctx_len)
(l: size_window_t a_t len)
(table_len: table_len_t len)
(table_inv1 table_inv2: table_inv_t a_t len table_len)
= | false | null | false |
ctx: lbuffer (uint_t a_t SEC) ctx_len ->
a1: lbuffer (uint_t a_t SEC) len ->
bLen: size_t ->
bBits: size_t{(v bBits - 1) / bits a_t < v bLen} ->
b1: lbuffer (uint_t a_t SEC) bLen ->
a2: lbuffer (uint_t a_t SEC) len ->
b2: lbuffer (uint_t a_t SEC) bLen ->
table1: clbuffer (uint_t a_t SEC) (table_len *! len) ->
table2: clbuffer (uint_t a_t SEC) (table_len *! len) ->
res: lbuffer (uint_t a_t SEC) len
-> Stack unit
(requires
fun h ->
live h a1 /\ live h b1 /\ live h a2 /\ live h b2 /\ live h res /\ live h ctx /\
live h table1 /\ live h table2 /\ eq_or_disjoint a1 a2 /\ disjoint a1 res /\
disjoint a1 ctx /\ disjoint b1 res /\ disjoint a2 res /\ disjoint a2 ctx /\
disjoint b2 res /\ disjoint res ctx /\ disjoint res table1 /\ disjoint res table2 /\
BD.bn_v h b1 < pow2 (v bBits) /\ BD.bn_v h b2 < pow2 (v bBits) /\
k.to.linv_ctx (as_seq h ctx) /\ k.to.linv (as_seq h a1) /\ k.to.linv (as_seq h a2) /\
table_inv1 (as_seq h a1) (as_seq h table1) /\ table_inv2 (as_seq h a2) (as_seq h table2))
(ensures
fun h0 _ h1 ->
modifies (loc res) h0 h1 /\ k.to.linv (as_seq h1 res) /\
k.to.refl (as_seq h1 res) ==
S.exp_double_fw k.to.comm_monoid
(k.to.refl (as_seq h0 a1))
(v bBits)
(BD.bn_v h0 b1)
(k.to.refl (as_seq h0 a2))
(BD.bn_v h0 b2)
(v l)) | {
"checked_file": "Hacl.Impl.MultiExponentiation.fsti.checked",
"dependencies": [
"prims.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Exponentiation.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Exponentiation.fsti.checked",
"Hacl.Bignum.Definitions.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.MultiExponentiation.fsti"
} | [
"total"
] | [
"Hacl.Impl.Exponentiation.Definitions.inttype_a",
"Lib.IntTypes.size_t",
"Prims.b2t",
"Prims.op_GreaterThan",
"Lib.IntTypes.v",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Hacl.Impl.Exponentiation.Definitions.concrete_ops",
"Hacl.Impl.Exponentiation.size_window_t",
"Hacl.Impl.Exponentiation.table_len_t",
"Hacl.Impl.Exponentiation.table_inv_t",
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint_t",
"Lib.IntTypes.SEC",
"Prims.op_LessThan",
"Prims.op_Division",
"Prims.op_Subtraction",
"Lib.IntTypes.bits",
"Lib.Buffer.clbuffer",
"Lib.IntTypes.op_Star_Bang",
"Prims.unit",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"Lib.Buffer.live",
"Lib.Buffer.MUT",
"Lib.Buffer.CONST",
"Lib.Buffer.eq_or_disjoint",
"Lib.Buffer.disjoint",
"Hacl.Bignum.Definitions.bn_v",
"Prims.pow2",
"Hacl.Impl.Exponentiation.Definitions.__proj__Mkto_comm_monoid__item__linv_ctx",
"FStar.Ghost.reveal",
"Hacl.Impl.Exponentiation.Definitions.to_comm_monoid",
"Hacl.Impl.Exponentiation.Definitions.__proj__Mkconcrete_ops__item__to",
"Lib.Buffer.as_seq",
"Hacl.Impl.Exponentiation.Definitions.__proj__Mkto_comm_monoid__item__linv",
"Lib.Buffer.modifies",
"Lib.Buffer.loc",
"Prims.eq2",
"Hacl.Impl.Exponentiation.Definitions.__proj__Mkto_comm_monoid__item__a_spec",
"Hacl.Impl.Exponentiation.Definitions.__proj__Mkto_comm_monoid__item__refl",
"Lib.Exponentiation.exp_double_fw",
"Hacl.Impl.Exponentiation.Definitions.__proj__Mkto_comm_monoid__item__comm_monoid"
] | [] | module Hacl.Impl.MultiExponentiation
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
module ST = FStar.HyperStack.ST
module S = Lib.Exponentiation
module BD = Hacl.Bignum.Definitions
open Hacl.Impl.Exponentiation
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
// Double Fixed-window method using two precomputed tables
//---------------------------------------------------------
inline_for_extraction noextract
let lexp_double_fw_tables_st
(a_t:inttype_a)
(len:size_t{v len > 0})
(ctx_len:size_t)
(k:concrete_ops a_t len ctx_len)
(l:size_window_t a_t len)
(table_len:table_len_t len)
(table_inv1:table_inv_t a_t len table_len)
(table_inv2:table_inv_t a_t len table_len) | false | false | Hacl.Impl.MultiExponentiation.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lexp_double_fw_tables_st : a_t: Hacl.Impl.Exponentiation.Definitions.inttype_a ->
len: Lib.IntTypes.size_t{Lib.IntTypes.v len > 0} ->
ctx_len: Lib.IntTypes.size_t ->
k: Hacl.Impl.Exponentiation.Definitions.concrete_ops a_t len ctx_len ->
l: Hacl.Impl.Exponentiation.size_window_t a_t len ->
table_len: Hacl.Impl.Exponentiation.table_len_t len ->
table_inv1: Hacl.Impl.Exponentiation.table_inv_t a_t len table_len ->
table_inv2: Hacl.Impl.Exponentiation.table_inv_t a_t len table_len
-> Type0 | [] | Hacl.Impl.MultiExponentiation.lexp_double_fw_tables_st | {
"file_name": "code/bignum/Hacl.Impl.MultiExponentiation.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
a_t: Hacl.Impl.Exponentiation.Definitions.inttype_a ->
len: Lib.IntTypes.size_t{Lib.IntTypes.v len > 0} ->
ctx_len: Lib.IntTypes.size_t ->
k: Hacl.Impl.Exponentiation.Definitions.concrete_ops a_t len ctx_len ->
l: Hacl.Impl.Exponentiation.size_window_t a_t len ->
table_len: Hacl.Impl.Exponentiation.table_len_t len ->
table_inv1: Hacl.Impl.Exponentiation.table_inv_t a_t len table_len ->
table_inv2: Hacl.Impl.Exponentiation.table_inv_t a_t len table_len
-> Type0 | {
"end_col": 55,
"end_line": 62,
"start_col": 4,
"start_line": 32
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "Hacl.Impl.Exponentiation",
"short_module": null
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lexp_four_fw_tables_st
(a_t:inttype_a)
(len:size_t{v len > 0})
(ctx_len:size_t)
(k:concrete_ops a_t len ctx_len)
(l:size_window_t a_t len)
(table_len:table_len_t len)
(table_inv1:table_inv_t a_t len table_len)
(table_inv2:table_inv_t a_t len table_len)
(table_inv3:table_inv_t a_t len table_len)
(table_inv4:table_inv_t a_t len table_len)
=
ctx:lbuffer (uint_t a_t SEC) ctx_len
-> a1:lbuffer (uint_t a_t SEC) len
-> bLen:size_t
-> bBits:size_t{(v bBits - 1) / bits a_t < v bLen}
-> b1:lbuffer (uint_t a_t SEC) bLen
-> a2:lbuffer (uint_t a_t SEC) len
-> b2:lbuffer (uint_t a_t SEC) bLen
-> a3:lbuffer (uint_t a_t SEC) len
-> b3:lbuffer (uint_t a_t SEC) bLen
-> a4:lbuffer (uint_t a_t SEC) len
-> b4:lbuffer (uint_t a_t SEC) bLen
-> table1:clbuffer (uint_t a_t SEC) (table_len *! len)
-> table2:clbuffer (uint_t a_t SEC) (table_len *! len)
-> table3:clbuffer (uint_t a_t SEC) (table_len *! len)
-> table4:clbuffer (uint_t a_t SEC) (table_len *! len)
-> res:lbuffer (uint_t a_t SEC) len ->
Stack unit
(requires fun h ->
live h a1 /\ live h b1 /\ live h a2 /\ live h b2 /\
live h a3 /\ live h b3 /\ live h a4 /\ live h b4 /\
live h res /\ live h ctx /\
live h table1 /\ live h table2 /\ live h table3 /\ live h table4 /\
eq_or_disjoint a1 a2 /\ eq_or_disjoint a1 a3 /\ eq_or_disjoint a1 a4 /\
eq_or_disjoint a2 a3 /\ eq_or_disjoint a2 a4 /\ eq_or_disjoint a3 a4 /\
disjoint a1 res /\ disjoint a1 ctx /\ disjoint a2 res /\ disjoint a2 ctx /\
disjoint a3 res /\ disjoint a3 ctx /\ disjoint a4 res /\ disjoint a4 ctx /\
disjoint b1 res /\ disjoint b2 res /\ disjoint b3 res /\ disjoint b4 res /\
disjoint res ctx /\ disjoint res table1 /\ disjoint res table2 /\
disjoint res table3 /\ disjoint res table4 /\
BD.bn_v h b1 < pow2 (v bBits) /\
BD.bn_v h b2 < pow2 (v bBits) /\
BD.bn_v h b3 < pow2 (v bBits) /\
BD.bn_v h b4 < pow2 (v bBits) /\
k.to.linv_ctx (as_seq h ctx) /\
k.to.linv (as_seq h a1) /\ k.to.linv (as_seq h a2) /\
k.to.linv (as_seq h a3) /\ k.to.linv (as_seq h a4) /\
table_inv1 (as_seq h a1) (as_seq h table1) /\
table_inv2 (as_seq h a2) (as_seq h table2) /\
table_inv3 (as_seq h a3) (as_seq h table3) /\
table_inv4 (as_seq h a4) (as_seq h table4))
(ensures fun h0 _ h1 -> modifies (loc res) h0 h1 /\
k.to.linv (as_seq h1 res) /\
k.to.refl (as_seq h1 res) ==
S.exp_four_fw k.to.comm_monoid
(k.to.refl (as_seq h0 a1)) (v bBits) (BD.bn_v h0 b1)
(k.to.refl (as_seq h0 a2)) (BD.bn_v h0 b2)
(k.to.refl (as_seq h0 a3)) (BD.bn_v h0 b3)
(k.to.refl (as_seq h0 a4)) (BD.bn_v h0 b4) (v l)) | let lexp_four_fw_tables_st
(a_t: inttype_a)
(len: size_t{v len > 0})
(ctx_len: size_t)
(k: concrete_ops a_t len ctx_len)
(l: size_window_t a_t len)
(table_len: table_len_t len)
(table_inv1 table_inv2 table_inv3 table_inv4: table_inv_t a_t len table_len)
= | false | null | false |
ctx: lbuffer (uint_t a_t SEC) ctx_len ->
a1: lbuffer (uint_t a_t SEC) len ->
bLen: size_t ->
bBits: size_t{(v bBits - 1) / bits a_t < v bLen} ->
b1: lbuffer (uint_t a_t SEC) bLen ->
a2: lbuffer (uint_t a_t SEC) len ->
b2: lbuffer (uint_t a_t SEC) bLen ->
a3: lbuffer (uint_t a_t SEC) len ->
b3: lbuffer (uint_t a_t SEC) bLen ->
a4: lbuffer (uint_t a_t SEC) len ->
b4: lbuffer (uint_t a_t SEC) bLen ->
table1: clbuffer (uint_t a_t SEC) (table_len *! len) ->
table2: clbuffer (uint_t a_t SEC) (table_len *! len) ->
table3: clbuffer (uint_t a_t SEC) (table_len *! len) ->
table4: clbuffer (uint_t a_t SEC) (table_len *! len) ->
res: lbuffer (uint_t a_t SEC) len
-> Stack unit
(requires
fun h ->
live h a1 /\ live h b1 /\ live h a2 /\ live h b2 /\ live h a3 /\ live h b3 /\ live h a4 /\
live h b4 /\ live h res /\ live h ctx /\ live h table1 /\ live h table2 /\ live h table3 /\
live h table4 /\ eq_or_disjoint a1 a2 /\ eq_or_disjoint a1 a3 /\ eq_or_disjoint a1 a4 /\
eq_or_disjoint a2 a3 /\ eq_or_disjoint a2 a4 /\ eq_or_disjoint a3 a4 /\ disjoint a1 res /\
disjoint a1 ctx /\ disjoint a2 res /\ disjoint a2 ctx /\ disjoint a3 res /\
disjoint a3 ctx /\ disjoint a4 res /\ disjoint a4 ctx /\ disjoint b1 res /\
disjoint b2 res /\ disjoint b3 res /\ disjoint b4 res /\ disjoint res ctx /\
disjoint res table1 /\ disjoint res table2 /\ disjoint res table3 /\ disjoint res table4 /\
BD.bn_v h b1 < pow2 (v bBits) /\ BD.bn_v h b2 < pow2 (v bBits) /\
BD.bn_v h b3 < pow2 (v bBits) /\ BD.bn_v h b4 < pow2 (v bBits) /\
k.to.linv_ctx (as_seq h ctx) /\ k.to.linv (as_seq h a1) /\ k.to.linv (as_seq h a2) /\
k.to.linv (as_seq h a3) /\ k.to.linv (as_seq h a4) /\
table_inv1 (as_seq h a1) (as_seq h table1) /\ table_inv2 (as_seq h a2) (as_seq h table2) /\
table_inv3 (as_seq h a3) (as_seq h table3) /\ table_inv4 (as_seq h a4) (as_seq h table4))
(ensures
fun h0 _ h1 ->
modifies (loc res) h0 h1 /\ k.to.linv (as_seq h1 res) /\
k.to.refl (as_seq h1 res) ==
S.exp_four_fw k.to.comm_monoid (k.to.refl (as_seq h0 a1)) (v bBits) (BD.bn_v h0 b1)
(k.to.refl (as_seq h0 a2)) (BD.bn_v h0 b2) (k.to.refl (as_seq h0 a3)) (BD.bn_v h0 b3)
(k.to.refl (as_seq h0 a4)) (BD.bn_v h0 b4) (v l)) | {
"checked_file": "Hacl.Impl.MultiExponentiation.fsti.checked",
"dependencies": [
"prims.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Exponentiation.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Exponentiation.fsti.checked",
"Hacl.Bignum.Definitions.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.MultiExponentiation.fsti"
} | [
"total"
] | [
"Hacl.Impl.Exponentiation.Definitions.inttype_a",
"Lib.IntTypes.size_t",
"Prims.b2t",
"Prims.op_GreaterThan",
"Lib.IntTypes.v",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Hacl.Impl.Exponentiation.Definitions.concrete_ops",
"Hacl.Impl.Exponentiation.size_window_t",
"Hacl.Impl.Exponentiation.table_len_t",
"Hacl.Impl.Exponentiation.table_inv_t",
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint_t",
"Lib.IntTypes.SEC",
"Prims.op_LessThan",
"Prims.op_Division",
"Prims.op_Subtraction",
"Lib.IntTypes.bits",
"Lib.Buffer.clbuffer",
"Lib.IntTypes.op_Star_Bang",
"Prims.unit",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"Lib.Buffer.live",
"Lib.Buffer.MUT",
"Lib.Buffer.CONST",
"Lib.Buffer.eq_or_disjoint",
"Lib.Buffer.disjoint",
"Hacl.Bignum.Definitions.bn_v",
"Prims.pow2",
"Hacl.Impl.Exponentiation.Definitions.__proj__Mkto_comm_monoid__item__linv_ctx",
"FStar.Ghost.reveal",
"Hacl.Impl.Exponentiation.Definitions.to_comm_monoid",
"Hacl.Impl.Exponentiation.Definitions.__proj__Mkconcrete_ops__item__to",
"Lib.Buffer.as_seq",
"Hacl.Impl.Exponentiation.Definitions.__proj__Mkto_comm_monoid__item__linv",
"Lib.Buffer.modifies",
"Lib.Buffer.loc",
"Prims.eq2",
"Hacl.Impl.Exponentiation.Definitions.__proj__Mkto_comm_monoid__item__a_spec",
"Hacl.Impl.Exponentiation.Definitions.__proj__Mkto_comm_monoid__item__refl",
"Lib.Exponentiation.exp_four_fw",
"Hacl.Impl.Exponentiation.Definitions.__proj__Mkto_comm_monoid__item__comm_monoid"
] | [] | module Hacl.Impl.MultiExponentiation
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
module ST = FStar.HyperStack.ST
module S = Lib.Exponentiation
module BD = Hacl.Bignum.Definitions
open Hacl.Impl.Exponentiation
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
// Double Fixed-window method using two precomputed tables
//---------------------------------------------------------
inline_for_extraction noextract
let lexp_double_fw_tables_st
(a_t:inttype_a)
(len:size_t{v len > 0})
(ctx_len:size_t)
(k:concrete_ops a_t len ctx_len)
(l:size_window_t a_t len)
(table_len:table_len_t len)
(table_inv1:table_inv_t a_t len table_len)
(table_inv2:table_inv_t a_t len table_len)
=
ctx:lbuffer (uint_t a_t SEC) ctx_len
-> a1:lbuffer (uint_t a_t SEC) len
-> bLen:size_t
-> bBits:size_t{(v bBits - 1) / bits a_t < v bLen}
-> b1:lbuffer (uint_t a_t SEC) bLen
-> a2:lbuffer (uint_t a_t SEC) len
-> b2:lbuffer (uint_t a_t SEC) bLen
-> table1:clbuffer (uint_t a_t SEC) (table_len *! len)
-> table2:clbuffer (uint_t a_t SEC) (table_len *! len)
-> res:lbuffer (uint_t a_t SEC) len ->
Stack unit
(requires fun h ->
live h a1 /\ live h b1 /\ live h a2 /\ live h b2 /\
live h res /\ live h ctx /\ live h table1 /\ live h table2 /\
eq_or_disjoint a1 a2 /\ disjoint a1 res /\ disjoint a1 ctx /\
disjoint b1 res /\ disjoint a2 res /\ disjoint a2 ctx /\
disjoint b2 res /\ disjoint res ctx /\ disjoint res table1 /\ disjoint res table2 /\
BD.bn_v h b1 < pow2 (v bBits) /\
BD.bn_v h b2 < pow2 (v bBits) /\
k.to.linv_ctx (as_seq h ctx) /\
k.to.linv (as_seq h a1) /\ k.to.linv (as_seq h a2) /\
table_inv1 (as_seq h a1) (as_seq h table1) /\
table_inv2 (as_seq h a2) (as_seq h table2))
(ensures fun h0 _ h1 -> modifies (loc res) h0 h1 /\
k.to.linv (as_seq h1 res) /\
k.to.refl (as_seq h1 res) ==
S.exp_double_fw k.to.comm_monoid
(k.to.refl (as_seq h0 a1)) (v bBits) (BD.bn_v h0 b1)
(k.to.refl (as_seq h0 a2)) (BD.bn_v h0 b2) (v l))
inline_for_extraction noextract
val mk_lexp_double_fw_tables:
#a_t:inttype_a
-> len:size_t{v len > 0}
-> ctx_len:size_t
-> k:concrete_ops a_t len ctx_len
-> l:size_window_t a_t len
-> table_len:table_len_t len
-> table_inv1:table_inv_t a_t len table_len
-> table_inv2:table_inv_t a_t len table_len
-> pow_a_to_small_b1:pow_a_to_small_b_st a_t len ctx_len k l table_len table_inv1
-> pow_a_to_small_b2:pow_a_to_small_b_st a_t len ctx_len k l table_len table_inv2 ->
lexp_double_fw_tables_st a_t len ctx_len k l table_len table_inv1 table_inv2
// Double Fixed-window method with two precomputed tables
// table1 = [a1^0 = one; a1^1; a1^2; ..; a1^(table_len - 1)]
// table2 = [a2^0 = one; a2^1; a2^2; ..; a2^(table_len - 1)]
//-----------------------------------------------------------
inline_for_extraction noextract
let lexp_double_fw_st
(a_t:inttype_a)
(len:size_t{v len > 0})
(ctx_len:size_t)
(k:concrete_ops a_t len ctx_len)
(l:size_window_t a_t len) =
ctx:lbuffer (uint_t a_t SEC) ctx_len
-> a1:lbuffer (uint_t a_t SEC) len
-> bLen:size_t
-> bBits:size_t{(v bBits - 1) / bits a_t < v bLen}
-> b1:lbuffer (uint_t a_t SEC) bLen
-> a2:lbuffer (uint_t a_t SEC) len
-> b2:lbuffer (uint_t a_t SEC) bLen
-> res:lbuffer (uint_t a_t SEC) len ->
Stack unit
(requires fun h ->
live h a1 /\ live h b1 /\ live h a2 /\ live h b2 /\ live h res /\ live h ctx /\
eq_or_disjoint a1 a2 /\ disjoint a1 res /\ disjoint a1 ctx /\
disjoint a2 res /\ disjoint a2 ctx /\
disjoint res b1 /\ disjoint res b2 /\ disjoint res ctx /\
BD.bn_v h b1 < pow2 (v bBits) /\
BD.bn_v h b2 < pow2 (v bBits) /\
k.to.linv_ctx (as_seq h ctx) /\
k.to.linv (as_seq h a1) /\ k.to.linv (as_seq h a2))
(ensures fun h0 _ h1 -> modifies (loc res) h0 h1 /\
k.to.linv (as_seq h1 res) /\
k.to.refl (as_seq h1 res) ==
S.exp_double_fw #k.to.a_spec k.to.comm_monoid
(k.to.refl (as_seq h0 a1)) (v bBits) (BD.bn_v h0 b1)
(k.to.refl (as_seq h0 a2)) (BD.bn_v h0 b2) (v l))
// This function computes `a1^b1 `mul` a2^b2` using a fixed-window method
// It takes variable time to compute the result
inline_for_extraction noextract
val lexp_double_fw_vartime:
#a_t:inttype_a
-> len:size_t{v len > 0}
-> ctx_len:size_t
-> k:concrete_ops a_t len ctx_len
-> l:size_window_t a_t len ->
lexp_double_fw_st a_t len ctx_len k l
// This function computes `a1^b1 `mul` a2^b2` using a fixed-window method
// It takes constant time to compute the result
inline_for_extraction noextract
val lexp_double_fw_consttime:
#a_t:inttype_a
-> len:size_t{v len > 0}
-> ctx_len:size_t
-> k:concrete_ops a_t len ctx_len
-> l:size_window_t a_t len ->
lexp_double_fw_st a_t len ctx_len k l
//--------------------------------------------------
inline_for_extraction noextract
let lexp_four_fw_tables_st
(a_t:inttype_a)
(len:size_t{v len > 0})
(ctx_len:size_t)
(k:concrete_ops a_t len ctx_len)
(l:size_window_t a_t len)
(table_len:table_len_t len)
(table_inv1:table_inv_t a_t len table_len)
(table_inv2:table_inv_t a_t len table_len)
(table_inv3:table_inv_t a_t len table_len)
(table_inv4:table_inv_t a_t len table_len) | false | false | Hacl.Impl.MultiExponentiation.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lexp_four_fw_tables_st : a_t: Hacl.Impl.Exponentiation.Definitions.inttype_a ->
len: Lib.IntTypes.size_t{Lib.IntTypes.v len > 0} ->
ctx_len: Lib.IntTypes.size_t ->
k: Hacl.Impl.Exponentiation.Definitions.concrete_ops a_t len ctx_len ->
l: Hacl.Impl.Exponentiation.size_window_t a_t len ->
table_len: Hacl.Impl.Exponentiation.table_len_t len ->
table_inv1: Hacl.Impl.Exponentiation.table_inv_t a_t len table_len ->
table_inv2: Hacl.Impl.Exponentiation.table_inv_t a_t len table_len ->
table_inv3: Hacl.Impl.Exponentiation.table_inv_t a_t len table_len ->
table_inv4: Hacl.Impl.Exponentiation.table_inv_t a_t len table_len
-> Type0 | [] | Hacl.Impl.MultiExponentiation.lexp_four_fw_tables_st | {
"file_name": "code/bignum/Hacl.Impl.MultiExponentiation.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
a_t: Hacl.Impl.Exponentiation.Definitions.inttype_a ->
len: Lib.IntTypes.size_t{Lib.IntTypes.v len > 0} ->
ctx_len: Lib.IntTypes.size_t ->
k: Hacl.Impl.Exponentiation.Definitions.concrete_ops a_t len ctx_len ->
l: Hacl.Impl.Exponentiation.size_window_t a_t len ->
table_len: Hacl.Impl.Exponentiation.table_len_t len ->
table_inv1: Hacl.Impl.Exponentiation.table_inv_t a_t len table_len ->
table_inv2: Hacl.Impl.Exponentiation.table_inv_t a_t len table_len ->
table_inv3: Hacl.Impl.Exponentiation.table_inv_t a_t len table_len ->
table_inv4: Hacl.Impl.Exponentiation.table_inv_t a_t len table_len
-> Type0 | {
"end_col": 55,
"end_line": 205,
"start_col": 4,
"start_line": 156
} |
|
Prims.Tot | val norm_list (p: prop) : prop | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let norm_list (p:prop) : prop =
norm [zeta; iota; delta_only [`%list_to_seq_post]] p | val norm_list (p: prop) : prop
let norm_list (p: prop) : prop = | false | null | false | norm [zeta; iota; delta_only [`%list_to_seq_post]] p | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Prims.prop",
"FStar.Pervasives.norm",
"Prims.Cons",
"FStar.Pervasives.norm_step",
"FStar.Pervasives.zeta",
"FStar.Pervasives.iota",
"FStar.Pervasives.delta_only",
"Prims.string",
"Prims.Nil"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h
let heaplet_id_is_none (h:vale_heap) =
get_heaplet_id h == None
let heaplet_id_is_some (h:vale_heap) (i:heaplet_id) =
get_heaplet_id h == Some i | false | true | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val norm_list (p: prop) : prop | [] | Vale.X64.InsMem.norm_list | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | p: Prims.prop -> Prims.prop | {
"end_col": 54,
"end_line": 69,
"start_col": 2,
"start_line": 69
} |
Prims.Tot | val declare_buffer128 (b: buffer TUInt128) (hid: heaplet_id) (t: taint) (mut: mutability)
: buffer_info | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let declare_buffer128 (b:buffer TUInt128) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt128 b hid t mut | val declare_buffer128 (b: buffer TUInt128) (hid: heaplet_id) (t: taint) (mut: mutability)
: buffer_info
let declare_buffer128 (b: buffer TUInt128) (hid: heaplet_id) (t: taint) (mut: mutability)
: buffer_info = | false | null | false | Mkbuffer_info TUInt128 b hid t mut | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Vale.X64.Memory.buffer",
"Vale.Arch.HeapTypes_s.TUInt128",
"Vale.X64.Decls.heaplet_id",
"Vale.Arch.HeapTypes_s.taint",
"Vale.Arch.HeapImpl.mutability",
"Vale.Arch.HeapImpl.Mkbuffer_info",
"Vale.Arch.HeapImpl.buffer_info"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h
let heaplet_id_is_none (h:vale_heap) =
get_heaplet_id h == None
let heaplet_id_is_some (h:vale_heap) (i:heaplet_id) =
get_heaplet_id h == Some i
unfold let norm_list (p:prop) : prop =
norm [zeta; iota; delta_only [`%list_to_seq_post]] p
irreducible let norm_loc_attr = ()
unfold let norm_loc (l:loc) : loc =
norm [zeta; iota; delta_only [`%loc_mutable_buffers]; delta_attr [`%norm_loc_attr]] l
let trigger_create_heaplet (h:heaplet_id) = True
[@norm_loc_attr]
unfold let declare_buffer64 (b:buffer TUInt64) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt64 b hid t mut
[@norm_loc_attr] | false | true | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val declare_buffer128 (b: buffer TUInt128) (hid: heaplet_id) (t: taint) (mut: mutability)
: buffer_info | [] | Vale.X64.InsMem.declare_buffer128 | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
b: Vale.X64.Memory.buffer Vale.Arch.HeapTypes_s.TUInt128 ->
hid: Vale.X64.Decls.heaplet_id ->
t: Vale.Arch.HeapTypes_s.taint ->
mut: Vale.Arch.HeapImpl.mutability
-> Vale.Arch.HeapImpl.buffer_info | {
"end_col": 36,
"end_line": 84,
"start_col": 2,
"start_line": 84
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let trigger_create_heaplet (h:heaplet_id) = True | let trigger_create_heaplet (h: heaplet_id) = | false | null | false | True | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.heaplet_id",
"Prims.l_True",
"Prims.logical"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h
let heaplet_id_is_none (h:vale_heap) =
get_heaplet_id h == None
let heaplet_id_is_some (h:vale_heap) (i:heaplet_id) =
get_heaplet_id h == Some i
unfold let norm_list (p:prop) : prop =
norm [zeta; iota; delta_only [`%list_to_seq_post]] p
irreducible let norm_loc_attr = ()
unfold let norm_loc (l:loc) : loc =
norm [zeta; iota; delta_only [`%loc_mutable_buffers]; delta_attr [`%norm_loc_attr]] l | false | true | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val trigger_create_heaplet : h: Vale.X64.Decls.heaplet_id -> Prims.logical | [] | Vale.X64.InsMem.trigger_create_heaplet | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | h: Vale.X64.Decls.heaplet_id -> Prims.logical | {
"end_col": 48,
"end_line": 76,
"start_col": 44,
"start_line": 76
} |
|
Prims.Tot | val norm_loc (l: loc) : loc | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let norm_loc (l:loc) : loc =
norm [zeta; iota; delta_only [`%loc_mutable_buffers]; delta_attr [`%norm_loc_attr]] l | val norm_loc (l: loc) : loc
let norm_loc (l: loc) : loc = | false | null | false | norm [zeta; iota; delta_only [`%loc_mutable_buffers]; delta_attr [`%norm_loc_attr]] l | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Vale.X64.Memory.loc",
"FStar.Pervasives.norm",
"Prims.Cons",
"FStar.Pervasives.norm_step",
"FStar.Pervasives.zeta",
"FStar.Pervasives.iota",
"FStar.Pervasives.delta_only",
"Prims.string",
"Prims.Nil",
"FStar.Pervasives.delta_attr"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h
let heaplet_id_is_none (h:vale_heap) =
get_heaplet_id h == None
let heaplet_id_is_some (h:vale_heap) (i:heaplet_id) =
get_heaplet_id h == Some i
unfold let norm_list (p:prop) : prop =
norm [zeta; iota; delta_only [`%list_to_seq_post]] p
irreducible let norm_loc_attr = () | false | true | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val norm_loc (l: loc) : loc | [] | Vale.X64.InsMem.norm_loc | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | l: Vale.X64.Memory.loc -> Vale.X64.Memory.loc | {
"end_col": 87,
"end_line": 74,
"start_col": 2,
"start_line": 74
} |
Prims.Tot | val declare_buffer64 (b: buffer TUInt64) (hid: heaplet_id) (t: taint) (mut: mutability)
: buffer_info | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let declare_buffer64 (b:buffer TUInt64) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt64 b hid t mut | val declare_buffer64 (b: buffer TUInt64) (hid: heaplet_id) (t: taint) (mut: mutability)
: buffer_info
let declare_buffer64 (b: buffer TUInt64) (hid: heaplet_id) (t: taint) (mut: mutability)
: buffer_info = | false | null | false | Mkbuffer_info TUInt64 b hid t mut | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Vale.X64.Memory.buffer",
"Vale.Arch.HeapTypes_s.TUInt64",
"Vale.X64.Decls.heaplet_id",
"Vale.Arch.HeapTypes_s.taint",
"Vale.Arch.HeapImpl.mutability",
"Vale.Arch.HeapImpl.Mkbuffer_info",
"Vale.Arch.HeapImpl.buffer_info"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h
let heaplet_id_is_none (h:vale_heap) =
get_heaplet_id h == None
let heaplet_id_is_some (h:vale_heap) (i:heaplet_id) =
get_heaplet_id h == Some i
unfold let norm_list (p:prop) : prop =
norm [zeta; iota; delta_only [`%list_to_seq_post]] p
irreducible let norm_loc_attr = ()
unfold let norm_loc (l:loc) : loc =
norm [zeta; iota; delta_only [`%loc_mutable_buffers]; delta_attr [`%norm_loc_attr]] l
let trigger_create_heaplet (h:heaplet_id) = True
[@norm_loc_attr] | false | true | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val declare_buffer64 (b: buffer TUInt64) (hid: heaplet_id) (t: taint) (mut: mutability)
: buffer_info | [] | Vale.X64.InsMem.declare_buffer64 | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
b: Vale.X64.Memory.buffer Vale.Arch.HeapTypes_s.TUInt64 ->
hid: Vale.X64.Decls.heaplet_id ->
t: Vale.Arch.HeapTypes_s.taint ->
mut: Vale.Arch.HeapImpl.mutability
-> Vale.Arch.HeapImpl.buffer_info | {
"end_col": 35,
"end_line": 80,
"start_col": 2,
"start_line": 80
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let heaplet_id_is_some (h:vale_heap) (i:heaplet_id) =
get_heaplet_id h == Some i | let heaplet_id_is_some (h: vale_heap) (i: heaplet_id) = | false | null | false | get_heaplet_id h == Some i | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Vale.X64.InsBasic.vale_heap",
"Vale.X64.Decls.heaplet_id",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"Vale.X64.Memory.heaplet_id",
"Vale.X64.Memory.get_heaplet_id",
"FStar.Pervasives.Native.Some",
"Prims.logical"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h
let heaplet_id_is_none (h:vale_heap) =
get_heaplet_id h == None | false | true | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val heaplet_id_is_some : h: Vale.X64.InsBasic.vale_heap -> i: Vale.X64.Decls.heaplet_id -> Prims.logical | [] | Vale.X64.InsMem.heaplet_id_is_some | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | h: Vale.X64.InsBasic.vale_heap -> i: Vale.X64.Decls.heaplet_id -> Prims.logical | {
"end_col": 28,
"end_line": 66,
"start_col": 2,
"start_line": 66
} |
|
Prims.Tot | val va_wp_DestroyHeaplets (va_s0: va_state) (va_k: (va_state -> unit -> Type0)) : Type0 | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_DestroyHeaplets (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout
va_s0).vl_inner) /\ (forall (va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout
va_x_memLayout va_s0 in va_get_ok va_sM /\ Vale.X64.Decls.modifies_mem
(Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_s0).vl_inner))
(Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner)) (va_get_mem va_sM) /\
(va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint /\ heaplet_id_is_none
(va_get_mem va_sM) /\ (forall (h:heaplet_id) . {:pattern(trigger_create_heaplet
h)}trigger_create_heaplet h ==> Vale.X64.Memory.heaps_match (Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_s0).vl_inner)) ((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM)
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h) ==> va_k va_sM (()))) | val va_wp_DestroyHeaplets (va_s0: va_state) (va_k: (va_state -> unit -> Type0)) : Type0
let va_wp_DestroyHeaplets (va_s0: va_state) (va_k: (va_state -> unit -> Type0)) : Type0 = | false | null | false | (va_get_ok va_s0 /\ Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout va_s0).vl_inner) /\
(forall (va_x_memLayout: vale_heap_layout).
let va_sM = va_upd_mem_layout va_x_memLayout va_s0 in
va_get_ok va_sM /\
Vale.X64.Decls.modifies_mem (Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_s0)
.vl_inner))
(Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner))
(va_get_mem va_sM) /\
(va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint /\
heaplet_id_is_none (va_get_mem va_sM) /\
(forall (h: heaplet_id). {:pattern (trigger_create_heaplet h)}
trigger_create_heaplet h ==>
Vale.X64.Memory.heaps_match (Vale.X64.Memory.layout_buffers ((va_get_mem_layout va_s0)
.vl_inner))
((va_get_mem_layout va_sM).vl_taint)
(va_get_mem va_sM)
(Vale.X64.Decls.va_get_mem_heaplet h va_sM)
h) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.Memory.layout_heaplets_initialized",
"Vale.Arch.HeapImpl.__proj__Mkvale_heap_layout__item__vl_inner",
"Vale.X64.Decls.va_get_mem_layout",
"Prims.l_Forall",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Prims.l_imp",
"Vale.X64.Decls.modifies_mem",
"Vale.X64.Memory.layout_modifies_loc",
"Vale.X64.Memory.layout_old_heap",
"Vale.X64.Decls.va_get_mem",
"Prims.eq2",
"Vale.Arch.HeapTypes_s.memTaint_t",
"Vale.Arch.HeapImpl.__proj__Mkvale_heap_layout__item__vl_taint",
"Vale.X64.InsMem.heaplet_id_is_none",
"Vale.X64.Decls.heaplet_id",
"Vale.X64.InsMem.trigger_create_heaplet",
"Vale.X64.Memory.heaps_match",
"Vale.X64.Memory.layout_buffers",
"Vale.X64.Decls.va_get_mem_heaplet",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_mem_layout"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h
let heaplet_id_is_none (h:vale_heap) =
get_heaplet_id h == None
let heaplet_id_is_some (h:vale_heap) (i:heaplet_id) =
get_heaplet_id h == Some i
unfold let norm_list (p:prop) : prop =
norm [zeta; iota; delta_only [`%list_to_seq_post]] p
irreducible let norm_loc_attr = ()
unfold let norm_loc (l:loc) : loc =
norm [zeta; iota; delta_only [`%loc_mutable_buffers]; delta_attr [`%norm_loc_attr]] l
let trigger_create_heaplet (h:heaplet_id) = True
[@norm_loc_attr]
unfold let declare_buffer64 (b:buffer TUInt64) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt64 b hid t mut
[@norm_loc_attr]
unfold let declare_buffer128 (b:buffer TUInt128) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt128 b hid t mut
let create_post (layout:vale_heap_layout) (bs:Seq.seq buffer_info) =
forall (i:nat).{:pattern Seq.index bs i} i < Seq.length bs ==> (
let Mkbuffer_info t b hid _ mut = Seq.index bs i in
trigger_create_heaplet hid /\
valid_layout_buffer_id t b layout (Some hid) false /\
valid_layout_buffer_id t b layout (Some hid) (mut = Mutable))
//-- CreateHeaplets
val va_code_CreateHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_CreateHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_CreateHeaplets : va_b0:va_code -> va_s0:va_state -> buffers:(list buffer_info)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_CreateHeaplets ()) va_s0 /\ va_get_ok va_s0 /\ (let
(bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (norm_list
(Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) /\ va_state_eq va_sM (va_update_mem_layout
va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_CreateHeaplets (buffers:(list buffer_info)) (va_s0:va_state) (va_k:(va_state -> unit ->
Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem
va_s0) /\ (norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs)) /\ (forall
(va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout va_x_memLayout va_s0 in
va_get_ok va_sM /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) ==> va_k va_sM (())))
val va_wpProof_CreateHeaplets : buffers:(list buffer_info) -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_CreateHeaplets buffers va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_CreateHeaplets ())
([va_Mod_mem_layout]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_CreateHeaplets (buffers:(list buffer_info)) : (va_quickCode unit
(va_code_CreateHeaplets ())) =
(va_QProc (va_code_CreateHeaplets ()) ([va_Mod_mem_layout]) (va_wp_CreateHeaplets buffers)
(va_wpProof_CreateHeaplets buffers))
//--
//-- DestroyHeaplets
val va_code_DestroyHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_DestroyHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_DestroyHeaplets : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_DestroyHeaplets ()) va_s0 /\ va_get_ok va_s0 /\
Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout va_s0).vl_inner)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
Vale.X64.Decls.modifies_mem (Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout
va_s0).vl_inner)) (Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner))
(va_get_mem va_sM) /\ (va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint
/\ heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> Vale.X64.Memory.heaps_match
(Vale.X64.Memory.layout_buffers ((va_get_mem_layout va_s0).vl_inner)) ((va_get_mem_layout
va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h va_sM) h) /\
va_state_eq va_sM (va_update_mem_layout va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr] | false | true | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_DestroyHeaplets (va_s0: va_state) (va_k: (va_state -> unit -> Type0)) : Type0 | [] | Vale.X64.InsMem.va_wp_DestroyHeaplets | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | va_s0: Vale.X64.Decls.va_state -> va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 72,
"end_line": 188,
"start_col": 2,
"start_line": 179
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let heaplet_id_is_none (h:vale_heap) =
get_heaplet_id h == None | let heaplet_id_is_none (h: vale_heap) = | false | null | false | get_heaplet_id h == None | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Vale.X64.InsBasic.vale_heap",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"Vale.X64.Memory.heaplet_id",
"Vale.X64.Memory.get_heaplet_id",
"FStar.Pervasives.Native.None",
"Prims.logical"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h | false | true | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val heaplet_id_is_none : h: Vale.X64.InsBasic.vale_heap -> Prims.logical | [] | Vale.X64.InsMem.heaplet_id_is_none | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | h: Vale.X64.InsBasic.vale_heap -> Prims.logical | {
"end_col": 26,
"end_line": 63,
"start_col": 2,
"start_line": 63
} |
|
Prims.Tot | val va_wp_LoadBe64_buffer
(h: va_operand_heaplet)
(dst: va_operand_dst_opr64)
(src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_LoadBe64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ movbe_enabled /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64
(va_eval_heaplet va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer
#Vale.X64.Memory.vuint64 b (va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\
Vale.X64.Memory.valid_taint_buf64 b (va_eval_heaplet va_s0 h) ((va_get_mem_layout
va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 src + offset == Vale.X64.Memory.buffer_addr
#Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) + 8 `op_Multiply` index /\ (forall
(va_x_dst:va_value_dst_opr64) . let va_sM = va_upd_operand_dst_opr64 dst va_x_dst va_s0 in
va_get_ok va_sM /\ va_eval_dst_opr64 va_sM dst == Vale.Def.Types_s.reverse_bytes_nat64
(Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h)) ==> va_k va_sM (()))) | val va_wp_LoadBe64_buffer
(h: va_operand_heaplet)
(dst: va_operand_dst_opr64)
(src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_LoadBe64_buffer
(h: va_operand_heaplet)
(dst: va_operand_dst_opr64)
(src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ movbe_enabled /\
Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet va_s0 h) b index /\
Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64
b
(va_get_mem_layout va_s0)
(va_eval_heaplet va_s0 h)
false /\
Vale.X64.Memory.valid_taint_buf64 b
(va_eval_heaplet va_s0 h)
((va_get_mem_layout va_s0).vl_taint)
t /\
va_eval_reg_opr64 va_s0 src + offset ==
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) +
8
`op_Multiply`
index /\
(forall (va_x_dst: va_value_dst_opr64).
let va_sM = va_upd_operand_dst_opr64 dst va_x_dst va_s0 in
va_get_ok va_sM /\
va_eval_dst_opr64 va_sM dst ==
Vale.Def.Types_s.reverse_bytes_nat64 (Vale.X64.Decls.buffer64_read b
index
(va_eval_heaplet va_sM h)) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_operand_heaplet",
"Vale.X64.Decls.va_operand_dst_opr64",
"Vale.X64.Decls.va_operand_reg_opr64",
"Prims.int",
"Vale.Arch.HeapTypes_s.taint",
"Vale.X64.Memory.buffer64",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Vale.X64.Decls.va_is_src_heaplet",
"Prims.b2t",
"Vale.X64.Decls.va_is_dst_dst_opr64",
"Vale.X64.Decls.va_is_src_reg_opr64",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.CPU_Features_s.movbe_enabled",
"Vale.X64.Decls.valid_src_addr",
"Vale.X64.Memory.vuint64",
"Vale.X64.Decls.va_eval_heaplet",
"Vale.X64.Memory.valid_layout_buffer",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Memory.valid_taint_buf64",
"Vale.Arch.HeapImpl.__proj__Mkvale_heap_layout__item__vl_taint",
"Prims.eq2",
"Prims.op_Addition",
"Vale.X64.Decls.va_eval_reg_opr64",
"Vale.X64.Memory.buffer_addr",
"Prims.op_Multiply",
"Prims.l_Forall",
"Vale.X64.Decls.va_value_dst_opr64",
"Prims.l_imp",
"Vale.Def.Types_s.nat64",
"Vale.X64.Decls.va_eval_dst_opr64",
"Vale.Def.Types_s.reverse_bytes_nat64",
"Vale.X64.Decls.buffer64_read",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_operand_dst_opr64"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h
let heaplet_id_is_none (h:vale_heap) =
get_heaplet_id h == None
let heaplet_id_is_some (h:vale_heap) (i:heaplet_id) =
get_heaplet_id h == Some i
unfold let norm_list (p:prop) : prop =
norm [zeta; iota; delta_only [`%list_to_seq_post]] p
irreducible let norm_loc_attr = ()
unfold let norm_loc (l:loc) : loc =
norm [zeta; iota; delta_only [`%loc_mutable_buffers]; delta_attr [`%norm_loc_attr]] l
let trigger_create_heaplet (h:heaplet_id) = True
[@norm_loc_attr]
unfold let declare_buffer64 (b:buffer TUInt64) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt64 b hid t mut
[@norm_loc_attr]
unfold let declare_buffer128 (b:buffer TUInt128) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt128 b hid t mut
let create_post (layout:vale_heap_layout) (bs:Seq.seq buffer_info) =
forall (i:nat).{:pattern Seq.index bs i} i < Seq.length bs ==> (
let Mkbuffer_info t b hid _ mut = Seq.index bs i in
trigger_create_heaplet hid /\
valid_layout_buffer_id t b layout (Some hid) false /\
valid_layout_buffer_id t b layout (Some hid) (mut = Mutable))
//-- CreateHeaplets
val va_code_CreateHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_CreateHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_CreateHeaplets : va_b0:va_code -> va_s0:va_state -> buffers:(list buffer_info)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_CreateHeaplets ()) va_s0 /\ va_get_ok va_s0 /\ (let
(bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (norm_list
(Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) /\ va_state_eq va_sM (va_update_mem_layout
va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_CreateHeaplets (buffers:(list buffer_info)) (va_s0:va_state) (va_k:(va_state -> unit ->
Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem
va_s0) /\ (norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs)) /\ (forall
(va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout va_x_memLayout va_s0 in
va_get_ok va_sM /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) ==> va_k va_sM (())))
val va_wpProof_CreateHeaplets : buffers:(list buffer_info) -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_CreateHeaplets buffers va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_CreateHeaplets ())
([va_Mod_mem_layout]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_CreateHeaplets (buffers:(list buffer_info)) : (va_quickCode unit
(va_code_CreateHeaplets ())) =
(va_QProc (va_code_CreateHeaplets ()) ([va_Mod_mem_layout]) (va_wp_CreateHeaplets buffers)
(va_wpProof_CreateHeaplets buffers))
//--
//-- DestroyHeaplets
val va_code_DestroyHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_DestroyHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_DestroyHeaplets : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_DestroyHeaplets ()) va_s0 /\ va_get_ok va_s0 /\
Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout va_s0).vl_inner)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
Vale.X64.Decls.modifies_mem (Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout
va_s0).vl_inner)) (Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner))
(va_get_mem va_sM) /\ (va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint
/\ heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> Vale.X64.Memory.heaps_match
(Vale.X64.Memory.layout_buffers ((va_get_mem_layout va_s0).vl_inner)) ((va_get_mem_layout
va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h va_sM) h) /\
va_state_eq va_sM (va_update_mem_layout va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_DestroyHeaplets (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout
va_s0).vl_inner) /\ (forall (va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout
va_x_memLayout va_s0 in va_get_ok va_sM /\ Vale.X64.Decls.modifies_mem
(Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_s0).vl_inner))
(Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner)) (va_get_mem va_sM) /\
(va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint /\ heaplet_id_is_none
(va_get_mem va_sM) /\ (forall (h:heaplet_id) . {:pattern(trigger_create_heaplet
h)}trigger_create_heaplet h ==> Vale.X64.Memory.heaps_match (Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_s0).vl_inner)) ((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM)
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h) ==> va_k va_sM (())))
val va_wpProof_DestroyHeaplets : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_DestroyHeaplets va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_DestroyHeaplets ())
([va_Mod_mem_layout]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_DestroyHeaplets () : (va_quickCode unit (va_code_DestroyHeaplets ())) =
(va_QProc (va_code_DestroyHeaplets ()) ([va_Mod_mem_layout]) va_wp_DestroyHeaplets
va_wpProof_DestroyHeaplets)
//--
//-- Load64_buffer
val va_code_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_code
val va_codegen_success_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_pbool
val va_lemma_Load64_buffer : va_b0:va_code -> va_s0:va_state -> h:va_operand_heaplet ->
dst:va_operand_dst_opr64 -> src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 ->
index:int
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Load64_buffer h dst src offset t) va_s0 /\
va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\ Vale.X64.Memory.valid_taint_buf64
b (va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0
src + offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0
h) + 8 `op_Multiply` index))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_dst_opr64 va_sM dst == Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h)
/\ va_state_eq va_sM (va_update_ok va_sM (va_update_operand_dst_opr64 dst va_sM va_s0))))
[@ va_qattr]
let va_wp_Load64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\ Vale.X64.Memory.valid_taint_buf64
b (va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0
src + offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0
h) + 8 `op_Multiply` index /\ (forall (va_x_dst:va_value_dst_opr64) . let va_sM =
va_upd_operand_dst_opr64 dst va_x_dst va_s0 in va_get_ok va_sM /\ va_eval_dst_opr64 va_sM dst
== Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h) ==> va_k va_sM (())))
val va_wpProof_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 -> index:int -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Load64_buffer h dst src offset t b index va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Load64_buffer h dst src offset t)
([va_mod_dst_opr64 dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Load64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) : (va_quickCode unit
(va_code_Load64_buffer h dst src offset t)) =
(va_QProc (va_code_Load64_buffer h dst src offset t) ([va_mod_dst_opr64 dst])
(va_wp_Load64_buffer h dst src offset t b index) (va_wpProof_Load64_buffer h dst src offset t b
index))
//--
//-- Store64_buffer
val va_code_Store64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_code
val va_codegen_success_Store64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_pbool
val va_lemma_Store64_buffer : va_b0:va_code -> va_s0:va_state -> h:va_operand_heaplet ->
dst:va_operand_reg_opr64 -> src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 ->
index:int
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Store64_buffer h dst src offset t) va_s0 /\
va_is_dst_heaplet h va_s0 /\ va_is_src_reg_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_dst_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) true /\ Vale.X64.Memory.valid_taint_buf64 b
(va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 dst
+ offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) +
8 `op_Multiply` index))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_heaplet va_sM h == buffer64_write b index (va_eval_reg_opr64 va_s0 src)
(va_eval_heaplet va_s0 h) /\ va_state_eq va_sM (va_update_mem va_sM (va_update_ok va_sM
(va_update_operand_heaplet h va_sM va_s0)))))
[@ va_qattr]
let va_wp_Store64_buffer (h:va_operand_heaplet) (dst:va_operand_reg_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_is_dst_heaplet h va_s0 /\ va_is_src_reg_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_dst_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) true /\ Vale.X64.Memory.valid_taint_buf64 b
(va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 dst
+ offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) +
8 `op_Multiply` index /\ (forall (va_x_h:va_value_heaplet) (va_x_mem:vale_heap) . let va_sM =
va_upd_mem va_x_mem (va_upd_operand_heaplet h va_x_h va_s0) in va_get_ok va_sM /\
va_eval_heaplet va_sM h == buffer64_write b index (va_eval_reg_opr64 va_s0 src)
(va_eval_heaplet va_s0 h) ==> va_k va_sM (())))
val va_wpProof_Store64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 -> index:int -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Store64_buffer h dst src offset t b index va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Store64_buffer h dst src offset t)
([va_Mod_mem; va_mod_heaplet h]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Store64_buffer (h:va_operand_heaplet) (dst:va_operand_reg_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) : (va_quickCode unit
(va_code_Store64_buffer h dst src offset t)) =
(va_QProc (va_code_Store64_buffer h dst src offset t) ([va_Mod_mem; va_mod_heaplet h])
(va_wp_Store64_buffer h dst src offset t b index) (va_wpProof_Store64_buffer h dst src offset t
b index))
//--
//-- LoadBe64_buffer
val va_code_LoadBe64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_code
val va_codegen_success_LoadBe64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_pbool
val va_lemma_LoadBe64_buffer : va_b0:va_code -> va_s0:va_state -> h:va_operand_heaplet ->
dst:va_operand_dst_opr64 -> src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 ->
index:int
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_LoadBe64_buffer h dst src offset t) va_s0 /\
va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ movbe_enabled /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64
(va_eval_heaplet va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer
#Vale.X64.Memory.vuint64 b (va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\
Vale.X64.Memory.valid_taint_buf64 b (va_eval_heaplet va_s0 h) ((va_get_mem_layout
va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 src + offset == Vale.X64.Memory.buffer_addr
#Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) + 8 `op_Multiply` index))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_dst_opr64 va_sM dst == Vale.Def.Types_s.reverse_bytes_nat64
(Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h)) /\ va_state_eq va_sM
(va_update_ok va_sM (va_update_operand_dst_opr64 dst va_sM va_s0))))
[@ va_qattr]
let va_wp_LoadBe64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state) | false | true | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_LoadBe64_buffer
(h: va_operand_heaplet)
(dst: va_operand_dst_opr64)
(src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.X64.InsMem.va_wp_LoadBe64_buffer | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
h: Vale.X64.Decls.va_operand_heaplet ->
dst: Vale.X64.Decls.va_operand_dst_opr64 ->
src: Vale.X64.Decls.va_operand_reg_opr64 ->
offset: Prims.int ->
t: Vale.Arch.HeapTypes_s.taint ->
b: Vale.X64.Memory.buffer64 ->
index: Prims.int ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 90,
"end_line": 343,
"start_col": 2,
"start_line": 334
} |
Prims.Tot | val va_wp_Load64_buffer
(h: va_operand_heaplet)
(dst: va_operand_dst_opr64)
(src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Load64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\ Vale.X64.Memory.valid_taint_buf64
b (va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0
src + offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0
h) + 8 `op_Multiply` index /\ (forall (va_x_dst:va_value_dst_opr64) . let va_sM =
va_upd_operand_dst_opr64 dst va_x_dst va_s0 in va_get_ok va_sM /\ va_eval_dst_opr64 va_sM dst
== Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h) ==> va_k va_sM (()))) | val va_wp_Load64_buffer
(h: va_operand_heaplet)
(dst: va_operand_dst_opr64)
(src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_Load64_buffer
(h: va_operand_heaplet)
(dst: va_operand_dst_opr64)
(src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\
Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet va_s0 h) b index /\
Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64
b
(va_get_mem_layout va_s0)
(va_eval_heaplet va_s0 h)
false /\
Vale.X64.Memory.valid_taint_buf64 b
(va_eval_heaplet va_s0 h)
((va_get_mem_layout va_s0).vl_taint)
t /\
va_eval_reg_opr64 va_s0 src + offset ==
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) +
8
`op_Multiply`
index /\
(forall (va_x_dst: va_value_dst_opr64).
let va_sM = va_upd_operand_dst_opr64 dst va_x_dst va_s0 in
va_get_ok va_sM /\
va_eval_dst_opr64 va_sM dst == Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_operand_heaplet",
"Vale.X64.Decls.va_operand_dst_opr64",
"Vale.X64.Decls.va_operand_reg_opr64",
"Prims.int",
"Vale.Arch.HeapTypes_s.taint",
"Vale.X64.Memory.buffer64",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Vale.X64.Decls.va_is_src_heaplet",
"Prims.b2t",
"Vale.X64.Decls.va_is_dst_dst_opr64",
"Vale.X64.Decls.va_is_src_reg_opr64",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.Decls.valid_src_addr",
"Vale.X64.Memory.vuint64",
"Vale.X64.Decls.va_eval_heaplet",
"Vale.X64.Memory.valid_layout_buffer",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Memory.valid_taint_buf64",
"Vale.Arch.HeapImpl.__proj__Mkvale_heap_layout__item__vl_taint",
"Prims.eq2",
"Prims.op_Addition",
"Vale.X64.Decls.va_eval_reg_opr64",
"Vale.X64.Memory.buffer_addr",
"Prims.op_Multiply",
"Prims.l_Forall",
"Vale.X64.Decls.va_value_dst_opr64",
"Prims.l_imp",
"Vale.Def.Types_s.nat64",
"Vale.X64.Decls.va_eval_dst_opr64",
"Vale.X64.Decls.buffer64_read",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_operand_dst_opr64"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h
let heaplet_id_is_none (h:vale_heap) =
get_heaplet_id h == None
let heaplet_id_is_some (h:vale_heap) (i:heaplet_id) =
get_heaplet_id h == Some i
unfold let norm_list (p:prop) : prop =
norm [zeta; iota; delta_only [`%list_to_seq_post]] p
irreducible let norm_loc_attr = ()
unfold let norm_loc (l:loc) : loc =
norm [zeta; iota; delta_only [`%loc_mutable_buffers]; delta_attr [`%norm_loc_attr]] l
let trigger_create_heaplet (h:heaplet_id) = True
[@norm_loc_attr]
unfold let declare_buffer64 (b:buffer TUInt64) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt64 b hid t mut
[@norm_loc_attr]
unfold let declare_buffer128 (b:buffer TUInt128) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt128 b hid t mut
let create_post (layout:vale_heap_layout) (bs:Seq.seq buffer_info) =
forall (i:nat).{:pattern Seq.index bs i} i < Seq.length bs ==> (
let Mkbuffer_info t b hid _ mut = Seq.index bs i in
trigger_create_heaplet hid /\
valid_layout_buffer_id t b layout (Some hid) false /\
valid_layout_buffer_id t b layout (Some hid) (mut = Mutable))
//-- CreateHeaplets
val va_code_CreateHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_CreateHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_CreateHeaplets : va_b0:va_code -> va_s0:va_state -> buffers:(list buffer_info)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_CreateHeaplets ()) va_s0 /\ va_get_ok va_s0 /\ (let
(bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (norm_list
(Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) /\ va_state_eq va_sM (va_update_mem_layout
va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_CreateHeaplets (buffers:(list buffer_info)) (va_s0:va_state) (va_k:(va_state -> unit ->
Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem
va_s0) /\ (norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs)) /\ (forall
(va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout va_x_memLayout va_s0 in
va_get_ok va_sM /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) ==> va_k va_sM (())))
val va_wpProof_CreateHeaplets : buffers:(list buffer_info) -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_CreateHeaplets buffers va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_CreateHeaplets ())
([va_Mod_mem_layout]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_CreateHeaplets (buffers:(list buffer_info)) : (va_quickCode unit
(va_code_CreateHeaplets ())) =
(va_QProc (va_code_CreateHeaplets ()) ([va_Mod_mem_layout]) (va_wp_CreateHeaplets buffers)
(va_wpProof_CreateHeaplets buffers))
//--
//-- DestroyHeaplets
val va_code_DestroyHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_DestroyHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_DestroyHeaplets : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_DestroyHeaplets ()) va_s0 /\ va_get_ok va_s0 /\
Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout va_s0).vl_inner)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
Vale.X64.Decls.modifies_mem (Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout
va_s0).vl_inner)) (Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner))
(va_get_mem va_sM) /\ (va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint
/\ heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> Vale.X64.Memory.heaps_match
(Vale.X64.Memory.layout_buffers ((va_get_mem_layout va_s0).vl_inner)) ((va_get_mem_layout
va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h va_sM) h) /\
va_state_eq va_sM (va_update_mem_layout va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_DestroyHeaplets (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout
va_s0).vl_inner) /\ (forall (va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout
va_x_memLayout va_s0 in va_get_ok va_sM /\ Vale.X64.Decls.modifies_mem
(Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_s0).vl_inner))
(Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner)) (va_get_mem va_sM) /\
(va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint /\ heaplet_id_is_none
(va_get_mem va_sM) /\ (forall (h:heaplet_id) . {:pattern(trigger_create_heaplet
h)}trigger_create_heaplet h ==> Vale.X64.Memory.heaps_match (Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_s0).vl_inner)) ((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM)
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h) ==> va_k va_sM (())))
val va_wpProof_DestroyHeaplets : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_DestroyHeaplets va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_DestroyHeaplets ())
([va_Mod_mem_layout]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_DestroyHeaplets () : (va_quickCode unit (va_code_DestroyHeaplets ())) =
(va_QProc (va_code_DestroyHeaplets ()) ([va_Mod_mem_layout]) va_wp_DestroyHeaplets
va_wpProof_DestroyHeaplets)
//--
//-- Load64_buffer
val va_code_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_code
val va_codegen_success_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_pbool
val va_lemma_Load64_buffer : va_b0:va_code -> va_s0:va_state -> h:va_operand_heaplet ->
dst:va_operand_dst_opr64 -> src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 ->
index:int
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Load64_buffer h dst src offset t) va_s0 /\
va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\ Vale.X64.Memory.valid_taint_buf64
b (va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0
src + offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0
h) + 8 `op_Multiply` index))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_dst_opr64 va_sM dst == Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h)
/\ va_state_eq va_sM (va_update_ok va_sM (va_update_operand_dst_opr64 dst va_sM va_s0))))
[@ va_qattr]
let va_wp_Load64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state) | false | true | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Load64_buffer
(h: va_operand_heaplet)
(dst: va_operand_dst_opr64)
(src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.X64.InsMem.va_wp_Load64_buffer | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
h: Vale.X64.Decls.va_operand_heaplet ->
dst: Vale.X64.Decls.va_operand_dst_opr64 ->
src: Vale.X64.Decls.va_operand_reg_opr64 ->
offset: Prims.int ->
t: Vale.Arch.HeapTypes_s.taint ->
b: Vale.X64.Memory.buffer64 ->
index: Prims.int ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 91,
"end_line": 235,
"start_col": 2,
"start_line": 227
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let create_post (layout:vale_heap_layout) (bs:Seq.seq buffer_info) =
forall (i:nat).{:pattern Seq.index bs i} i < Seq.length bs ==> (
let Mkbuffer_info t b hid _ mut = Seq.index bs i in
trigger_create_heaplet hid /\
valid_layout_buffer_id t b layout (Some hid) false /\
valid_layout_buffer_id t b layout (Some hid) (mut = Mutable)) | let create_post (layout: vale_heap_layout) (bs: Seq.seq buffer_info) = | false | null | false | forall (i: nat). {:pattern Seq.index bs i}
i < Seq.length bs ==>
(let Mkbuffer_info t b hid _ mut = Seq.index bs i in
trigger_create_heaplet hid /\ valid_layout_buffer_id t b layout (Some hid) false /\
valid_layout_buffer_id t b layout (Some hid) (mut = Mutable)) | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Vale.Arch.HeapImpl.vale_heap_layout",
"FStar.Seq.Base.seq",
"Vale.Arch.HeapImpl.buffer_info",
"Prims.l_Forall",
"Prims.nat",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_LessThan",
"FStar.Seq.Base.length",
"Vale.Arch.HeapTypes_s.base_typ",
"Vale.Arch.HeapImpl.buffer",
"Vale.Arch.HeapImpl.heaplet_id",
"Vale.Arch.HeapTypes_s.taint",
"Vale.Arch.HeapImpl.mutability",
"Prims.l_and",
"Vale.X64.InsMem.trigger_create_heaplet",
"Vale.X64.Memory.valid_layout_buffer_id",
"FStar.Pervasives.Native.Some",
"Vale.X64.Memory.heaplet_id",
"Prims.op_Equality",
"Vale.Arch.HeapImpl.Mutable",
"Prims.logical",
"FStar.Seq.Base.index"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h
let heaplet_id_is_none (h:vale_heap) =
get_heaplet_id h == None
let heaplet_id_is_some (h:vale_heap) (i:heaplet_id) =
get_heaplet_id h == Some i
unfold let norm_list (p:prop) : prop =
norm [zeta; iota; delta_only [`%list_to_seq_post]] p
irreducible let norm_loc_attr = ()
unfold let norm_loc (l:loc) : loc =
norm [zeta; iota; delta_only [`%loc_mutable_buffers]; delta_attr [`%norm_loc_attr]] l
let trigger_create_heaplet (h:heaplet_id) = True
[@norm_loc_attr]
unfold let declare_buffer64 (b:buffer TUInt64) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt64 b hid t mut
[@norm_loc_attr]
unfold let declare_buffer128 (b:buffer TUInt128) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt128 b hid t mut | false | true | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val create_post : layout: Vale.Arch.HeapImpl.vale_heap_layout -> bs: FStar.Seq.Base.seq Vale.Arch.HeapImpl.buffer_info
-> Prims.logical | [] | Vale.X64.InsMem.create_post | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | layout: Vale.Arch.HeapImpl.vale_heap_layout -> bs: FStar.Seq.Base.seq Vale.Arch.HeapImpl.buffer_info
-> Prims.logical | {
"end_col": 65,
"end_line": 91,
"start_col": 2,
"start_line": 87
} |
|
Prims.Ghost | val buffer64_write (b: buffer64) (i: int) (v: nat64) (h: vale_heap)
: Ghost vale_heap (requires buffer_readable h b /\ buffer_writeable b) (ensures fun _ -> True) | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h | val buffer64_write (b: buffer64) (i: int) (v: nat64) (h: vale_heap)
: Ghost vale_heap (requires buffer_readable h b /\ buffer_writeable b) (ensures fun _ -> True)
let buffer64_write (b: buffer64) (i: int) (v: nat64) (h: vale_heap)
: Ghost vale_heap (requires buffer_readable h b /\ buffer_writeable b) (ensures fun _ -> True) = | false | null | false | buffer_write b i v h | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [] | [
"Vale.X64.Memory.buffer64",
"Prims.int",
"Vale.X64.Memory.nat64",
"Vale.X64.InsBasic.vale_heap",
"Vale.X64.Memory.buffer_write",
"Vale.X64.Memory.vuint64",
"Prims.l_and",
"Vale.X64.Decls.buffer_readable",
"Vale.X64.Decls.buffer_writeable",
"Prims.l_True"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True) | false | false | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val buffer64_write (b: buffer64) (i: int) (v: nat64) (h: vale_heap)
: Ghost vale_heap (requires buffer_readable h b /\ buffer_writeable b) (ensures fun _ -> True) | [] | Vale.X64.InsMem.buffer64_write | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
b: Vale.X64.Memory.buffer64 ->
i: Prims.int ->
v: Vale.X64.Memory.nat64 ->
h: Vale.X64.InsBasic.vale_heap
-> Prims.Ghost Vale.X64.InsBasic.vale_heap | {
"end_col": 22,
"end_line": 60,
"start_col": 2,
"start_line": 60
} |
Prims.Tot | val va_quick_Mem64_lemma
(h: heaplet_id)
(base: operand64)
(offset: int)
(b: buffer64)
(index: int)
(t: taint)
: (va_quickCode unit (va_code_Mem64_lemma ())) | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t)) | val va_quick_Mem64_lemma
(h: heaplet_id)
(base: operand64)
(offset: int)
(b: buffer64)
(index: int)
(t: taint)
: (va_quickCode unit (va_code_Mem64_lemma ()))
let va_quick_Mem64_lemma
(h: heaplet_id)
(base: operand64)
(offset: int)
(b: buffer64)
(index: int)
(t: taint)
: (va_quickCode unit (va_code_Mem64_lemma ())) = | false | null | false | (va_QProc (va_code_Mem64_lemma ())
([])
(va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t)) | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.heaplet_id",
"Vale.X64.Machine_s.operand64",
"Prims.int",
"Vale.X64.Memory.buffer64",
"Vale.Arch.HeapTypes_s.taint",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.X64.InsMem.va_code_Mem64_lemma",
"Prims.Nil",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.InsMem.va_wp_Mem64_lemma",
"Vale.X64.InsMem.va_wpProof_Mem64_lemma",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int) | false | false | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Mem64_lemma
(h: heaplet_id)
(base: operand64)
(offset: int)
(b: buffer64)
(index: int)
(t: taint)
: (va_quickCode unit (va_code_Mem64_lemma ())) | [] | Vale.X64.InsMem.va_quick_Mem64_lemma | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
h: Vale.X64.Decls.heaplet_id ->
base: Vale.X64.Machine_s.operand64 ->
offset: Prims.int ->
b: Vale.X64.Memory.buffer64 ->
index: Prims.int ->
t: Vale.Arch.HeapTypes_s.taint
-> Vale.X64.QuickCode.va_quickCode Prims.unit (Vale.X64.InsMem.va_code_Mem64_lemma ()) | {
"end_col": 53,
"end_line": 54,
"start_col": 2,
"start_line": 53
} |
Prims.Tot | val va_wp_Mem64_lemma
(h: heaplet_id)
(base: operand64)
(offset: int)
(b: buffer64)
(index: int)
(t: taint)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (()))) | val va_wp_Mem64_lemma
(h: heaplet_id)
(base: operand64)
(offset: int)
(b: buffer64)
(index: int)
(t: taint)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_Mem64_lemma
(h: heaplet_id)
(base: operand64)
(offset: int)
(b: buffer64)
(index: int)
(t: taint)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_get_ok va_s0 /\
(let heap_h = va_get_mem_heaplet h va_s0 in
(OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\
eval_operand base va_s0 + offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\
(let va_sM = va_s0 in
va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in
valid_operand (va_opr_code_Mem64 h base offset t) va_sM /\
load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.heaplet_id",
"Vale.X64.Machine_s.operand64",
"Prims.int",
"Vale.X64.Memory.buffer64",
"Vale.Arch.HeapTypes_s.taint",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.Machine_s.uu___is_OReg",
"Vale.X64.Machine_s.nat64",
"Vale.X64.Machine_s.reg_64",
"Vale.X64.Decls.valid_src_addr",
"Vale.X64.Memory.vuint64",
"Vale.X64.Memory.valid_layout_buffer",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Memory.valid_taint_buf64",
"Vale.Arch.HeapImpl.__proj__Mkvale_heap_layout__item__vl_taint",
"Prims.eq2",
"Prims.op_Addition",
"Vale.X64.State.eval_operand",
"Vale.X64.Memory.buffer_addr",
"Prims.op_Multiply",
"Vale.Arch.HeapImpl.vale_heap",
"Vale.X64.Decls.va_get_mem_heaplet",
"Prims.l_imp",
"Vale.X64.Decls.valid_operand",
"Vale.X64.Decls.va_opr_code_Mem64",
"Prims.nat",
"Prims.l_or",
"Prims.op_LessThan",
"Vale.Def.Words_s.pow2_64",
"Vale.X64.Memory.load_mem64",
"Vale.X64.Decls.va_get_mem",
"Vale.X64.Memory.buffer_read",
"Vale.X64.State.vale_state"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int) | false | true | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Mem64_lemma
(h: heaplet_id)
(base: operand64)
(offset: int)
(b: buffer64)
(index: int)
(t: taint)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.X64.InsMem.va_wp_Mem64_lemma | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
h: Vale.X64.Decls.heaplet_id ->
base: Vale.X64.Machine_s.operand64 ->
offset: Prims.int ->
b: Vale.X64.Memory.buffer64 ->
index: Prims.int ->
t: Vale.Arch.HeapTypes_s.taint ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 56,
"end_line": 42,
"start_col": 2,
"start_line": 36
} |
Prims.Tot | val va_quick_DestroyHeaplets: Prims.unit -> (va_quickCode unit (va_code_DestroyHeaplets ())) | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_DestroyHeaplets () : (va_quickCode unit (va_code_DestroyHeaplets ())) =
(va_QProc (va_code_DestroyHeaplets ()) ([va_Mod_mem_layout]) va_wp_DestroyHeaplets
va_wpProof_DestroyHeaplets) | val va_quick_DestroyHeaplets: Prims.unit -> (va_quickCode unit (va_code_DestroyHeaplets ()))
let va_quick_DestroyHeaplets () : (va_quickCode unit (va_code_DestroyHeaplets ())) = | false | null | false | (va_QProc (va_code_DestroyHeaplets ())
([va_Mod_mem_layout])
va_wp_DestroyHeaplets
va_wpProof_DestroyHeaplets) | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Prims.unit",
"Vale.X64.QuickCode.va_QProc",
"Vale.X64.InsMem.va_code_DestroyHeaplets",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_mem_layout",
"Prims.Nil",
"Vale.X64.InsMem.va_wp_DestroyHeaplets",
"Vale.X64.InsMem.va_wpProof_DestroyHeaplets",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h
let heaplet_id_is_none (h:vale_heap) =
get_heaplet_id h == None
let heaplet_id_is_some (h:vale_heap) (i:heaplet_id) =
get_heaplet_id h == Some i
unfold let norm_list (p:prop) : prop =
norm [zeta; iota; delta_only [`%list_to_seq_post]] p
irreducible let norm_loc_attr = ()
unfold let norm_loc (l:loc) : loc =
norm [zeta; iota; delta_only [`%loc_mutable_buffers]; delta_attr [`%norm_loc_attr]] l
let trigger_create_heaplet (h:heaplet_id) = True
[@norm_loc_attr]
unfold let declare_buffer64 (b:buffer TUInt64) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt64 b hid t mut
[@norm_loc_attr]
unfold let declare_buffer128 (b:buffer TUInt128) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt128 b hid t mut
let create_post (layout:vale_heap_layout) (bs:Seq.seq buffer_info) =
forall (i:nat).{:pattern Seq.index bs i} i < Seq.length bs ==> (
let Mkbuffer_info t b hid _ mut = Seq.index bs i in
trigger_create_heaplet hid /\
valid_layout_buffer_id t b layout (Some hid) false /\
valid_layout_buffer_id t b layout (Some hid) (mut = Mutable))
//-- CreateHeaplets
val va_code_CreateHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_CreateHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_CreateHeaplets : va_b0:va_code -> va_s0:va_state -> buffers:(list buffer_info)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_CreateHeaplets ()) va_s0 /\ va_get_ok va_s0 /\ (let
(bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (norm_list
(Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) /\ va_state_eq va_sM (va_update_mem_layout
va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_CreateHeaplets (buffers:(list buffer_info)) (va_s0:va_state) (va_k:(va_state -> unit ->
Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem
va_s0) /\ (norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs)) /\ (forall
(va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout va_x_memLayout va_s0 in
va_get_ok va_sM /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) ==> va_k va_sM (())))
val va_wpProof_CreateHeaplets : buffers:(list buffer_info) -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_CreateHeaplets buffers va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_CreateHeaplets ())
([va_Mod_mem_layout]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_CreateHeaplets (buffers:(list buffer_info)) : (va_quickCode unit
(va_code_CreateHeaplets ())) =
(va_QProc (va_code_CreateHeaplets ()) ([va_Mod_mem_layout]) (va_wp_CreateHeaplets buffers)
(va_wpProof_CreateHeaplets buffers))
//--
//-- DestroyHeaplets
val va_code_DestroyHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_DestroyHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_DestroyHeaplets : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_DestroyHeaplets ()) va_s0 /\ va_get_ok va_s0 /\
Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout va_s0).vl_inner)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
Vale.X64.Decls.modifies_mem (Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout
va_s0).vl_inner)) (Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner))
(va_get_mem va_sM) /\ (va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint
/\ heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> Vale.X64.Memory.heaps_match
(Vale.X64.Memory.layout_buffers ((va_get_mem_layout va_s0).vl_inner)) ((va_get_mem_layout
va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h va_sM) h) /\
va_state_eq va_sM (va_update_mem_layout va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_DestroyHeaplets (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout
va_s0).vl_inner) /\ (forall (va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout
va_x_memLayout va_s0 in va_get_ok va_sM /\ Vale.X64.Decls.modifies_mem
(Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_s0).vl_inner))
(Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner)) (va_get_mem va_sM) /\
(va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint /\ heaplet_id_is_none
(va_get_mem va_sM) /\ (forall (h:heaplet_id) . {:pattern(trigger_create_heaplet
h)}trigger_create_heaplet h ==> Vale.X64.Memory.heaps_match (Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_s0).vl_inner)) ((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM)
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h) ==> va_k va_sM (())))
val va_wpProof_DestroyHeaplets : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_DestroyHeaplets va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_DestroyHeaplets ())
([va_Mod_mem_layout]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr] | false | false | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_DestroyHeaplets: Prims.unit -> (va_quickCode unit (va_code_DestroyHeaplets ())) | [] | Vale.X64.InsMem.va_quick_DestroyHeaplets | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | _: Prims.unit
-> Vale.X64.QuickCode.va_quickCode Prims.unit (Vale.X64.InsMem.va_code_DestroyHeaplets ()) | {
"end_col": 31,
"end_line": 198,
"start_col": 2,
"start_line": 197
} |
Prims.Tot | val va_quick_CreateHeaplets (buffers: (list buffer_info))
: (va_quickCode unit (va_code_CreateHeaplets ())) | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_CreateHeaplets (buffers:(list buffer_info)) : (va_quickCode unit
(va_code_CreateHeaplets ())) =
(va_QProc (va_code_CreateHeaplets ()) ([va_Mod_mem_layout]) (va_wp_CreateHeaplets buffers)
(va_wpProof_CreateHeaplets buffers)) | val va_quick_CreateHeaplets (buffers: (list buffer_info))
: (va_quickCode unit (va_code_CreateHeaplets ()))
let va_quick_CreateHeaplets (buffers: (list buffer_info))
: (va_quickCode unit (va_code_CreateHeaplets ())) = | false | null | false | (va_QProc (va_code_CreateHeaplets ())
([va_Mod_mem_layout])
(va_wp_CreateHeaplets buffers)
(va_wpProof_CreateHeaplets buffers)) | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Prims.list",
"Vale.Arch.HeapImpl.buffer_info",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.X64.InsMem.va_code_CreateHeaplets",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_mem_layout",
"Prims.Nil",
"Vale.X64.InsMem.va_wp_CreateHeaplets",
"Vale.X64.InsMem.va_wpProof_CreateHeaplets",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h
let heaplet_id_is_none (h:vale_heap) =
get_heaplet_id h == None
let heaplet_id_is_some (h:vale_heap) (i:heaplet_id) =
get_heaplet_id h == Some i
unfold let norm_list (p:prop) : prop =
norm [zeta; iota; delta_only [`%list_to_seq_post]] p
irreducible let norm_loc_attr = ()
unfold let norm_loc (l:loc) : loc =
norm [zeta; iota; delta_only [`%loc_mutable_buffers]; delta_attr [`%norm_loc_attr]] l
let trigger_create_heaplet (h:heaplet_id) = True
[@norm_loc_attr]
unfold let declare_buffer64 (b:buffer TUInt64) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt64 b hid t mut
[@norm_loc_attr]
unfold let declare_buffer128 (b:buffer TUInt128) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt128 b hid t mut
let create_post (layout:vale_heap_layout) (bs:Seq.seq buffer_info) =
forall (i:nat).{:pattern Seq.index bs i} i < Seq.length bs ==> (
let Mkbuffer_info t b hid _ mut = Seq.index bs i in
trigger_create_heaplet hid /\
valid_layout_buffer_id t b layout (Some hid) false /\
valid_layout_buffer_id t b layout (Some hid) (mut = Mutable))
//-- CreateHeaplets
val va_code_CreateHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_CreateHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_CreateHeaplets : va_b0:va_code -> va_s0:va_state -> buffers:(list buffer_info)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_CreateHeaplets ()) va_s0 /\ va_get_ok va_s0 /\ (let
(bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (norm_list
(Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) /\ va_state_eq va_sM (va_update_mem_layout
va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_CreateHeaplets (buffers:(list buffer_info)) (va_s0:va_state) (va_k:(va_state -> unit ->
Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem
va_s0) /\ (norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs)) /\ (forall
(va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout va_x_memLayout va_s0 in
va_get_ok va_sM /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) ==> va_k va_sM (())))
val va_wpProof_CreateHeaplets : buffers:(list buffer_info) -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_CreateHeaplets buffers va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_CreateHeaplets ())
([va_Mod_mem_layout]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_CreateHeaplets (buffers:(list buffer_info)) : (va_quickCode unit | false | false | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_CreateHeaplets (buffers: (list buffer_info))
: (va_quickCode unit (va_code_CreateHeaplets ())) | [] | Vale.X64.InsMem.va_quick_CreateHeaplets | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | buffers: Prims.list Vale.Arch.HeapImpl.buffer_info
-> Vale.X64.QuickCode.va_quickCode Prims.unit (Vale.X64.InsMem.va_code_CreateHeaplets ()) | {
"end_col": 40,
"end_line": 156,
"start_col": 2,
"start_line": 155
} |
Prims.Tot | val va_wp_CreateHeaplets
(buffers: (list buffer_info))
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_CreateHeaplets (buffers:(list buffer_info)) (va_s0:va_state) (va_k:(va_state -> unit ->
Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem
va_s0) /\ (norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs)) /\ (forall
(va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout va_x_memLayout va_s0 in
va_get_ok va_sM /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) ==> va_k va_sM (()))) | val va_wp_CreateHeaplets
(buffers: (list buffer_info))
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_CreateHeaplets
(buffers: (list buffer_info))
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_get_ok va_s0 /\
(let bs:(FStar.Seq.Base.seq buffer_info) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs)) /\
(forall (va_x_memLayout: vale_heap_layout).
let va_sM = va_upd_mem_layout va_x_memLayout va_s0 in
va_get_ok va_sM /\
(let bs:(FStar.Seq.Base.seq buffer_info) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) ==
norm_loc (Vale.X64.Memory.loc_mutable_buffers buffers) /\
Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\
Vale.X64.Memory.layout_buffers ((va_get_mem_layout va_sM).vl_inner) == bs /\
Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout va_sM).vl_inner) /\
(va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint /\
create_post (va_get_mem_layout va_sM) bs /\ heaplet_id_is_none (va_get_mem va_sM) /\
(forall (h: heaplet_id). {:pattern (trigger_create_heaplet h)}
trigger_create_heaplet h ==>
heaplet_id_is_some (Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\
Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint)
(va_get_mem va_sM)
(Vale.X64.Decls.va_get_mem_heaplet h va_sM)
h) /\
(forall (i: nat). {:pattern (Seq.index bs i)}
i < FStar.Seq.Base.length #buffer_info bs ==>
Vale.X64.Memory.buffer_info_has_id bs
i
((FStar.Seq.Base.index #Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Prims.list",
"Vale.Arch.HeapImpl.buffer_info",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.Memory.is_initial_heap",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Decls.va_get_mem",
"Prims.l_imp",
"Vale.X64.InsMem.norm_list",
"Vale.Lib.Seqs.list_to_seq_post",
"Vale.X64.Memory.init_heaplets_req",
"FStar.Seq.Base.seq",
"Vale.Lib.Seqs.list_to_seq",
"Prims.l_Forall",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Prims.eq2",
"Vale.X64.Memory.loc",
"Vale.X64.Memory.layout_modifies_loc",
"Vale.Arch.HeapImpl.__proj__Mkvale_heap_layout__item__vl_inner",
"Vale.X64.InsMem.norm_loc",
"Vale.X64.Memory.loc_mutable_buffers",
"Vale.X64.Memory.vale_heap",
"Vale.X64.Memory.layout_old_heap",
"Vale.X64.Memory.layout_buffers",
"Vale.X64.Memory.layout_heaplets_initialized",
"Vale.Arch.HeapTypes_s.memTaint_t",
"Vale.Arch.HeapImpl.__proj__Mkvale_heap_layout__item__vl_taint",
"Vale.X64.InsMem.create_post",
"Vale.X64.InsMem.heaplet_id_is_none",
"Vale.X64.Decls.heaplet_id",
"Vale.X64.InsMem.trigger_create_heaplet",
"Vale.X64.InsMem.heaplet_id_is_some",
"Vale.X64.Decls.va_get_mem_heaplet",
"Vale.X64.Memory.heaps_match",
"Prims.nat",
"Prims.op_LessThan",
"FStar.Seq.Base.length",
"Vale.X64.Memory.buffer_info_has_id",
"Vale.Arch.HeapImpl.__proj__Mkbuffer_info__item__bi_heaplet",
"FStar.Seq.Base.index",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_mem_layout"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h
let heaplet_id_is_none (h:vale_heap) =
get_heaplet_id h == None
let heaplet_id_is_some (h:vale_heap) (i:heaplet_id) =
get_heaplet_id h == Some i
unfold let norm_list (p:prop) : prop =
norm [zeta; iota; delta_only [`%list_to_seq_post]] p
irreducible let norm_loc_attr = ()
unfold let norm_loc (l:loc) : loc =
norm [zeta; iota; delta_only [`%loc_mutable_buffers]; delta_attr [`%norm_loc_attr]] l
let trigger_create_heaplet (h:heaplet_id) = True
[@norm_loc_attr]
unfold let declare_buffer64 (b:buffer TUInt64) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt64 b hid t mut
[@norm_loc_attr]
unfold let declare_buffer128 (b:buffer TUInt128) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt128 b hid t mut
let create_post (layout:vale_heap_layout) (bs:Seq.seq buffer_info) =
forall (i:nat).{:pattern Seq.index bs i} i < Seq.length bs ==> (
let Mkbuffer_info t b hid _ mut = Seq.index bs i in
trigger_create_heaplet hid /\
valid_layout_buffer_id t b layout (Some hid) false /\
valid_layout_buffer_id t b layout (Some hid) (mut = Mutable))
//-- CreateHeaplets
val va_code_CreateHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_CreateHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_CreateHeaplets : va_b0:va_code -> va_s0:va_state -> buffers:(list buffer_info)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_CreateHeaplets ()) va_s0 /\ va_get_ok va_s0 /\ (let
(bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (norm_list
(Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) /\ va_state_eq va_sM (va_update_mem_layout
va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_CreateHeaplets (buffers:(list buffer_info)) (va_s0:va_state) (va_k:(va_state -> unit -> | false | true | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_CreateHeaplets
(buffers: (list buffer_info))
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.X64.InsMem.va_wp_CreateHeaplets | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
buffers: Prims.list Vale.Arch.HeapImpl.buffer_info ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 77,
"end_line": 144,
"start_col": 2,
"start_line": 125
} |
Prims.Tot | val va_quick_Load64_buffer
(h: va_operand_heaplet)
(dst: va_operand_dst_opr64)
(src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
: (va_quickCode unit (va_code_Load64_buffer h dst src offset t)) | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Load64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) : (va_quickCode unit
(va_code_Load64_buffer h dst src offset t)) =
(va_QProc (va_code_Load64_buffer h dst src offset t) ([va_mod_dst_opr64 dst])
(va_wp_Load64_buffer h dst src offset t b index) (va_wpProof_Load64_buffer h dst src offset t b
index)) | val va_quick_Load64_buffer
(h: va_operand_heaplet)
(dst: va_operand_dst_opr64)
(src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
: (va_quickCode unit (va_code_Load64_buffer h dst src offset t))
let va_quick_Load64_buffer
(h: va_operand_heaplet)
(dst: va_operand_dst_opr64)
(src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
: (va_quickCode unit (va_code_Load64_buffer h dst src offset t)) = | false | null | false | (va_QProc (va_code_Load64_buffer h dst src offset t)
([va_mod_dst_opr64 dst])
(va_wp_Load64_buffer h dst src offset t b index)
(va_wpProof_Load64_buffer h dst src offset t b index)) | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_operand_heaplet",
"Vale.X64.Decls.va_operand_dst_opr64",
"Vale.X64.Decls.va_operand_reg_opr64",
"Prims.int",
"Vale.Arch.HeapTypes_s.taint",
"Vale.X64.Memory.buffer64",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.X64.InsMem.va_code_Load64_buffer",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_mod_dst_opr64",
"Prims.Nil",
"Vale.X64.InsMem.va_wp_Load64_buffer",
"Vale.X64.InsMem.va_wpProof_Load64_buffer",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h
let heaplet_id_is_none (h:vale_heap) =
get_heaplet_id h == None
let heaplet_id_is_some (h:vale_heap) (i:heaplet_id) =
get_heaplet_id h == Some i
unfold let norm_list (p:prop) : prop =
norm [zeta; iota; delta_only [`%list_to_seq_post]] p
irreducible let norm_loc_attr = ()
unfold let norm_loc (l:loc) : loc =
norm [zeta; iota; delta_only [`%loc_mutable_buffers]; delta_attr [`%norm_loc_attr]] l
let trigger_create_heaplet (h:heaplet_id) = True
[@norm_loc_attr]
unfold let declare_buffer64 (b:buffer TUInt64) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt64 b hid t mut
[@norm_loc_attr]
unfold let declare_buffer128 (b:buffer TUInt128) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt128 b hid t mut
let create_post (layout:vale_heap_layout) (bs:Seq.seq buffer_info) =
forall (i:nat).{:pattern Seq.index bs i} i < Seq.length bs ==> (
let Mkbuffer_info t b hid _ mut = Seq.index bs i in
trigger_create_heaplet hid /\
valid_layout_buffer_id t b layout (Some hid) false /\
valid_layout_buffer_id t b layout (Some hid) (mut = Mutable))
//-- CreateHeaplets
val va_code_CreateHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_CreateHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_CreateHeaplets : va_b0:va_code -> va_s0:va_state -> buffers:(list buffer_info)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_CreateHeaplets ()) va_s0 /\ va_get_ok va_s0 /\ (let
(bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (norm_list
(Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) /\ va_state_eq va_sM (va_update_mem_layout
va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_CreateHeaplets (buffers:(list buffer_info)) (va_s0:va_state) (va_k:(va_state -> unit ->
Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem
va_s0) /\ (norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs)) /\ (forall
(va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout va_x_memLayout va_s0 in
va_get_ok va_sM /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) ==> va_k va_sM (())))
val va_wpProof_CreateHeaplets : buffers:(list buffer_info) -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_CreateHeaplets buffers va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_CreateHeaplets ())
([va_Mod_mem_layout]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_CreateHeaplets (buffers:(list buffer_info)) : (va_quickCode unit
(va_code_CreateHeaplets ())) =
(va_QProc (va_code_CreateHeaplets ()) ([va_Mod_mem_layout]) (va_wp_CreateHeaplets buffers)
(va_wpProof_CreateHeaplets buffers))
//--
//-- DestroyHeaplets
val va_code_DestroyHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_DestroyHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_DestroyHeaplets : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_DestroyHeaplets ()) va_s0 /\ va_get_ok va_s0 /\
Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout va_s0).vl_inner)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
Vale.X64.Decls.modifies_mem (Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout
va_s0).vl_inner)) (Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner))
(va_get_mem va_sM) /\ (va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint
/\ heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> Vale.X64.Memory.heaps_match
(Vale.X64.Memory.layout_buffers ((va_get_mem_layout va_s0).vl_inner)) ((va_get_mem_layout
va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h va_sM) h) /\
va_state_eq va_sM (va_update_mem_layout va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_DestroyHeaplets (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout
va_s0).vl_inner) /\ (forall (va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout
va_x_memLayout va_s0 in va_get_ok va_sM /\ Vale.X64.Decls.modifies_mem
(Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_s0).vl_inner))
(Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner)) (va_get_mem va_sM) /\
(va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint /\ heaplet_id_is_none
(va_get_mem va_sM) /\ (forall (h:heaplet_id) . {:pattern(trigger_create_heaplet
h)}trigger_create_heaplet h ==> Vale.X64.Memory.heaps_match (Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_s0).vl_inner)) ((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM)
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h) ==> va_k va_sM (())))
val va_wpProof_DestroyHeaplets : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_DestroyHeaplets va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_DestroyHeaplets ())
([va_Mod_mem_layout]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_DestroyHeaplets () : (va_quickCode unit (va_code_DestroyHeaplets ())) =
(va_QProc (va_code_DestroyHeaplets ()) ([va_Mod_mem_layout]) va_wp_DestroyHeaplets
va_wpProof_DestroyHeaplets)
//--
//-- Load64_buffer
val va_code_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_code
val va_codegen_success_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_pbool
val va_lemma_Load64_buffer : va_b0:va_code -> va_s0:va_state -> h:va_operand_heaplet ->
dst:va_operand_dst_opr64 -> src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 ->
index:int
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Load64_buffer h dst src offset t) va_s0 /\
va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\ Vale.X64.Memory.valid_taint_buf64
b (va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0
src + offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0
h) + 8 `op_Multiply` index))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_dst_opr64 va_sM dst == Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h)
/\ va_state_eq va_sM (va_update_ok va_sM (va_update_operand_dst_opr64 dst va_sM va_s0))))
[@ va_qattr]
let va_wp_Load64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\ Vale.X64.Memory.valid_taint_buf64
b (va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0
src + offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0
h) + 8 `op_Multiply` index /\ (forall (va_x_dst:va_value_dst_opr64) . let va_sM =
va_upd_operand_dst_opr64 dst va_x_dst va_s0 in va_get_ok va_sM /\ va_eval_dst_opr64 va_sM dst
== Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h) ==> va_k va_sM (())))
val va_wpProof_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 -> index:int -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Load64_buffer h dst src offset t b index va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Load64_buffer h dst src offset t)
([va_mod_dst_opr64 dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Load64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) : (va_quickCode unit | false | false | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Load64_buffer
(h: va_operand_heaplet)
(dst: va_operand_dst_opr64)
(src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
: (va_quickCode unit (va_code_Load64_buffer h dst src offset t)) | [] | Vale.X64.InsMem.va_quick_Load64_buffer | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
h: Vale.X64.Decls.va_operand_heaplet ->
dst: Vale.X64.Decls.va_operand_dst_opr64 ->
src: Vale.X64.Decls.va_operand_reg_opr64 ->
offset: Prims.int ->
t: Vale.Arch.HeapTypes_s.taint ->
b: Vale.X64.Memory.buffer64 ->
index: Prims.int
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.X64.InsMem.va_code_Load64_buffer h dst src offset t) | {
"end_col": 11,
"end_line": 250,
"start_col": 2,
"start_line": 248
} |
Prims.Tot | val va_quick_LoadBe64_buffer
(h: va_operand_heaplet)
(dst: va_operand_dst_opr64)
(src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
: (va_quickCode unit (va_code_LoadBe64_buffer h dst src offset t)) | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_LoadBe64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) : (va_quickCode unit
(va_code_LoadBe64_buffer h dst src offset t)) =
(va_QProc (va_code_LoadBe64_buffer h dst src offset t) ([va_mod_dst_opr64 dst])
(va_wp_LoadBe64_buffer h dst src offset t b index) (va_wpProof_LoadBe64_buffer h dst src offset
t b index)) | val va_quick_LoadBe64_buffer
(h: va_operand_heaplet)
(dst: va_operand_dst_opr64)
(src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
: (va_quickCode unit (va_code_LoadBe64_buffer h dst src offset t))
let va_quick_LoadBe64_buffer
(h: va_operand_heaplet)
(dst: va_operand_dst_opr64)
(src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
: (va_quickCode unit (va_code_LoadBe64_buffer h dst src offset t)) = | false | null | false | (va_QProc (va_code_LoadBe64_buffer h dst src offset t)
([va_mod_dst_opr64 dst])
(va_wp_LoadBe64_buffer h dst src offset t b index)
(va_wpProof_LoadBe64_buffer h dst src offset t b index)) | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_operand_heaplet",
"Vale.X64.Decls.va_operand_dst_opr64",
"Vale.X64.Decls.va_operand_reg_opr64",
"Prims.int",
"Vale.Arch.HeapTypes_s.taint",
"Vale.X64.Memory.buffer64",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.X64.InsMem.va_code_LoadBe64_buffer",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_mod_dst_opr64",
"Prims.Nil",
"Vale.X64.InsMem.va_wp_LoadBe64_buffer",
"Vale.X64.InsMem.va_wpProof_LoadBe64_buffer",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h
let heaplet_id_is_none (h:vale_heap) =
get_heaplet_id h == None
let heaplet_id_is_some (h:vale_heap) (i:heaplet_id) =
get_heaplet_id h == Some i
unfold let norm_list (p:prop) : prop =
norm [zeta; iota; delta_only [`%list_to_seq_post]] p
irreducible let norm_loc_attr = ()
unfold let norm_loc (l:loc) : loc =
norm [zeta; iota; delta_only [`%loc_mutable_buffers]; delta_attr [`%norm_loc_attr]] l
let trigger_create_heaplet (h:heaplet_id) = True
[@norm_loc_attr]
unfold let declare_buffer64 (b:buffer TUInt64) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt64 b hid t mut
[@norm_loc_attr]
unfold let declare_buffer128 (b:buffer TUInt128) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt128 b hid t mut
let create_post (layout:vale_heap_layout) (bs:Seq.seq buffer_info) =
forall (i:nat).{:pattern Seq.index bs i} i < Seq.length bs ==> (
let Mkbuffer_info t b hid _ mut = Seq.index bs i in
trigger_create_heaplet hid /\
valid_layout_buffer_id t b layout (Some hid) false /\
valid_layout_buffer_id t b layout (Some hid) (mut = Mutable))
//-- CreateHeaplets
val va_code_CreateHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_CreateHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_CreateHeaplets : va_b0:va_code -> va_s0:va_state -> buffers:(list buffer_info)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_CreateHeaplets ()) va_s0 /\ va_get_ok va_s0 /\ (let
(bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (norm_list
(Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) /\ va_state_eq va_sM (va_update_mem_layout
va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_CreateHeaplets (buffers:(list buffer_info)) (va_s0:va_state) (va_k:(va_state -> unit ->
Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem
va_s0) /\ (norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs)) /\ (forall
(va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout va_x_memLayout va_s0 in
va_get_ok va_sM /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) ==> va_k va_sM (())))
val va_wpProof_CreateHeaplets : buffers:(list buffer_info) -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_CreateHeaplets buffers va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_CreateHeaplets ())
([va_Mod_mem_layout]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_CreateHeaplets (buffers:(list buffer_info)) : (va_quickCode unit
(va_code_CreateHeaplets ())) =
(va_QProc (va_code_CreateHeaplets ()) ([va_Mod_mem_layout]) (va_wp_CreateHeaplets buffers)
(va_wpProof_CreateHeaplets buffers))
//--
//-- DestroyHeaplets
val va_code_DestroyHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_DestroyHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_DestroyHeaplets : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_DestroyHeaplets ()) va_s0 /\ va_get_ok va_s0 /\
Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout va_s0).vl_inner)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
Vale.X64.Decls.modifies_mem (Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout
va_s0).vl_inner)) (Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner))
(va_get_mem va_sM) /\ (va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint
/\ heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> Vale.X64.Memory.heaps_match
(Vale.X64.Memory.layout_buffers ((va_get_mem_layout va_s0).vl_inner)) ((va_get_mem_layout
va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h va_sM) h) /\
va_state_eq va_sM (va_update_mem_layout va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_DestroyHeaplets (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout
va_s0).vl_inner) /\ (forall (va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout
va_x_memLayout va_s0 in va_get_ok va_sM /\ Vale.X64.Decls.modifies_mem
(Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_s0).vl_inner))
(Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner)) (va_get_mem va_sM) /\
(va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint /\ heaplet_id_is_none
(va_get_mem va_sM) /\ (forall (h:heaplet_id) . {:pattern(trigger_create_heaplet
h)}trigger_create_heaplet h ==> Vale.X64.Memory.heaps_match (Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_s0).vl_inner)) ((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM)
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h) ==> va_k va_sM (())))
val va_wpProof_DestroyHeaplets : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_DestroyHeaplets va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_DestroyHeaplets ())
([va_Mod_mem_layout]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_DestroyHeaplets () : (va_quickCode unit (va_code_DestroyHeaplets ())) =
(va_QProc (va_code_DestroyHeaplets ()) ([va_Mod_mem_layout]) va_wp_DestroyHeaplets
va_wpProof_DestroyHeaplets)
//--
//-- Load64_buffer
val va_code_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_code
val va_codegen_success_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_pbool
val va_lemma_Load64_buffer : va_b0:va_code -> va_s0:va_state -> h:va_operand_heaplet ->
dst:va_operand_dst_opr64 -> src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 ->
index:int
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Load64_buffer h dst src offset t) va_s0 /\
va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\ Vale.X64.Memory.valid_taint_buf64
b (va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0
src + offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0
h) + 8 `op_Multiply` index))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_dst_opr64 va_sM dst == Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h)
/\ va_state_eq va_sM (va_update_ok va_sM (va_update_operand_dst_opr64 dst va_sM va_s0))))
[@ va_qattr]
let va_wp_Load64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\ Vale.X64.Memory.valid_taint_buf64
b (va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0
src + offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0
h) + 8 `op_Multiply` index /\ (forall (va_x_dst:va_value_dst_opr64) . let va_sM =
va_upd_operand_dst_opr64 dst va_x_dst va_s0 in va_get_ok va_sM /\ va_eval_dst_opr64 va_sM dst
== Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h) ==> va_k va_sM (())))
val va_wpProof_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 -> index:int -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Load64_buffer h dst src offset t b index va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Load64_buffer h dst src offset t)
([va_mod_dst_opr64 dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Load64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) : (va_quickCode unit
(va_code_Load64_buffer h dst src offset t)) =
(va_QProc (va_code_Load64_buffer h dst src offset t) ([va_mod_dst_opr64 dst])
(va_wp_Load64_buffer h dst src offset t b index) (va_wpProof_Load64_buffer h dst src offset t b
index))
//--
//-- Store64_buffer
val va_code_Store64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_code
val va_codegen_success_Store64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_pbool
val va_lemma_Store64_buffer : va_b0:va_code -> va_s0:va_state -> h:va_operand_heaplet ->
dst:va_operand_reg_opr64 -> src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 ->
index:int
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Store64_buffer h dst src offset t) va_s0 /\
va_is_dst_heaplet h va_s0 /\ va_is_src_reg_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_dst_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) true /\ Vale.X64.Memory.valid_taint_buf64 b
(va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 dst
+ offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) +
8 `op_Multiply` index))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_heaplet va_sM h == buffer64_write b index (va_eval_reg_opr64 va_s0 src)
(va_eval_heaplet va_s0 h) /\ va_state_eq va_sM (va_update_mem va_sM (va_update_ok va_sM
(va_update_operand_heaplet h va_sM va_s0)))))
[@ va_qattr]
let va_wp_Store64_buffer (h:va_operand_heaplet) (dst:va_operand_reg_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_is_dst_heaplet h va_s0 /\ va_is_src_reg_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_dst_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) true /\ Vale.X64.Memory.valid_taint_buf64 b
(va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 dst
+ offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) +
8 `op_Multiply` index /\ (forall (va_x_h:va_value_heaplet) (va_x_mem:vale_heap) . let va_sM =
va_upd_mem va_x_mem (va_upd_operand_heaplet h va_x_h va_s0) in va_get_ok va_sM /\
va_eval_heaplet va_sM h == buffer64_write b index (va_eval_reg_opr64 va_s0 src)
(va_eval_heaplet va_s0 h) ==> va_k va_sM (())))
val va_wpProof_Store64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 -> index:int -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Store64_buffer h dst src offset t b index va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Store64_buffer h dst src offset t)
([va_Mod_mem; va_mod_heaplet h]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Store64_buffer (h:va_operand_heaplet) (dst:va_operand_reg_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) : (va_quickCode unit
(va_code_Store64_buffer h dst src offset t)) =
(va_QProc (va_code_Store64_buffer h dst src offset t) ([va_Mod_mem; va_mod_heaplet h])
(va_wp_Store64_buffer h dst src offset t b index) (va_wpProof_Store64_buffer h dst src offset t
b index))
//--
//-- LoadBe64_buffer
val va_code_LoadBe64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_code
val va_codegen_success_LoadBe64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_pbool
val va_lemma_LoadBe64_buffer : va_b0:va_code -> va_s0:va_state -> h:va_operand_heaplet ->
dst:va_operand_dst_opr64 -> src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 ->
index:int
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_LoadBe64_buffer h dst src offset t) va_s0 /\
va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ movbe_enabled /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64
(va_eval_heaplet va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer
#Vale.X64.Memory.vuint64 b (va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\
Vale.X64.Memory.valid_taint_buf64 b (va_eval_heaplet va_s0 h) ((va_get_mem_layout
va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 src + offset == Vale.X64.Memory.buffer_addr
#Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) + 8 `op_Multiply` index))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_dst_opr64 va_sM dst == Vale.Def.Types_s.reverse_bytes_nat64
(Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h)) /\ va_state_eq va_sM
(va_update_ok va_sM (va_update_operand_dst_opr64 dst va_sM va_s0))))
[@ va_qattr]
let va_wp_LoadBe64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ movbe_enabled /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64
(va_eval_heaplet va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer
#Vale.X64.Memory.vuint64 b (va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\
Vale.X64.Memory.valid_taint_buf64 b (va_eval_heaplet va_s0 h) ((va_get_mem_layout
va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 src + offset == Vale.X64.Memory.buffer_addr
#Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) + 8 `op_Multiply` index /\ (forall
(va_x_dst:va_value_dst_opr64) . let va_sM = va_upd_operand_dst_opr64 dst va_x_dst va_s0 in
va_get_ok va_sM /\ va_eval_dst_opr64 va_sM dst == Vale.Def.Types_s.reverse_bytes_nat64
(Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h)) ==> va_k va_sM (())))
val va_wpProof_LoadBe64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 -> index:int -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_LoadBe64_buffer h dst src offset t b index va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_LoadBe64_buffer h dst src offset t)
([va_mod_dst_opr64 dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_LoadBe64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) : (va_quickCode unit | false | false | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_LoadBe64_buffer
(h: va_operand_heaplet)
(dst: va_operand_dst_opr64)
(src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
: (va_quickCode unit (va_code_LoadBe64_buffer h dst src offset t)) | [] | Vale.X64.InsMem.va_quick_LoadBe64_buffer | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
h: Vale.X64.Decls.va_operand_heaplet ->
dst: Vale.X64.Decls.va_operand_dst_opr64 ->
src: Vale.X64.Decls.va_operand_reg_opr64 ->
offset: Prims.int ->
t: Vale.Arch.HeapTypes_s.taint ->
b: Vale.X64.Memory.buffer64 ->
index: Prims.int
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.X64.InsMem.va_code_LoadBe64_buffer h dst src offset t) | {
"end_col": 15,
"end_line": 358,
"start_col": 2,
"start_line": 356
} |
Prims.Tot | val va_wp_Store64_buffer
(h: va_operand_heaplet)
(dst src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Store64_buffer (h:va_operand_heaplet) (dst:va_operand_reg_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_is_dst_heaplet h va_s0 /\ va_is_src_reg_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_dst_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) true /\ Vale.X64.Memory.valid_taint_buf64 b
(va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 dst
+ offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) +
8 `op_Multiply` index /\ (forall (va_x_h:va_value_heaplet) (va_x_mem:vale_heap) . let va_sM =
va_upd_mem va_x_mem (va_upd_operand_heaplet h va_x_h va_s0) in va_get_ok va_sM /\
va_eval_heaplet va_sM h == buffer64_write b index (va_eval_reg_opr64 va_s0 src)
(va_eval_heaplet va_s0 h) ==> va_k va_sM (()))) | val va_wp_Store64_buffer
(h: va_operand_heaplet)
(dst src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_Store64_buffer
(h: va_operand_heaplet)
(dst src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_is_dst_heaplet h va_s0 /\ va_is_src_reg_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\
Vale.X64.Decls.valid_dst_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet va_s0 h) b index /\
Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64
b
(va_get_mem_layout va_s0)
(va_eval_heaplet va_s0 h)
true /\
Vale.X64.Memory.valid_taint_buf64 b
(va_eval_heaplet va_s0 h)
((va_get_mem_layout va_s0).vl_taint)
t /\
va_eval_reg_opr64 va_s0 dst + offset ==
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) +
8
`op_Multiply`
index /\
(forall (va_x_h: va_value_heaplet) (va_x_mem: vale_heap).
let va_sM = va_upd_mem va_x_mem (va_upd_operand_heaplet h va_x_h va_s0) in
va_get_ok va_sM /\
va_eval_heaplet va_sM h ==
buffer64_write b index (va_eval_reg_opr64 va_s0 src) (va_eval_heaplet va_s0 h) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_operand_heaplet",
"Vale.X64.Decls.va_operand_reg_opr64",
"Prims.int",
"Vale.Arch.HeapTypes_s.taint",
"Vale.X64.Memory.buffer64",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Vale.X64.Decls.va_is_dst_heaplet",
"Prims.b2t",
"Vale.X64.Decls.va_is_src_reg_opr64",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.Decls.valid_dst_addr",
"Vale.X64.Memory.vuint64",
"Vale.X64.Decls.va_eval_heaplet",
"Vale.X64.Memory.valid_layout_buffer",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Memory.valid_taint_buf64",
"Vale.Arch.HeapImpl.__proj__Mkvale_heap_layout__item__vl_taint",
"Prims.eq2",
"Prims.op_Addition",
"Vale.X64.Decls.va_eval_reg_opr64",
"Vale.X64.Memory.buffer_addr",
"Prims.op_Multiply",
"Prims.l_Forall",
"Vale.X64.Decls.va_value_heaplet",
"Vale.X64.InsBasic.vale_heap",
"Prims.l_imp",
"Vale.X64.Memory.vale_heap",
"Vale.X64.InsMem.buffer64_write",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_mem",
"Vale.X64.Decls.va_upd_operand_heaplet"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h
let heaplet_id_is_none (h:vale_heap) =
get_heaplet_id h == None
let heaplet_id_is_some (h:vale_heap) (i:heaplet_id) =
get_heaplet_id h == Some i
unfold let norm_list (p:prop) : prop =
norm [zeta; iota; delta_only [`%list_to_seq_post]] p
irreducible let norm_loc_attr = ()
unfold let norm_loc (l:loc) : loc =
norm [zeta; iota; delta_only [`%loc_mutable_buffers]; delta_attr [`%norm_loc_attr]] l
let trigger_create_heaplet (h:heaplet_id) = True
[@norm_loc_attr]
unfold let declare_buffer64 (b:buffer TUInt64) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt64 b hid t mut
[@norm_loc_attr]
unfold let declare_buffer128 (b:buffer TUInt128) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt128 b hid t mut
let create_post (layout:vale_heap_layout) (bs:Seq.seq buffer_info) =
forall (i:nat).{:pattern Seq.index bs i} i < Seq.length bs ==> (
let Mkbuffer_info t b hid _ mut = Seq.index bs i in
trigger_create_heaplet hid /\
valid_layout_buffer_id t b layout (Some hid) false /\
valid_layout_buffer_id t b layout (Some hid) (mut = Mutable))
//-- CreateHeaplets
val va_code_CreateHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_CreateHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_CreateHeaplets : va_b0:va_code -> va_s0:va_state -> buffers:(list buffer_info)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_CreateHeaplets ()) va_s0 /\ va_get_ok va_s0 /\ (let
(bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (norm_list
(Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) /\ va_state_eq va_sM (va_update_mem_layout
va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_CreateHeaplets (buffers:(list buffer_info)) (va_s0:va_state) (va_k:(va_state -> unit ->
Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem
va_s0) /\ (norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs)) /\ (forall
(va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout va_x_memLayout va_s0 in
va_get_ok va_sM /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) ==> va_k va_sM (())))
val va_wpProof_CreateHeaplets : buffers:(list buffer_info) -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_CreateHeaplets buffers va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_CreateHeaplets ())
([va_Mod_mem_layout]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_CreateHeaplets (buffers:(list buffer_info)) : (va_quickCode unit
(va_code_CreateHeaplets ())) =
(va_QProc (va_code_CreateHeaplets ()) ([va_Mod_mem_layout]) (va_wp_CreateHeaplets buffers)
(va_wpProof_CreateHeaplets buffers))
//--
//-- DestroyHeaplets
val va_code_DestroyHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_DestroyHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_DestroyHeaplets : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_DestroyHeaplets ()) va_s0 /\ va_get_ok va_s0 /\
Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout va_s0).vl_inner)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
Vale.X64.Decls.modifies_mem (Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout
va_s0).vl_inner)) (Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner))
(va_get_mem va_sM) /\ (va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint
/\ heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> Vale.X64.Memory.heaps_match
(Vale.X64.Memory.layout_buffers ((va_get_mem_layout va_s0).vl_inner)) ((va_get_mem_layout
va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h va_sM) h) /\
va_state_eq va_sM (va_update_mem_layout va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_DestroyHeaplets (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout
va_s0).vl_inner) /\ (forall (va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout
va_x_memLayout va_s0 in va_get_ok va_sM /\ Vale.X64.Decls.modifies_mem
(Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_s0).vl_inner))
(Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner)) (va_get_mem va_sM) /\
(va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint /\ heaplet_id_is_none
(va_get_mem va_sM) /\ (forall (h:heaplet_id) . {:pattern(trigger_create_heaplet
h)}trigger_create_heaplet h ==> Vale.X64.Memory.heaps_match (Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_s0).vl_inner)) ((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM)
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h) ==> va_k va_sM (())))
val va_wpProof_DestroyHeaplets : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_DestroyHeaplets va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_DestroyHeaplets ())
([va_Mod_mem_layout]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_DestroyHeaplets () : (va_quickCode unit (va_code_DestroyHeaplets ())) =
(va_QProc (va_code_DestroyHeaplets ()) ([va_Mod_mem_layout]) va_wp_DestroyHeaplets
va_wpProof_DestroyHeaplets)
//--
//-- Load64_buffer
val va_code_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_code
val va_codegen_success_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_pbool
val va_lemma_Load64_buffer : va_b0:va_code -> va_s0:va_state -> h:va_operand_heaplet ->
dst:va_operand_dst_opr64 -> src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 ->
index:int
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Load64_buffer h dst src offset t) va_s0 /\
va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\ Vale.X64.Memory.valid_taint_buf64
b (va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0
src + offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0
h) + 8 `op_Multiply` index))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_dst_opr64 va_sM dst == Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h)
/\ va_state_eq va_sM (va_update_ok va_sM (va_update_operand_dst_opr64 dst va_sM va_s0))))
[@ va_qattr]
let va_wp_Load64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\ Vale.X64.Memory.valid_taint_buf64
b (va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0
src + offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0
h) + 8 `op_Multiply` index /\ (forall (va_x_dst:va_value_dst_opr64) . let va_sM =
va_upd_operand_dst_opr64 dst va_x_dst va_s0 in va_get_ok va_sM /\ va_eval_dst_opr64 va_sM dst
== Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h) ==> va_k va_sM (())))
val va_wpProof_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 -> index:int -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Load64_buffer h dst src offset t b index va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Load64_buffer h dst src offset t)
([va_mod_dst_opr64 dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Load64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) : (va_quickCode unit
(va_code_Load64_buffer h dst src offset t)) =
(va_QProc (va_code_Load64_buffer h dst src offset t) ([va_mod_dst_opr64 dst])
(va_wp_Load64_buffer h dst src offset t b index) (va_wpProof_Load64_buffer h dst src offset t b
index))
//--
//-- Store64_buffer
val va_code_Store64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_code
val va_codegen_success_Store64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_pbool
val va_lemma_Store64_buffer : va_b0:va_code -> va_s0:va_state -> h:va_operand_heaplet ->
dst:va_operand_reg_opr64 -> src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 ->
index:int
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Store64_buffer h dst src offset t) va_s0 /\
va_is_dst_heaplet h va_s0 /\ va_is_src_reg_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_dst_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) true /\ Vale.X64.Memory.valid_taint_buf64 b
(va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 dst
+ offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) +
8 `op_Multiply` index))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_heaplet va_sM h == buffer64_write b index (va_eval_reg_opr64 va_s0 src)
(va_eval_heaplet va_s0 h) /\ va_state_eq va_sM (va_update_mem va_sM (va_update_ok va_sM
(va_update_operand_heaplet h va_sM va_s0)))))
[@ va_qattr]
let va_wp_Store64_buffer (h:va_operand_heaplet) (dst:va_operand_reg_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state) | false | true | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Store64_buffer
(h: va_operand_heaplet)
(dst src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.X64.InsMem.va_wp_Store64_buffer | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
h: Vale.X64.Decls.va_operand_heaplet ->
dst: Vale.X64.Decls.va_operand_reg_opr64 ->
src: Vale.X64.Decls.va_operand_reg_opr64 ->
offset: Prims.int ->
t: Vale.Arch.HeapTypes_s.taint ->
b: Vale.X64.Memory.buffer64 ->
index: Prims.int ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 51,
"end_line": 289,
"start_col": 2,
"start_line": 280
} |
Prims.Tot | val va_quick_Store64_buffer
(h: va_operand_heaplet)
(dst src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
: (va_quickCode unit (va_code_Store64_buffer h dst src offset t)) | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Store64_buffer (h:va_operand_heaplet) (dst:va_operand_reg_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) : (va_quickCode unit
(va_code_Store64_buffer h dst src offset t)) =
(va_QProc (va_code_Store64_buffer h dst src offset t) ([va_Mod_mem; va_mod_heaplet h])
(va_wp_Store64_buffer h dst src offset t b index) (va_wpProof_Store64_buffer h dst src offset t
b index)) | val va_quick_Store64_buffer
(h: va_operand_heaplet)
(dst src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
: (va_quickCode unit (va_code_Store64_buffer h dst src offset t))
let va_quick_Store64_buffer
(h: va_operand_heaplet)
(dst src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
: (va_quickCode unit (va_code_Store64_buffer h dst src offset t)) = | false | null | false | (va_QProc (va_code_Store64_buffer h dst src offset t)
([va_Mod_mem; va_mod_heaplet h])
(va_wp_Store64_buffer h dst src offset t b index)
(va_wpProof_Store64_buffer h dst src offset t b index)) | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_operand_heaplet",
"Vale.X64.Decls.va_operand_reg_opr64",
"Prims.int",
"Vale.Arch.HeapTypes_s.taint",
"Vale.X64.Memory.buffer64",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.X64.InsMem.va_code_Store64_buffer",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_mem",
"Vale.X64.QuickCode.va_mod_heaplet",
"Prims.Nil",
"Vale.X64.InsMem.va_wp_Store64_buffer",
"Vale.X64.InsMem.va_wpProof_Store64_buffer",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h
let heaplet_id_is_none (h:vale_heap) =
get_heaplet_id h == None
let heaplet_id_is_some (h:vale_heap) (i:heaplet_id) =
get_heaplet_id h == Some i
unfold let norm_list (p:prop) : prop =
norm [zeta; iota; delta_only [`%list_to_seq_post]] p
irreducible let norm_loc_attr = ()
unfold let norm_loc (l:loc) : loc =
norm [zeta; iota; delta_only [`%loc_mutable_buffers]; delta_attr [`%norm_loc_attr]] l
let trigger_create_heaplet (h:heaplet_id) = True
[@norm_loc_attr]
unfold let declare_buffer64 (b:buffer TUInt64) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt64 b hid t mut
[@norm_loc_attr]
unfold let declare_buffer128 (b:buffer TUInt128) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt128 b hid t mut
let create_post (layout:vale_heap_layout) (bs:Seq.seq buffer_info) =
forall (i:nat).{:pattern Seq.index bs i} i < Seq.length bs ==> (
let Mkbuffer_info t b hid _ mut = Seq.index bs i in
trigger_create_heaplet hid /\
valid_layout_buffer_id t b layout (Some hid) false /\
valid_layout_buffer_id t b layout (Some hid) (mut = Mutable))
//-- CreateHeaplets
val va_code_CreateHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_CreateHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_CreateHeaplets : va_b0:va_code -> va_s0:va_state -> buffers:(list buffer_info)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_CreateHeaplets ()) va_s0 /\ va_get_ok va_s0 /\ (let
(bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (norm_list
(Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) /\ va_state_eq va_sM (va_update_mem_layout
va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_CreateHeaplets (buffers:(list buffer_info)) (va_s0:va_state) (va_k:(va_state -> unit ->
Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem
va_s0) /\ (norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs)) /\ (forall
(va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout va_x_memLayout va_s0 in
va_get_ok va_sM /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) ==> va_k va_sM (())))
val va_wpProof_CreateHeaplets : buffers:(list buffer_info) -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_CreateHeaplets buffers va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_CreateHeaplets ())
([va_Mod_mem_layout]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_CreateHeaplets (buffers:(list buffer_info)) : (va_quickCode unit
(va_code_CreateHeaplets ())) =
(va_QProc (va_code_CreateHeaplets ()) ([va_Mod_mem_layout]) (va_wp_CreateHeaplets buffers)
(va_wpProof_CreateHeaplets buffers))
//--
//-- DestroyHeaplets
val va_code_DestroyHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_DestroyHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_DestroyHeaplets : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_DestroyHeaplets ()) va_s0 /\ va_get_ok va_s0 /\
Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout va_s0).vl_inner)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
Vale.X64.Decls.modifies_mem (Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout
va_s0).vl_inner)) (Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner))
(va_get_mem va_sM) /\ (va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint
/\ heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> Vale.X64.Memory.heaps_match
(Vale.X64.Memory.layout_buffers ((va_get_mem_layout va_s0).vl_inner)) ((va_get_mem_layout
va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h va_sM) h) /\
va_state_eq va_sM (va_update_mem_layout va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_DestroyHeaplets (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout
va_s0).vl_inner) /\ (forall (va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout
va_x_memLayout va_s0 in va_get_ok va_sM /\ Vale.X64.Decls.modifies_mem
(Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_s0).vl_inner))
(Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner)) (va_get_mem va_sM) /\
(va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint /\ heaplet_id_is_none
(va_get_mem va_sM) /\ (forall (h:heaplet_id) . {:pattern(trigger_create_heaplet
h)}trigger_create_heaplet h ==> Vale.X64.Memory.heaps_match (Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_s0).vl_inner)) ((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM)
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h) ==> va_k va_sM (())))
val va_wpProof_DestroyHeaplets : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_DestroyHeaplets va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_DestroyHeaplets ())
([va_Mod_mem_layout]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_DestroyHeaplets () : (va_quickCode unit (va_code_DestroyHeaplets ())) =
(va_QProc (va_code_DestroyHeaplets ()) ([va_Mod_mem_layout]) va_wp_DestroyHeaplets
va_wpProof_DestroyHeaplets)
//--
//-- Load64_buffer
val va_code_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_code
val va_codegen_success_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_pbool
val va_lemma_Load64_buffer : va_b0:va_code -> va_s0:va_state -> h:va_operand_heaplet ->
dst:va_operand_dst_opr64 -> src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 ->
index:int
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Load64_buffer h dst src offset t) va_s0 /\
va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\ Vale.X64.Memory.valid_taint_buf64
b (va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0
src + offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0
h) + 8 `op_Multiply` index))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_dst_opr64 va_sM dst == Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h)
/\ va_state_eq va_sM (va_update_ok va_sM (va_update_operand_dst_opr64 dst va_sM va_s0))))
[@ va_qattr]
let va_wp_Load64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\ Vale.X64.Memory.valid_taint_buf64
b (va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0
src + offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0
h) + 8 `op_Multiply` index /\ (forall (va_x_dst:va_value_dst_opr64) . let va_sM =
va_upd_operand_dst_opr64 dst va_x_dst va_s0 in va_get_ok va_sM /\ va_eval_dst_opr64 va_sM dst
== Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h) ==> va_k va_sM (())))
val va_wpProof_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 -> index:int -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Load64_buffer h dst src offset t b index va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Load64_buffer h dst src offset t)
([va_mod_dst_opr64 dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Load64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) : (va_quickCode unit
(va_code_Load64_buffer h dst src offset t)) =
(va_QProc (va_code_Load64_buffer h dst src offset t) ([va_mod_dst_opr64 dst])
(va_wp_Load64_buffer h dst src offset t b index) (va_wpProof_Load64_buffer h dst src offset t b
index))
//--
//-- Store64_buffer
val va_code_Store64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_code
val va_codegen_success_Store64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_pbool
val va_lemma_Store64_buffer : va_b0:va_code -> va_s0:va_state -> h:va_operand_heaplet ->
dst:va_operand_reg_opr64 -> src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 ->
index:int
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Store64_buffer h dst src offset t) va_s0 /\
va_is_dst_heaplet h va_s0 /\ va_is_src_reg_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_dst_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) true /\ Vale.X64.Memory.valid_taint_buf64 b
(va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 dst
+ offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) +
8 `op_Multiply` index))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_heaplet va_sM h == buffer64_write b index (va_eval_reg_opr64 va_s0 src)
(va_eval_heaplet va_s0 h) /\ va_state_eq va_sM (va_update_mem va_sM (va_update_ok va_sM
(va_update_operand_heaplet h va_sM va_s0)))))
[@ va_qattr]
let va_wp_Store64_buffer (h:va_operand_heaplet) (dst:va_operand_reg_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_is_dst_heaplet h va_s0 /\ va_is_src_reg_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_dst_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) true /\ Vale.X64.Memory.valid_taint_buf64 b
(va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 dst
+ offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) +
8 `op_Multiply` index /\ (forall (va_x_h:va_value_heaplet) (va_x_mem:vale_heap) . let va_sM =
va_upd_mem va_x_mem (va_upd_operand_heaplet h va_x_h va_s0) in va_get_ok va_sM /\
va_eval_heaplet va_sM h == buffer64_write b index (va_eval_reg_opr64 va_s0 src)
(va_eval_heaplet va_s0 h) ==> va_k va_sM (())))
val va_wpProof_Store64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 -> index:int -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Store64_buffer h dst src offset t b index va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Store64_buffer h dst src offset t)
([va_Mod_mem; va_mod_heaplet h]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Store64_buffer (h:va_operand_heaplet) (dst:va_operand_reg_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) : (va_quickCode unit | false | false | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Store64_buffer
(h: va_operand_heaplet)
(dst src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
: (va_quickCode unit (va_code_Store64_buffer h dst src offset t)) | [] | Vale.X64.InsMem.va_quick_Store64_buffer | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
h: Vale.X64.Decls.va_operand_heaplet ->
dst: Vale.X64.Decls.va_operand_reg_opr64 ->
src: Vale.X64.Decls.va_operand_reg_opr64 ->
offset: Prims.int ->
t: Vale.Arch.HeapTypes_s.taint ->
b: Vale.X64.Memory.buffer64 ->
index: Prims.int
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.X64.InsMem.va_code_Store64_buffer h dst src offset t) | {
"end_col": 13,
"end_line": 304,
"start_col": 2,
"start_line": 302
} |
Prims.Tot | val va_quick_StoreBe64_buffer
(h: va_operand_heaplet)
(dst src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
: (va_quickCode unit (va_code_StoreBe64_buffer h dst src offset t)) | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_StoreBe64_buffer (h:va_operand_heaplet) (dst:va_operand_reg_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) : (va_quickCode unit
(va_code_StoreBe64_buffer h dst src offset t)) =
(va_QProc (va_code_StoreBe64_buffer h dst src offset t) ([va_Mod_mem; va_mod_heaplet h])
(va_wp_StoreBe64_buffer h dst src offset t b index) (va_wpProof_StoreBe64_buffer h dst src
offset t b index)) | val va_quick_StoreBe64_buffer
(h: va_operand_heaplet)
(dst src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
: (va_quickCode unit (va_code_StoreBe64_buffer h dst src offset t))
let va_quick_StoreBe64_buffer
(h: va_operand_heaplet)
(dst src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
: (va_quickCode unit (va_code_StoreBe64_buffer h dst src offset t)) = | false | null | false | (va_QProc (va_code_StoreBe64_buffer h dst src offset t)
([va_Mod_mem; va_mod_heaplet h])
(va_wp_StoreBe64_buffer h dst src offset t b index)
(va_wpProof_StoreBe64_buffer h dst src offset t b index)) | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_operand_heaplet",
"Vale.X64.Decls.va_operand_reg_opr64",
"Prims.int",
"Vale.Arch.HeapTypes_s.taint",
"Vale.X64.Memory.buffer64",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.X64.InsMem.va_code_StoreBe64_buffer",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_mem",
"Vale.X64.QuickCode.va_mod_heaplet",
"Prims.Nil",
"Vale.X64.InsMem.va_wp_StoreBe64_buffer",
"Vale.X64.InsMem.va_wpProof_StoreBe64_buffer",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h
let heaplet_id_is_none (h:vale_heap) =
get_heaplet_id h == None
let heaplet_id_is_some (h:vale_heap) (i:heaplet_id) =
get_heaplet_id h == Some i
unfold let norm_list (p:prop) : prop =
norm [zeta; iota; delta_only [`%list_to_seq_post]] p
irreducible let norm_loc_attr = ()
unfold let norm_loc (l:loc) : loc =
norm [zeta; iota; delta_only [`%loc_mutable_buffers]; delta_attr [`%norm_loc_attr]] l
let trigger_create_heaplet (h:heaplet_id) = True
[@norm_loc_attr]
unfold let declare_buffer64 (b:buffer TUInt64) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt64 b hid t mut
[@norm_loc_attr]
unfold let declare_buffer128 (b:buffer TUInt128) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt128 b hid t mut
let create_post (layout:vale_heap_layout) (bs:Seq.seq buffer_info) =
forall (i:nat).{:pattern Seq.index bs i} i < Seq.length bs ==> (
let Mkbuffer_info t b hid _ mut = Seq.index bs i in
trigger_create_heaplet hid /\
valid_layout_buffer_id t b layout (Some hid) false /\
valid_layout_buffer_id t b layout (Some hid) (mut = Mutable))
//-- CreateHeaplets
val va_code_CreateHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_CreateHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_CreateHeaplets : va_b0:va_code -> va_s0:va_state -> buffers:(list buffer_info)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_CreateHeaplets ()) va_s0 /\ va_get_ok va_s0 /\ (let
(bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (norm_list
(Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) /\ va_state_eq va_sM (va_update_mem_layout
va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_CreateHeaplets (buffers:(list buffer_info)) (va_s0:va_state) (va_k:(va_state -> unit ->
Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem
va_s0) /\ (norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs)) /\ (forall
(va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout va_x_memLayout va_s0 in
va_get_ok va_sM /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) ==> va_k va_sM (())))
val va_wpProof_CreateHeaplets : buffers:(list buffer_info) -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_CreateHeaplets buffers va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_CreateHeaplets ())
([va_Mod_mem_layout]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_CreateHeaplets (buffers:(list buffer_info)) : (va_quickCode unit
(va_code_CreateHeaplets ())) =
(va_QProc (va_code_CreateHeaplets ()) ([va_Mod_mem_layout]) (va_wp_CreateHeaplets buffers)
(va_wpProof_CreateHeaplets buffers))
//--
//-- DestroyHeaplets
val va_code_DestroyHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_DestroyHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_DestroyHeaplets : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_DestroyHeaplets ()) va_s0 /\ va_get_ok va_s0 /\
Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout va_s0).vl_inner)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
Vale.X64.Decls.modifies_mem (Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout
va_s0).vl_inner)) (Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner))
(va_get_mem va_sM) /\ (va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint
/\ heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> Vale.X64.Memory.heaps_match
(Vale.X64.Memory.layout_buffers ((va_get_mem_layout va_s0).vl_inner)) ((va_get_mem_layout
va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h va_sM) h) /\
va_state_eq va_sM (va_update_mem_layout va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_DestroyHeaplets (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout
va_s0).vl_inner) /\ (forall (va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout
va_x_memLayout va_s0 in va_get_ok va_sM /\ Vale.X64.Decls.modifies_mem
(Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_s0).vl_inner))
(Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner)) (va_get_mem va_sM) /\
(va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint /\ heaplet_id_is_none
(va_get_mem va_sM) /\ (forall (h:heaplet_id) . {:pattern(trigger_create_heaplet
h)}trigger_create_heaplet h ==> Vale.X64.Memory.heaps_match (Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_s0).vl_inner)) ((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM)
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h) ==> va_k va_sM (())))
val va_wpProof_DestroyHeaplets : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_DestroyHeaplets va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_DestroyHeaplets ())
([va_Mod_mem_layout]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_DestroyHeaplets () : (va_quickCode unit (va_code_DestroyHeaplets ())) =
(va_QProc (va_code_DestroyHeaplets ()) ([va_Mod_mem_layout]) va_wp_DestroyHeaplets
va_wpProof_DestroyHeaplets)
//--
//-- Load64_buffer
val va_code_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_code
val va_codegen_success_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_pbool
val va_lemma_Load64_buffer : va_b0:va_code -> va_s0:va_state -> h:va_operand_heaplet ->
dst:va_operand_dst_opr64 -> src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 ->
index:int
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Load64_buffer h dst src offset t) va_s0 /\
va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\ Vale.X64.Memory.valid_taint_buf64
b (va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0
src + offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0
h) + 8 `op_Multiply` index))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_dst_opr64 va_sM dst == Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h)
/\ va_state_eq va_sM (va_update_ok va_sM (va_update_operand_dst_opr64 dst va_sM va_s0))))
[@ va_qattr]
let va_wp_Load64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\ Vale.X64.Memory.valid_taint_buf64
b (va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0
src + offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0
h) + 8 `op_Multiply` index /\ (forall (va_x_dst:va_value_dst_opr64) . let va_sM =
va_upd_operand_dst_opr64 dst va_x_dst va_s0 in va_get_ok va_sM /\ va_eval_dst_opr64 va_sM dst
== Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h) ==> va_k va_sM (())))
val va_wpProof_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 -> index:int -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Load64_buffer h dst src offset t b index va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Load64_buffer h dst src offset t)
([va_mod_dst_opr64 dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Load64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) : (va_quickCode unit
(va_code_Load64_buffer h dst src offset t)) =
(va_QProc (va_code_Load64_buffer h dst src offset t) ([va_mod_dst_opr64 dst])
(va_wp_Load64_buffer h dst src offset t b index) (va_wpProof_Load64_buffer h dst src offset t b
index))
//--
//-- Store64_buffer
val va_code_Store64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_code
val va_codegen_success_Store64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_pbool
val va_lemma_Store64_buffer : va_b0:va_code -> va_s0:va_state -> h:va_operand_heaplet ->
dst:va_operand_reg_opr64 -> src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 ->
index:int
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Store64_buffer h dst src offset t) va_s0 /\
va_is_dst_heaplet h va_s0 /\ va_is_src_reg_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_dst_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) true /\ Vale.X64.Memory.valid_taint_buf64 b
(va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 dst
+ offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) +
8 `op_Multiply` index))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_heaplet va_sM h == buffer64_write b index (va_eval_reg_opr64 va_s0 src)
(va_eval_heaplet va_s0 h) /\ va_state_eq va_sM (va_update_mem va_sM (va_update_ok va_sM
(va_update_operand_heaplet h va_sM va_s0)))))
[@ va_qattr]
let va_wp_Store64_buffer (h:va_operand_heaplet) (dst:va_operand_reg_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_is_dst_heaplet h va_s0 /\ va_is_src_reg_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_dst_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) true /\ Vale.X64.Memory.valid_taint_buf64 b
(va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 dst
+ offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) +
8 `op_Multiply` index /\ (forall (va_x_h:va_value_heaplet) (va_x_mem:vale_heap) . let va_sM =
va_upd_mem va_x_mem (va_upd_operand_heaplet h va_x_h va_s0) in va_get_ok va_sM /\
va_eval_heaplet va_sM h == buffer64_write b index (va_eval_reg_opr64 va_s0 src)
(va_eval_heaplet va_s0 h) ==> va_k va_sM (())))
val va_wpProof_Store64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 -> index:int -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Store64_buffer h dst src offset t b index va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Store64_buffer h dst src offset t)
([va_Mod_mem; va_mod_heaplet h]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Store64_buffer (h:va_operand_heaplet) (dst:va_operand_reg_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) : (va_quickCode unit
(va_code_Store64_buffer h dst src offset t)) =
(va_QProc (va_code_Store64_buffer h dst src offset t) ([va_Mod_mem; va_mod_heaplet h])
(va_wp_Store64_buffer h dst src offset t b index) (va_wpProof_Store64_buffer h dst src offset t
b index))
//--
//-- LoadBe64_buffer
val va_code_LoadBe64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_code
val va_codegen_success_LoadBe64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_pbool
val va_lemma_LoadBe64_buffer : va_b0:va_code -> va_s0:va_state -> h:va_operand_heaplet ->
dst:va_operand_dst_opr64 -> src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 ->
index:int
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_LoadBe64_buffer h dst src offset t) va_s0 /\
va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ movbe_enabled /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64
(va_eval_heaplet va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer
#Vale.X64.Memory.vuint64 b (va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\
Vale.X64.Memory.valid_taint_buf64 b (va_eval_heaplet va_s0 h) ((va_get_mem_layout
va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 src + offset == Vale.X64.Memory.buffer_addr
#Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) + 8 `op_Multiply` index))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_dst_opr64 va_sM dst == Vale.Def.Types_s.reverse_bytes_nat64
(Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h)) /\ va_state_eq va_sM
(va_update_ok va_sM (va_update_operand_dst_opr64 dst va_sM va_s0))))
[@ va_qattr]
let va_wp_LoadBe64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ movbe_enabled /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64
(va_eval_heaplet va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer
#Vale.X64.Memory.vuint64 b (va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\
Vale.X64.Memory.valid_taint_buf64 b (va_eval_heaplet va_s0 h) ((va_get_mem_layout
va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 src + offset == Vale.X64.Memory.buffer_addr
#Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) + 8 `op_Multiply` index /\ (forall
(va_x_dst:va_value_dst_opr64) . let va_sM = va_upd_operand_dst_opr64 dst va_x_dst va_s0 in
va_get_ok va_sM /\ va_eval_dst_opr64 va_sM dst == Vale.Def.Types_s.reverse_bytes_nat64
(Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h)) ==> va_k va_sM (())))
val va_wpProof_LoadBe64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 -> index:int -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_LoadBe64_buffer h dst src offset t b index va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_LoadBe64_buffer h dst src offset t)
([va_mod_dst_opr64 dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_LoadBe64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) : (va_quickCode unit
(va_code_LoadBe64_buffer h dst src offset t)) =
(va_QProc (va_code_LoadBe64_buffer h dst src offset t) ([va_mod_dst_opr64 dst])
(va_wp_LoadBe64_buffer h dst src offset t b index) (va_wpProof_LoadBe64_buffer h dst src offset
t b index))
//--
//-- StoreBe64_buffer
val va_code_StoreBe64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_code
val va_codegen_success_StoreBe64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_pbool
val va_lemma_StoreBe64_buffer : va_b0:va_code -> va_s0:va_state -> h:va_operand_heaplet ->
dst:va_operand_reg_opr64 -> src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 ->
index:int
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_StoreBe64_buffer h dst src offset t) va_s0 /\
va_is_dst_heaplet h va_s0 /\ va_is_src_reg_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ movbe_enabled /\ Vale.X64.Decls.valid_dst_addr #Vale.X64.Memory.vuint64
(va_eval_heaplet va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer
#Vale.X64.Memory.vuint64 b (va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) true /\
Vale.X64.Memory.valid_taint_buf64 b (va_eval_heaplet va_s0 h) ((va_get_mem_layout
va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 dst + offset == Vale.X64.Memory.buffer_addr
#Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) + 8 `op_Multiply` index))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_heaplet va_sM h == buffer64_write b index (Vale.Def.Types_s.reverse_bytes_nat64
(va_eval_reg_opr64 va_s0 src)) (va_eval_heaplet va_s0 h) /\ va_state_eq va_sM (va_update_mem
va_sM (va_update_ok va_sM (va_update_operand_heaplet h va_sM va_s0)))))
[@ va_qattr]
let va_wp_StoreBe64_buffer (h:va_operand_heaplet) (dst:va_operand_reg_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_is_dst_heaplet h va_s0 /\ va_is_src_reg_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ movbe_enabled /\ Vale.X64.Decls.valid_dst_addr #Vale.X64.Memory.vuint64
(va_eval_heaplet va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer
#Vale.X64.Memory.vuint64 b (va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) true /\
Vale.X64.Memory.valid_taint_buf64 b (va_eval_heaplet va_s0 h) ((va_get_mem_layout
va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 dst + offset == Vale.X64.Memory.buffer_addr
#Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) + 8 `op_Multiply` index /\ (forall
(va_x_h:va_value_heaplet) (va_x_mem:vale_heap) . let va_sM = va_upd_mem va_x_mem
(va_upd_operand_heaplet h va_x_h va_s0) in va_get_ok va_sM /\ va_eval_heaplet va_sM h ==
buffer64_write b index (Vale.Def.Types_s.reverse_bytes_nat64 (va_eval_reg_opr64 va_s0 src))
(va_eval_heaplet va_s0 h) ==> va_k va_sM (())))
val va_wpProof_StoreBe64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 -> index:int -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_StoreBe64_buffer h dst src offset t b index va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_StoreBe64_buffer h dst src offset t)
([va_Mod_mem; va_mod_heaplet h]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_StoreBe64_buffer (h:va_operand_heaplet) (dst:va_operand_reg_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) : (va_quickCode unit | false | false | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_StoreBe64_buffer
(h: va_operand_heaplet)
(dst src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
: (va_quickCode unit (va_code_StoreBe64_buffer h dst src offset t)) | [] | Vale.X64.InsMem.va_quick_StoreBe64_buffer | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
h: Vale.X64.Decls.va_operand_heaplet ->
dst: Vale.X64.Decls.va_operand_reg_opr64 ->
src: Vale.X64.Decls.va_operand_reg_opr64 ->
offset: Prims.int ->
t: Vale.Arch.HeapTypes_s.taint ->
b: Vale.X64.Memory.buffer64 ->
index: Prims.int
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.X64.InsMem.va_code_StoreBe64_buffer h dst src offset t) | {
"end_col": 22,
"end_line": 413,
"start_col": 2,
"start_line": 411
} |
Prims.Tot | val va_wp_StoreBe64_buffer
(h: va_operand_heaplet)
(dst src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_StoreBe64_buffer (h:va_operand_heaplet) (dst:va_operand_reg_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_is_dst_heaplet h va_s0 /\ va_is_src_reg_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ movbe_enabled /\ Vale.X64.Decls.valid_dst_addr #Vale.X64.Memory.vuint64
(va_eval_heaplet va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer
#Vale.X64.Memory.vuint64 b (va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) true /\
Vale.X64.Memory.valid_taint_buf64 b (va_eval_heaplet va_s0 h) ((va_get_mem_layout
va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 dst + offset == Vale.X64.Memory.buffer_addr
#Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) + 8 `op_Multiply` index /\ (forall
(va_x_h:va_value_heaplet) (va_x_mem:vale_heap) . let va_sM = va_upd_mem va_x_mem
(va_upd_operand_heaplet h va_x_h va_s0) in va_get_ok va_sM /\ va_eval_heaplet va_sM h ==
buffer64_write b index (Vale.Def.Types_s.reverse_bytes_nat64 (va_eval_reg_opr64 va_s0 src))
(va_eval_heaplet va_s0 h) ==> va_k va_sM (()))) | val va_wp_StoreBe64_buffer
(h: va_operand_heaplet)
(dst src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_StoreBe64_buffer
(h: va_operand_heaplet)
(dst src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_is_dst_heaplet h va_s0 /\ va_is_src_reg_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ movbe_enabled /\
Vale.X64.Decls.valid_dst_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet va_s0 h) b index /\
Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64
b
(va_get_mem_layout va_s0)
(va_eval_heaplet va_s0 h)
true /\
Vale.X64.Memory.valid_taint_buf64 b
(va_eval_heaplet va_s0 h)
((va_get_mem_layout va_s0).vl_taint)
t /\
va_eval_reg_opr64 va_s0 dst + offset ==
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) +
8
`op_Multiply`
index /\
(forall (va_x_h: va_value_heaplet) (va_x_mem: vale_heap).
let va_sM = va_upd_mem va_x_mem (va_upd_operand_heaplet h va_x_h va_s0) in
va_get_ok va_sM /\
va_eval_heaplet va_sM h ==
buffer64_write b
index
(Vale.Def.Types_s.reverse_bytes_nat64 (va_eval_reg_opr64 va_s0 src))
(va_eval_heaplet va_s0 h) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.X64.InsMem.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.X64.InsMem.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_operand_heaplet",
"Vale.X64.Decls.va_operand_reg_opr64",
"Prims.int",
"Vale.Arch.HeapTypes_s.taint",
"Vale.X64.Memory.buffer64",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Vale.X64.Decls.va_is_dst_heaplet",
"Prims.b2t",
"Vale.X64.Decls.va_is_src_reg_opr64",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.CPU_Features_s.movbe_enabled",
"Vale.X64.Decls.valid_dst_addr",
"Vale.X64.Memory.vuint64",
"Vale.X64.Decls.va_eval_heaplet",
"Vale.X64.Memory.valid_layout_buffer",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Memory.valid_taint_buf64",
"Vale.Arch.HeapImpl.__proj__Mkvale_heap_layout__item__vl_taint",
"Prims.eq2",
"Prims.op_Addition",
"Vale.X64.Decls.va_eval_reg_opr64",
"Vale.X64.Memory.buffer_addr",
"Prims.op_Multiply",
"Prims.l_Forall",
"Vale.X64.Decls.va_value_heaplet",
"Vale.X64.InsBasic.vale_heap",
"Prims.l_imp",
"Vale.X64.Memory.vale_heap",
"Vale.X64.InsMem.buffer64_write",
"Vale.Def.Types_s.reverse_bytes_nat64",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_mem",
"Vale.X64.Decls.va_upd_operand_heaplet"
] | [] | module Vale.X64.InsMem
open FStar.Seq
open Vale.Def.Types_s
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.QuickCode
open Vale.X64.InsBasic
open Vale.X64.CPU_Features_s
open Vale.Lib.Seqs
//-- Mem64_lemma
val va_code_Mem64_lemma : va_dummy:unit -> Tot va_code
val va_codegen_success_Mem64_lemma : va_dummy:unit -> Tot va_pbool
val va_lemma_Mem64_lemma : va_b0:va_code -> va_s0:va_state -> h:heaplet_id -> base:operand64 ->
offset:int -> b:buffer64 -> index:int -> t:taint
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Mem64_lemma ()) va_s0 /\ va_get_ok va_s0 /\ (let
heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr heap_h b index /\
valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\ valid_taint_buf64 b heap_h
((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 + offset == buffer_addr b
heap_h + 8 `op_Multiply` index)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base offset t)
va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM) ==
buffer_read b index heap_h) /\ va_state_eq va_sM (va_update_ok va_sM va_s0)))
[@ va_qattr]
let va_wp_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let heap_h = va_get_mem_heaplet h va_s0 in (OReg? base) /\ valid_src_addr
heap_h b index /\ valid_layout_buffer b (va_get_mem_layout va_s0) heap_h false /\
valid_taint_buf64 b heap_h ((va_get_mem_layout va_s0).vl_taint) t /\ eval_operand base va_s0 +
offset == buffer_addr b heap_h + 8 `op_Multiply` index) /\ (let va_sM = va_s0 in va_get_ok
va_sM /\ (let heap_h = va_get_mem_heaplet h va_s0 in valid_operand (va_opr_code_Mem64 h base
offset t) va_sM /\ load_mem64 (buffer_addr b heap_h + 8 `op_Multiply` index) (va_get_mem va_sM)
== buffer_read b index heap_h) ==> va_k va_sM (())))
val va_wpProof_Mem64_lemma : h:heaplet_id -> base:operand64 -> offset:int -> b:buffer64 ->
index:int -> t:taint -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Mem64_lemma h base offset b index t va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Mem64_lemma ()) ([]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Mem64_lemma (h:heaplet_id) (base:operand64) (offset:int) (b:buffer64) (index:int)
(t:taint) : (va_quickCode unit (va_code_Mem64_lemma ())) =
(va_QProc (va_code_Mem64_lemma ()) ([]) (va_wp_Mem64_lemma h base offset b index t)
(va_wpProof_Mem64_lemma h base offset b index t))
//--
let buffer64_write (b:buffer64) (i:int) (v:nat64) (h:vale_heap) : Ghost vale_heap
(requires buffer_readable h b /\ buffer_writeable b)
(ensures fun _ -> True)
=
buffer_write b i v h
let heaplet_id_is_none (h:vale_heap) =
get_heaplet_id h == None
let heaplet_id_is_some (h:vale_heap) (i:heaplet_id) =
get_heaplet_id h == Some i
unfold let norm_list (p:prop) : prop =
norm [zeta; iota; delta_only [`%list_to_seq_post]] p
irreducible let norm_loc_attr = ()
unfold let norm_loc (l:loc) : loc =
norm [zeta; iota; delta_only [`%loc_mutable_buffers]; delta_attr [`%norm_loc_attr]] l
let trigger_create_heaplet (h:heaplet_id) = True
[@norm_loc_attr]
unfold let declare_buffer64 (b:buffer TUInt64) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt64 b hid t mut
[@norm_loc_attr]
unfold let declare_buffer128 (b:buffer TUInt128) (hid:heaplet_id) (t:taint) (mut:mutability) : buffer_info =
Mkbuffer_info TUInt128 b hid t mut
let create_post (layout:vale_heap_layout) (bs:Seq.seq buffer_info) =
forall (i:nat).{:pattern Seq.index bs i} i < Seq.length bs ==> (
let Mkbuffer_info t b hid _ mut = Seq.index bs i in
trigger_create_heaplet hid /\
valid_layout_buffer_id t b layout (Some hid) false /\
valid_layout_buffer_id t b layout (Some hid) (mut = Mutable))
//-- CreateHeaplets
val va_code_CreateHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_CreateHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_CreateHeaplets : va_b0:va_code -> va_s0:va_state -> buffers:(list buffer_info)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_CreateHeaplets ()) va_s0 /\ va_get_ok va_s0 /\ (let
(bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (norm_list
(Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq #buffer_info buffers in
norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) /\ va_state_eq va_sM (va_update_mem_layout
va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_CreateHeaplets (buffers:(list buffer_info)) (va_s0:va_state) (va_k:(va_state -> unit ->
Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem
va_s0) /\ (norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) ==>
Vale.X64.Memory.init_heaplets_req (va_get_mem va_s0) bs)) /\ (forall
(va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout va_x_memLayout va_s0 in
va_get_ok va_sM /\ (let (bs:(FStar.Seq.Base.seq buffer_info)) = Vale.Lib.Seqs.list_to_seq
#buffer_info buffers in norm_list (Vale.Lib.Seqs.list_to_seq_post #buffer_info buffers bs 0) /\
Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_sM).vl_inner) == norm_loc
(Vale.X64.Memory.loc_mutable_buffers buffers) /\ Vale.X64.Memory.layout_old_heap
((va_get_mem_layout va_sM).vl_inner) == va_get_mem va_s0 /\ Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_sM).vl_inner) == bs /\ Vale.X64.Memory.layout_heaplets_initialized
((va_get_mem_layout va_sM).vl_inner) /\ (va_get_mem_layout va_sM).vl_taint ==
(va_get_mem_layout va_s0).vl_taint /\ create_post (va_get_mem_layout va_sM) bs /\
heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> heaplet_id_is_some
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h /\ Vale.X64.Memory.heaps_match bs
((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h
va_sM) h) /\ (forall (i:nat) . {:pattern(Seq.index bs i)}i < FStar.Seq.Base.length #buffer_info
bs ==> Vale.X64.Memory.buffer_info_has_id bs i ((FStar.Seq.Base.index
#Vale.Arch.HeapImpl.buffer_info bs i).bi_heaplet))) ==> va_k va_sM (())))
val va_wpProof_CreateHeaplets : buffers:(list buffer_info) -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_CreateHeaplets buffers va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_CreateHeaplets ())
([va_Mod_mem_layout]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_CreateHeaplets (buffers:(list buffer_info)) : (va_quickCode unit
(va_code_CreateHeaplets ())) =
(va_QProc (va_code_CreateHeaplets ()) ([va_Mod_mem_layout]) (va_wp_CreateHeaplets buffers)
(va_wpProof_CreateHeaplets buffers))
//--
//-- DestroyHeaplets
val va_code_DestroyHeaplets : va_dummy:unit -> Tot va_code
val va_codegen_success_DestroyHeaplets : va_dummy:unit -> Tot va_pbool
val va_lemma_DestroyHeaplets : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_DestroyHeaplets ()) va_s0 /\ va_get_ok va_s0 /\
Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout va_s0).vl_inner)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
Vale.X64.Decls.modifies_mem (Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout
va_s0).vl_inner)) (Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner))
(va_get_mem va_sM) /\ (va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint
/\ heaplet_id_is_none (va_get_mem va_sM) /\ (forall (h:heaplet_id) .
{:pattern(trigger_create_heaplet h)}trigger_create_heaplet h ==> Vale.X64.Memory.heaps_match
(Vale.X64.Memory.layout_buffers ((va_get_mem_layout va_s0).vl_inner)) ((va_get_mem_layout
va_sM).vl_taint) (va_get_mem va_sM) (Vale.X64.Decls.va_get_mem_heaplet h va_sM) h) /\
va_state_eq va_sM (va_update_mem_layout va_sM (va_update_ok va_sM va_s0))))
[@ va_qattr]
let va_wp_DestroyHeaplets (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ Vale.X64.Memory.layout_heaplets_initialized ((va_get_mem_layout
va_s0).vl_inner) /\ (forall (va_x_memLayout:vale_heap_layout) . let va_sM = va_upd_mem_layout
va_x_memLayout va_s0 in va_get_ok va_sM /\ Vale.X64.Decls.modifies_mem
(Vale.X64.Memory.layout_modifies_loc ((va_get_mem_layout va_s0).vl_inner))
(Vale.X64.Memory.layout_old_heap ((va_get_mem_layout va_s0).vl_inner)) (va_get_mem va_sM) /\
(va_get_mem_layout va_sM).vl_taint == (va_get_mem_layout va_s0).vl_taint /\ heaplet_id_is_none
(va_get_mem va_sM) /\ (forall (h:heaplet_id) . {:pattern(trigger_create_heaplet
h)}trigger_create_heaplet h ==> Vale.X64.Memory.heaps_match (Vale.X64.Memory.layout_buffers
((va_get_mem_layout va_s0).vl_inner)) ((va_get_mem_layout va_sM).vl_taint) (va_get_mem va_sM)
(Vale.X64.Decls.va_get_mem_heaplet h va_sM) h) ==> va_k va_sM (())))
val va_wpProof_DestroyHeaplets : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_DestroyHeaplets va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_DestroyHeaplets ())
([va_Mod_mem_layout]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_DestroyHeaplets () : (va_quickCode unit (va_code_DestroyHeaplets ())) =
(va_QProc (va_code_DestroyHeaplets ()) ([va_Mod_mem_layout]) va_wp_DestroyHeaplets
va_wpProof_DestroyHeaplets)
//--
//-- Load64_buffer
val va_code_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_code
val va_codegen_success_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_pbool
val va_lemma_Load64_buffer : va_b0:va_code -> va_s0:va_state -> h:va_operand_heaplet ->
dst:va_operand_dst_opr64 -> src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 ->
index:int
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Load64_buffer h dst src offset t) va_s0 /\
va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\ Vale.X64.Memory.valid_taint_buf64
b (va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0
src + offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0
h) + 8 `op_Multiply` index))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_dst_opr64 va_sM dst == Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h)
/\ va_state_eq va_sM (va_update_ok va_sM (va_update_operand_dst_opr64 dst va_sM va_s0))))
[@ va_qattr]
let va_wp_Load64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\ Vale.X64.Memory.valid_taint_buf64
b (va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0
src + offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0
h) + 8 `op_Multiply` index /\ (forall (va_x_dst:va_value_dst_opr64) . let va_sM =
va_upd_operand_dst_opr64 dst va_x_dst va_s0 in va_get_ok va_sM /\ va_eval_dst_opr64 va_sM dst
== Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h) ==> va_k va_sM (())))
val va_wpProof_Load64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 -> index:int -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Load64_buffer h dst src offset t b index va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Load64_buffer h dst src offset t)
([va_mod_dst_opr64 dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Load64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) : (va_quickCode unit
(va_code_Load64_buffer h dst src offset t)) =
(va_QProc (va_code_Load64_buffer h dst src offset t) ([va_mod_dst_opr64 dst])
(va_wp_Load64_buffer h dst src offset t b index) (va_wpProof_Load64_buffer h dst src offset t b
index))
//--
//-- Store64_buffer
val va_code_Store64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_code
val va_codegen_success_Store64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_pbool
val va_lemma_Store64_buffer : va_b0:va_code -> va_s0:va_state -> h:va_operand_heaplet ->
dst:va_operand_reg_opr64 -> src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 ->
index:int
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Store64_buffer h dst src offset t) va_s0 /\
va_is_dst_heaplet h va_s0 /\ va_is_src_reg_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_dst_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) true /\ Vale.X64.Memory.valid_taint_buf64 b
(va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 dst
+ offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) +
8 `op_Multiply` index))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_heaplet va_sM h == buffer64_write b index (va_eval_reg_opr64 va_s0 src)
(va_eval_heaplet va_s0 h) /\ va_state_eq va_sM (va_update_mem va_sM (va_update_ok va_sM
(va_update_operand_heaplet h va_sM va_s0)))))
[@ va_qattr]
let va_wp_Store64_buffer (h:va_operand_heaplet) (dst:va_operand_reg_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_is_dst_heaplet h va_s0 /\ va_is_src_reg_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ Vale.X64.Decls.valid_dst_addr #Vale.X64.Memory.vuint64 (va_eval_heaplet
va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer #Vale.X64.Memory.vuint64 b
(va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) true /\ Vale.X64.Memory.valid_taint_buf64 b
(va_eval_heaplet va_s0 h) ((va_get_mem_layout va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 dst
+ offset == Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) +
8 `op_Multiply` index /\ (forall (va_x_h:va_value_heaplet) (va_x_mem:vale_heap) . let va_sM =
va_upd_mem va_x_mem (va_upd_operand_heaplet h va_x_h va_s0) in va_get_ok va_sM /\
va_eval_heaplet va_sM h == buffer64_write b index (va_eval_reg_opr64 va_s0 src)
(va_eval_heaplet va_s0 h) ==> va_k va_sM (())))
val va_wpProof_Store64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 -> index:int -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Store64_buffer h dst src offset t b index va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Store64_buffer h dst src offset t)
([va_Mod_mem; va_mod_heaplet h]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Store64_buffer (h:va_operand_heaplet) (dst:va_operand_reg_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) : (va_quickCode unit
(va_code_Store64_buffer h dst src offset t)) =
(va_QProc (va_code_Store64_buffer h dst src offset t) ([va_Mod_mem; va_mod_heaplet h])
(va_wp_Store64_buffer h dst src offset t b index) (va_wpProof_Store64_buffer h dst src offset t
b index))
//--
//-- LoadBe64_buffer
val va_code_LoadBe64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_code
val va_codegen_success_LoadBe64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_pbool
val va_lemma_LoadBe64_buffer : va_b0:va_code -> va_s0:va_state -> h:va_operand_heaplet ->
dst:va_operand_dst_opr64 -> src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 ->
index:int
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_LoadBe64_buffer h dst src offset t) va_s0 /\
va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ movbe_enabled /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64
(va_eval_heaplet va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer
#Vale.X64.Memory.vuint64 b (va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\
Vale.X64.Memory.valid_taint_buf64 b (va_eval_heaplet va_s0 h) ((va_get_mem_layout
va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 src + offset == Vale.X64.Memory.buffer_addr
#Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) + 8 `op_Multiply` index))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_dst_opr64 va_sM dst == Vale.Def.Types_s.reverse_bytes_nat64
(Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h)) /\ va_state_eq va_sM
(va_update_ok va_sM (va_update_operand_dst_opr64 dst va_sM va_s0))))
[@ va_qattr]
let va_wp_LoadBe64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_is_src_heaplet h va_s0 /\ va_is_dst_dst_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ movbe_enabled /\ Vale.X64.Decls.valid_src_addr #Vale.X64.Memory.vuint64
(va_eval_heaplet va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer
#Vale.X64.Memory.vuint64 b (va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) false /\
Vale.X64.Memory.valid_taint_buf64 b (va_eval_heaplet va_s0 h) ((va_get_mem_layout
va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 src + offset == Vale.X64.Memory.buffer_addr
#Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) + 8 `op_Multiply` index /\ (forall
(va_x_dst:va_value_dst_opr64) . let va_sM = va_upd_operand_dst_opr64 dst va_x_dst va_s0 in
va_get_ok va_sM /\ va_eval_dst_opr64 va_sM dst == Vale.Def.Types_s.reverse_bytes_nat64
(Vale.X64.Decls.buffer64_read b index (va_eval_heaplet va_sM h)) ==> va_k va_sM (())))
val va_wpProof_LoadBe64_buffer : h:va_operand_heaplet -> dst:va_operand_dst_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 -> index:int -> va_s0:va_state ->
va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_LoadBe64_buffer h dst src offset t b index va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_LoadBe64_buffer h dst src offset t)
([va_mod_dst_opr64 dst]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_LoadBe64_buffer (h:va_operand_heaplet) (dst:va_operand_dst_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) : (va_quickCode unit
(va_code_LoadBe64_buffer h dst src offset t)) =
(va_QProc (va_code_LoadBe64_buffer h dst src offset t) ([va_mod_dst_opr64 dst])
(va_wp_LoadBe64_buffer h dst src offset t b index) (va_wpProof_LoadBe64_buffer h dst src offset
t b index))
//--
//-- StoreBe64_buffer
val va_code_StoreBe64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_code
val va_codegen_success_StoreBe64_buffer : h:va_operand_heaplet -> dst:va_operand_reg_opr64 ->
src:va_operand_reg_opr64 -> offset:int -> t:taint -> Tot va_pbool
val va_lemma_StoreBe64_buffer : va_b0:va_code -> va_s0:va_state -> h:va_operand_heaplet ->
dst:va_operand_reg_opr64 -> src:va_operand_reg_opr64 -> offset:int -> t:taint -> b:buffer64 ->
index:int
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_StoreBe64_buffer h dst src offset t) va_s0 /\
va_is_dst_heaplet h va_s0 /\ va_is_src_reg_opr64 dst va_s0 /\ va_is_src_reg_opr64 src va_s0 /\
va_get_ok va_s0 /\ movbe_enabled /\ Vale.X64.Decls.valid_dst_addr #Vale.X64.Memory.vuint64
(va_eval_heaplet va_s0 h) b index /\ Vale.X64.Memory.valid_layout_buffer
#Vale.X64.Memory.vuint64 b (va_get_mem_layout va_s0) (va_eval_heaplet va_s0 h) true /\
Vale.X64.Memory.valid_taint_buf64 b (va_eval_heaplet va_s0 h) ((va_get_mem_layout
va_s0).vl_taint) t /\ va_eval_reg_opr64 va_s0 dst + offset == Vale.X64.Memory.buffer_addr
#Vale.X64.Memory.vuint64 b (va_eval_heaplet va_s0 h) + 8 `op_Multiply` index))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_eval_heaplet va_sM h == buffer64_write b index (Vale.Def.Types_s.reverse_bytes_nat64
(va_eval_reg_opr64 va_s0 src)) (va_eval_heaplet va_s0 h) /\ va_state_eq va_sM (va_update_mem
va_sM (va_update_ok va_sM (va_update_operand_heaplet h va_sM va_s0)))))
[@ va_qattr]
let va_wp_StoreBe64_buffer (h:va_operand_heaplet) (dst:va_operand_reg_opr64)
(src:va_operand_reg_opr64) (offset:int) (t:taint) (b:buffer64) (index:int) (va_s0:va_state) | false | true | Vale.X64.InsMem.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_StoreBe64_buffer
(h: va_operand_heaplet)
(dst src: va_operand_reg_opr64)
(offset: int)
(t: taint)
(b: buffer64)
(index: int)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.X64.InsMem.va_wp_StoreBe64_buffer | {
"file_name": "obj/Vale.X64.InsMem.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
h: Vale.X64.Decls.va_operand_heaplet ->
dst: Vale.X64.Decls.va_operand_reg_opr64 ->
src: Vale.X64.Decls.va_operand_reg_opr64 ->
offset: Prims.int ->
t: Vale.Arch.HeapTypes_s.taint ->
b: Vale.X64.Memory.buffer64 ->
index: Prims.int ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 51,
"end_line": 398,
"start_col": 2,
"start_line": 388
} |
Prims.Tot | val aff_point_at_infinity:aff_point | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let aff_point_at_infinity : aff_point = (zero, one) | val aff_point_at_infinity:aff_point
let aff_point_at_infinity:aff_point = | false | null | false | (zero, one) | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [
"FStar.Pervasives.Native.Mktuple2",
"Spec.Curve25519.elem",
"Spec.Curve25519.zero",
"Spec.Curve25519.one"
] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0
let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x
let g_x : elem = 15112221349535400772501151409588531511454012693041857206046113283949847762202
let g_y : elem = 46316835694926478169428394003475163141307993866256225615783033603165251855960
let aff_g : aff_point = (g_x, g_y)
let g: ext_point = (g_x, g_y, 1, g_x *% g_y)
let is_on_curve (p:aff_point) =
let (x, y) = p in
y *% y -% x *% x == 1 +% d *% (x *% x) *% (y *% y)
let to_aff_point (p:ext_point) : aff_point =
let _X, _Y, _Z, _T = p in
_X /% _Z, _Y /% _Z
let is_ext (p:ext_point) =
let _X, _Y, _Z, _T = p in
_T == _X *% _Y /% _Z /\ _Z <> zero
let point_inv (p:ext_point) =
is_ext p /\ is_on_curve (to_aff_point p)
// let is_on_curve_ext (p:ext_point) =
// let _X, _Y, _Z, _T = p in
// _Y *% _Y -% X *% X == _Z *% _Z +% d *% _T *% _T
// let to_ext_point (p:aff_point) : ext_point =
// let x, y = p in
// (x, y, one, x *% y)
/// Point addition and doubling in affine coordinates
let aff_point_add (p:aff_point) (q:aff_point) : aff_point =
let x1, y1 = p in
let x2, y2 = q in
let x3 = (x1 *% y2 +% y1 *% x2) /% (1 +% d *% (x1 *% x2) *% (y1 *% y2)) in
let y3 = (y1 *% y2 +% x1 *% x2) /% (1 -% d *% (x1 *% x2) *% (y1 *% y2)) in
x3, y3
let aff_point_double (p:aff_point) : aff_point =
let x, y = p in
let x3 = (2 *% x *% y) /% (y *% y -% x *% x) in
let y3 = (y *% y +% x *% x) /% (2 -% y *% y +% x *% x) in
x3, y3 | false | true | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val aff_point_at_infinity:aff_point | [] | Spec.Ed25519.PointOps.aff_point_at_infinity | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Spec.Ed25519.PointOps.aff_point | {
"end_col": 51,
"end_line": 67,
"start_col": 40,
"start_line": 67
} |
Prims.Tot | val point_at_infinity:ext_point | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let point_at_infinity: ext_point = (zero, one, one, zero) | val point_at_infinity:ext_point
let point_at_infinity:ext_point = | false | null | false | (zero, one, one, zero) | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [
"FStar.Pervasives.Native.Mktuple4",
"Spec.Curve25519.elem",
"Spec.Curve25519.zero",
"Spec.Curve25519.one"
] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0
let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x
let g_x : elem = 15112221349535400772501151409588531511454012693041857206046113283949847762202
let g_y : elem = 46316835694926478169428394003475163141307993866256225615783033603165251855960
let aff_g : aff_point = (g_x, g_y)
let g: ext_point = (g_x, g_y, 1, g_x *% g_y)
let is_on_curve (p:aff_point) =
let (x, y) = p in
y *% y -% x *% x == 1 +% d *% (x *% x) *% (y *% y)
let to_aff_point (p:ext_point) : aff_point =
let _X, _Y, _Z, _T = p in
_X /% _Z, _Y /% _Z
let is_ext (p:ext_point) =
let _X, _Y, _Z, _T = p in
_T == _X *% _Y /% _Z /\ _Z <> zero
let point_inv (p:ext_point) =
is_ext p /\ is_on_curve (to_aff_point p)
// let is_on_curve_ext (p:ext_point) =
// let _X, _Y, _Z, _T = p in
// _Y *% _Y -% X *% X == _Z *% _Z +% d *% _T *% _T
// let to_ext_point (p:aff_point) : ext_point =
// let x, y = p in
// (x, y, one, x *% y)
/// Point addition and doubling in affine coordinates
let aff_point_add (p:aff_point) (q:aff_point) : aff_point =
let x1, y1 = p in
let x2, y2 = q in
let x3 = (x1 *% y2 +% y1 *% x2) /% (1 +% d *% (x1 *% x2) *% (y1 *% y2)) in
let y3 = (y1 *% y2 +% x1 *% x2) /% (1 -% d *% (x1 *% x2) *% (y1 *% y2)) in
x3, y3
let aff_point_double (p:aff_point) : aff_point =
let x, y = p in
let x3 = (2 *% x *% y) /% (y *% y -% x *% x) in
let y3 = (y *% y +% x *% x) /% (2 -% y *% y +% x *% x) in
x3, y3
let aff_point_at_infinity : aff_point = (zero, one)
let aff_point_negate (p:aff_point) : aff_point =
let x, y = p in
((-x) % prime, y)
/// Point addition and doubling in Extended Twisted Edwards Coordinates
let point_add (p:ext_point) (q:ext_point) : Tot ext_point =
let _X1, _Y1, _Z1, _T1 = p in
let _X2, _Y2, _Z2, _T2 = q in
let a = (_Y1 -% _X1) *% (_Y2 -% _X2) in
let b = (_Y1 +% _X1) *% (_Y2 +% _X2) in
let c = (2 *% d *% _T1) *% _T2 in
let d = (2 *% _Z1) *% _Z2 in
let e = b -% a in
let f = d -% c in
let g = d +% c in
let h = b +% a in
let _X3 = e *% f in
let _Y3 = g *% h in
let _T3 = e *% h in
let _Z3 = f *% g in
(_X3, _Y3, _Z3, _T3)
let point_double (p:ext_point) : Tot ext_point =
let _X1, _Y1, _Z1, _T1 = p in
let a = _X1 *% _X1 in
let b = _Y1 *% _Y1 in
let c = 2 *% (_Z1 *% _Z1) in
let h = a +% b in
let e = h -% ((_X1 +% _Y1) *% (_X1 +% _Y1)) in
let g = a -% b in
let f = c +% g in
let _X3 = e *% f in
let _Y3 = g *% h in
let _T3 = e *% h in
let _Z3 = f *% g in
(_X3, _Y3, _Z3, _T3) | false | true | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val point_at_infinity:ext_point | [] | Spec.Ed25519.PointOps.point_at_infinity | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Spec.Ed25519.PointOps.ext_point | {
"end_col": 57,
"end_line": 108,
"start_col": 35,
"start_line": 108
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let point_inv (p:ext_point) =
is_ext p /\ is_on_curve (to_aff_point p) | let point_inv (p: ext_point) = | false | null | false | is_ext p /\ is_on_curve (to_aff_point p) | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [
"Spec.Ed25519.PointOps.ext_point",
"Prims.l_and",
"Spec.Ed25519.PointOps.is_ext",
"Spec.Ed25519.PointOps.is_on_curve",
"Spec.Ed25519.PointOps.to_aff_point",
"Prims.logical"
] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0
let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x
let g_x : elem = 15112221349535400772501151409588531511454012693041857206046113283949847762202
let g_y : elem = 46316835694926478169428394003475163141307993866256225615783033603165251855960
let aff_g : aff_point = (g_x, g_y)
let g: ext_point = (g_x, g_y, 1, g_x *% g_y)
let is_on_curve (p:aff_point) =
let (x, y) = p in
y *% y -% x *% x == 1 +% d *% (x *% x) *% (y *% y)
let to_aff_point (p:ext_point) : aff_point =
let _X, _Y, _Z, _T = p in
_X /% _Z, _Y /% _Z
let is_ext (p:ext_point) =
let _X, _Y, _Z, _T = p in
_T == _X *% _Y /% _Z /\ _Z <> zero | false | true | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val point_inv : p: Spec.Ed25519.PointOps.ext_point -> Prims.logical | [] | Spec.Ed25519.PointOps.point_inv | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | p: Spec.Ed25519.PointOps.ext_point -> Prims.logical | {
"end_col": 42,
"end_line": 42,
"start_col": 2,
"start_line": 42
} |
|
Prims.Tot | val to_aff_point (p: ext_point) : aff_point | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let to_aff_point (p:ext_point) : aff_point =
let _X, _Y, _Z, _T = p in
_X /% _Z, _Y /% _Z | val to_aff_point (p: ext_point) : aff_point
let to_aff_point (p: ext_point) : aff_point = | false | null | false | let _X, _Y, _Z, _T = p in
_X /% _Z, _Y /% _Z | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [
"Spec.Ed25519.PointOps.ext_point",
"Spec.Curve25519.elem",
"FStar.Pervasives.Native.Mktuple2",
"Spec.Curve25519.op_Slash_Percent",
"Spec.Ed25519.PointOps.aff_point"
] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0
let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x
let g_x : elem = 15112221349535400772501151409588531511454012693041857206046113283949847762202
let g_y : elem = 46316835694926478169428394003475163141307993866256225615783033603165251855960
let aff_g : aff_point = (g_x, g_y)
let g: ext_point = (g_x, g_y, 1, g_x *% g_y)
let is_on_curve (p:aff_point) =
let (x, y) = p in
y *% y -% x *% x == 1 +% d *% (x *% x) *% (y *% y) | false | true | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val to_aff_point (p: ext_point) : aff_point | [] | Spec.Ed25519.PointOps.to_aff_point | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | p: Spec.Ed25519.PointOps.ext_point -> Spec.Ed25519.PointOps.aff_point | {
"end_col": 20,
"end_line": 35,
"start_col": 44,
"start_line": 33
} |
Prims.Tot | val aff_g:aff_point | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let aff_g : aff_point = (g_x, g_y) | val aff_g:aff_point
let aff_g:aff_point = | false | null | false | (g_x, g_y) | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [
"FStar.Pervasives.Native.Mktuple2",
"Spec.Curve25519.elem",
"Spec.Ed25519.PointOps.g_x",
"Spec.Ed25519.PointOps.g_y"
] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0
let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x
let g_x : elem = 15112221349535400772501151409588531511454012693041857206046113283949847762202
let g_y : elem = 46316835694926478169428394003475163141307993866256225615783033603165251855960 | false | true | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val aff_g:aff_point | [] | Spec.Ed25519.PointOps.aff_g | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Spec.Ed25519.PointOps.aff_point | {
"end_col": 34,
"end_line": 25,
"start_col": 24,
"start_line": 25
} |
Prims.Tot | val d:elem | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x | val d:elem
let d:elem = | false | null | false | let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm (x < prime);
x | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.b2t",
"Prims.op_LessThan",
"Spec.Curve25519.prime",
"Prims.int"
] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0 | false | true | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val d:elem | [] | Spec.Ed25519.PointOps.d | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Spec.Curve25519.elem | {
"end_col": 3,
"end_line": 20,
"start_col": 14,
"start_line": 17
} |
Prims.Tot | val g_x:elem | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let g_x : elem = 15112221349535400772501151409588531511454012693041857206046113283949847762202 | val g_x:elem
let g_x:elem = | false | null | false | 15112221349535400772501151409588531511454012693041857206046113283949847762202 | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0
let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x | false | true | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val g_x:elem | [] | Spec.Ed25519.PointOps.g_x | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Spec.Curve25519.elem | {
"end_col": 94,
"end_line": 22,
"start_col": 17,
"start_line": 22
} |
Prims.Tot | val g_y:elem | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let g_y : elem = 46316835694926478169428394003475163141307993866256225615783033603165251855960 | val g_y:elem
let g_y:elem = | false | null | false | 46316835694926478169428394003475163141307993866256225615783033603165251855960 | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0
let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x | false | true | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val g_y:elem | [] | Spec.Ed25519.PointOps.g_y | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Spec.Curve25519.elem | {
"end_col": 94,
"end_line": 23,
"start_col": 17,
"start_line": 23
} |
Prims.Tot | val modp_sqrt_m1:elem | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0 | val modp_sqrt_m1:elem
let modp_sqrt_m1:elem = | false | null | false | 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0 | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates | false | true | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modp_sqrt_m1:elem | [] | Spec.Ed25519.PointOps.modp_sqrt_m1 | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Spec.Curve25519.elem | {
"end_col": 92,
"end_line": 15,
"start_col": 26,
"start_line": 15
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let is_ext (p:ext_point) =
let _X, _Y, _Z, _T = p in
_T == _X *% _Y /% _Z /\ _Z <> zero | let is_ext (p: ext_point) = | false | null | false | let _X, _Y, _Z, _T = p in
_T == _X *% _Y /% _Z /\ _Z <> zero | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [
"Spec.Ed25519.PointOps.ext_point",
"Spec.Curve25519.elem",
"Prims.l_and",
"Prims.eq2",
"Spec.Curve25519.op_Slash_Percent",
"Spec.Curve25519.op_Star_Percent",
"Prims.b2t",
"Prims.op_disEquality",
"Spec.Curve25519.zero",
"Prims.logical"
] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0
let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x
let g_x : elem = 15112221349535400772501151409588531511454012693041857206046113283949847762202
let g_y : elem = 46316835694926478169428394003475163141307993866256225615783033603165251855960
let aff_g : aff_point = (g_x, g_y)
let g: ext_point = (g_x, g_y, 1, g_x *% g_y)
let is_on_curve (p:aff_point) =
let (x, y) = p in
y *% y -% x *% x == 1 +% d *% (x *% x) *% (y *% y)
let to_aff_point (p:ext_point) : aff_point =
let _X, _Y, _Z, _T = p in
_X /% _Z, _Y /% _Z | false | true | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val is_ext : p: Spec.Ed25519.PointOps.ext_point -> Prims.logical | [] | Spec.Ed25519.PointOps.is_ext | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | p: Spec.Ed25519.PointOps.ext_point -> Prims.logical | {
"end_col": 36,
"end_line": 39,
"start_col": 26,
"start_line": 37
} |
|
Prims.Tot | val aff_point_add (p q: aff_point) : aff_point | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let aff_point_add (p:aff_point) (q:aff_point) : aff_point =
let x1, y1 = p in
let x2, y2 = q in
let x3 = (x1 *% y2 +% y1 *% x2) /% (1 +% d *% (x1 *% x2) *% (y1 *% y2)) in
let y3 = (y1 *% y2 +% x1 *% x2) /% (1 -% d *% (x1 *% x2) *% (y1 *% y2)) in
x3, y3 | val aff_point_add (p q: aff_point) : aff_point
let aff_point_add (p q: aff_point) : aff_point = | false | null | false | let x1, y1 = p in
let x2, y2 = q in
let x3 = (x1 *% y2 +% y1 *% x2) /% (1 +% d *% (x1 *% x2) *% (y1 *% y2)) in
let y3 = (y1 *% y2 +% x1 *% x2) /% (1 -% d *% (x1 *% x2) *% (y1 *% y2)) in
x3, y3 | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [
"Spec.Ed25519.PointOps.aff_point",
"Spec.Curve25519.elem",
"FStar.Pervasives.Native.Mktuple2",
"Spec.Curve25519.op_Slash_Percent",
"Spec.Curve25519.op_Plus_Percent",
"Spec.Curve25519.op_Star_Percent",
"Spec.Curve25519.op_Subtraction_Percent",
"Spec.Ed25519.PointOps.d"
] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0
let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x
let g_x : elem = 15112221349535400772501151409588531511454012693041857206046113283949847762202
let g_y : elem = 46316835694926478169428394003475163141307993866256225615783033603165251855960
let aff_g : aff_point = (g_x, g_y)
let g: ext_point = (g_x, g_y, 1, g_x *% g_y)
let is_on_curve (p:aff_point) =
let (x, y) = p in
y *% y -% x *% x == 1 +% d *% (x *% x) *% (y *% y)
let to_aff_point (p:ext_point) : aff_point =
let _X, _Y, _Z, _T = p in
_X /% _Z, _Y /% _Z
let is_ext (p:ext_point) =
let _X, _Y, _Z, _T = p in
_T == _X *% _Y /% _Z /\ _Z <> zero
let point_inv (p:ext_point) =
is_ext p /\ is_on_curve (to_aff_point p)
// let is_on_curve_ext (p:ext_point) =
// let _X, _Y, _Z, _T = p in
// _Y *% _Y -% X *% X == _Z *% _Z +% d *% _T *% _T
// let to_ext_point (p:aff_point) : ext_point =
// let x, y = p in
// (x, y, one, x *% y)
/// Point addition and doubling in affine coordinates | false | true | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val aff_point_add (p q: aff_point) : aff_point | [] | Spec.Ed25519.PointOps.aff_point_add | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | p: Spec.Ed25519.PointOps.aff_point -> q: Spec.Ed25519.PointOps.aff_point
-> Spec.Ed25519.PointOps.aff_point | {
"end_col": 8,
"end_line": 59,
"start_col": 59,
"start_line": 54
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let is_on_curve (p:aff_point) =
let (x, y) = p in
y *% y -% x *% x == 1 +% d *% (x *% x) *% (y *% y) | let is_on_curve (p: aff_point) = | false | null | false | let x, y = p in
y *% y -% x *% x == 1 +% d *% (x *% x) *% (y *% y) | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [
"Spec.Ed25519.PointOps.aff_point",
"Spec.Curve25519.elem",
"Prims.eq2",
"Spec.Curve25519.op_Subtraction_Percent",
"Spec.Curve25519.op_Star_Percent",
"Spec.Curve25519.op_Plus_Percent",
"Spec.Ed25519.PointOps.d",
"Prims.logical"
] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0
let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x
let g_x : elem = 15112221349535400772501151409588531511454012693041857206046113283949847762202
let g_y : elem = 46316835694926478169428394003475163141307993866256225615783033603165251855960
let aff_g : aff_point = (g_x, g_y)
let g: ext_point = (g_x, g_y, 1, g_x *% g_y) | false | true | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val is_on_curve : p: Spec.Ed25519.PointOps.aff_point -> Prims.logical | [] | Spec.Ed25519.PointOps.is_on_curve | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | p: Spec.Ed25519.PointOps.aff_point -> Prims.logical | {
"end_col": 52,
"end_line": 31,
"start_col": 31,
"start_line": 29
} |
|
Prims.Tot | val aff_point_double (p: aff_point) : aff_point | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let aff_point_double (p:aff_point) : aff_point =
let x, y = p in
let x3 = (2 *% x *% y) /% (y *% y -% x *% x) in
let y3 = (y *% y +% x *% x) /% (2 -% y *% y +% x *% x) in
x3, y3 | val aff_point_double (p: aff_point) : aff_point
let aff_point_double (p: aff_point) : aff_point = | false | null | false | let x, y = p in
let x3 = (2 *% x *% y) /% (y *% y -% x *% x) in
let y3 = (y *% y +% x *% x) /% (2 -% y *% y +% x *% x) in
x3, y3 | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [
"Spec.Ed25519.PointOps.aff_point",
"Spec.Curve25519.elem",
"FStar.Pervasives.Native.Mktuple2",
"Spec.Curve25519.op_Slash_Percent",
"Spec.Curve25519.op_Plus_Percent",
"Spec.Curve25519.op_Star_Percent",
"Spec.Curve25519.op_Subtraction_Percent"
] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0
let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x
let g_x : elem = 15112221349535400772501151409588531511454012693041857206046113283949847762202
let g_y : elem = 46316835694926478169428394003475163141307993866256225615783033603165251855960
let aff_g : aff_point = (g_x, g_y)
let g: ext_point = (g_x, g_y, 1, g_x *% g_y)
let is_on_curve (p:aff_point) =
let (x, y) = p in
y *% y -% x *% x == 1 +% d *% (x *% x) *% (y *% y)
let to_aff_point (p:ext_point) : aff_point =
let _X, _Y, _Z, _T = p in
_X /% _Z, _Y /% _Z
let is_ext (p:ext_point) =
let _X, _Y, _Z, _T = p in
_T == _X *% _Y /% _Z /\ _Z <> zero
let point_inv (p:ext_point) =
is_ext p /\ is_on_curve (to_aff_point p)
// let is_on_curve_ext (p:ext_point) =
// let _X, _Y, _Z, _T = p in
// _Y *% _Y -% X *% X == _Z *% _Z +% d *% _T *% _T
// let to_ext_point (p:aff_point) : ext_point =
// let x, y = p in
// (x, y, one, x *% y)
/// Point addition and doubling in affine coordinates
let aff_point_add (p:aff_point) (q:aff_point) : aff_point =
let x1, y1 = p in
let x2, y2 = q in
let x3 = (x1 *% y2 +% y1 *% x2) /% (1 +% d *% (x1 *% x2) *% (y1 *% y2)) in
let y3 = (y1 *% y2 +% x1 *% x2) /% (1 -% d *% (x1 *% x2) *% (y1 *% y2)) in
x3, y3 | false | true | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val aff_point_double (p: aff_point) : aff_point | [] | Spec.Ed25519.PointOps.aff_point_double | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | p: Spec.Ed25519.PointOps.aff_point -> Spec.Ed25519.PointOps.aff_point | {
"end_col": 8,
"end_line": 65,
"start_col": 48,
"start_line": 61
} |
Prims.Tot | val g:ext_point | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let g: ext_point = (g_x, g_y, 1, g_x *% g_y) | val g:ext_point
let g:ext_point = | false | null | false | (g_x, g_y, 1, g_x *% g_y) | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [
"FStar.Pervasives.Native.Mktuple4",
"Spec.Curve25519.elem",
"Spec.Ed25519.PointOps.g_x",
"Spec.Ed25519.PointOps.g_y",
"Spec.Curve25519.op_Star_Percent"
] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0
let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x
let g_x : elem = 15112221349535400772501151409588531511454012693041857206046113283949847762202
let g_y : elem = 46316835694926478169428394003475163141307993866256225615783033603165251855960 | false | true | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val g:ext_point | [] | Spec.Ed25519.PointOps.g | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Spec.Ed25519.PointOps.ext_point | {
"end_col": 44,
"end_line": 26,
"start_col": 19,
"start_line": 26
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let point_equal (p:ext_point) (q:ext_point) =
let px, py, pz, pt = p in
let qx, qy, qz, qt = q in
if ((px *% qz) <> (qx *% pz)) then false
else if ((py *% qz) <> (qy *% pz)) then false
else true | let point_equal (p q: ext_point) = | false | null | false | let px, py, pz, pt = p in
let qx, qy, qz, qt = q in
if ((px *% qz) <> (qx *% pz)) then false else if ((py *% qz) <> (qy *% pz)) then false else true | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [
"Spec.Ed25519.PointOps.ext_point",
"Spec.Curve25519.elem",
"Prims.op_disEquality",
"Spec.Curve25519.op_Star_Percent",
"Prims.bool"
] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0
let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x
let g_x : elem = 15112221349535400772501151409588531511454012693041857206046113283949847762202
let g_y : elem = 46316835694926478169428394003475163141307993866256225615783033603165251855960
let aff_g : aff_point = (g_x, g_y)
let g: ext_point = (g_x, g_y, 1, g_x *% g_y)
let is_on_curve (p:aff_point) =
let (x, y) = p in
y *% y -% x *% x == 1 +% d *% (x *% x) *% (y *% y)
let to_aff_point (p:ext_point) : aff_point =
let _X, _Y, _Z, _T = p in
_X /% _Z, _Y /% _Z
let is_ext (p:ext_point) =
let _X, _Y, _Z, _T = p in
_T == _X *% _Y /% _Z /\ _Z <> zero
let point_inv (p:ext_point) =
is_ext p /\ is_on_curve (to_aff_point p)
// let is_on_curve_ext (p:ext_point) =
// let _X, _Y, _Z, _T = p in
// _Y *% _Y -% X *% X == _Z *% _Z +% d *% _T *% _T
// let to_ext_point (p:aff_point) : ext_point =
// let x, y = p in
// (x, y, one, x *% y)
/// Point addition and doubling in affine coordinates
let aff_point_add (p:aff_point) (q:aff_point) : aff_point =
let x1, y1 = p in
let x2, y2 = q in
let x3 = (x1 *% y2 +% y1 *% x2) /% (1 +% d *% (x1 *% x2) *% (y1 *% y2)) in
let y3 = (y1 *% y2 +% x1 *% x2) /% (1 -% d *% (x1 *% x2) *% (y1 *% y2)) in
x3, y3
let aff_point_double (p:aff_point) : aff_point =
let x, y = p in
let x3 = (2 *% x *% y) /% (y *% y -% x *% x) in
let y3 = (y *% y +% x *% x) /% (2 -% y *% y +% x *% x) in
x3, y3
let aff_point_at_infinity : aff_point = (zero, one)
let aff_point_negate (p:aff_point) : aff_point =
let x, y = p in
((-x) % prime, y)
/// Point addition and doubling in Extended Twisted Edwards Coordinates
let point_add (p:ext_point) (q:ext_point) : Tot ext_point =
let _X1, _Y1, _Z1, _T1 = p in
let _X2, _Y2, _Z2, _T2 = q in
let a = (_Y1 -% _X1) *% (_Y2 -% _X2) in
let b = (_Y1 +% _X1) *% (_Y2 +% _X2) in
let c = (2 *% d *% _T1) *% _T2 in
let d = (2 *% _Z1) *% _Z2 in
let e = b -% a in
let f = d -% c in
let g = d +% c in
let h = b +% a in
let _X3 = e *% f in
let _Y3 = g *% h in
let _T3 = e *% h in
let _Z3 = f *% g in
(_X3, _Y3, _Z3, _T3)
let point_double (p:ext_point) : Tot ext_point =
let _X1, _Y1, _Z1, _T1 = p in
let a = _X1 *% _X1 in
let b = _Y1 *% _Y1 in
let c = 2 *% (_Z1 *% _Z1) in
let h = a +% b in
let e = h -% ((_X1 +% _Y1) *% (_X1 +% _Y1)) in
let g = a -% b in
let f = c +% g in
let _X3 = e *% f in
let _Y3 = g *% h in
let _T3 = e *% h in
let _Z3 = f *% g in
(_X3, _Y3, _Z3, _T3)
let point_at_infinity: ext_point = (zero, one, one, zero)
let point_negate (p:ext_point) : ext_point =
let _X, _Y, _Z, _T = p in
((-_X) % prime, _Y, _Z, (-_T) % prime)
let point_compress (p:ext_point) : Tot (BSeq.lbytes 32) =
let px, py, pz, pt = p in
let zinv = finv pz in
let x = px *% zinv in
let y = py *% zinv in
BSeq.nat_to_bytes_le 32 (pow2 255 * (x % 2) + y)
let recover_x (y:nat) (sign:bool) : Tot (option elem) =
if y >= prime then None
else (
let y2 = y *% y in
let x2 = (y2 -% one) *% (finv ((d *% y2) +% one)) in
if x2 = zero then (
if sign then None
else Some zero)
else (
let x = x2 **% ((prime + 3) / 8) in
let x = if ((x *% x) -% x2) <> zero then x *% modp_sqrt_m1 else x in
if ((x *% x) -% x2) <> zero then None
else (
let x = if (x % 2 = 1) <> sign then (prime - x) % prime else x in
Some x)))
let point_decompress (s:BSeq.lbytes 32) : Tot (option ext_point) =
let y = BSeq.nat_from_bytes_le s in
let sign = (y / pow2 255) % 2 = 1 in
let y = y % pow2 255 in
let x = recover_x y sign in
match x with
| Some x -> Some (x, y, one, x *% y)
| _ -> None | false | true | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val point_equal : p: Spec.Ed25519.PointOps.ext_point -> q: Spec.Ed25519.PointOps.ext_point -> Prims.bool | [] | Spec.Ed25519.PointOps.point_equal | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | p: Spec.Ed25519.PointOps.ext_point -> q: Spec.Ed25519.PointOps.ext_point -> Prims.bool | {
"end_col": 11,
"end_line": 152,
"start_col": 45,
"start_line": 147
} |
|
Prims.Tot | val point_double (p: ext_point) : Tot ext_point | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let point_double (p:ext_point) : Tot ext_point =
let _X1, _Y1, _Z1, _T1 = p in
let a = _X1 *% _X1 in
let b = _Y1 *% _Y1 in
let c = 2 *% (_Z1 *% _Z1) in
let h = a +% b in
let e = h -% ((_X1 +% _Y1) *% (_X1 +% _Y1)) in
let g = a -% b in
let f = c +% g in
let _X3 = e *% f in
let _Y3 = g *% h in
let _T3 = e *% h in
let _Z3 = f *% g in
(_X3, _Y3, _Z3, _T3) | val point_double (p: ext_point) : Tot ext_point
let point_double (p: ext_point) : Tot ext_point = | false | null | false | let _X1, _Y1, _Z1, _T1 = p in
let a = _X1 *% _X1 in
let b = _Y1 *% _Y1 in
let c = 2 *% (_Z1 *% _Z1) in
let h = a +% b in
let e = h -% ((_X1 +% _Y1) *% (_X1 +% _Y1)) in
let g = a -% b in
let f = c +% g in
let _X3 = e *% f in
let _Y3 = g *% h in
let _T3 = e *% h in
let _Z3 = f *% g in
(_X3, _Y3, _Z3, _T3) | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [
"Spec.Ed25519.PointOps.ext_point",
"Spec.Curve25519.elem",
"FStar.Pervasives.Native.Mktuple4",
"Spec.Curve25519.op_Star_Percent",
"Spec.Curve25519.op_Plus_Percent",
"Spec.Curve25519.op_Subtraction_Percent"
] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0
let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x
let g_x : elem = 15112221349535400772501151409588531511454012693041857206046113283949847762202
let g_y : elem = 46316835694926478169428394003475163141307993866256225615783033603165251855960
let aff_g : aff_point = (g_x, g_y)
let g: ext_point = (g_x, g_y, 1, g_x *% g_y)
let is_on_curve (p:aff_point) =
let (x, y) = p in
y *% y -% x *% x == 1 +% d *% (x *% x) *% (y *% y)
let to_aff_point (p:ext_point) : aff_point =
let _X, _Y, _Z, _T = p in
_X /% _Z, _Y /% _Z
let is_ext (p:ext_point) =
let _X, _Y, _Z, _T = p in
_T == _X *% _Y /% _Z /\ _Z <> zero
let point_inv (p:ext_point) =
is_ext p /\ is_on_curve (to_aff_point p)
// let is_on_curve_ext (p:ext_point) =
// let _X, _Y, _Z, _T = p in
// _Y *% _Y -% X *% X == _Z *% _Z +% d *% _T *% _T
// let to_ext_point (p:aff_point) : ext_point =
// let x, y = p in
// (x, y, one, x *% y)
/// Point addition and doubling in affine coordinates
let aff_point_add (p:aff_point) (q:aff_point) : aff_point =
let x1, y1 = p in
let x2, y2 = q in
let x3 = (x1 *% y2 +% y1 *% x2) /% (1 +% d *% (x1 *% x2) *% (y1 *% y2)) in
let y3 = (y1 *% y2 +% x1 *% x2) /% (1 -% d *% (x1 *% x2) *% (y1 *% y2)) in
x3, y3
let aff_point_double (p:aff_point) : aff_point =
let x, y = p in
let x3 = (2 *% x *% y) /% (y *% y -% x *% x) in
let y3 = (y *% y +% x *% x) /% (2 -% y *% y +% x *% x) in
x3, y3
let aff_point_at_infinity : aff_point = (zero, one)
let aff_point_negate (p:aff_point) : aff_point =
let x, y = p in
((-x) % prime, y)
/// Point addition and doubling in Extended Twisted Edwards Coordinates
let point_add (p:ext_point) (q:ext_point) : Tot ext_point =
let _X1, _Y1, _Z1, _T1 = p in
let _X2, _Y2, _Z2, _T2 = q in
let a = (_Y1 -% _X1) *% (_Y2 -% _X2) in
let b = (_Y1 +% _X1) *% (_Y2 +% _X2) in
let c = (2 *% d *% _T1) *% _T2 in
let d = (2 *% _Z1) *% _Z2 in
let e = b -% a in
let f = d -% c in
let g = d +% c in
let h = b +% a in
let _X3 = e *% f in
let _Y3 = g *% h in
let _T3 = e *% h in
let _Z3 = f *% g in
(_X3, _Y3, _Z3, _T3) | false | true | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val point_double (p: ext_point) : Tot ext_point | [] | Spec.Ed25519.PointOps.point_double | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | p: Spec.Ed25519.PointOps.ext_point -> Spec.Ed25519.PointOps.ext_point | {
"end_col": 22,
"end_line": 106,
"start_col": 48,
"start_line": 93
} |
Prims.Tot | val point_add (p q: ext_point) : Tot ext_point | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let point_add (p:ext_point) (q:ext_point) : Tot ext_point =
let _X1, _Y1, _Z1, _T1 = p in
let _X2, _Y2, _Z2, _T2 = q in
let a = (_Y1 -% _X1) *% (_Y2 -% _X2) in
let b = (_Y1 +% _X1) *% (_Y2 +% _X2) in
let c = (2 *% d *% _T1) *% _T2 in
let d = (2 *% _Z1) *% _Z2 in
let e = b -% a in
let f = d -% c in
let g = d +% c in
let h = b +% a in
let _X3 = e *% f in
let _Y3 = g *% h in
let _T3 = e *% h in
let _Z3 = f *% g in
(_X3, _Y3, _Z3, _T3) | val point_add (p q: ext_point) : Tot ext_point
let point_add (p q: ext_point) : Tot ext_point = | false | null | false | let _X1, _Y1, _Z1, _T1 = p in
let _X2, _Y2, _Z2, _T2 = q in
let a = (_Y1 -% _X1) *% (_Y2 -% _X2) in
let b = (_Y1 +% _X1) *% (_Y2 +% _X2) in
let c = (2 *% d *% _T1) *% _T2 in
let d = (2 *% _Z1) *% _Z2 in
let e = b -% a in
let f = d -% c in
let g = d +% c in
let h = b +% a in
let _X3 = e *% f in
let _Y3 = g *% h in
let _T3 = e *% h in
let _Z3 = f *% g in
(_X3, _Y3, _Z3, _T3) | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [
"Spec.Ed25519.PointOps.ext_point",
"Spec.Curve25519.elem",
"FStar.Pervasives.Native.Mktuple4",
"Spec.Curve25519.op_Star_Percent",
"Spec.Curve25519.op_Plus_Percent",
"Spec.Curve25519.op_Subtraction_Percent",
"Spec.Ed25519.PointOps.d"
] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0
let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x
let g_x : elem = 15112221349535400772501151409588531511454012693041857206046113283949847762202
let g_y : elem = 46316835694926478169428394003475163141307993866256225615783033603165251855960
let aff_g : aff_point = (g_x, g_y)
let g: ext_point = (g_x, g_y, 1, g_x *% g_y)
let is_on_curve (p:aff_point) =
let (x, y) = p in
y *% y -% x *% x == 1 +% d *% (x *% x) *% (y *% y)
let to_aff_point (p:ext_point) : aff_point =
let _X, _Y, _Z, _T = p in
_X /% _Z, _Y /% _Z
let is_ext (p:ext_point) =
let _X, _Y, _Z, _T = p in
_T == _X *% _Y /% _Z /\ _Z <> zero
let point_inv (p:ext_point) =
is_ext p /\ is_on_curve (to_aff_point p)
// let is_on_curve_ext (p:ext_point) =
// let _X, _Y, _Z, _T = p in
// _Y *% _Y -% X *% X == _Z *% _Z +% d *% _T *% _T
// let to_ext_point (p:aff_point) : ext_point =
// let x, y = p in
// (x, y, one, x *% y)
/// Point addition and doubling in affine coordinates
let aff_point_add (p:aff_point) (q:aff_point) : aff_point =
let x1, y1 = p in
let x2, y2 = q in
let x3 = (x1 *% y2 +% y1 *% x2) /% (1 +% d *% (x1 *% x2) *% (y1 *% y2)) in
let y3 = (y1 *% y2 +% x1 *% x2) /% (1 -% d *% (x1 *% x2) *% (y1 *% y2)) in
x3, y3
let aff_point_double (p:aff_point) : aff_point =
let x, y = p in
let x3 = (2 *% x *% y) /% (y *% y -% x *% x) in
let y3 = (y *% y +% x *% x) /% (2 -% y *% y +% x *% x) in
x3, y3
let aff_point_at_infinity : aff_point = (zero, one)
let aff_point_negate (p:aff_point) : aff_point =
let x, y = p in
((-x) % prime, y)
/// Point addition and doubling in Extended Twisted Edwards Coordinates | false | true | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val point_add (p q: ext_point) : Tot ext_point | [] | Spec.Ed25519.PointOps.point_add | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | p: Spec.Ed25519.PointOps.ext_point -> q: Spec.Ed25519.PointOps.ext_point
-> Spec.Ed25519.PointOps.ext_point | {
"end_col": 22,
"end_line": 91,
"start_col": 59,
"start_line": 76
} |
Prims.Tot | val point_negate (p: ext_point) : ext_point | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let point_negate (p:ext_point) : ext_point =
let _X, _Y, _Z, _T = p in
((-_X) % prime, _Y, _Z, (-_T) % prime) | val point_negate (p: ext_point) : ext_point
let point_negate (p: ext_point) : ext_point = | false | null | false | let _X, _Y, _Z, _T = p in
((- _X) % prime, _Y, _Z, (- _T) % prime) | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [
"Spec.Ed25519.PointOps.ext_point",
"Spec.Curve25519.elem",
"FStar.Pervasives.Native.Mktuple4",
"Prims.op_Modulus",
"Prims.op_Minus",
"Spec.Curve25519.prime"
] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0
let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x
let g_x : elem = 15112221349535400772501151409588531511454012693041857206046113283949847762202
let g_y : elem = 46316835694926478169428394003475163141307993866256225615783033603165251855960
let aff_g : aff_point = (g_x, g_y)
let g: ext_point = (g_x, g_y, 1, g_x *% g_y)
let is_on_curve (p:aff_point) =
let (x, y) = p in
y *% y -% x *% x == 1 +% d *% (x *% x) *% (y *% y)
let to_aff_point (p:ext_point) : aff_point =
let _X, _Y, _Z, _T = p in
_X /% _Z, _Y /% _Z
let is_ext (p:ext_point) =
let _X, _Y, _Z, _T = p in
_T == _X *% _Y /% _Z /\ _Z <> zero
let point_inv (p:ext_point) =
is_ext p /\ is_on_curve (to_aff_point p)
// let is_on_curve_ext (p:ext_point) =
// let _X, _Y, _Z, _T = p in
// _Y *% _Y -% X *% X == _Z *% _Z +% d *% _T *% _T
// let to_ext_point (p:aff_point) : ext_point =
// let x, y = p in
// (x, y, one, x *% y)
/// Point addition and doubling in affine coordinates
let aff_point_add (p:aff_point) (q:aff_point) : aff_point =
let x1, y1 = p in
let x2, y2 = q in
let x3 = (x1 *% y2 +% y1 *% x2) /% (1 +% d *% (x1 *% x2) *% (y1 *% y2)) in
let y3 = (y1 *% y2 +% x1 *% x2) /% (1 -% d *% (x1 *% x2) *% (y1 *% y2)) in
x3, y3
let aff_point_double (p:aff_point) : aff_point =
let x, y = p in
let x3 = (2 *% x *% y) /% (y *% y -% x *% x) in
let y3 = (y *% y +% x *% x) /% (2 -% y *% y +% x *% x) in
x3, y3
let aff_point_at_infinity : aff_point = (zero, one)
let aff_point_negate (p:aff_point) : aff_point =
let x, y = p in
((-x) % prime, y)
/// Point addition and doubling in Extended Twisted Edwards Coordinates
let point_add (p:ext_point) (q:ext_point) : Tot ext_point =
let _X1, _Y1, _Z1, _T1 = p in
let _X2, _Y2, _Z2, _T2 = q in
let a = (_Y1 -% _X1) *% (_Y2 -% _X2) in
let b = (_Y1 +% _X1) *% (_Y2 +% _X2) in
let c = (2 *% d *% _T1) *% _T2 in
let d = (2 *% _Z1) *% _Z2 in
let e = b -% a in
let f = d -% c in
let g = d +% c in
let h = b +% a in
let _X3 = e *% f in
let _Y3 = g *% h in
let _T3 = e *% h in
let _Z3 = f *% g in
(_X3, _Y3, _Z3, _T3)
let point_double (p:ext_point) : Tot ext_point =
let _X1, _Y1, _Z1, _T1 = p in
let a = _X1 *% _X1 in
let b = _Y1 *% _Y1 in
let c = 2 *% (_Z1 *% _Z1) in
let h = a +% b in
let e = h -% ((_X1 +% _Y1) *% (_X1 +% _Y1)) in
let g = a -% b in
let f = c +% g in
let _X3 = e *% f in
let _Y3 = g *% h in
let _T3 = e *% h in
let _Z3 = f *% g in
(_X3, _Y3, _Z3, _T3)
let point_at_infinity: ext_point = (zero, one, one, zero) | false | true | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val point_negate (p: ext_point) : ext_point | [] | Spec.Ed25519.PointOps.point_negate | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | p: Spec.Ed25519.PointOps.ext_point -> Spec.Ed25519.PointOps.ext_point | {
"end_col": 40,
"end_line": 112,
"start_col": 44,
"start_line": 110
} |
Prims.Tot | val aff_point_negate (p: aff_point) : aff_point | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let aff_point_negate (p:aff_point) : aff_point =
let x, y = p in
((-x) % prime, y) | val aff_point_negate (p: aff_point) : aff_point
let aff_point_negate (p: aff_point) : aff_point = | false | null | false | let x, y = p in
((- x) % prime, y) | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [
"Spec.Ed25519.PointOps.aff_point",
"Spec.Curve25519.elem",
"FStar.Pervasives.Native.Mktuple2",
"Prims.op_Modulus",
"Prims.op_Minus",
"Spec.Curve25519.prime"
] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0
let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x
let g_x : elem = 15112221349535400772501151409588531511454012693041857206046113283949847762202
let g_y : elem = 46316835694926478169428394003475163141307993866256225615783033603165251855960
let aff_g : aff_point = (g_x, g_y)
let g: ext_point = (g_x, g_y, 1, g_x *% g_y)
let is_on_curve (p:aff_point) =
let (x, y) = p in
y *% y -% x *% x == 1 +% d *% (x *% x) *% (y *% y)
let to_aff_point (p:ext_point) : aff_point =
let _X, _Y, _Z, _T = p in
_X /% _Z, _Y /% _Z
let is_ext (p:ext_point) =
let _X, _Y, _Z, _T = p in
_T == _X *% _Y /% _Z /\ _Z <> zero
let point_inv (p:ext_point) =
is_ext p /\ is_on_curve (to_aff_point p)
// let is_on_curve_ext (p:ext_point) =
// let _X, _Y, _Z, _T = p in
// _Y *% _Y -% X *% X == _Z *% _Z +% d *% _T *% _T
// let to_ext_point (p:aff_point) : ext_point =
// let x, y = p in
// (x, y, one, x *% y)
/// Point addition and doubling in affine coordinates
let aff_point_add (p:aff_point) (q:aff_point) : aff_point =
let x1, y1 = p in
let x2, y2 = q in
let x3 = (x1 *% y2 +% y1 *% x2) /% (1 +% d *% (x1 *% x2) *% (y1 *% y2)) in
let y3 = (y1 *% y2 +% x1 *% x2) /% (1 -% d *% (x1 *% x2) *% (y1 *% y2)) in
x3, y3
let aff_point_double (p:aff_point) : aff_point =
let x, y = p in
let x3 = (2 *% x *% y) /% (y *% y -% x *% x) in
let y3 = (y *% y +% x *% x) /% (2 -% y *% y +% x *% x) in
x3, y3
let aff_point_at_infinity : aff_point = (zero, one) | false | true | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val aff_point_negate (p: aff_point) : aff_point | [] | Spec.Ed25519.PointOps.aff_point_negate | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | p: Spec.Ed25519.PointOps.aff_point -> Spec.Ed25519.PointOps.aff_point | {
"end_col": 19,
"end_line": 71,
"start_col": 48,
"start_line": 69
} |
Prims.Tot | val point_compress (p: ext_point) : Tot (BSeq.lbytes 32) | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let point_compress (p:ext_point) : Tot (BSeq.lbytes 32) =
let px, py, pz, pt = p in
let zinv = finv pz in
let x = px *% zinv in
let y = py *% zinv in
BSeq.nat_to_bytes_le 32 (pow2 255 * (x % 2) + y) | val point_compress (p: ext_point) : Tot (BSeq.lbytes 32)
let point_compress (p: ext_point) : Tot (BSeq.lbytes 32) = | false | null | false | let px, py, pz, pt = p in
let zinv = finv pz in
let x = px *% zinv in
let y = py *% zinv in
BSeq.nat_to_bytes_le 32 (pow2 255 * (x % 2) + y) | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [
"Spec.Ed25519.PointOps.ext_point",
"Spec.Curve25519.elem",
"Lib.ByteSequence.nat_to_bytes_le",
"Lib.IntTypes.SEC",
"Prims.op_Addition",
"FStar.Mul.op_Star",
"Prims.pow2",
"Prims.op_Modulus",
"Spec.Curve25519.op_Star_Percent",
"Spec.Curve25519.finv",
"Lib.ByteSequence.lbytes"
] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0
let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x
let g_x : elem = 15112221349535400772501151409588531511454012693041857206046113283949847762202
let g_y : elem = 46316835694926478169428394003475163141307993866256225615783033603165251855960
let aff_g : aff_point = (g_x, g_y)
let g: ext_point = (g_x, g_y, 1, g_x *% g_y)
let is_on_curve (p:aff_point) =
let (x, y) = p in
y *% y -% x *% x == 1 +% d *% (x *% x) *% (y *% y)
let to_aff_point (p:ext_point) : aff_point =
let _X, _Y, _Z, _T = p in
_X /% _Z, _Y /% _Z
let is_ext (p:ext_point) =
let _X, _Y, _Z, _T = p in
_T == _X *% _Y /% _Z /\ _Z <> zero
let point_inv (p:ext_point) =
is_ext p /\ is_on_curve (to_aff_point p)
// let is_on_curve_ext (p:ext_point) =
// let _X, _Y, _Z, _T = p in
// _Y *% _Y -% X *% X == _Z *% _Z +% d *% _T *% _T
// let to_ext_point (p:aff_point) : ext_point =
// let x, y = p in
// (x, y, one, x *% y)
/// Point addition and doubling in affine coordinates
let aff_point_add (p:aff_point) (q:aff_point) : aff_point =
let x1, y1 = p in
let x2, y2 = q in
let x3 = (x1 *% y2 +% y1 *% x2) /% (1 +% d *% (x1 *% x2) *% (y1 *% y2)) in
let y3 = (y1 *% y2 +% x1 *% x2) /% (1 -% d *% (x1 *% x2) *% (y1 *% y2)) in
x3, y3
let aff_point_double (p:aff_point) : aff_point =
let x, y = p in
let x3 = (2 *% x *% y) /% (y *% y -% x *% x) in
let y3 = (y *% y +% x *% x) /% (2 -% y *% y +% x *% x) in
x3, y3
let aff_point_at_infinity : aff_point = (zero, one)
let aff_point_negate (p:aff_point) : aff_point =
let x, y = p in
((-x) % prime, y)
/// Point addition and doubling in Extended Twisted Edwards Coordinates
let point_add (p:ext_point) (q:ext_point) : Tot ext_point =
let _X1, _Y1, _Z1, _T1 = p in
let _X2, _Y2, _Z2, _T2 = q in
let a = (_Y1 -% _X1) *% (_Y2 -% _X2) in
let b = (_Y1 +% _X1) *% (_Y2 +% _X2) in
let c = (2 *% d *% _T1) *% _T2 in
let d = (2 *% _Z1) *% _Z2 in
let e = b -% a in
let f = d -% c in
let g = d +% c in
let h = b +% a in
let _X3 = e *% f in
let _Y3 = g *% h in
let _T3 = e *% h in
let _Z3 = f *% g in
(_X3, _Y3, _Z3, _T3)
let point_double (p:ext_point) : Tot ext_point =
let _X1, _Y1, _Z1, _T1 = p in
let a = _X1 *% _X1 in
let b = _Y1 *% _Y1 in
let c = 2 *% (_Z1 *% _Z1) in
let h = a +% b in
let e = h -% ((_X1 +% _Y1) *% (_X1 +% _Y1)) in
let g = a -% b in
let f = c +% g in
let _X3 = e *% f in
let _Y3 = g *% h in
let _T3 = e *% h in
let _Z3 = f *% g in
(_X3, _Y3, _Z3, _T3)
let point_at_infinity: ext_point = (zero, one, one, zero)
let point_negate (p:ext_point) : ext_point =
let _X, _Y, _Z, _T = p in
((-_X) % prime, _Y, _Z, (-_T) % prime) | false | false | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val point_compress (p: ext_point) : Tot (BSeq.lbytes 32) | [] | Spec.Ed25519.PointOps.point_compress | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | p: Spec.Ed25519.PointOps.ext_point -> Lib.ByteSequence.lbytes 32 | {
"end_col": 50,
"end_line": 120,
"start_col": 57,
"start_line": 115
} |
Prims.Tot | val recover_x (y: nat) (sign: bool) : Tot (option elem) | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let recover_x (y:nat) (sign:bool) : Tot (option elem) =
if y >= prime then None
else (
let y2 = y *% y in
let x2 = (y2 -% one) *% (finv ((d *% y2) +% one)) in
if x2 = zero then (
if sign then None
else Some zero)
else (
let x = x2 **% ((prime + 3) / 8) in
let x = if ((x *% x) -% x2) <> zero then x *% modp_sqrt_m1 else x in
if ((x *% x) -% x2) <> zero then None
else (
let x = if (x % 2 = 1) <> sign then (prime - x) % prime else x in
Some x))) | val recover_x (y: nat) (sign: bool) : Tot (option elem)
let recover_x (y: nat) (sign: bool) : Tot (option elem) = | false | null | false | if y >= prime
then None
else
(let y2 = y *% y in
let x2 = (y2 -% one) *% (finv ((d *% y2) +% one)) in
if x2 = zero
then (if sign then None else Some zero)
else
(let x = x2 **% ((prime + 3) / 8) in
let x = if ((x *% x) -% x2) <> zero then x *% modp_sqrt_m1 else x in
if ((x *% x) -% x2) <> zero
then None
else
(let x = if (x % 2 = 1) <> sign then (prime - x) % prime else x in
Some x))) | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [
"Prims.nat",
"Prims.bool",
"Prims.op_GreaterThanOrEqual",
"Spec.Curve25519.prime",
"FStar.Pervasives.Native.None",
"Spec.Curve25519.elem",
"Prims.op_Equality",
"Spec.Curve25519.zero",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.option",
"Prims.op_disEquality",
"Spec.Curve25519.op_Subtraction_Percent",
"Spec.Curve25519.op_Star_Percent",
"Prims.int",
"Prims.op_Modulus",
"Prims.op_Subtraction",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThan",
"Spec.Ed25519.PointOps.modp_sqrt_m1",
"Spec.Curve25519.op_Star_Star_Percent",
"Prims.op_Division",
"Prims.op_Addition",
"Spec.Curve25519.one",
"Spec.Curve25519.finv",
"Spec.Curve25519.op_Plus_Percent",
"Spec.Ed25519.PointOps.d"
] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0
let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x
let g_x : elem = 15112221349535400772501151409588531511454012693041857206046113283949847762202
let g_y : elem = 46316835694926478169428394003475163141307993866256225615783033603165251855960
let aff_g : aff_point = (g_x, g_y)
let g: ext_point = (g_x, g_y, 1, g_x *% g_y)
let is_on_curve (p:aff_point) =
let (x, y) = p in
y *% y -% x *% x == 1 +% d *% (x *% x) *% (y *% y)
let to_aff_point (p:ext_point) : aff_point =
let _X, _Y, _Z, _T = p in
_X /% _Z, _Y /% _Z
let is_ext (p:ext_point) =
let _X, _Y, _Z, _T = p in
_T == _X *% _Y /% _Z /\ _Z <> zero
let point_inv (p:ext_point) =
is_ext p /\ is_on_curve (to_aff_point p)
// let is_on_curve_ext (p:ext_point) =
// let _X, _Y, _Z, _T = p in
// _Y *% _Y -% X *% X == _Z *% _Z +% d *% _T *% _T
// let to_ext_point (p:aff_point) : ext_point =
// let x, y = p in
// (x, y, one, x *% y)
/// Point addition and doubling in affine coordinates
let aff_point_add (p:aff_point) (q:aff_point) : aff_point =
let x1, y1 = p in
let x2, y2 = q in
let x3 = (x1 *% y2 +% y1 *% x2) /% (1 +% d *% (x1 *% x2) *% (y1 *% y2)) in
let y3 = (y1 *% y2 +% x1 *% x2) /% (1 -% d *% (x1 *% x2) *% (y1 *% y2)) in
x3, y3
let aff_point_double (p:aff_point) : aff_point =
let x, y = p in
let x3 = (2 *% x *% y) /% (y *% y -% x *% x) in
let y3 = (y *% y +% x *% x) /% (2 -% y *% y +% x *% x) in
x3, y3
let aff_point_at_infinity : aff_point = (zero, one)
let aff_point_negate (p:aff_point) : aff_point =
let x, y = p in
((-x) % prime, y)
/// Point addition and doubling in Extended Twisted Edwards Coordinates
let point_add (p:ext_point) (q:ext_point) : Tot ext_point =
let _X1, _Y1, _Z1, _T1 = p in
let _X2, _Y2, _Z2, _T2 = q in
let a = (_Y1 -% _X1) *% (_Y2 -% _X2) in
let b = (_Y1 +% _X1) *% (_Y2 +% _X2) in
let c = (2 *% d *% _T1) *% _T2 in
let d = (2 *% _Z1) *% _Z2 in
let e = b -% a in
let f = d -% c in
let g = d +% c in
let h = b +% a in
let _X3 = e *% f in
let _Y3 = g *% h in
let _T3 = e *% h in
let _Z3 = f *% g in
(_X3, _Y3, _Z3, _T3)
let point_double (p:ext_point) : Tot ext_point =
let _X1, _Y1, _Z1, _T1 = p in
let a = _X1 *% _X1 in
let b = _Y1 *% _Y1 in
let c = 2 *% (_Z1 *% _Z1) in
let h = a +% b in
let e = h -% ((_X1 +% _Y1) *% (_X1 +% _Y1)) in
let g = a -% b in
let f = c +% g in
let _X3 = e *% f in
let _Y3 = g *% h in
let _T3 = e *% h in
let _Z3 = f *% g in
(_X3, _Y3, _Z3, _T3)
let point_at_infinity: ext_point = (zero, one, one, zero)
let point_negate (p:ext_point) : ext_point =
let _X, _Y, _Z, _T = p in
((-_X) % prime, _Y, _Z, (-_T) % prime)
let point_compress (p:ext_point) : Tot (BSeq.lbytes 32) =
let px, py, pz, pt = p in
let zinv = finv pz in
let x = px *% zinv in
let y = py *% zinv in
BSeq.nat_to_bytes_le 32 (pow2 255 * (x % 2) + y) | false | true | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val recover_x (y: nat) (sign: bool) : Tot (option elem) | [] | Spec.Ed25519.PointOps.recover_x | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | y: Prims.nat -> sign: Prims.bool -> FStar.Pervasives.Native.option Spec.Curve25519.elem | {
"end_col": 17,
"end_line": 136,
"start_col": 2,
"start_line": 123
} |
Prims.Tot | val point_decompress (s: BSeq.lbytes 32) : Tot (option ext_point) | [
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Spec.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let point_decompress (s:BSeq.lbytes 32) : Tot (option ext_point) =
let y = BSeq.nat_from_bytes_le s in
let sign = (y / pow2 255) % 2 = 1 in
let y = y % pow2 255 in
let x = recover_x y sign in
match x with
| Some x -> Some (x, y, one, x *% y)
| _ -> None | val point_decompress (s: BSeq.lbytes 32) : Tot (option ext_point)
let point_decompress (s: BSeq.lbytes 32) : Tot (option ext_point) = | false | null | false | let y = BSeq.nat_from_bytes_le s in
let sign = (y / pow2 255) % 2 = 1 in
let y = y % pow2 255 in
let x = recover_x y sign in
match x with
| Some x -> Some (x, y, one, x *% y)
| _ -> None | {
"checked_file": "Spec.Ed25519.PointOps.fst.checked",
"dependencies": [
"Spec.Curve25519.fst.checked",
"prims.fst.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Ed25519.PointOps.fst"
} | [
"total"
] | [
"Lib.ByteSequence.lbytes",
"Spec.Curve25519.elem",
"FStar.Pervasives.Native.Some",
"Spec.Ed25519.PointOps.ext_point",
"FStar.Pervasives.Native.Mktuple4",
"Spec.Curve25519.one",
"Spec.Curve25519.op_Star_Percent",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.None",
"Spec.Ed25519.PointOps.recover_x",
"Prims.int",
"Prims.op_Modulus",
"Prims.pow2",
"Prims.bool",
"Prims.op_Equality",
"Prims.op_Division",
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThan",
"Prims.op_Multiply",
"Lib.Sequence.length",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U8",
"Lib.IntTypes.SEC",
"Lib.ByteSequence.nat_from_bytes_le"
] | [] | module Spec.Ed25519.PointOps
open FStar.Mul
open Spec.Curve25519
module BSeq = Lib.ByteSequence
#reset-options "--z3rlimit 50 --fuel 0 --ifuel 0"
type aff_point = elem & elem // Affine point
type ext_point = elem & elem & elem & elem // Homogeneous extended coordinates
(* 2 **% ((prime - 1) / 4) *)
let modp_sqrt_m1 : elem = 0x2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0
let d : elem =
let x = 37095705934669439343138083508754565189542113879843219016388785533085940283555 in
assert_norm(x < prime);
x
let g_x : elem = 15112221349535400772501151409588531511454012693041857206046113283949847762202
let g_y : elem = 46316835694926478169428394003475163141307993866256225615783033603165251855960
let aff_g : aff_point = (g_x, g_y)
let g: ext_point = (g_x, g_y, 1, g_x *% g_y)
let is_on_curve (p:aff_point) =
let (x, y) = p in
y *% y -% x *% x == 1 +% d *% (x *% x) *% (y *% y)
let to_aff_point (p:ext_point) : aff_point =
let _X, _Y, _Z, _T = p in
_X /% _Z, _Y /% _Z
let is_ext (p:ext_point) =
let _X, _Y, _Z, _T = p in
_T == _X *% _Y /% _Z /\ _Z <> zero
let point_inv (p:ext_point) =
is_ext p /\ is_on_curve (to_aff_point p)
// let is_on_curve_ext (p:ext_point) =
// let _X, _Y, _Z, _T = p in
// _Y *% _Y -% X *% X == _Z *% _Z +% d *% _T *% _T
// let to_ext_point (p:aff_point) : ext_point =
// let x, y = p in
// (x, y, one, x *% y)
/// Point addition and doubling in affine coordinates
let aff_point_add (p:aff_point) (q:aff_point) : aff_point =
let x1, y1 = p in
let x2, y2 = q in
let x3 = (x1 *% y2 +% y1 *% x2) /% (1 +% d *% (x1 *% x2) *% (y1 *% y2)) in
let y3 = (y1 *% y2 +% x1 *% x2) /% (1 -% d *% (x1 *% x2) *% (y1 *% y2)) in
x3, y3
let aff_point_double (p:aff_point) : aff_point =
let x, y = p in
let x3 = (2 *% x *% y) /% (y *% y -% x *% x) in
let y3 = (y *% y +% x *% x) /% (2 -% y *% y +% x *% x) in
x3, y3
let aff_point_at_infinity : aff_point = (zero, one)
let aff_point_negate (p:aff_point) : aff_point =
let x, y = p in
((-x) % prime, y)
/// Point addition and doubling in Extended Twisted Edwards Coordinates
let point_add (p:ext_point) (q:ext_point) : Tot ext_point =
let _X1, _Y1, _Z1, _T1 = p in
let _X2, _Y2, _Z2, _T2 = q in
let a = (_Y1 -% _X1) *% (_Y2 -% _X2) in
let b = (_Y1 +% _X1) *% (_Y2 +% _X2) in
let c = (2 *% d *% _T1) *% _T2 in
let d = (2 *% _Z1) *% _Z2 in
let e = b -% a in
let f = d -% c in
let g = d +% c in
let h = b +% a in
let _X3 = e *% f in
let _Y3 = g *% h in
let _T3 = e *% h in
let _Z3 = f *% g in
(_X3, _Y3, _Z3, _T3)
let point_double (p:ext_point) : Tot ext_point =
let _X1, _Y1, _Z1, _T1 = p in
let a = _X1 *% _X1 in
let b = _Y1 *% _Y1 in
let c = 2 *% (_Z1 *% _Z1) in
let h = a +% b in
let e = h -% ((_X1 +% _Y1) *% (_X1 +% _Y1)) in
let g = a -% b in
let f = c +% g in
let _X3 = e *% f in
let _Y3 = g *% h in
let _T3 = e *% h in
let _Z3 = f *% g in
(_X3, _Y3, _Z3, _T3)
let point_at_infinity: ext_point = (zero, one, one, zero)
let point_negate (p:ext_point) : ext_point =
let _X, _Y, _Z, _T = p in
((-_X) % prime, _Y, _Z, (-_T) % prime)
let point_compress (p:ext_point) : Tot (BSeq.lbytes 32) =
let px, py, pz, pt = p in
let zinv = finv pz in
let x = px *% zinv in
let y = py *% zinv in
BSeq.nat_to_bytes_le 32 (pow2 255 * (x % 2) + y)
let recover_x (y:nat) (sign:bool) : Tot (option elem) =
if y >= prime then None
else (
let y2 = y *% y in
let x2 = (y2 -% one) *% (finv ((d *% y2) +% one)) in
if x2 = zero then (
if sign then None
else Some zero)
else (
let x = x2 **% ((prime + 3) / 8) in
let x = if ((x *% x) -% x2) <> zero then x *% modp_sqrt_m1 else x in
if ((x *% x) -% x2) <> zero then None
else (
let x = if (x % 2 = 1) <> sign then (prime - x) % prime else x in
Some x))) | false | false | Spec.Ed25519.PointOps.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val point_decompress (s: BSeq.lbytes 32) : Tot (option ext_point) | [] | Spec.Ed25519.PointOps.point_decompress | {
"file_name": "specs/Spec.Ed25519.PointOps.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Lib.ByteSequence.lbytes 32 -> FStar.Pervasives.Native.option Spec.Ed25519.PointOps.ext_point | {
"end_col": 13,
"end_line": 145,
"start_col": 66,
"start_line": 138
} |
FStar.Pervasives.Lemma | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a) | let gsub_inj (#a: Type0) = | false | null | true | mgsub_inj #(option a)
#(initialization_preorder a)
#(initialization_preorder a)
(initialization_preorder a)
(initialization_preorder a) | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.mgsub_inj",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.length",
"Prims.op_Equals_Equals_Equals",
"LowStar.Monotonic.Buffer.mgsub",
"Prims.squash",
"Prims.eq2",
"Prims.l_imp",
"Prims.nat",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a)
unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gsub_inj : b1:
LowStar.Monotonic.Buffer.mbuffer (FStar.Pervasives.Native.option a)
(LowStar.UninitializedBuffer.initialization_preorder a)
(LowStar.UninitializedBuffer.initialization_preorder a) ->
b2:
LowStar.Monotonic.Buffer.mbuffer (FStar.Pervasives.Native.option a)
(LowStar.UninitializedBuffer.initialization_preorder a)
(LowStar.UninitializedBuffer.initialization_preorder a) ->
i1: FStar.UInt32.t ->
i2: FStar.UInt32.t ->
len1: FStar.UInt32.t ->
len2: FStar.UInt32.t
-> FStar.Pervasives.Lemma
(requires
FStar.UInt32.v i1 + FStar.UInt32.v len1 <= LowStar.Monotonic.Buffer.length b1 /\
FStar.UInt32.v i2 + FStar.UInt32.v len2 <= LowStar.Monotonic.Buffer.length b2 /\
LowStar.Monotonic.Buffer.mgsub (LowStar.UninitializedBuffer.initialization_preorder a)
b1
i1
len1 ===
LowStar.Monotonic.Buffer.mgsub (LowStar.UninitializedBuffer.initialization_preorder a)
b2
i2
len2)
(ensures
len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\
(i1 == i2 /\ LowStar.Monotonic.Buffer.length b1 == LowStar.Monotonic.Buffer.length b2 ==>
b1 == b2)) | [] | LowStar.UninitializedBuffer.gsub_inj | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b1:
LowStar.Monotonic.Buffer.mbuffer (FStar.Pervasives.Native.option a)
(LowStar.UninitializedBuffer.initialization_preorder a)
(LowStar.UninitializedBuffer.initialization_preorder a) ->
b2:
LowStar.Monotonic.Buffer.mbuffer (FStar.Pervasives.Native.option a)
(LowStar.UninitializedBuffer.initialization_preorder a)
(LowStar.UninitializedBuffer.initialization_preorder a) ->
i1: FStar.UInt32.t ->
i2: FStar.UInt32.t ->
len1: FStar.UInt32.t ->
len2: FStar.UInt32.t
-> FStar.Pervasives.Lemma
(requires
FStar.UInt32.v i1 + FStar.UInt32.v len1 <= LowStar.Monotonic.Buffer.length b1 /\
FStar.UInt32.v i2 + FStar.UInt32.v len2 <= LowStar.Monotonic.Buffer.length b2 /\
LowStar.Monotonic.Buffer.mgsub (LowStar.UninitializedBuffer.initialization_preorder a)
b1
i1
len1 ===
LowStar.Monotonic.Buffer.mgsub (LowStar.UninitializedBuffer.initialization_preorder a)
b2
i2
len2)
(ensures
len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\
(i1 == i2 /\ LowStar.Monotonic.Buffer.length b1 == LowStar.Monotonic.Buffer.length b2 ==>
b1 == b2)) | {
"end_col": 168,
"end_line": 44,
"start_col": 33,
"start_line": 44
} |
|
Prims.Tot | val unull (#a: Type0) : ubuffer a | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a) | val unull (#a: Type0) : ubuffer a
let unull (#a: Type0) : ubuffer a = | false | null | false | mnull #(option a) #(initialization_preorder a) #(initialization_preorder a) | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [
"total"
] | [
"LowStar.Monotonic.Buffer.mnull",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder",
"LowStar.UninitializedBuffer.ubuffer"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a) | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val unull (#a: Type0) : ubuffer a | [] | LowStar.UninitializedBuffer.unull | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | LowStar.UninitializedBuffer.ubuffer a | {
"end_col": 116,
"end_line": 40,
"start_col": 41,
"start_line": 40
} |
Prims.Ghost | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) | let gsub (#a: Type0) = | false | null | false | mgsub #(option a)
#(initialization_preorder a)
#(initialization_preorder a)
(initialization_preorder a) | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [] | [
"LowStar.Monotonic.Buffer.mgsub",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.length",
"Prims.l_True"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a) | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gsub : b:
LowStar.Monotonic.Buffer.mbuffer (FStar.Pervasives.Native.option a)
(LowStar.UninitializedBuffer.initialization_preorder a)
(LowStar.UninitializedBuffer.initialization_preorder a) ->
i: FStar.UInt32.t ->
len: FStar.UInt32.t
-> Prims.Ghost
(LowStar.Monotonic.Buffer.mbuffer (FStar.Pervasives.Native.option a)
(LowStar.UninitializedBuffer.initialization_preorder a)
(LowStar.UninitializedBuffer.initialization_preorder a)) | [] | LowStar.UninitializedBuffer.gsub | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b:
LowStar.Monotonic.Buffer.mbuffer (FStar.Pervasives.Native.option a)
(LowStar.UninitializedBuffer.initialization_preorder a)
(LowStar.UninitializedBuffer.initialization_preorder a) ->
i: FStar.UInt32.t ->
len: FStar.UInt32.t
-> Prims.Ghost
(LowStar.Monotonic.Buffer.mbuffer (FStar.Pervasives.Native.option a)
(LowStar.UninitializedBuffer.initialization_preorder a)
(LowStar.UninitializedBuffer.initialization_preorder a)) | {
"end_col": 132,
"end_line": 42,
"start_col": 29,
"start_line": 42
} |
|
FStar.HyperStack.ST.Stack | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let usub (#a:Type0) = msub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) | let usub (#a: Type0) = | true | null | false | msub #(option a)
#(initialization_preorder a)
#(initialization_preorder a)
(initialization_preorder a) | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [] | [
"LowStar.Monotonic.Buffer.msub",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"FStar.Ghost.erased",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.UInt32.v",
"FStar.Ghost.reveal",
"LowStar.Monotonic.Buffer.length",
"Prims.l_Forall",
"FStar.Seq.Base.seq",
"Prims.l_imp",
"Prims.eq2",
"Prims.nat",
"FStar.Seq.Base.length",
"FStar.Seq.Base.slice",
"Prims.int",
"Prims.op_Subtraction",
"FStar.Seq.Properties.replace_subseq",
"LowStar.Monotonic.Buffer.live",
"LowStar.Monotonic.Buffer.mgsub"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a)
unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
unfold let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a)
inline_for_extraction
type pointer (a:Type0) = b:ubuffer a{length b == 1}
inline_for_extraction
type pointer_or_null (a:Type0) = b:ubuffer a{if g_is_null b then True else length b == 1} | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val usub : b:
LowStar.Monotonic.Buffer.mbuffer (FStar.Pervasives.Native.option a)
(LowStar.UninitializedBuffer.initialization_preorder a)
(LowStar.UninitializedBuffer.initialization_preorder a) ->
i: FStar.UInt32.t ->
len: FStar.Ghost.erased FStar.UInt32.t
-> FStar.HyperStack.ST.Stack
(LowStar.Monotonic.Buffer.mbuffer (FStar.Pervasives.Native.option a)
(LowStar.UninitializedBuffer.initialization_preorder a)
(LowStar.UninitializedBuffer.initialization_preorder a)) | [] | LowStar.UninitializedBuffer.usub | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b:
LowStar.Monotonic.Buffer.mbuffer (FStar.Pervasives.Native.option a)
(LowStar.UninitializedBuffer.initialization_preorder a)
(LowStar.UninitializedBuffer.initialization_preorder a) ->
i: FStar.UInt32.t ->
len: FStar.Ghost.erased FStar.UInt32.t
-> FStar.HyperStack.ST.Stack
(LowStar.Monotonic.Buffer.mbuffer (FStar.Pervasives.Native.option a)
(LowStar.UninitializedBuffer.initialization_preorder a)
(LowStar.UninitializedBuffer.initialization_preorder a)) | {
"end_col": 146,
"end_line": 52,
"start_col": 44,
"start_line": 52
} |
|
Prims.Tot | val ipred (#a: Type0) (i: nat) : spred (option a) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ipred (#a:Type0) (i:nat) :spred (option a) = fun s -> i < Seq.length s ==> Some? (Seq.index s i) | val ipred (#a: Type0) (i: nat) : spred (option a)
let ipred (#a: Type0) (i: nat) : spred (option a) = | false | null | false | fun s -> i < Seq.length s ==> Some? (Seq.index s i) | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [
"total"
] | [
"Prims.nat",
"FStar.Seq.Base.seq",
"FStar.Pervasives.Native.option",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_LessThan",
"FStar.Seq.Base.length",
"FStar.Pervasives.Native.uu___is_Some",
"FStar.Seq.Base.index",
"LowStar.Monotonic.Buffer.spred"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a)
unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
unfold let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a)
inline_for_extraction
type pointer (a:Type0) = b:ubuffer a{length b == 1}
inline_for_extraction
type pointer_or_null (a:Type0) = b:ubuffer a{if g_is_null b then True else length b == 1}
inline_for_extraction let usub (#a:Type0) = msub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
inline_for_extraction let uoffset (#a:Type0) = moffset #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
(****** main stateful API *****)
(*
* b `initialized_at` i: is a stable predicate that witnesses the initialization of an index i in ubuffer b | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ipred (#a: Type0) (i: nat) : spred (option a) | [] | LowStar.UninitializedBuffer.ipred | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | i: Prims.nat -> LowStar.Monotonic.Buffer.spred (FStar.Pervasives.Native.option a) | {
"end_col": 108,
"end_line": 62,
"start_col": 57,
"start_line": 62
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lubuffer (a:Type0) (len:nat) = b:ubuffer a{length b == len} | let lubuffer (a: Type0) (len: nat) = | false | null | false | b: ubuffer a {length b == len} | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [
"total"
] | [
"Prims.nat",
"LowStar.UninitializedBuffer.ubuffer",
"Prims.eq2",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a)
unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
unfold let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a)
inline_for_extraction
type pointer (a:Type0) = b:ubuffer a{length b == 1}
inline_for_extraction
type pointer_or_null (a:Type0) = b:ubuffer a{if g_is_null b then True else length b == 1}
inline_for_extraction let usub (#a:Type0) = msub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
inline_for_extraction let uoffset (#a:Type0) = moffset #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
(****** main stateful API *****)
(*
* b `initialized_at` i: is a stable predicate that witnesses the initialization of an index i in ubuffer b
*)
private let ipred (#a:Type0) (i:nat) :spred (option a) = fun s -> i < Seq.length s ==> Some? (Seq.index s i)
let initialized_at (#a:Type0) (b:ubuffer a) (i:nat) :Type0 = witnessed b (ipred i)
(*
* Clients need to prove that b is initialized_at i
*)
let uindex (#a:Type0) (b:ubuffer a) (i:U32.t)
:HST.Stack a (requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i)))
(ensures (fun h0 y h1 -> let y_opt = Seq.index (as_seq h0 b) (U32.v i) in
Some? y_opt /\ y == Some?.v y_opt /\ h0 == h1))
= let y_opt = index b i in
recall_p b (ipred (U32.v i));
Some?.v y_opt
(*
* b `initialized_at` i is a postcondition
*)
let uupd (#a:Type0) (b:ubuffer a) (i:U32.t) (v:a)
:HST.Stack unit (requires (fun h0 -> live h0 b /\ U32.v i < length b))
(ensures (fun h0 _ h1 -> modifies (loc_buffer b) h0 h1 /\
live h1 b /\
as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) (Some v) /\
b `initialized_at` (U32.v i)))
= upd b i (Some v);
witness_p b (ipred (U32.v i)) | false | true | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lubuffer : a: Type0 -> len: Prims.nat -> Type0 | [] | LowStar.UninitializedBuffer.lubuffer | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: Type0 -> len: Prims.nat -> Type0 | {
"end_col": 70,
"end_line": 88,
"start_col": 42,
"start_line": 88
} |
|
FStar.HyperStack.ST.Stack | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let uoffset (#a:Type0) = moffset #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) | let uoffset (#a: Type0) = | true | null | false | moffset #(option a)
#(initialization_preorder a)
#(initialization_preorder a)
(initialization_preorder a) | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [] | [
"LowStar.Monotonic.Buffer.moffset",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.length",
"Prims.l_Forall",
"FStar.Seq.Base.seq",
"Prims.l_imp",
"Prims.eq2",
"Prims.nat",
"FStar.Seq.Base.length",
"FStar.Seq.Base.slice",
"Prims.op_Addition",
"FStar.UInt32.sub",
"LowStar.Monotonic.Buffer.len",
"Prims.int",
"Prims.op_Subtraction",
"FStar.Seq.Properties.replace_subseq",
"LowStar.Monotonic.Buffer.live",
"LowStar.Monotonic.Buffer.mgsub"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a)
unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
unfold let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a)
inline_for_extraction
type pointer (a:Type0) = b:ubuffer a{length b == 1}
inline_for_extraction
type pointer_or_null (a:Type0) = b:ubuffer a{if g_is_null b then True else length b == 1}
inline_for_extraction let usub (#a:Type0) = msub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val uoffset : b:
LowStar.Monotonic.Buffer.mbuffer (FStar.Pervasives.Native.option a)
(LowStar.UninitializedBuffer.initialization_preorder a)
(LowStar.UninitializedBuffer.initialization_preorder a) ->
i: FStar.UInt32.t
-> FStar.HyperStack.ST.Stack
(LowStar.Monotonic.Buffer.mbuffer (FStar.Pervasives.Native.option a)
(LowStar.UninitializedBuffer.initialization_preorder a)
(LowStar.UninitializedBuffer.initialization_preorder a)) | [] | LowStar.UninitializedBuffer.uoffset | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b:
LowStar.Monotonic.Buffer.mbuffer (FStar.Pervasives.Native.option a)
(LowStar.UninitializedBuffer.initialization_preorder a)
(LowStar.UninitializedBuffer.initialization_preorder a) ->
i: FStar.UInt32.t
-> FStar.HyperStack.ST.Stack
(LowStar.Monotonic.Buffer.mbuffer (FStar.Pervasives.Native.option a)
(LowStar.UninitializedBuffer.initialization_preorder a)
(LowStar.UninitializedBuffer.initialization_preorder a)) | {
"end_col": 152,
"end_line": 54,
"start_col": 47,
"start_line": 54
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let valid_j_for_blit
(#a:Type0) (#rrel #rel:srel a) (src:mbuffer a rrel rel) (idx_src:U32.t)
(dst:ubuffer a) (idx_dst:U32.t) (j:U32.t)
= U32.v idx_src + U32.v j <= length src /\
U32.v idx_dst + U32.v j <= length dst | let valid_j_for_blit
(#a: Type0)
(#rrel #rel: srel a)
(src: mbuffer a rrel rel)
(idx_src: U32.t)
(dst: ubuffer a)
(idx_dst j: U32.t)
= | false | null | false | U32.v idx_src + U32.v j <= length src /\ U32.v idx_dst + U32.v j <= length dst | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [
"total"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"LowStar.UninitializedBuffer.ubuffer",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a)
unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
unfold let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a)
inline_for_extraction
type pointer (a:Type0) = b:ubuffer a{length b == 1}
inline_for_extraction
type pointer_or_null (a:Type0) = b:ubuffer a{if g_is_null b then True else length b == 1}
inline_for_extraction let usub (#a:Type0) = msub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
inline_for_extraction let uoffset (#a:Type0) = moffset #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
(****** main stateful API *****)
(*
* b `initialized_at` i: is a stable predicate that witnesses the initialization of an index i in ubuffer b
*)
private let ipred (#a:Type0) (i:nat) :spred (option a) = fun s -> i < Seq.length s ==> Some? (Seq.index s i)
let initialized_at (#a:Type0) (b:ubuffer a) (i:nat) :Type0 = witnessed b (ipred i)
(*
* Clients need to prove that b is initialized_at i
*)
let uindex (#a:Type0) (b:ubuffer a) (i:U32.t)
:HST.Stack a (requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i)))
(ensures (fun h0 y h1 -> let y_opt = Seq.index (as_seq h0 b) (U32.v i) in
Some? y_opt /\ y == Some?.v y_opt /\ h0 == h1))
= let y_opt = index b i in
recall_p b (ipred (U32.v i));
Some?.v y_opt
(*
* b `initialized_at` i is a postcondition
*)
let uupd (#a:Type0) (b:ubuffer a) (i:U32.t) (v:a)
:HST.Stack unit (requires (fun h0 -> live h0 b /\ U32.v i < length b))
(ensures (fun h0 _ h1 -> modifies (loc_buffer b) h0 h1 /\
live h1 b /\
as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) (Some v) /\
b `initialized_at` (U32.v i)))
= upd b i (Some v);
witness_p b (ipred (U32.v i))
unfold let lubuffer (a:Type0) (len:nat) = b:ubuffer a{length b == len}
unfold let lubuffer_or_null (a:Type0) (len:nat) (r:HS.rid) =
b:ubuffer a{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
(*
* No initializer
*)
let ugcmalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len
inline_for_extraction
let ugcmalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer_or_null a (U32.v len) r{recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len
let umalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mmalloc r None len
inline_for_extraction
let umalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer_or_null a (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mmalloc r None len
let ualloca (#a:Type0) (len:U32.t)
:HST.StackInline (lubuffer a (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None) /\
frameOf b == HS.get_tip h0))
= malloca None len
(*
* blit functionality, where src is a regular buffer
*)
[@@"opaque_to_smt"]
unfold let valid_j_for_blit
(#a:Type0) (#rrel #rel:srel a) (src:mbuffer a rrel rel) (idx_src:U32.t) | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val valid_j_for_blit : src: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
idx_src: FStar.UInt32.t ->
dst: LowStar.UninitializedBuffer.ubuffer a ->
idx_dst: FStar.UInt32.t ->
j: FStar.UInt32.t
-> Prims.logical | [] | LowStar.UninitializedBuffer.valid_j_for_blit | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
src: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
idx_src: FStar.UInt32.t ->
dst: LowStar.UninitializedBuffer.ubuffer a ->
idx_dst: FStar.UInt32.t ->
j: FStar.UInt32.t
-> Prims.logical | {
"end_col": 41,
"end_line": 137,
"start_col": 4,
"start_line": 136
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lubuffer_or_null (a:Type0) (len:nat) (r:HS.rid) =
b:ubuffer a{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)} | let lubuffer_or_null (a: Type0) (len: nat) (r: HS.rid) = | false | null | false | b: ubuffer a {(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)} | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [
"total"
] | [
"Prims.nat",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.UninitializedBuffer.ubuffer",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.length",
"LowStar.Monotonic.Buffer.frameOf"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a)
unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
unfold let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a)
inline_for_extraction
type pointer (a:Type0) = b:ubuffer a{length b == 1}
inline_for_extraction
type pointer_or_null (a:Type0) = b:ubuffer a{if g_is_null b then True else length b == 1}
inline_for_extraction let usub (#a:Type0) = msub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
inline_for_extraction let uoffset (#a:Type0) = moffset #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
(****** main stateful API *****)
(*
* b `initialized_at` i: is a stable predicate that witnesses the initialization of an index i in ubuffer b
*)
private let ipred (#a:Type0) (i:nat) :spred (option a) = fun s -> i < Seq.length s ==> Some? (Seq.index s i)
let initialized_at (#a:Type0) (b:ubuffer a) (i:nat) :Type0 = witnessed b (ipred i)
(*
* Clients need to prove that b is initialized_at i
*)
let uindex (#a:Type0) (b:ubuffer a) (i:U32.t)
:HST.Stack a (requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i)))
(ensures (fun h0 y h1 -> let y_opt = Seq.index (as_seq h0 b) (U32.v i) in
Some? y_opt /\ y == Some?.v y_opt /\ h0 == h1))
= let y_opt = index b i in
recall_p b (ipred (U32.v i));
Some?.v y_opt
(*
* b `initialized_at` i is a postcondition
*)
let uupd (#a:Type0) (b:ubuffer a) (i:U32.t) (v:a)
:HST.Stack unit (requires (fun h0 -> live h0 b /\ U32.v i < length b))
(ensures (fun h0 _ h1 -> modifies (loc_buffer b) h0 h1 /\
live h1 b /\
as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) (Some v) /\
b `initialized_at` (U32.v i)))
= upd b i (Some v);
witness_p b (ipred (U32.v i))
unfold let lubuffer (a:Type0) (len:nat) = b:ubuffer a{length b == len} | false | true | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lubuffer_or_null : a: Type0 -> len: Prims.nat -> r: FStar.Monotonic.HyperHeap.rid -> Type0 | [] | LowStar.UninitializedBuffer.lubuffer_or_null | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: Type0 -> len: Prims.nat -> r: FStar.Monotonic.HyperHeap.rid -> Type0 | {
"end_col": 74,
"end_line": 91,
"start_col": 2,
"start_line": 91
} |
|
Prims.Tot | val initialized_at (#a: Type0) (b: ubuffer a) (i: nat) : Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let initialized_at (#a:Type0) (b:ubuffer a) (i:nat) :Type0 = witnessed b (ipred i) | val initialized_at (#a: Type0) (b: ubuffer a) (i: nat) : Type0
let initialized_at (#a: Type0) (b: ubuffer a) (i: nat) : Type0 = | false | null | false | witnessed b (ipred i) | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [
"total"
] | [
"LowStar.UninitializedBuffer.ubuffer",
"Prims.nat",
"LowStar.Monotonic.Buffer.witnessed",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder",
"LowStar.UninitializedBuffer.ipred"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a)
unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
unfold let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a)
inline_for_extraction
type pointer (a:Type0) = b:ubuffer a{length b == 1}
inline_for_extraction
type pointer_or_null (a:Type0) = b:ubuffer a{if g_is_null b then True else length b == 1}
inline_for_extraction let usub (#a:Type0) = msub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
inline_for_extraction let uoffset (#a:Type0) = moffset #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
(****** main stateful API *****)
(*
* b `initialized_at` i: is a stable predicate that witnesses the initialization of an index i in ubuffer b
*) | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val initialized_at (#a: Type0) (b: ubuffer a) (i: nat) : Type0 | [] | LowStar.UninitializedBuffer.initialized_at | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.UninitializedBuffer.ubuffer a -> i: Prims.nat -> Type0 | {
"end_col": 82,
"end_line": 63,
"start_col": 61,
"start_line": 63
} |
Prims.Tot | val initialization_preorder (a: Type0) : srel (option a) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i)) | val initialization_preorder (a: Type0) : srel (option a)
let initialization_preorder (a: Type0) : srel (option a) = | false | null | false | fun s1 s2 ->
Seq.length s1 == Seq.length s2 /\
(forall (i: nat). {:pattern (Seq.index s2 i)}
i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i)) | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [
"total"
] | [
"FStar.Seq.Base.seq",
"FStar.Pervasives.Native.option",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"FStar.Seq.Base.length",
"Prims.l_Forall",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_LessThan",
"FStar.Pervasives.Native.uu___is_Some",
"FStar.Seq.Base.index",
"Prims.logical",
"LowStar.Monotonic.Buffer.srel"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*) | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val initialization_preorder (a: Type0) : srel (option a) | [] | LowStar.UninitializedBuffer.initialization_preorder | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: Type0 -> LowStar.Monotonic.Buffer.srel (FStar.Pervasives.Native.option a) | {
"end_col": 128,
"end_line": 35,
"start_col": 2,
"start_line": 34
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ublit_post_j
(#a:Type0) (#rrel #rel:srel a) (src:mbuffer a rrel rel) (idx_src:U32.t)
(dst:ubuffer a) (idx_dst:U32.t) (j:U32.t{valid_j_for_blit src idx_src dst idx_dst j})
(h0 h1:HS.mem)
= modifies (loc_buffer dst) h0 h1 /\ live h1 dst /\
(forall (i:nat).{:pattern (Seq.index (as_seq h1 dst) i)} (i >= U32.v idx_dst /\ i < U32.v idx_dst + U32.v j ==>
Seq.index (as_seq h1 dst) i ==
Some (Seq.index (as_seq h0 src) (U32.v idx_src + i - U32.v idx_dst)))
) /\
Seq.slice (as_seq h1 dst) 0 (U32.v idx_dst) == Seq.slice (as_seq h0 dst) 0 (U32.v idx_dst) /\
Seq.slice (as_seq h1 dst) (U32.v idx_dst + U32.v j) (length dst) == Seq.slice (as_seq h0 dst) (U32.v idx_dst + U32.v j) (length dst) /\
(forall (i:nat).{:pattern (dst `initialized_at` i)} (i >= U32.v idx_dst /\ i < U32.v idx_dst + U32.v j) ==>
dst `initialized_at` i) | let ublit_post_j
(#a: Type0)
(#rrel #rel: srel a)
(src: mbuffer a rrel rel)
(idx_src: U32.t)
(dst: ubuffer a)
(idx_dst: U32.t)
(j: U32.t{valid_j_for_blit src idx_src dst idx_dst j})
(h0 h1: HS.mem)
= | false | null | false | modifies (loc_buffer dst) h0 h1 /\ live h1 dst /\
(forall (i: nat). {:pattern (Seq.index (as_seq h1 dst) i)}
(i >= U32.v idx_dst /\ i < U32.v idx_dst + U32.v j ==>
Seq.index (as_seq h1 dst) i ==
Some (Seq.index (as_seq h0 src) (U32.v idx_src + i - U32.v idx_dst)))) /\
Seq.slice (as_seq h1 dst) 0 (U32.v idx_dst) == Seq.slice (as_seq h0 dst) 0 (U32.v idx_dst) /\
Seq.slice (as_seq h1 dst) (U32.v idx_dst + U32.v j) (length dst) ==
Seq.slice (as_seq h0 dst) (U32.v idx_dst + U32.v j) (length dst) /\
(forall (i: nat). {:pattern (dst `initialized_at` i)}
(i >= U32.v idx_dst /\ i < U32.v idx_dst + U32.v j) ==> dst `initialized_at` i) | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [
"total"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"LowStar.UninitializedBuffer.ubuffer",
"LowStar.UninitializedBuffer.valid_j_for_blit",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_buffer",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder",
"LowStar.Monotonic.Buffer.live",
"Prims.l_Forall",
"Prims.nat",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_GreaterThanOrEqual",
"FStar.UInt32.v",
"Prims.op_LessThan",
"Prims.op_Addition",
"Prims.eq2",
"FStar.Seq.Base.index",
"LowStar.Monotonic.Buffer.as_seq",
"FStar.Pervasives.Native.Some",
"Prims.op_Subtraction",
"FStar.Seq.Base.seq",
"FStar.Seq.Base.slice",
"LowStar.Monotonic.Buffer.length",
"LowStar.UninitializedBuffer.initialized_at",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a)
unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
unfold let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a)
inline_for_extraction
type pointer (a:Type0) = b:ubuffer a{length b == 1}
inline_for_extraction
type pointer_or_null (a:Type0) = b:ubuffer a{if g_is_null b then True else length b == 1}
inline_for_extraction let usub (#a:Type0) = msub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
inline_for_extraction let uoffset (#a:Type0) = moffset #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
(****** main stateful API *****)
(*
* b `initialized_at` i: is a stable predicate that witnesses the initialization of an index i in ubuffer b
*)
private let ipred (#a:Type0) (i:nat) :spred (option a) = fun s -> i < Seq.length s ==> Some? (Seq.index s i)
let initialized_at (#a:Type0) (b:ubuffer a) (i:nat) :Type0 = witnessed b (ipred i)
(*
* Clients need to prove that b is initialized_at i
*)
let uindex (#a:Type0) (b:ubuffer a) (i:U32.t)
:HST.Stack a (requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i)))
(ensures (fun h0 y h1 -> let y_opt = Seq.index (as_seq h0 b) (U32.v i) in
Some? y_opt /\ y == Some?.v y_opt /\ h0 == h1))
= let y_opt = index b i in
recall_p b (ipred (U32.v i));
Some?.v y_opt
(*
* b `initialized_at` i is a postcondition
*)
let uupd (#a:Type0) (b:ubuffer a) (i:U32.t) (v:a)
:HST.Stack unit (requires (fun h0 -> live h0 b /\ U32.v i < length b))
(ensures (fun h0 _ h1 -> modifies (loc_buffer b) h0 h1 /\
live h1 b /\
as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) (Some v) /\
b `initialized_at` (U32.v i)))
= upd b i (Some v);
witness_p b (ipred (U32.v i))
unfold let lubuffer (a:Type0) (len:nat) = b:ubuffer a{length b == len}
unfold let lubuffer_or_null (a:Type0) (len:nat) (r:HS.rid) =
b:ubuffer a{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
(*
* No initializer
*)
let ugcmalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len
inline_for_extraction
let ugcmalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer_or_null a (U32.v len) r{recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len
let umalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mmalloc r None len
inline_for_extraction
let umalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer_or_null a (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mmalloc r None len
let ualloca (#a:Type0) (len:U32.t)
:HST.StackInline (lubuffer a (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None) /\
frameOf b == HS.get_tip h0))
= malloca None len
(*
* blit functionality, where src is a regular buffer
*)
[@@"opaque_to_smt"]
unfold let valid_j_for_blit
(#a:Type0) (#rrel #rel:srel a) (src:mbuffer a rrel rel) (idx_src:U32.t)
(dst:ubuffer a) (idx_dst:U32.t) (j:U32.t)
= U32.v idx_src + U32.v j <= length src /\
U32.v idx_dst + U32.v j <= length dst
(*
* postcondition of blit
*)
[@@"opaque_to_smt"]
unfold private let ublit_post_j
(#a:Type0) (#rrel #rel:srel a) (src:mbuffer a rrel rel) (idx_src:U32.t)
(dst:ubuffer a) (idx_dst:U32.t) (j:U32.t{valid_j_for_blit src idx_src dst idx_dst j}) | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ublit_post_j : src: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
idx_src: FStar.UInt32.t ->
dst: LowStar.UninitializedBuffer.ubuffer a ->
idx_dst: FStar.UInt32.t ->
j: FStar.UInt32.t{LowStar.UninitializedBuffer.valid_j_for_blit src idx_src dst idx_dst j} ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> Prims.logical | [] | LowStar.UninitializedBuffer.ublit_post_j | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
src: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
idx_src: FStar.UInt32.t ->
dst: LowStar.UninitializedBuffer.ubuffer a ->
idx_dst: FStar.UInt32.t ->
j: FStar.UInt32.t{LowStar.UninitializedBuffer.valid_j_for_blit src idx_src dst idx_dst j} ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> Prims.logical | {
"end_col": 38,
"end_line": 155,
"start_col": 4,
"start_line": 147
} |
|
FStar.HyperStack.ST.ST | val ugcmalloc (#a: Type0) (r: HS.rid) (len: U32.t)
: HST.ST (b: lubuffer a (U32.v len) {frameOf b == r /\ recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None))) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ugcmalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len | val ugcmalloc (#a: Type0) (r: HS.rid) (len: U32.t)
: HST.ST (b: lubuffer a (U32.v len) {frameOf b == r /\ recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
let ugcmalloc (#a: Type0) (r: HS.rid) (len: U32.t)
: HST.ST (b: lubuffer a (U32.v len) {frameOf b == r /\ recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None))) = | true | null | false | mgcmalloc r None len | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [] | [
"FStar.Monotonic.HyperHeap.rid",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.mgcmalloc",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder",
"FStar.Pervasives.Native.None",
"LowStar.Monotonic.Buffer.lmbuffer",
"FStar.UInt32.v",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"LowStar.UninitializedBuffer.lubuffer",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.malloc_pre",
"LowStar.Monotonic.Buffer.alloc_post_mem_common",
"FStar.Seq.Base.create"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a)
unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
unfold let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a)
inline_for_extraction
type pointer (a:Type0) = b:ubuffer a{length b == 1}
inline_for_extraction
type pointer_or_null (a:Type0) = b:ubuffer a{if g_is_null b then True else length b == 1}
inline_for_extraction let usub (#a:Type0) = msub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
inline_for_extraction let uoffset (#a:Type0) = moffset #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
(****** main stateful API *****)
(*
* b `initialized_at` i: is a stable predicate that witnesses the initialization of an index i in ubuffer b
*)
private let ipred (#a:Type0) (i:nat) :spred (option a) = fun s -> i < Seq.length s ==> Some? (Seq.index s i)
let initialized_at (#a:Type0) (b:ubuffer a) (i:nat) :Type0 = witnessed b (ipred i)
(*
* Clients need to prove that b is initialized_at i
*)
let uindex (#a:Type0) (b:ubuffer a) (i:U32.t)
:HST.Stack a (requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i)))
(ensures (fun h0 y h1 -> let y_opt = Seq.index (as_seq h0 b) (U32.v i) in
Some? y_opt /\ y == Some?.v y_opt /\ h0 == h1))
= let y_opt = index b i in
recall_p b (ipred (U32.v i));
Some?.v y_opt
(*
* b `initialized_at` i is a postcondition
*)
let uupd (#a:Type0) (b:ubuffer a) (i:U32.t) (v:a)
:HST.Stack unit (requires (fun h0 -> live h0 b /\ U32.v i < length b))
(ensures (fun h0 _ h1 -> modifies (loc_buffer b) h0 h1 /\
live h1 b /\
as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) (Some v) /\
b `initialized_at` (U32.v i)))
= upd b i (Some v);
witness_p b (ipred (U32.v i))
unfold let lubuffer (a:Type0) (len:nat) = b:ubuffer a{length b == len}
unfold let lubuffer_or_null (a:Type0) (len:nat) (r:HS.rid) =
b:ubuffer a{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
(*
* No initializer
*)
let ugcmalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ recallable b})
(requires (fun h0 -> malloc_pre r len)) | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ugcmalloc (#a: Type0) (r: HS.rid) (len: U32.t)
: HST.ST (b: lubuffer a (U32.v len) {frameOf b == r /\ recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None))) | [] | LowStar.UninitializedBuffer.ugcmalloc | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | r: FStar.Monotonic.HyperHeap.rid -> len: FStar.UInt32.t
-> FStar.HyperStack.ST.ST
(b:
LowStar.UninitializedBuffer.lubuffer a (FStar.UInt32.v len)
{LowStar.Monotonic.Buffer.frameOf b == r /\ LowStar.Monotonic.Buffer.recallable b}) | {
"end_col": 24,
"end_line": 100,
"start_col": 4,
"start_line": 100
} |
FStar.HyperStack.ST.Stack | val uindex (#a: Type0) (b: ubuffer a) (i: U32.t)
: HST.Stack a
(requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i)))
(ensures
(fun h0 y h1 ->
let y_opt = Seq.index (as_seq h0 b) (U32.v i) in
Some? y_opt /\ y == Some?.v y_opt /\ h0 == h1)) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let uindex (#a:Type0) (b:ubuffer a) (i:U32.t)
:HST.Stack a (requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i)))
(ensures (fun h0 y h1 -> let y_opt = Seq.index (as_seq h0 b) (U32.v i) in
Some? y_opt /\ y == Some?.v y_opt /\ h0 == h1))
= let y_opt = index b i in
recall_p b (ipred (U32.v i));
Some?.v y_opt | val uindex (#a: Type0) (b: ubuffer a) (i: U32.t)
: HST.Stack a
(requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i)))
(ensures
(fun h0 y h1 ->
let y_opt = Seq.index (as_seq h0 b) (U32.v i) in
Some? y_opt /\ y == Some?.v y_opt /\ h0 == h1))
let uindex (#a: Type0) (b: ubuffer a) (i: U32.t)
: HST.Stack a
(requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i)))
(ensures
(fun h0 y h1 ->
let y_opt = Seq.index (as_seq h0 b) (U32.v i) in
Some? y_opt /\ y == Some?.v y_opt /\ h0 == h1)) = | true | null | false | let y_opt = index b i in
recall_p b (ipred (U32.v i));
Some?.v y_opt | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [] | [
"LowStar.UninitializedBuffer.ubuffer",
"FStar.UInt32.t",
"FStar.Pervasives.Native.__proj__Some__item__v",
"Prims.unit",
"LowStar.Monotonic.Buffer.recall_p",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder",
"LowStar.UninitializedBuffer.ipred",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.index",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowStar.Monotonic.Buffer.live",
"Prims.b2t",
"Prims.op_LessThan",
"LowStar.Monotonic.Buffer.length",
"LowStar.UninitializedBuffer.initialized_at",
"FStar.Pervasives.Native.uu___is_Some",
"Prims.eq2",
"FStar.Seq.Base.index",
"LowStar.Monotonic.Buffer.as_seq"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a)
unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
unfold let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a)
inline_for_extraction
type pointer (a:Type0) = b:ubuffer a{length b == 1}
inline_for_extraction
type pointer_or_null (a:Type0) = b:ubuffer a{if g_is_null b then True else length b == 1}
inline_for_extraction let usub (#a:Type0) = msub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
inline_for_extraction let uoffset (#a:Type0) = moffset #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
(****** main stateful API *****)
(*
* b `initialized_at` i: is a stable predicate that witnesses the initialization of an index i in ubuffer b
*)
private let ipred (#a:Type0) (i:nat) :spred (option a) = fun s -> i < Seq.length s ==> Some? (Seq.index s i)
let initialized_at (#a:Type0) (b:ubuffer a) (i:nat) :Type0 = witnessed b (ipred i)
(*
* Clients need to prove that b is initialized_at i
*)
let uindex (#a:Type0) (b:ubuffer a) (i:U32.t)
:HST.Stack a (requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i)))
(ensures (fun h0 y h1 -> let y_opt = Seq.index (as_seq h0 b) (U32.v i) in | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val uindex (#a: Type0) (b: ubuffer a) (i: U32.t)
: HST.Stack a
(requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i)))
(ensures
(fun h0 y h1 ->
let y_opt = Seq.index (as_seq h0 b) (U32.v i) in
Some? y_opt /\ y == Some?.v y_opt /\ h0 == h1)) | [] | LowStar.UninitializedBuffer.uindex | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.UninitializedBuffer.ubuffer a -> i: FStar.UInt32.t -> FStar.HyperStack.ST.Stack a | {
"end_col": 17,
"end_line": 74,
"start_col": 3,
"start_line": 72
} |
FStar.HyperStack.ST.ST | val umalloc (#a: Type0) (r: HS.rid) (len: U32.t)
: HST.ST (b: lubuffer a (U32.v len) {frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None))) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let umalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mmalloc r None len | val umalloc (#a: Type0) (r: HS.rid) (len: U32.t)
: HST.ST (b: lubuffer a (U32.v len) {frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
let umalloc (#a: Type0) (r: HS.rid) (len: U32.t)
: HST.ST (b: lubuffer a (U32.v len) {frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None))) = | true | null | false | mmalloc r None len | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [] | [
"FStar.Monotonic.HyperHeap.rid",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.mmalloc",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder",
"FStar.Pervasives.Native.None",
"LowStar.Monotonic.Buffer.lmbuffer",
"FStar.UInt32.v",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.freeable",
"LowStar.UninitializedBuffer.lubuffer",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.malloc_pre",
"LowStar.Monotonic.Buffer.alloc_post_mem_common",
"FStar.Seq.Base.create"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a)
unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
unfold let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a)
inline_for_extraction
type pointer (a:Type0) = b:ubuffer a{length b == 1}
inline_for_extraction
type pointer_or_null (a:Type0) = b:ubuffer a{if g_is_null b then True else length b == 1}
inline_for_extraction let usub (#a:Type0) = msub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
inline_for_extraction let uoffset (#a:Type0) = moffset #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
(****** main stateful API *****)
(*
* b `initialized_at` i: is a stable predicate that witnesses the initialization of an index i in ubuffer b
*)
private let ipred (#a:Type0) (i:nat) :spred (option a) = fun s -> i < Seq.length s ==> Some? (Seq.index s i)
let initialized_at (#a:Type0) (b:ubuffer a) (i:nat) :Type0 = witnessed b (ipred i)
(*
* Clients need to prove that b is initialized_at i
*)
let uindex (#a:Type0) (b:ubuffer a) (i:U32.t)
:HST.Stack a (requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i)))
(ensures (fun h0 y h1 -> let y_opt = Seq.index (as_seq h0 b) (U32.v i) in
Some? y_opt /\ y == Some?.v y_opt /\ h0 == h1))
= let y_opt = index b i in
recall_p b (ipred (U32.v i));
Some?.v y_opt
(*
* b `initialized_at` i is a postcondition
*)
let uupd (#a:Type0) (b:ubuffer a) (i:U32.t) (v:a)
:HST.Stack unit (requires (fun h0 -> live h0 b /\ U32.v i < length b))
(ensures (fun h0 _ h1 -> modifies (loc_buffer b) h0 h1 /\
live h1 b /\
as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) (Some v) /\
b `initialized_at` (U32.v i)))
= upd b i (Some v);
witness_p b (ipred (U32.v i))
unfold let lubuffer (a:Type0) (len:nat) = b:ubuffer a{length b == len}
unfold let lubuffer_or_null (a:Type0) (len:nat) (r:HS.rid) =
b:ubuffer a{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
(*
* No initializer
*)
let ugcmalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len
inline_for_extraction
let ugcmalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer_or_null a (U32.v len) r{recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len
let umalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len)) | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val umalloc (#a: Type0) (r: HS.rid) (len: U32.t)
: HST.ST (b: lubuffer a (U32.v len) {frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None))) | [] | LowStar.UninitializedBuffer.umalloc | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | r: FStar.Monotonic.HyperHeap.rid -> len: FStar.UInt32.t
-> FStar.HyperStack.ST.ST
(b:
LowStar.UninitializedBuffer.lubuffer a (FStar.UInt32.v len)
{LowStar.Monotonic.Buffer.frameOf b == r /\ LowStar.Monotonic.Buffer.freeable b}) | {
"end_col": 22,
"end_line": 113,
"start_col": 4,
"start_line": 113
} |
FStar.HyperStack.ST.StackInline | val ualloca (#a: Type0) (len: U32.t)
: HST.StackInline (lubuffer a (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures
(fun h0 b h1 ->
alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None) /\
frameOf b == HS.get_tip h0)) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ualloca (#a:Type0) (len:U32.t)
:HST.StackInline (lubuffer a (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None) /\
frameOf b == HS.get_tip h0))
= malloca None len | val ualloca (#a: Type0) (len: U32.t)
: HST.StackInline (lubuffer a (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures
(fun h0 b h1 ->
alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None) /\
frameOf b == HS.get_tip h0))
let ualloca (#a: Type0) (len: U32.t)
: HST.StackInline (lubuffer a (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures
(fun h0 b h1 ->
alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None) /\
frameOf b == HS.get_tip h0)) = | true | null | false | malloca None len | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [] | [
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.malloca",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder",
"FStar.Pervasives.Native.None",
"LowStar.Monotonic.Buffer.lmbuffer",
"FStar.UInt32.v",
"LowStar.UninitializedBuffer.lubuffer",
"FStar.Monotonic.HyperStack.mem",
"Prims.b2t",
"LowStar.Monotonic.Buffer.alloca_pre",
"Prims.l_and",
"LowStar.Monotonic.Buffer.alloc_post_mem_common",
"FStar.Seq.Base.create",
"Prims.eq2",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"FStar.Monotonic.HyperStack.get_tip"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a)
unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
unfold let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a)
inline_for_extraction
type pointer (a:Type0) = b:ubuffer a{length b == 1}
inline_for_extraction
type pointer_or_null (a:Type0) = b:ubuffer a{if g_is_null b then True else length b == 1}
inline_for_extraction let usub (#a:Type0) = msub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
inline_for_extraction let uoffset (#a:Type0) = moffset #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
(****** main stateful API *****)
(*
* b `initialized_at` i: is a stable predicate that witnesses the initialization of an index i in ubuffer b
*)
private let ipred (#a:Type0) (i:nat) :spred (option a) = fun s -> i < Seq.length s ==> Some? (Seq.index s i)
let initialized_at (#a:Type0) (b:ubuffer a) (i:nat) :Type0 = witnessed b (ipred i)
(*
* Clients need to prove that b is initialized_at i
*)
let uindex (#a:Type0) (b:ubuffer a) (i:U32.t)
:HST.Stack a (requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i)))
(ensures (fun h0 y h1 -> let y_opt = Seq.index (as_seq h0 b) (U32.v i) in
Some? y_opt /\ y == Some?.v y_opt /\ h0 == h1))
= let y_opt = index b i in
recall_p b (ipred (U32.v i));
Some?.v y_opt
(*
* b `initialized_at` i is a postcondition
*)
let uupd (#a:Type0) (b:ubuffer a) (i:U32.t) (v:a)
:HST.Stack unit (requires (fun h0 -> live h0 b /\ U32.v i < length b))
(ensures (fun h0 _ h1 -> modifies (loc_buffer b) h0 h1 /\
live h1 b /\
as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) (Some v) /\
b `initialized_at` (U32.v i)))
= upd b i (Some v);
witness_p b (ipred (U32.v i))
unfold let lubuffer (a:Type0) (len:nat) = b:ubuffer a{length b == len}
unfold let lubuffer_or_null (a:Type0) (len:nat) (r:HS.rid) =
b:ubuffer a{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
(*
* No initializer
*)
let ugcmalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len
inline_for_extraction
let ugcmalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer_or_null a (U32.v len) r{recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len
let umalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mmalloc r None len
inline_for_extraction
let umalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer_or_null a (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mmalloc r None len
let ualloca (#a:Type0) (len:U32.t)
:HST.StackInline (lubuffer a (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None) /\ | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ualloca (#a: Type0) (len: U32.t)
: HST.StackInline (lubuffer a (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures
(fun h0 b h1 ->
alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None) /\
frameOf b == HS.get_tip h0)) | [] | LowStar.UninitializedBuffer.ualloca | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | len: FStar.UInt32.t
-> FStar.HyperStack.ST.StackInline (LowStar.UninitializedBuffer.lubuffer a (FStar.UInt32.v len)) | {
"end_col": 20,
"end_line": 127,
"start_col": 4,
"start_line": 127
} |
FStar.HyperStack.ST.ST | val ugcmalloc_partial (#a: Type0) (r: HS.rid) (len: U32.t)
: HST.ST (b: lubuffer_or_null a (U32.v len) r {recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None))) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ugcmalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer_or_null a (U32.v len) r{recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len | val ugcmalloc_partial (#a: Type0) (r: HS.rid) (len: U32.t)
: HST.ST (b: lubuffer_or_null a (U32.v len) r {recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
let ugcmalloc_partial (#a: Type0) (r: HS.rid) (len: U32.t)
: HST.ST (b: lubuffer_or_null a (U32.v len) r {recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None))) = | true | null | false | mgcmalloc r None len | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [] | [
"FStar.Monotonic.HyperHeap.rid",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.mgcmalloc",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder",
"FStar.Pervasives.Native.None",
"LowStar.Monotonic.Buffer.lmbuffer",
"FStar.UInt32.v",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"LowStar.UninitializedBuffer.lubuffer_or_null",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.malloc_pre",
"LowStar.Monotonic.Buffer.alloc_partial_post_mem_common",
"FStar.Seq.Base.create"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a)
unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
unfold let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a)
inline_for_extraction
type pointer (a:Type0) = b:ubuffer a{length b == 1}
inline_for_extraction
type pointer_or_null (a:Type0) = b:ubuffer a{if g_is_null b then True else length b == 1}
inline_for_extraction let usub (#a:Type0) = msub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
inline_for_extraction let uoffset (#a:Type0) = moffset #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
(****** main stateful API *****)
(*
* b `initialized_at` i: is a stable predicate that witnesses the initialization of an index i in ubuffer b
*)
private let ipred (#a:Type0) (i:nat) :spred (option a) = fun s -> i < Seq.length s ==> Some? (Seq.index s i)
let initialized_at (#a:Type0) (b:ubuffer a) (i:nat) :Type0 = witnessed b (ipred i)
(*
* Clients need to prove that b is initialized_at i
*)
let uindex (#a:Type0) (b:ubuffer a) (i:U32.t)
:HST.Stack a (requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i)))
(ensures (fun h0 y h1 -> let y_opt = Seq.index (as_seq h0 b) (U32.v i) in
Some? y_opt /\ y == Some?.v y_opt /\ h0 == h1))
= let y_opt = index b i in
recall_p b (ipred (U32.v i));
Some?.v y_opt
(*
* b `initialized_at` i is a postcondition
*)
let uupd (#a:Type0) (b:ubuffer a) (i:U32.t) (v:a)
:HST.Stack unit (requires (fun h0 -> live h0 b /\ U32.v i < length b))
(ensures (fun h0 _ h1 -> modifies (loc_buffer b) h0 h1 /\
live h1 b /\
as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) (Some v) /\
b `initialized_at` (U32.v i)))
= upd b i (Some v);
witness_p b (ipred (U32.v i))
unfold let lubuffer (a:Type0) (len:nat) = b:ubuffer a{length b == len}
unfold let lubuffer_or_null (a:Type0) (len:nat) (r:HS.rid) =
b:ubuffer a{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
(*
* No initializer
*)
let ugcmalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len
inline_for_extraction
let ugcmalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer_or_null a (U32.v len) r{recallable b})
(requires (fun h0 -> malloc_pre r len)) | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ugcmalloc_partial (#a: Type0) (r: HS.rid) (len: U32.t)
: HST.ST (b: lubuffer_or_null a (U32.v len) r {recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None))) | [] | LowStar.UninitializedBuffer.ugcmalloc_partial | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | r: FStar.Monotonic.HyperHeap.rid -> len: FStar.UInt32.t
-> FStar.HyperStack.ST.ST
(b:
LowStar.UninitializedBuffer.lubuffer_or_null a (FStar.UInt32.v len) r
{LowStar.Monotonic.Buffer.recallable b}) | {
"end_col": 24,
"end_line": 107,
"start_col": 4,
"start_line": 107
} |
FStar.HyperStack.ST.ST | val recall_initialized (#a: Type0) (b: ubuffer a) (i: nat)
: HST.ST unit
(fun h0 -> (recallable b \/ live h0 b) /\ b `initialized_at` i)
(fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ (i < length b ==> Some? (Seq.index (as_seq h0 b) i))) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let recall_initialized (#a:Type0) (b:ubuffer a) (i:nat)
:HST.ST unit (fun h0 -> (recallable b \/ live h0 b) /\ b `initialized_at` i)
(fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ (i < length b ==> Some? (Seq.index (as_seq h0 b) i)))
= recall_p b (ipred i) | val recall_initialized (#a: Type0) (b: ubuffer a) (i: nat)
: HST.ST unit
(fun h0 -> (recallable b \/ live h0 b) /\ b `initialized_at` i)
(fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ (i < length b ==> Some? (Seq.index (as_seq h0 b) i)))
let recall_initialized (#a: Type0) (b: ubuffer a) (i: nat)
: HST.ST unit
(fun h0 -> (recallable b \/ live h0 b) /\ b `initialized_at` i)
(fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ (i < length b ==> Some? (Seq.index (as_seq h0 b) i))) = | true | null | false | recall_p b (ipred i) | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [] | [
"LowStar.UninitializedBuffer.ubuffer",
"Prims.nat",
"LowStar.Monotonic.Buffer.recall_p",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder",
"LowStar.UninitializedBuffer.ipred",
"Prims.unit",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"Prims.l_or",
"LowStar.Monotonic.Buffer.recallable",
"LowStar.Monotonic.Buffer.live",
"LowStar.UninitializedBuffer.initialized_at",
"Prims.eq2",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_LessThan",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.Native.uu___is_Some",
"FStar.Seq.Base.index",
"LowStar.Monotonic.Buffer.as_seq"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a)
unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
unfold let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a)
inline_for_extraction
type pointer (a:Type0) = b:ubuffer a{length b == 1}
inline_for_extraction
type pointer_or_null (a:Type0) = b:ubuffer a{if g_is_null b then True else length b == 1}
inline_for_extraction let usub (#a:Type0) = msub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
inline_for_extraction let uoffset (#a:Type0) = moffset #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
(****** main stateful API *****)
(*
* b `initialized_at` i: is a stable predicate that witnesses the initialization of an index i in ubuffer b
*)
private let ipred (#a:Type0) (i:nat) :spred (option a) = fun s -> i < Seq.length s ==> Some? (Seq.index s i)
let initialized_at (#a:Type0) (b:ubuffer a) (i:nat) :Type0 = witnessed b (ipred i)
(*
* Clients need to prove that b is initialized_at i
*)
let uindex (#a:Type0) (b:ubuffer a) (i:U32.t)
:HST.Stack a (requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i)))
(ensures (fun h0 y h1 -> let y_opt = Seq.index (as_seq h0 b) (U32.v i) in
Some? y_opt /\ y == Some?.v y_opt /\ h0 == h1))
= let y_opt = index b i in
recall_p b (ipred (U32.v i));
Some?.v y_opt
(*
* b `initialized_at` i is a postcondition
*)
let uupd (#a:Type0) (b:ubuffer a) (i:U32.t) (v:a)
:HST.Stack unit (requires (fun h0 -> live h0 b /\ U32.v i < length b))
(ensures (fun h0 _ h1 -> modifies (loc_buffer b) h0 h1 /\
live h1 b /\
as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) (Some v) /\
b `initialized_at` (U32.v i)))
= upd b i (Some v);
witness_p b (ipred (U32.v i))
unfold let lubuffer (a:Type0) (len:nat) = b:ubuffer a{length b == len}
unfold let lubuffer_or_null (a:Type0) (len:nat) (r:HS.rid) =
b:ubuffer a{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
(*
* No initializer
*)
let ugcmalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len
inline_for_extraction
let ugcmalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer_or_null a (U32.v len) r{recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len
let umalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mmalloc r None len
inline_for_extraction
let umalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer_or_null a (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mmalloc r None len
let ualloca (#a:Type0) (len:U32.t)
:HST.StackInline (lubuffer a (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None) /\
frameOf b == HS.get_tip h0))
= malloca None len
(*
* blit functionality, where src is a regular buffer
*)
[@@"opaque_to_smt"]
unfold let valid_j_for_blit
(#a:Type0) (#rrel #rel:srel a) (src:mbuffer a rrel rel) (idx_src:U32.t)
(dst:ubuffer a) (idx_dst:U32.t) (j:U32.t)
= U32.v idx_src + U32.v j <= length src /\
U32.v idx_dst + U32.v j <= length dst
(*
* postcondition of blit
*)
[@@"opaque_to_smt"]
unfold private let ublit_post_j
(#a:Type0) (#rrel #rel:srel a) (src:mbuffer a rrel rel) (idx_src:U32.t)
(dst:ubuffer a) (idx_dst:U32.t) (j:U32.t{valid_j_for_blit src idx_src dst idx_dst j})
(h0 h1:HS.mem)
= modifies (loc_buffer dst) h0 h1 /\ live h1 dst /\
(forall (i:nat).{:pattern (Seq.index (as_seq h1 dst) i)} (i >= U32.v idx_dst /\ i < U32.v idx_dst + U32.v j ==>
Seq.index (as_seq h1 dst) i ==
Some (Seq.index (as_seq h0 src) (U32.v idx_src + i - U32.v idx_dst)))
) /\
Seq.slice (as_seq h1 dst) 0 (U32.v idx_dst) == Seq.slice (as_seq h0 dst) 0 (U32.v idx_dst) /\
Seq.slice (as_seq h1 dst) (U32.v idx_dst + U32.v j) (length dst) == Seq.slice (as_seq h0 dst) (U32.v idx_dst + U32.v j) (length dst) /\
(forall (i:nat).{:pattern (dst `initialized_at` i)} (i >= U32.v idx_dst /\ i < U32.v idx_dst + U32.v j) ==>
dst `initialized_at` i)
let ublit (#a:Type0) (#rrel #rel:srel a)
(src:mbuffer a rrel rel) (idx_src:U32.t)
(dst:ubuffer a{disjoint src dst}) (idx_dst:U32.t)
(len:U32.t{valid_j_for_blit src idx_src dst idx_dst len})
:HST.Stack unit (requires (fun h0 -> live h0 src /\ live h0 dst))
(ensures (fun h0 _ h1 -> ublit_post_j src idx_src dst idx_dst len h0 h1))
= let rec aux (j:U32.t{valid_j_for_blit src idx_src dst idx_dst j})
:HST.Stack unit
(requires (fun h0 -> live h0 src /\ live h0 dst /\ ublit_post_j src idx_src dst idx_dst j h0 h0))
(ensures (fun h0 _ h1 -> ublit_post_j src idx_src dst idx_dst len h0 h1))
= let open FStar.UInt32 in
if j = len then ()
else if j <^ len then begin
uupd dst (idx_dst +^ j) (index src (idx_src +^ j));
aux (j +^ 1ul)
end
in
aux 0ul
let witness_initialized (#a:Type0) (b:ubuffer a) (i:nat)
:HST.ST unit (fun h0 -> i < length b /\ Some? (Seq.index (as_seq h0 b) i))
(fun h0 _ h1 -> h0 == h1 /\ b `initialized_at` i)
= witness_p b (ipred i)
let recall_initialized (#a:Type0) (b:ubuffer a) (i:nat)
:HST.ST unit (fun h0 -> (recallable b \/ live h0 b) /\ b `initialized_at` i) | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val recall_initialized (#a: Type0) (b: ubuffer a) (i: nat)
: HST.ST unit
(fun h0 -> (recallable b \/ live h0 b) /\ b `initialized_at` i)
(fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ (i < length b ==> Some? (Seq.index (as_seq h0 b) i))) | [] | LowStar.UninitializedBuffer.recall_initialized | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.UninitializedBuffer.ubuffer a -> i: Prims.nat -> FStar.HyperStack.ST.ST Prims.unit | {
"end_col": 24,
"end_line": 184,
"start_col": 4,
"start_line": 184
} |
FStar.HyperStack.ST.Stack | val uupd (#a: Type0) (b: ubuffer a) (i: U32.t) (v: a)
: HST.Stack unit
(requires (fun h0 -> live h0 b /\ U32.v i < length b))
(ensures
(fun h0 _ h1 ->
modifies (loc_buffer b) h0 h1 /\ live h1 b /\
as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) (Some v) /\ b `initialized_at` (U32.v i))
) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let uupd (#a:Type0) (b:ubuffer a) (i:U32.t) (v:a)
:HST.Stack unit (requires (fun h0 -> live h0 b /\ U32.v i < length b))
(ensures (fun h0 _ h1 -> modifies (loc_buffer b) h0 h1 /\
live h1 b /\
as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) (Some v) /\
b `initialized_at` (U32.v i)))
= upd b i (Some v);
witness_p b (ipred (U32.v i)) | val uupd (#a: Type0) (b: ubuffer a) (i: U32.t) (v: a)
: HST.Stack unit
(requires (fun h0 -> live h0 b /\ U32.v i < length b))
(ensures
(fun h0 _ h1 ->
modifies (loc_buffer b) h0 h1 /\ live h1 b /\
as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) (Some v) /\ b `initialized_at` (U32.v i))
)
let uupd (#a: Type0) (b: ubuffer a) (i: U32.t) (v: a)
: HST.Stack unit
(requires (fun h0 -> live h0 b /\ U32.v i < length b))
(ensures
(fun h0 _ h1 ->
modifies (loc_buffer b) h0 h1 /\ live h1 b /\
as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) (Some v) /\ b `initialized_at` (U32.v i))
) = | true | null | false | upd b i (Some v);
witness_p b (ipred (U32.v i)) | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [] | [
"LowStar.UninitializedBuffer.ubuffer",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.witness_p",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder",
"LowStar.UninitializedBuffer.ipred",
"FStar.UInt32.v",
"Prims.unit",
"LowStar.Monotonic.Buffer.upd",
"FStar.Pervasives.Native.Some",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowStar.Monotonic.Buffer.live",
"Prims.b2t",
"Prims.op_LessThan",
"LowStar.Monotonic.Buffer.length",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_buffer",
"Prims.eq2",
"FStar.Seq.Base.seq",
"LowStar.Monotonic.Buffer.as_seq",
"FStar.Seq.Base.upd",
"LowStar.UninitializedBuffer.initialized_at"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a)
unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
unfold let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a)
inline_for_extraction
type pointer (a:Type0) = b:ubuffer a{length b == 1}
inline_for_extraction
type pointer_or_null (a:Type0) = b:ubuffer a{if g_is_null b then True else length b == 1}
inline_for_extraction let usub (#a:Type0) = msub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
inline_for_extraction let uoffset (#a:Type0) = moffset #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
(****** main stateful API *****)
(*
* b `initialized_at` i: is a stable predicate that witnesses the initialization of an index i in ubuffer b
*)
private let ipred (#a:Type0) (i:nat) :spred (option a) = fun s -> i < Seq.length s ==> Some? (Seq.index s i)
let initialized_at (#a:Type0) (b:ubuffer a) (i:nat) :Type0 = witnessed b (ipred i)
(*
* Clients need to prove that b is initialized_at i
*)
let uindex (#a:Type0) (b:ubuffer a) (i:U32.t)
:HST.Stack a (requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i)))
(ensures (fun h0 y h1 -> let y_opt = Seq.index (as_seq h0 b) (U32.v i) in
Some? y_opt /\ y == Some?.v y_opt /\ h0 == h1))
= let y_opt = index b i in
recall_p b (ipred (U32.v i));
Some?.v y_opt
(*
* b `initialized_at` i is a postcondition
*)
let uupd (#a:Type0) (b:ubuffer a) (i:U32.t) (v:a)
:HST.Stack unit (requires (fun h0 -> live h0 b /\ U32.v i < length b))
(ensures (fun h0 _ h1 -> modifies (loc_buffer b) h0 h1 /\
live h1 b /\
as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) (Some v) /\ | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val uupd (#a: Type0) (b: ubuffer a) (i: U32.t) (v: a)
: HST.Stack unit
(requires (fun h0 -> live h0 b /\ U32.v i < length b))
(ensures
(fun h0 _ h1 ->
modifies (loc_buffer b) h0 h1 /\ live h1 b /\
as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) (Some v) /\ b `initialized_at` (U32.v i))
) | [] | LowStar.UninitializedBuffer.uupd | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.UninitializedBuffer.ubuffer a -> i: FStar.UInt32.t -> v: a
-> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 33,
"end_line": 86,
"start_col": 4,
"start_line": 85
} |
FStar.HyperStack.ST.ST | val umalloc_partial (#a: Type0) (r: HS.rid) (len: U32.t)
: HST.ST (b: lubuffer_or_null a (U32.v len) r {(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None))) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let umalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer_or_null a (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mmalloc r None len | val umalloc_partial (#a: Type0) (r: HS.rid) (len: U32.t)
: HST.ST (b: lubuffer_or_null a (U32.v len) r {(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
let umalloc_partial (#a: Type0) (r: HS.rid) (len: U32.t)
: HST.ST (b: lubuffer_or_null a (U32.v len) r {(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None))) = | true | null | false | mmalloc r None len | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [] | [
"FStar.Monotonic.HyperHeap.rid",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.mmalloc",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder",
"FStar.Pervasives.Native.None",
"LowStar.Monotonic.Buffer.lmbuffer",
"FStar.UInt32.v",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.freeable",
"LowStar.UninitializedBuffer.lubuffer_or_null",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.malloc_pre",
"LowStar.Monotonic.Buffer.alloc_partial_post_mem_common",
"FStar.Seq.Base.create"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a)
unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
unfold let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a)
inline_for_extraction
type pointer (a:Type0) = b:ubuffer a{length b == 1}
inline_for_extraction
type pointer_or_null (a:Type0) = b:ubuffer a{if g_is_null b then True else length b == 1}
inline_for_extraction let usub (#a:Type0) = msub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
inline_for_extraction let uoffset (#a:Type0) = moffset #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
(****** main stateful API *****)
(*
* b `initialized_at` i: is a stable predicate that witnesses the initialization of an index i in ubuffer b
*)
private let ipred (#a:Type0) (i:nat) :spred (option a) = fun s -> i < Seq.length s ==> Some? (Seq.index s i)
let initialized_at (#a:Type0) (b:ubuffer a) (i:nat) :Type0 = witnessed b (ipred i)
(*
* Clients need to prove that b is initialized_at i
*)
let uindex (#a:Type0) (b:ubuffer a) (i:U32.t)
:HST.Stack a (requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i)))
(ensures (fun h0 y h1 -> let y_opt = Seq.index (as_seq h0 b) (U32.v i) in
Some? y_opt /\ y == Some?.v y_opt /\ h0 == h1))
= let y_opt = index b i in
recall_p b (ipred (U32.v i));
Some?.v y_opt
(*
* b `initialized_at` i is a postcondition
*)
let uupd (#a:Type0) (b:ubuffer a) (i:U32.t) (v:a)
:HST.Stack unit (requires (fun h0 -> live h0 b /\ U32.v i < length b))
(ensures (fun h0 _ h1 -> modifies (loc_buffer b) h0 h1 /\
live h1 b /\
as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) (Some v) /\
b `initialized_at` (U32.v i)))
= upd b i (Some v);
witness_p b (ipred (U32.v i))
unfold let lubuffer (a:Type0) (len:nat) = b:ubuffer a{length b == len}
unfold let lubuffer_or_null (a:Type0) (len:nat) (r:HS.rid) =
b:ubuffer a{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
(*
* No initializer
*)
let ugcmalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len
inline_for_extraction
let ugcmalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer_or_null a (U32.v len) r{recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len
let umalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mmalloc r None len
inline_for_extraction
let umalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer_or_null a (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len)) | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val umalloc_partial (#a: Type0) (r: HS.rid) (len: U32.t)
: HST.ST (b: lubuffer_or_null a (U32.v len) r {(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None))) | [] | LowStar.UninitializedBuffer.umalloc_partial | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | r: FStar.Monotonic.HyperHeap.rid -> len: FStar.UInt32.t
-> FStar.HyperStack.ST.ST
(b:
LowStar.UninitializedBuffer.lubuffer_or_null a (FStar.UInt32.v len) r
{ Prims.op_Negation (LowStar.Monotonic.Buffer.g_is_null b) ==>
LowStar.Monotonic.Buffer.freeable b }) | {
"end_col": 22,
"end_line": 120,
"start_col": 4,
"start_line": 120
} |
FStar.HyperStack.ST.ST | val witness_initialized (#a: Type0) (b: ubuffer a) (i: nat)
: HST.ST unit
(fun h0 -> i < length b /\ Some? (Seq.index (as_seq h0 b) i))
(fun h0 _ h1 -> h0 == h1 /\ b `initialized_at` i) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let witness_initialized (#a:Type0) (b:ubuffer a) (i:nat)
:HST.ST unit (fun h0 -> i < length b /\ Some? (Seq.index (as_seq h0 b) i))
(fun h0 _ h1 -> h0 == h1 /\ b `initialized_at` i)
= witness_p b (ipred i) | val witness_initialized (#a: Type0) (b: ubuffer a) (i: nat)
: HST.ST unit
(fun h0 -> i < length b /\ Some? (Seq.index (as_seq h0 b) i))
(fun h0 _ h1 -> h0 == h1 /\ b `initialized_at` i)
let witness_initialized (#a: Type0) (b: ubuffer a) (i: nat)
: HST.ST unit
(fun h0 -> i < length b /\ Some? (Seq.index (as_seq h0 b) i))
(fun h0 _ h1 -> h0 == h1 /\ b `initialized_at` i) = | true | null | false | witness_p b (ipred i) | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [] | [
"LowStar.UninitializedBuffer.ubuffer",
"Prims.nat",
"LowStar.Monotonic.Buffer.witness_p",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder",
"LowStar.UninitializedBuffer.ipred",
"Prims.unit",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThan",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.Native.uu___is_Some",
"FStar.Seq.Base.index",
"LowStar.Monotonic.Buffer.as_seq",
"Prims.eq2",
"LowStar.UninitializedBuffer.initialized_at"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a)
unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
unfold let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a)
inline_for_extraction
type pointer (a:Type0) = b:ubuffer a{length b == 1}
inline_for_extraction
type pointer_or_null (a:Type0) = b:ubuffer a{if g_is_null b then True else length b == 1}
inline_for_extraction let usub (#a:Type0) = msub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
inline_for_extraction let uoffset (#a:Type0) = moffset #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
(****** main stateful API *****)
(*
* b `initialized_at` i: is a stable predicate that witnesses the initialization of an index i in ubuffer b
*)
private let ipred (#a:Type0) (i:nat) :spred (option a) = fun s -> i < Seq.length s ==> Some? (Seq.index s i)
let initialized_at (#a:Type0) (b:ubuffer a) (i:nat) :Type0 = witnessed b (ipred i)
(*
* Clients need to prove that b is initialized_at i
*)
let uindex (#a:Type0) (b:ubuffer a) (i:U32.t)
:HST.Stack a (requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i)))
(ensures (fun h0 y h1 -> let y_opt = Seq.index (as_seq h0 b) (U32.v i) in
Some? y_opt /\ y == Some?.v y_opt /\ h0 == h1))
= let y_opt = index b i in
recall_p b (ipred (U32.v i));
Some?.v y_opt
(*
* b `initialized_at` i is a postcondition
*)
let uupd (#a:Type0) (b:ubuffer a) (i:U32.t) (v:a)
:HST.Stack unit (requires (fun h0 -> live h0 b /\ U32.v i < length b))
(ensures (fun h0 _ h1 -> modifies (loc_buffer b) h0 h1 /\
live h1 b /\
as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) (Some v) /\
b `initialized_at` (U32.v i)))
= upd b i (Some v);
witness_p b (ipred (U32.v i))
unfold let lubuffer (a:Type0) (len:nat) = b:ubuffer a{length b == len}
unfold let lubuffer_or_null (a:Type0) (len:nat) (r:HS.rid) =
b:ubuffer a{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
(*
* No initializer
*)
let ugcmalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len
inline_for_extraction
let ugcmalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer_or_null a (U32.v len) r{recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len
let umalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mmalloc r None len
inline_for_extraction
let umalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer_or_null a (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mmalloc r None len
let ualloca (#a:Type0) (len:U32.t)
:HST.StackInline (lubuffer a (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None) /\
frameOf b == HS.get_tip h0))
= malloca None len
(*
* blit functionality, where src is a regular buffer
*)
[@@"opaque_to_smt"]
unfold let valid_j_for_blit
(#a:Type0) (#rrel #rel:srel a) (src:mbuffer a rrel rel) (idx_src:U32.t)
(dst:ubuffer a) (idx_dst:U32.t) (j:U32.t)
= U32.v idx_src + U32.v j <= length src /\
U32.v idx_dst + U32.v j <= length dst
(*
* postcondition of blit
*)
[@@"opaque_to_smt"]
unfold private let ublit_post_j
(#a:Type0) (#rrel #rel:srel a) (src:mbuffer a rrel rel) (idx_src:U32.t)
(dst:ubuffer a) (idx_dst:U32.t) (j:U32.t{valid_j_for_blit src idx_src dst idx_dst j})
(h0 h1:HS.mem)
= modifies (loc_buffer dst) h0 h1 /\ live h1 dst /\
(forall (i:nat).{:pattern (Seq.index (as_seq h1 dst) i)} (i >= U32.v idx_dst /\ i < U32.v idx_dst + U32.v j ==>
Seq.index (as_seq h1 dst) i ==
Some (Seq.index (as_seq h0 src) (U32.v idx_src + i - U32.v idx_dst)))
) /\
Seq.slice (as_seq h1 dst) 0 (U32.v idx_dst) == Seq.slice (as_seq h0 dst) 0 (U32.v idx_dst) /\
Seq.slice (as_seq h1 dst) (U32.v idx_dst + U32.v j) (length dst) == Seq.slice (as_seq h0 dst) (U32.v idx_dst + U32.v j) (length dst) /\
(forall (i:nat).{:pattern (dst `initialized_at` i)} (i >= U32.v idx_dst /\ i < U32.v idx_dst + U32.v j) ==>
dst `initialized_at` i)
let ublit (#a:Type0) (#rrel #rel:srel a)
(src:mbuffer a rrel rel) (idx_src:U32.t)
(dst:ubuffer a{disjoint src dst}) (idx_dst:U32.t)
(len:U32.t{valid_j_for_blit src idx_src dst idx_dst len})
:HST.Stack unit (requires (fun h0 -> live h0 src /\ live h0 dst))
(ensures (fun h0 _ h1 -> ublit_post_j src idx_src dst idx_dst len h0 h1))
= let rec aux (j:U32.t{valid_j_for_blit src idx_src dst idx_dst j})
:HST.Stack unit
(requires (fun h0 -> live h0 src /\ live h0 dst /\ ublit_post_j src idx_src dst idx_dst j h0 h0))
(ensures (fun h0 _ h1 -> ublit_post_j src idx_src dst idx_dst len h0 h1))
= let open FStar.UInt32 in
if j = len then ()
else if j <^ len then begin
uupd dst (idx_dst +^ j) (index src (idx_src +^ j));
aux (j +^ 1ul)
end
in
aux 0ul
let witness_initialized (#a:Type0) (b:ubuffer a) (i:nat)
:HST.ST unit (fun h0 -> i < length b /\ Some? (Seq.index (as_seq h0 b) i)) | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val witness_initialized (#a: Type0) (b: ubuffer a) (i: nat)
: HST.ST unit
(fun h0 -> i < length b /\ Some? (Seq.index (as_seq h0 b) i))
(fun h0 _ h1 -> h0 == h1 /\ b `initialized_at` i) | [] | LowStar.UninitializedBuffer.witness_initialized | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.UninitializedBuffer.ubuffer a -> i: Prims.nat -> FStar.HyperStack.ST.ST Prims.unit | {
"end_col": 25,
"end_line": 179,
"start_col": 4,
"start_line": 179
} |
FStar.Pervasives.Lemma | val buffer_immutable_buffer_disjoint
(#ti: Type)
(#t: Type0)
(bi: LowStar.ImmutableBuffer.ibuffer ti)
(b: ubuffer t)
(h: HS.mem)
: Lemma (requires (live h b /\ live h bi /\ (exists (x: t). True))) (ensures (disjoint b bi)) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let buffer_immutable_buffer_disjoint
(#ti:Type) (#t:Type0)
(bi:LowStar.ImmutableBuffer.ibuffer ti)
(b:ubuffer t)
(h: HS.mem)
: Lemma
(requires (
live h b /\
live h bi /\
(exists (x:t). True ) // If the type is not inhabited, the initialization and immutable preorders are effectively identical
))
(ensures (
disjoint b bi
))
= if length b = 0
then empty_disjoint b bi
else if length bi = 0
then empty_disjoint bi b
else begin
let open LowStar.ImmutableBuffer in
let s = as_seq h b in
let s0 = Seq.upd s 0 None in
let s1 = Seq.upd s 0 (Some (FStar.IndefiniteDescription.indefinite_description_ghost t (fun _ -> True))) in
assert(initialization_preorder _ s0 s1 /\
Seq.index s0 0 =!= Seq.index s1 0 /\
~( immutable_preorder _ s0 s1 <==> initialization_preorder _ s0 s1));
live_same_addresses_equal_types_and_preorders b bi h
end | val buffer_immutable_buffer_disjoint
(#ti: Type)
(#t: Type0)
(bi: LowStar.ImmutableBuffer.ibuffer ti)
(b: ubuffer t)
(h: HS.mem)
: Lemma (requires (live h b /\ live h bi /\ (exists (x: t). True))) (ensures (disjoint b bi))
let buffer_immutable_buffer_disjoint
(#ti: Type)
(#t: Type0)
(bi: LowStar.ImmutableBuffer.ibuffer ti)
(b: ubuffer t)
(h: HS.mem)
: Lemma (requires (live h b /\ live h bi /\ (exists (x: t). True))) (ensures (disjoint b bi)) = | false | null | true | if length b = 0
then empty_disjoint b bi
else
if length bi = 0
then empty_disjoint bi b
else
let open LowStar.ImmutableBuffer in
let s = as_seq h b in
let s0 = Seq.upd s 0 None in
let s1 =
Seq.upd s
0
(Some (FStar.IndefiniteDescription.indefinite_description_ghost t (fun _ -> True)))
in
assert (initialization_preorder _ s0 s1 /\ Seq.index s0 0 =!= Seq.index s1 0 /\
~(immutable_preorder _ s0 s1 <==> initialization_preorder _ s0 s1));
live_same_addresses_equal_types_and_preorders b bi h | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [
"lemma"
] | [
"LowStar.ImmutableBuffer.ibuffer",
"LowStar.UninitializedBuffer.ubuffer",
"FStar.Monotonic.HyperStack.mem",
"Prims.op_Equality",
"Prims.int",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder",
"LowStar.Monotonic.Buffer.empty_disjoint",
"LowStar.ImmutableBuffer.immutable_preorder",
"Prims.bool",
"LowStar.Monotonic.Buffer.live_same_addresses_equal_types_and_preorders",
"Prims.unit",
"Prims._assert",
"Prims.l_and",
"Prims.l_not",
"Prims.eq2",
"FStar.Seq.Base.index",
"Prims.l_iff",
"FStar.Seq.Base.seq",
"FStar.Seq.Base.upd",
"FStar.Pervasives.Native.Some",
"FStar.IndefiniteDescription.indefinite_description_ghost",
"Prims.l_True",
"Prims.prop",
"FStar.Pervasives.Native.None",
"LowStar.Monotonic.Buffer.as_seq",
"LowStar.Monotonic.Buffer.live",
"Prims.l_Exists",
"Prims.squash",
"LowStar.Monotonic.Buffer.disjoint",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a)
unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
unfold let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a)
inline_for_extraction
type pointer (a:Type0) = b:ubuffer a{length b == 1}
inline_for_extraction
type pointer_or_null (a:Type0) = b:ubuffer a{if g_is_null b then True else length b == 1}
inline_for_extraction let usub (#a:Type0) = msub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
inline_for_extraction let uoffset (#a:Type0) = moffset #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
(****** main stateful API *****)
(*
* b `initialized_at` i: is a stable predicate that witnesses the initialization of an index i in ubuffer b
*)
private let ipred (#a:Type0) (i:nat) :spred (option a) = fun s -> i < Seq.length s ==> Some? (Seq.index s i)
let initialized_at (#a:Type0) (b:ubuffer a) (i:nat) :Type0 = witnessed b (ipred i)
(*
* Clients need to prove that b is initialized_at i
*)
let uindex (#a:Type0) (b:ubuffer a) (i:U32.t)
:HST.Stack a (requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i)))
(ensures (fun h0 y h1 -> let y_opt = Seq.index (as_seq h0 b) (U32.v i) in
Some? y_opt /\ y == Some?.v y_opt /\ h0 == h1))
= let y_opt = index b i in
recall_p b (ipred (U32.v i));
Some?.v y_opt
(*
* b `initialized_at` i is a postcondition
*)
let uupd (#a:Type0) (b:ubuffer a) (i:U32.t) (v:a)
:HST.Stack unit (requires (fun h0 -> live h0 b /\ U32.v i < length b))
(ensures (fun h0 _ h1 -> modifies (loc_buffer b) h0 h1 /\
live h1 b /\
as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) (Some v) /\
b `initialized_at` (U32.v i)))
= upd b i (Some v);
witness_p b (ipred (U32.v i))
unfold let lubuffer (a:Type0) (len:nat) = b:ubuffer a{length b == len}
unfold let lubuffer_or_null (a:Type0) (len:nat) (r:HS.rid) =
b:ubuffer a{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
(*
* No initializer
*)
let ugcmalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len
inline_for_extraction
let ugcmalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer_or_null a (U32.v len) r{recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len
let umalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mmalloc r None len
inline_for_extraction
let umalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer_or_null a (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mmalloc r None len
let ualloca (#a:Type0) (len:U32.t)
:HST.StackInline (lubuffer a (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None) /\
frameOf b == HS.get_tip h0))
= malloca None len
(*
* blit functionality, where src is a regular buffer
*)
[@@"opaque_to_smt"]
unfold let valid_j_for_blit
(#a:Type0) (#rrel #rel:srel a) (src:mbuffer a rrel rel) (idx_src:U32.t)
(dst:ubuffer a) (idx_dst:U32.t) (j:U32.t)
= U32.v idx_src + U32.v j <= length src /\
U32.v idx_dst + U32.v j <= length dst
(*
* postcondition of blit
*)
[@@"opaque_to_smt"]
unfold private let ublit_post_j
(#a:Type0) (#rrel #rel:srel a) (src:mbuffer a rrel rel) (idx_src:U32.t)
(dst:ubuffer a) (idx_dst:U32.t) (j:U32.t{valid_j_for_blit src idx_src dst idx_dst j})
(h0 h1:HS.mem)
= modifies (loc_buffer dst) h0 h1 /\ live h1 dst /\
(forall (i:nat).{:pattern (Seq.index (as_seq h1 dst) i)} (i >= U32.v idx_dst /\ i < U32.v idx_dst + U32.v j ==>
Seq.index (as_seq h1 dst) i ==
Some (Seq.index (as_seq h0 src) (U32.v idx_src + i - U32.v idx_dst)))
) /\
Seq.slice (as_seq h1 dst) 0 (U32.v idx_dst) == Seq.slice (as_seq h0 dst) 0 (U32.v idx_dst) /\
Seq.slice (as_seq h1 dst) (U32.v idx_dst + U32.v j) (length dst) == Seq.slice (as_seq h0 dst) (U32.v idx_dst + U32.v j) (length dst) /\
(forall (i:nat).{:pattern (dst `initialized_at` i)} (i >= U32.v idx_dst /\ i < U32.v idx_dst + U32.v j) ==>
dst `initialized_at` i)
let ublit (#a:Type0) (#rrel #rel:srel a)
(src:mbuffer a rrel rel) (idx_src:U32.t)
(dst:ubuffer a{disjoint src dst}) (idx_dst:U32.t)
(len:U32.t{valid_j_for_blit src idx_src dst idx_dst len})
:HST.Stack unit (requires (fun h0 -> live h0 src /\ live h0 dst))
(ensures (fun h0 _ h1 -> ublit_post_j src idx_src dst idx_dst len h0 h1))
= let rec aux (j:U32.t{valid_j_for_blit src idx_src dst idx_dst j})
:HST.Stack unit
(requires (fun h0 -> live h0 src /\ live h0 dst /\ ublit_post_j src idx_src dst idx_dst j h0 h0))
(ensures (fun h0 _ h1 -> ublit_post_j src idx_src dst idx_dst len h0 h1))
= let open FStar.UInt32 in
if j = len then ()
else if j <^ len then begin
uupd dst (idx_dst +^ j) (index src (idx_src +^ j));
aux (j +^ 1ul)
end
in
aux 0ul
let witness_initialized (#a:Type0) (b:ubuffer a) (i:nat)
:HST.ST unit (fun h0 -> i < length b /\ Some? (Seq.index (as_seq h0 b) i))
(fun h0 _ h1 -> h0 == h1 /\ b `initialized_at` i)
= witness_p b (ipred i)
let recall_initialized (#a:Type0) (b:ubuffer a) (i:nat)
:HST.ST unit (fun h0 -> (recallable b \/ live h0 b) /\ b `initialized_at` i)
(fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ (i < length b ==> Some? (Seq.index (as_seq h0 b) i)))
= recall_p b (ipred i)
let buffer_immutable_buffer_disjoint
(#ti:Type) (#t:Type0)
(bi:LowStar.ImmutableBuffer.ibuffer ti)
(b:ubuffer t)
(h: HS.mem)
: Lemma
(requires (
live h b /\
live h bi /\
(exists (x:t). True ) // If the type is not inhabited, the initialization and immutable preorders are effectively identical
))
(ensures (
disjoint b bi | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val buffer_immutable_buffer_disjoint
(#ti: Type)
(#t: Type0)
(bi: LowStar.ImmutableBuffer.ibuffer ti)
(b: ubuffer t)
(h: HS.mem)
: Lemma (requires (live h b /\ live h bi /\ (exists (x: t). True))) (ensures (disjoint b bi)) | [] | LowStar.UninitializedBuffer.buffer_immutable_buffer_disjoint | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
bi: LowStar.ImmutableBuffer.ibuffer ti ->
b: LowStar.UninitializedBuffer.ubuffer t ->
h: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.live h b /\ LowStar.Monotonic.Buffer.live h bi /\
(exists (x: t). Prims.l_True)) (ensures LowStar.Monotonic.Buffer.disjoint b bi) | {
"end_col": 5,
"end_line": 213,
"start_col": 2,
"start_line": 200
} |
FStar.HyperStack.ST.Stack | val ublit
(#a: Type0)
(#rrel #rel: srel a)
(src: mbuffer a rrel rel)
(idx_src: U32.t)
(dst: ubuffer a {disjoint src dst})
(idx_dst: U32.t)
(len: U32.t{valid_j_for_blit src idx_src dst idx_dst len})
: HST.Stack unit
(requires (fun h0 -> live h0 src /\ live h0 dst))
(ensures (fun h0 _ h1 -> ublit_post_j src idx_src dst idx_dst len h0 h1)) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ublit (#a:Type0) (#rrel #rel:srel a)
(src:mbuffer a rrel rel) (idx_src:U32.t)
(dst:ubuffer a{disjoint src dst}) (idx_dst:U32.t)
(len:U32.t{valid_j_for_blit src idx_src dst idx_dst len})
:HST.Stack unit (requires (fun h0 -> live h0 src /\ live h0 dst))
(ensures (fun h0 _ h1 -> ublit_post_j src idx_src dst idx_dst len h0 h1))
= let rec aux (j:U32.t{valid_j_for_blit src idx_src dst idx_dst j})
:HST.Stack unit
(requires (fun h0 -> live h0 src /\ live h0 dst /\ ublit_post_j src idx_src dst idx_dst j h0 h0))
(ensures (fun h0 _ h1 -> ublit_post_j src idx_src dst idx_dst len h0 h1))
= let open FStar.UInt32 in
if j = len then ()
else if j <^ len then begin
uupd dst (idx_dst +^ j) (index src (idx_src +^ j));
aux (j +^ 1ul)
end
in
aux 0ul | val ublit
(#a: Type0)
(#rrel #rel: srel a)
(src: mbuffer a rrel rel)
(idx_src: U32.t)
(dst: ubuffer a {disjoint src dst})
(idx_dst: U32.t)
(len: U32.t{valid_j_for_blit src idx_src dst idx_dst len})
: HST.Stack unit
(requires (fun h0 -> live h0 src /\ live h0 dst))
(ensures (fun h0 _ h1 -> ublit_post_j src idx_src dst idx_dst len h0 h1))
let ublit
(#a: Type0)
(#rrel #rel: srel a)
(src: mbuffer a rrel rel)
(idx_src: U32.t)
(dst: ubuffer a {disjoint src dst})
(idx_dst: U32.t)
(len: U32.t{valid_j_for_blit src idx_src dst idx_dst len})
: HST.Stack unit
(requires (fun h0 -> live h0 src /\ live h0 dst))
(ensures (fun h0 _ h1 -> ublit_post_j src idx_src dst idx_dst len h0 h1)) = | true | null | false | let rec aux (j: U32.t{valid_j_for_blit src idx_src dst idx_dst j})
: HST.Stack unit
(requires
(fun h0 -> live h0 src /\ live h0 dst /\ ublit_post_j src idx_src dst idx_dst j h0 h0))
(ensures (fun h0 _ h1 -> ublit_post_j src idx_src dst idx_dst len h0 h1)) =
let open FStar.UInt32 in
if j = len
then ()
else
if j <^ len
then
(uupd dst (idx_dst +^ j) (index src (idx_src +^ j));
aux (j +^ 1ul))
in
aux 0ul | {
"checked_file": "LowStar.UninitializedBuffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ImmutableBuffer.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IndefiniteDescription.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.UninitializedBuffer.fst"
} | [] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"LowStar.UninitializedBuffer.ubuffer",
"LowStar.Monotonic.Buffer.disjoint",
"FStar.Pervasives.Native.option",
"LowStar.UninitializedBuffer.initialization_preorder",
"LowStar.UninitializedBuffer.valid_j_for_blit",
"FStar.UInt32.__uint_to_t",
"Prims.unit",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowStar.Monotonic.Buffer.live",
"LowStar.UninitializedBuffer.ublit_post_j",
"Prims.op_Equality",
"Prims.l_or",
"Prims.bool",
"FStar.UInt32.op_Less_Hat",
"FStar.UInt32.op_Plus_Hat",
"LowStar.UninitializedBuffer.uupd",
"LowStar.Monotonic.Buffer.index"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.UninitializedBuffer
include LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(*
* Uninitialized buffers
*
* Modeled as: seq (option a) with a preorder that an index once set remains set
*)
private let initialization_preorder (a:Type0) :srel (option a) =
fun s1 s2 -> Seq.length s1 == Seq.length s2 /\
(forall (i:nat).{:pattern (Seq.index s2 i)} i < Seq.length s1 ==> Some? (Seq.index s1 i) ==> Some? (Seq.index s2 i))
type ubuffer (a:Type0) =
mbuffer (option a) (initialization_preorder a) (initialization_preorder a)
unfold let unull (#a:Type0) :ubuffer a = mnull #(option a) #(initialization_preorder a) #(initialization_preorder a)
unfold let gsub (#a:Type0) = mgsub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
unfold let gsub_inj (#a:Type0) = mgsub_inj #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a) (initialization_preorder a)
inline_for_extraction
type pointer (a:Type0) = b:ubuffer a{length b == 1}
inline_for_extraction
type pointer_or_null (a:Type0) = b:ubuffer a{if g_is_null b then True else length b == 1}
inline_for_extraction let usub (#a:Type0) = msub #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
inline_for_extraction let uoffset (#a:Type0) = moffset #(option a) #(initialization_preorder a) #(initialization_preorder a) (initialization_preorder a)
(****** main stateful API *****)
(*
* b `initialized_at` i: is a stable predicate that witnesses the initialization of an index i in ubuffer b
*)
private let ipred (#a:Type0) (i:nat) :spred (option a) = fun s -> i < Seq.length s ==> Some? (Seq.index s i)
let initialized_at (#a:Type0) (b:ubuffer a) (i:nat) :Type0 = witnessed b (ipred i)
(*
* Clients need to prove that b is initialized_at i
*)
let uindex (#a:Type0) (b:ubuffer a) (i:U32.t)
:HST.Stack a (requires (fun h0 -> live h0 b /\ U32.v i < length b /\ b `initialized_at` (U32.v i)))
(ensures (fun h0 y h1 -> let y_opt = Seq.index (as_seq h0 b) (U32.v i) in
Some? y_opt /\ y == Some?.v y_opt /\ h0 == h1))
= let y_opt = index b i in
recall_p b (ipred (U32.v i));
Some?.v y_opt
(*
* b `initialized_at` i is a postcondition
*)
let uupd (#a:Type0) (b:ubuffer a) (i:U32.t) (v:a)
:HST.Stack unit (requires (fun h0 -> live h0 b /\ U32.v i < length b))
(ensures (fun h0 _ h1 -> modifies (loc_buffer b) h0 h1 /\
live h1 b /\
as_seq h1 b == Seq.upd (as_seq h0 b) (U32.v i) (Some v) /\
b `initialized_at` (U32.v i)))
= upd b i (Some v);
witness_p b (ipred (U32.v i))
unfold let lubuffer (a:Type0) (len:nat) = b:ubuffer a{length b == len}
unfold let lubuffer_or_null (a:Type0) (len:nat) (r:HS.rid) =
b:ubuffer a{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
(*
* No initializer
*)
let ugcmalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len
inline_for_extraction
let ugcmalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer_or_null a (U32.v len) r{recallable b})
(requires (fun h0 -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mgcmalloc r None len
let umalloc (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer a (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mmalloc r None len
inline_for_extraction
let umalloc_partial (#a:Type0) (r:HS.rid) (len:U32.t)
:HST.ST (b:lubuffer_or_null a (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) None)))
= mmalloc r None len
let ualloca (#a:Type0) (len:U32.t)
:HST.StackInline (lubuffer a (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) None) /\
frameOf b == HS.get_tip h0))
= malloca None len
(*
* blit functionality, where src is a regular buffer
*)
[@@"opaque_to_smt"]
unfold let valid_j_for_blit
(#a:Type0) (#rrel #rel:srel a) (src:mbuffer a rrel rel) (idx_src:U32.t)
(dst:ubuffer a) (idx_dst:U32.t) (j:U32.t)
= U32.v idx_src + U32.v j <= length src /\
U32.v idx_dst + U32.v j <= length dst
(*
* postcondition of blit
*)
[@@"opaque_to_smt"]
unfold private let ublit_post_j
(#a:Type0) (#rrel #rel:srel a) (src:mbuffer a rrel rel) (idx_src:U32.t)
(dst:ubuffer a) (idx_dst:U32.t) (j:U32.t{valid_j_for_blit src idx_src dst idx_dst j})
(h0 h1:HS.mem)
= modifies (loc_buffer dst) h0 h1 /\ live h1 dst /\
(forall (i:nat).{:pattern (Seq.index (as_seq h1 dst) i)} (i >= U32.v idx_dst /\ i < U32.v idx_dst + U32.v j ==>
Seq.index (as_seq h1 dst) i ==
Some (Seq.index (as_seq h0 src) (U32.v idx_src + i - U32.v idx_dst)))
) /\
Seq.slice (as_seq h1 dst) 0 (U32.v idx_dst) == Seq.slice (as_seq h0 dst) 0 (U32.v idx_dst) /\
Seq.slice (as_seq h1 dst) (U32.v idx_dst + U32.v j) (length dst) == Seq.slice (as_seq h0 dst) (U32.v idx_dst + U32.v j) (length dst) /\
(forall (i:nat).{:pattern (dst `initialized_at` i)} (i >= U32.v idx_dst /\ i < U32.v idx_dst + U32.v j) ==>
dst `initialized_at` i)
let ublit (#a:Type0) (#rrel #rel:srel a)
(src:mbuffer a rrel rel) (idx_src:U32.t)
(dst:ubuffer a{disjoint src dst}) (idx_dst:U32.t)
(len:U32.t{valid_j_for_blit src idx_src dst idx_dst len})
:HST.Stack unit (requires (fun h0 -> live h0 src /\ live h0 dst)) | false | false | LowStar.UninitializedBuffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ublit
(#a: Type0)
(#rrel #rel: srel a)
(src: mbuffer a rrel rel)
(idx_src: U32.t)
(dst: ubuffer a {disjoint src dst})
(idx_dst: U32.t)
(len: U32.t{valid_j_for_blit src idx_src dst idx_dst len})
: HST.Stack unit
(requires (fun h0 -> live h0 src /\ live h0 dst))
(ensures (fun h0 _ h1 -> ublit_post_j src idx_src dst idx_dst len h0 h1)) | [] | LowStar.UninitializedBuffer.ublit | {
"file_name": "ulib/LowStar.UninitializedBuffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
src: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
idx_src: FStar.UInt32.t ->
dst: LowStar.UninitializedBuffer.ubuffer a {LowStar.Monotonic.Buffer.disjoint src dst} ->
idx_dst: FStar.UInt32.t ->
len: FStar.UInt32.t{LowStar.UninitializedBuffer.valid_j_for_blit src idx_src dst idx_dst len}
-> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 11,
"end_line": 174,
"start_col": 3,
"start_line": 163
} |
Prims.Tot | val chacha20_constants:
b:glbuffer size_t 4ul{recallable b /\ witnessed b Spec.Chacha20.chacha20_constants} | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loop"
},
{
"abbrev": true,
"full_module": "Spec.Chacha20",
"short_module": "Spec"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Chacha20.Core32",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.ByteBuffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let chacha20_constants =
[@ inline_let]
let l = [Spec.c0;Spec.c1;Spec.c2;Spec.c3] in
assert_norm(List.Tot.length l == 4);
createL_global l | val chacha20_constants:
b:glbuffer size_t 4ul{recallable b /\ witnessed b Spec.Chacha20.chacha20_constants}
let chacha20_constants = | false | null | false | [@@ inline_let ]let l = [Spec.c0; Spec.c1; Spec.c2; Spec.c3] in
assert_norm (List.Tot.length l == 4);
createL_global l | {
"checked_file": "Hacl.Impl.Chacha20.fst.checked",
"dependencies": [
"Spec.Chacha20.fst.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Chacha20.Core32.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Chacha20.fst"
} | [
"total"
] | [
"Lib.Buffer.createL_global",
"Lib.IntTypes.size_t",
"Lib.Buffer.glbuffer",
"Lib.IntTypes.size",
"FStar.Pervasives.normalize_term",
"Lib.IntTypes.size_nat",
"FStar.List.Tot.Base.length",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.eq2",
"Prims.int",
"FStar.UInt32.__uint_to_t",
"Prims.l_and",
"Lib.Buffer.recallable",
"Lib.Buffer.CONST",
"Lib.Buffer.witnessed",
"Spec.Chacha20.chacha20_constants",
"Prims.list",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Prims.Cons",
"Spec.Chacha20.c0",
"Spec.Chacha20.c1",
"Spec.Chacha20.c2",
"Spec.Chacha20.c3",
"Prims.Nil"
] | [] | module Hacl.Impl.Chacha20
open FStar.HyperStack
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Lib.ByteBuffer
open Hacl.Impl.Chacha20.Core32
module ST = FStar.HyperStack.ST
module Spec = Spec.Chacha20
module Loop = Lib.LoopCombinators
#set-options "--z3rlimit 200 --max_fuel 2"
//#set-options "--debug Hacl.Impl.Curve25519.Generic --debug_level ExtractNorm"
val rounds:
st:state
-> Stack unit
(requires fun h -> live h st)
(ensures fun h0 _ h1 -> modifies (loc st) h0 h1 /\
as_seq h1 st == Spec.rounds (as_seq h0 st))
[@ CInline]
let rounds st =
let h0 = ST.get () in
Loop.eq_repeat0 #Spec.state Spec.double_round (as_seq h0 st);
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 0;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 1;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 2;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 3;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 4;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 5;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 6;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 7;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 8;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 9;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st
val chacha20_core:
k:state
-> ctx0:state
-> ctr:size_t ->
Stack unit
(requires fun h -> live h ctx0 /\ live h k /\ disjoint ctx0 k)
(ensures fun h0 _ h1 -> modifies (loc k) h0 h1 /\
as_seq h1 k == Spec.chacha20_core (v ctr) (as_seq h0 ctx0))
[@ CInline ]
let chacha20_core k ctx ctr =
copy_state k ctx;
let ctr_u32 = size_to_uint32 ctr in
k.(12ul) <- k.(12ul) +. ctr_u32;
rounds k;
sum_state k ctx;
k.(12ul) <- k.(12ul) +. ctr_u32
val chacha20_constants:
b:glbuffer size_t 4ul{recallable b /\ witnessed b Spec.Chacha20.chacha20_constants} | false | false | Hacl.Impl.Chacha20.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 2,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 200,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val chacha20_constants:
b:glbuffer size_t 4ul{recallable b /\ witnessed b Spec.Chacha20.chacha20_constants} | [] | Hacl.Impl.Chacha20.chacha20_constants | {
"file_name": "code/chacha20/Hacl.Impl.Chacha20.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b:
Lib.Buffer.glbuffer Lib.IntTypes.size_t 4ul
{Lib.Buffer.recallable b /\ Lib.Buffer.witnessed b Spec.Chacha20.chacha20_constants} | {
"end_col": 18,
"end_line": 79,
"start_col": 2,
"start_line": 76
} |
FStar.HyperStack.ST.Stack | val chacha20_core:
k:state
-> ctx0:state
-> ctr:size_t ->
Stack unit
(requires fun h -> live h ctx0 /\ live h k /\ disjoint ctx0 k)
(ensures fun h0 _ h1 -> modifies (loc k) h0 h1 /\
as_seq h1 k == Spec.chacha20_core (v ctr) (as_seq h0 ctx0)) | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loop"
},
{
"abbrev": true,
"full_module": "Spec.Chacha20",
"short_module": "Spec"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Chacha20.Core32",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.ByteBuffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let chacha20_core k ctx ctr =
copy_state k ctx;
let ctr_u32 = size_to_uint32 ctr in
k.(12ul) <- k.(12ul) +. ctr_u32;
rounds k;
sum_state k ctx;
k.(12ul) <- k.(12ul) +. ctr_u32 | val chacha20_core:
k:state
-> ctx0:state
-> ctr:size_t ->
Stack unit
(requires fun h -> live h ctx0 /\ live h k /\ disjoint ctx0 k)
(ensures fun h0 _ h1 -> modifies (loc k) h0 h1 /\
as_seq h1 k == Spec.chacha20_core (v ctr) (as_seq h0 ctx0))
let chacha20_core k ctx ctr = | true | null | false | copy_state k ctx;
let ctr_u32 = size_to_uint32 ctr in
k.(12ul) <- k.(12ul) +. ctr_u32;
rounds k;
sum_state k ctx;
k.(12ul) <- k.(12ul) +. ctr_u32 | {
"checked_file": "Hacl.Impl.Chacha20.fst.checked",
"dependencies": [
"Spec.Chacha20.fst.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Chacha20.Core32.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Chacha20.fst"
} | [] | [
"Hacl.Impl.Chacha20.Core32.state",
"Lib.IntTypes.size_t",
"Lib.Buffer.op_Array_Assignment",
"Lib.IntTypes.uint32",
"FStar.UInt32.__uint_to_t",
"Prims.unit",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U32",
"Lib.IntTypes.SEC",
"Lib.IntTypes.op_Plus_Dot",
"Lib.Buffer.op_Array_Access",
"Lib.Buffer.MUT",
"Hacl.Impl.Chacha20.Core32.sum_state",
"Hacl.Impl.Chacha20.rounds",
"Prims.eq2",
"Lib.IntTypes.mk_int",
"Lib.IntTypes.v",
"Lib.IntTypes.PUB",
"Lib.IntTypes.size_to_uint32",
"Hacl.Impl.Chacha20.Core32.copy_state"
] | [] | module Hacl.Impl.Chacha20
open FStar.HyperStack
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Lib.ByteBuffer
open Hacl.Impl.Chacha20.Core32
module ST = FStar.HyperStack.ST
module Spec = Spec.Chacha20
module Loop = Lib.LoopCombinators
#set-options "--z3rlimit 200 --max_fuel 2"
//#set-options "--debug Hacl.Impl.Curve25519.Generic --debug_level ExtractNorm"
val rounds:
st:state
-> Stack unit
(requires fun h -> live h st)
(ensures fun h0 _ h1 -> modifies (loc st) h0 h1 /\
as_seq h1 st == Spec.rounds (as_seq h0 st))
[@ CInline]
let rounds st =
let h0 = ST.get () in
Loop.eq_repeat0 #Spec.state Spec.double_round (as_seq h0 st);
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 0;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 1;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 2;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 3;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 4;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 5;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 6;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 7;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 8;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 9;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st
val chacha20_core:
k:state
-> ctx0:state
-> ctr:size_t ->
Stack unit
(requires fun h -> live h ctx0 /\ live h k /\ disjoint ctx0 k)
(ensures fun h0 _ h1 -> modifies (loc k) h0 h1 /\
as_seq h1 k == Spec.chacha20_core (v ctr) (as_seq h0 ctx0))
[@ CInline ] | false | false | Hacl.Impl.Chacha20.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 2,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 200,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val chacha20_core:
k:state
-> ctx0:state
-> ctr:size_t ->
Stack unit
(requires fun h -> live h ctx0 /\ live h k /\ disjoint ctx0 k)
(ensures fun h0 _ h1 -> modifies (loc k) h0 h1 /\
as_seq h1 k == Spec.chacha20_core (v ctr) (as_seq h0 ctx0)) | [] | Hacl.Impl.Chacha20.chacha20_core | {
"file_name": "code/chacha20/Hacl.Impl.Chacha20.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
k: Hacl.Impl.Chacha20.Core32.state ->
ctx0: Hacl.Impl.Chacha20.Core32.state ->
ctr: Lib.IntTypes.size_t
-> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 33,
"end_line": 70,
"start_col": 2,
"start_line": 65
} |
FStar.HyperStack.ST.Stack | val chacha20_update:
ctx:state
-> len:size_t
-> out:lbuffer uint8 len
-> text:lbuffer uint8 len ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
eq_or_disjoint text out /\ disjoint text ctx /\ disjoint out ctx)
(ensures fun h0 _ h1 -> modifies (loc ctx |+| loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_update (as_seq h0 ctx) (as_seq h0 text)) | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loop"
},
{
"abbrev": true,
"full_module": "Spec.Chacha20",
"short_module": "Spec"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Chacha20.Core32",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.ByteBuffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let chacha20_update ctx len out text =
push_frame();
let blocks = len /. size 64 in
let rem = len %. size 64 in
let h0 = ST.get() in
map_blocks h0 len 64ul text out
(fun h -> Spec.chacha20_encrypt_block (as_seq h0 ctx))
(fun h -> Spec.chacha20_encrypt_last (as_seq h0 ctx))
(fun i -> chacha20_encrypt_block ctx (sub out (i *! 64ul) 64ul) i (sub text (i *! 64ul) 64ul))
(fun i -> chacha20_encrypt_last ctx rem (sub out (i *! 64ul) rem) i (sub text (i *! 64ul) rem));
pop_frame() | val chacha20_update:
ctx:state
-> len:size_t
-> out:lbuffer uint8 len
-> text:lbuffer uint8 len ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
eq_or_disjoint text out /\ disjoint text ctx /\ disjoint out ctx)
(ensures fun h0 _ h1 -> modifies (loc ctx |+| loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_update (as_seq h0 ctx) (as_seq h0 text))
let chacha20_update ctx len out text = | true | null | false | push_frame ();
let blocks = len /. size 64 in
let rem = len %. size 64 in
let h0 = ST.get () in
map_blocks h0
len
64ul
text
out
(fun h -> Spec.chacha20_encrypt_block (as_seq h0 ctx))
(fun h -> Spec.chacha20_encrypt_last (as_seq h0 ctx))
(fun i -> chacha20_encrypt_block ctx (sub out (i *! 64ul) 64ul) i (sub text (i *! 64ul) 64ul))
(fun i -> chacha20_encrypt_last ctx rem (sub out (i *! 64ul) rem) i (sub text (i *! 64ul) rem));
pop_frame () | {
"checked_file": "Hacl.Impl.Chacha20.fst.checked",
"dependencies": [
"Spec.Chacha20.fst.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Chacha20.Core32.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Chacha20.fst"
} | [] | [
"Hacl.Impl.Chacha20.Core32.state",
"Lib.IntTypes.size_t",
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint8",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Lib.Buffer.map_blocks",
"Lib.Buffer.MUT",
"FStar.UInt32.__uint_to_t",
"FStar.Monotonic.HyperStack.mem",
"Spec.Chacha20.chacha20_encrypt_block",
"Lib.Buffer.as_seq",
"Lib.IntTypes.uint32",
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThan",
"Prims.op_Division",
"Lib.IntTypes.v",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Lib.Sequence.lseq",
"Spec.Chacha20.chacha20_encrypt_last",
"Prims.eq2",
"Prims.int",
"Lib.IntTypes.size_nat",
"Hacl.Impl.Chacha20.chacha20_encrypt_block",
"Lib.Buffer.lbuffer_t",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U8",
"Lib.IntTypes.SEC",
"FStar.UInt32.uint_to_t",
"FStar.UInt32.t",
"Lib.Buffer.sub",
"Lib.IntTypes.op_Star_Bang",
"Hacl.Impl.Chacha20.chacha20_encrypt_last",
"FStar.HyperStack.ST.get",
"Lib.IntTypes.op_Percent_Dot",
"Lib.IntTypes.size",
"Lib.IntTypes.op_Slash_Dot",
"FStar.HyperStack.ST.push_frame"
] | [] | module Hacl.Impl.Chacha20
open FStar.HyperStack
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Lib.ByteBuffer
open Hacl.Impl.Chacha20.Core32
module ST = FStar.HyperStack.ST
module Spec = Spec.Chacha20
module Loop = Lib.LoopCombinators
#set-options "--z3rlimit 200 --max_fuel 2"
//#set-options "--debug Hacl.Impl.Curve25519.Generic --debug_level ExtractNorm"
val rounds:
st:state
-> Stack unit
(requires fun h -> live h st)
(ensures fun h0 _ h1 -> modifies (loc st) h0 h1 /\
as_seq h1 st == Spec.rounds (as_seq h0 st))
[@ CInline]
let rounds st =
let h0 = ST.get () in
Loop.eq_repeat0 #Spec.state Spec.double_round (as_seq h0 st);
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 0;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 1;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 2;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 3;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 4;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 5;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 6;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 7;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 8;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 9;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st
val chacha20_core:
k:state
-> ctx0:state
-> ctr:size_t ->
Stack unit
(requires fun h -> live h ctx0 /\ live h k /\ disjoint ctx0 k)
(ensures fun h0 _ h1 -> modifies (loc k) h0 h1 /\
as_seq h1 k == Spec.chacha20_core (v ctr) (as_seq h0 ctx0))
[@ CInline ]
let chacha20_core k ctx ctr =
copy_state k ctx;
let ctr_u32 = size_to_uint32 ctr in
k.(12ul) <- k.(12ul) +. ctr_u32;
rounds k;
sum_state k ctx;
k.(12ul) <- k.(12ul) +. ctr_u32
val chacha20_constants:
b:glbuffer size_t 4ul{recallable b /\ witnessed b Spec.Chacha20.chacha20_constants}
let chacha20_constants =
[@ inline_let]
let l = [Spec.c0;Spec.c1;Spec.c2;Spec.c3] in
assert_norm(List.Tot.length l == 4);
createL_global l
val chacha20_init:
ctx:state
-> k:lbuffer uint8 32ul
-> n:lbuffer uint8 12ul
-> ctr0:size_t ->
Stack unit
(requires fun h ->
live h ctx /\ live h k /\ live h n /\
disjoint ctx k /\ disjoint ctx n /\
as_seq h ctx == Lib.Sequence.create 16 (u32 0))
(ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\
as_seq h1 ctx == Spec.chacha20_init (as_seq h0 k) (as_seq h0 n) (v ctr0))
let chacha20_init ctx k n ctr =
let h0 = ST.get() in
recall_contents chacha20_constants Spec.chacha20_constants;
update_sub_f h0 ctx 0ul 4ul
(fun h -> Lib.Sequence.map secret Spec.chacha20_constants)
(fun _ -> mapT 4ul (sub ctx 0ul 4ul) secret chacha20_constants);
let h1 = ST.get() in
update_sub_f h1 ctx 4ul 8ul
(fun h -> Lib.ByteSequence.uints_from_bytes_le (as_seq h k))
(fun _ -> uints_from_bytes_le (sub ctx 4ul 8ul) k);
let h2 = ST.get() in
ctx.(12ul) <- size_to_uint32 ctr;
let h3 = ST.get() in
update_sub_f h3 ctx 13ul 3ul
(fun h -> Lib.ByteSequence.uints_from_bytes_le (as_seq h n))
(fun _ -> uints_from_bytes_le (sub ctx 13ul 3ul) n);
let h4 = ST.get() in
assert (as_seq h4 ctx == Spec.setup (as_seq h0 k) (as_seq h0 n) (v ctr) (as_seq h0 ctx));
()
val chacha20_encrypt_block:
ctx:state
-> out:lbuffer uint8 64ul
-> incr:size_t
-> text:lbuffer uint8 64ul ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
disjoint out ctx /\ disjoint text ctx)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_encrypt_block (as_seq h0 ctx) (v incr) (as_seq h0 text))
let chacha20_encrypt_block ctx out incr text =
push_frame();
let k = create 16ul (u32 0) in
chacha20_core k ctx incr;
xor_block out k text;
pop_frame()
val chacha20_encrypt_last:
ctx:state
-> len:size_t{v len < 64}
-> out:lbuffer uint8 len
-> incr:size_t
-> text:lbuffer uint8 len ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
disjoint out ctx /\ disjoint text ctx)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_encrypt_last (as_seq h0 ctx) (v incr) (v len) (as_seq h0 text))
[@CInline]
let chacha20_encrypt_last ctx len out incr text =
push_frame();
let plain = create (size 64) (u8 0) in
update_sub plain 0ul len text;
chacha20_encrypt_block ctx plain incr plain;
copy out (sub plain 0ul len);
pop_frame()
val chacha20_update:
ctx:state
-> len:size_t
-> out:lbuffer uint8 len
-> text:lbuffer uint8 len ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
eq_or_disjoint text out /\ disjoint text ctx /\ disjoint out ctx)
(ensures fun h0 _ h1 -> modifies (loc ctx |+| loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_update (as_seq h0 ctx) (as_seq h0 text)) | false | false | Hacl.Impl.Chacha20.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 2,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 200,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val chacha20_update:
ctx:state
-> len:size_t
-> out:lbuffer uint8 len
-> text:lbuffer uint8 len ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
eq_or_disjoint text out /\ disjoint text ctx /\ disjoint out ctx)
(ensures fun h0 _ h1 -> modifies (loc ctx |+| loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_update (as_seq h0 ctx) (as_seq h0 text)) | [] | Hacl.Impl.Chacha20.chacha20_update | {
"file_name": "code/chacha20/Hacl.Impl.Chacha20.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
ctx: Hacl.Impl.Chacha20.Core32.state ->
len: Lib.IntTypes.size_t ->
out: Lib.Buffer.lbuffer Lib.IntTypes.uint8 len ->
text: Lib.Buffer.lbuffer Lib.IntTypes.uint8 len
-> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 13,
"end_line": 181,
"start_col": 2,
"start_line": 172
} |
FStar.HyperStack.ST.Stack | val rounds:
st:state
-> Stack unit
(requires fun h -> live h st)
(ensures fun h0 _ h1 -> modifies (loc st) h0 h1 /\
as_seq h1 st == Spec.rounds (as_seq h0 st)) | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loop"
},
{
"abbrev": true,
"full_module": "Spec.Chacha20",
"short_module": "Spec"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Chacha20.Core32",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.ByteBuffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rounds st =
let h0 = ST.get () in
Loop.eq_repeat0 #Spec.state Spec.double_round (as_seq h0 st);
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 0;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 1;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 2;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 3;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 4;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 5;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 6;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 7;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 8;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 9;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st | val rounds:
st:state
-> Stack unit
(requires fun h -> live h st)
(ensures fun h0 _ h1 -> modifies (loc st) h0 h1 /\
as_seq h1 st == Spec.rounds (as_seq h0 st))
let rounds st = | true | null | false | let h0 = ST.get () in
Loop.eq_repeat0 #Spec.state Spec.double_round (as_seq h0 st);
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 0;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 1;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 2;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 3;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 4;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 5;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 6;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 7;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 8;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 9;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st | {
"checked_file": "Hacl.Impl.Chacha20.fst.checked",
"dependencies": [
"Spec.Chacha20.fst.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Chacha20.Core32.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Chacha20.fst"
} | [] | [
"Hacl.Impl.Chacha20.Core32.state",
"Hacl.Impl.Chacha20.Core32.double_round",
"Prims.unit",
"Lib.LoopCombinators.unfold_repeat",
"Spec.Chacha20.state",
"Spec.Chacha20.double_round",
"Lib.Buffer.as_seq",
"Lib.Buffer.MUT",
"Lib.IntTypes.uint32",
"FStar.UInt32.__uint_to_t",
"Lib.LoopCombinators.eq_repeat0",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get"
] | [] | module Hacl.Impl.Chacha20
open FStar.HyperStack
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Lib.ByteBuffer
open Hacl.Impl.Chacha20.Core32
module ST = FStar.HyperStack.ST
module Spec = Spec.Chacha20
module Loop = Lib.LoopCombinators
#set-options "--z3rlimit 200 --max_fuel 2"
//#set-options "--debug Hacl.Impl.Curve25519.Generic --debug_level ExtractNorm"
val rounds:
st:state
-> Stack unit
(requires fun h -> live h st)
(ensures fun h0 _ h1 -> modifies (loc st) h0 h1 /\
as_seq h1 st == Spec.rounds (as_seq h0 st)) | false | false | Hacl.Impl.Chacha20.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 2,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 200,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val rounds:
st:state
-> Stack unit
(requires fun h -> live h st)
(ensures fun h0 _ h1 -> modifies (loc st) h0 h1 /\
as_seq h1 st == Spec.rounds (as_seq h0 st)) | [] | Hacl.Impl.Chacha20.rounds | {
"file_name": "code/chacha20/Hacl.Impl.Chacha20.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | st: Hacl.Impl.Chacha20.Core32.state -> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 17,
"end_line": 51,
"start_col": 15,
"start_line": 29
} |
FStar.HyperStack.ST.Stack | val chacha20_init:
ctx:state
-> k:lbuffer uint8 32ul
-> n:lbuffer uint8 12ul
-> ctr0:size_t ->
Stack unit
(requires fun h ->
live h ctx /\ live h k /\ live h n /\
disjoint ctx k /\ disjoint ctx n /\
as_seq h ctx == Lib.Sequence.create 16 (u32 0))
(ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\
as_seq h1 ctx == Spec.chacha20_init (as_seq h0 k) (as_seq h0 n) (v ctr0)) | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loop"
},
{
"abbrev": true,
"full_module": "Spec.Chacha20",
"short_module": "Spec"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Chacha20.Core32",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.ByteBuffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let chacha20_init ctx k n ctr =
let h0 = ST.get() in
recall_contents chacha20_constants Spec.chacha20_constants;
update_sub_f h0 ctx 0ul 4ul
(fun h -> Lib.Sequence.map secret Spec.chacha20_constants)
(fun _ -> mapT 4ul (sub ctx 0ul 4ul) secret chacha20_constants);
let h1 = ST.get() in
update_sub_f h1 ctx 4ul 8ul
(fun h -> Lib.ByteSequence.uints_from_bytes_le (as_seq h k))
(fun _ -> uints_from_bytes_le (sub ctx 4ul 8ul) k);
let h2 = ST.get() in
ctx.(12ul) <- size_to_uint32 ctr;
let h3 = ST.get() in
update_sub_f h3 ctx 13ul 3ul
(fun h -> Lib.ByteSequence.uints_from_bytes_le (as_seq h n))
(fun _ -> uints_from_bytes_le (sub ctx 13ul 3ul) n);
let h4 = ST.get() in
assert (as_seq h4 ctx == Spec.setup (as_seq h0 k) (as_seq h0 n) (v ctr) (as_seq h0 ctx));
() | val chacha20_init:
ctx:state
-> k:lbuffer uint8 32ul
-> n:lbuffer uint8 12ul
-> ctr0:size_t ->
Stack unit
(requires fun h ->
live h ctx /\ live h k /\ live h n /\
disjoint ctx k /\ disjoint ctx n /\
as_seq h ctx == Lib.Sequence.create 16 (u32 0))
(ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\
as_seq h1 ctx == Spec.chacha20_init (as_seq h0 k) (as_seq h0 n) (v ctr0))
let chacha20_init ctx k n ctr = | true | null | false | let h0 = ST.get () in
recall_contents chacha20_constants Spec.chacha20_constants;
update_sub_f h0
ctx
0ul
4ul
(fun h -> Lib.Sequence.map secret Spec.chacha20_constants)
(fun _ -> mapT 4ul (sub ctx 0ul 4ul) secret chacha20_constants);
let h1 = ST.get () in
update_sub_f h1
ctx
4ul
8ul
(fun h -> Lib.ByteSequence.uints_from_bytes_le (as_seq h k))
(fun _ -> uints_from_bytes_le (sub ctx 4ul 8ul) k);
let h2 = ST.get () in
ctx.(12ul) <- size_to_uint32 ctr;
let h3 = ST.get () in
update_sub_f h3
ctx
13ul
3ul
(fun h -> Lib.ByteSequence.uints_from_bytes_le (as_seq h n))
(fun _ -> uints_from_bytes_le (sub ctx 13ul 3ul) n);
let h4 = ST.get () in
assert (as_seq h4 ctx == Spec.setup (as_seq h0 k) (as_seq h0 n) (v ctr) (as_seq h0 ctx));
() | {
"checked_file": "Hacl.Impl.Chacha20.fst.checked",
"dependencies": [
"Spec.Chacha20.fst.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Chacha20.Core32.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Chacha20.fst"
} | [] | [
"Hacl.Impl.Chacha20.Core32.state",
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint8",
"FStar.UInt32.__uint_to_t",
"Lib.IntTypes.size_t",
"Prims.unit",
"Prims._assert",
"Prims.eq2",
"Lib.Sequence.lseq",
"Lib.IntTypes.uint32",
"Lib.IntTypes.v",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Lib.Buffer.as_seq",
"Lib.Buffer.MUT",
"Spec.Chacha20.setup",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"Lib.Buffer.update_sub_f",
"Lib.ByteSequence.uints_from_bytes_le",
"Lib.IntTypes.SEC",
"Lib.ByteBuffer.uints_from_bytes_le",
"Lib.Buffer.lbuffer_t",
"Lib.IntTypes.int_t",
"FStar.UInt32.uint_to_t",
"Lib.Buffer.sub",
"Lib.Buffer.op_Array_Assignment",
"Lib.IntTypes.size_to_uint32",
"Lib.Sequence.map",
"Lib.IntTypes.secret",
"Spec.Chacha20.chacha20_constants",
"Lib.Buffer.mapT",
"Lib.Buffer.CONST",
"Hacl.Impl.Chacha20.chacha20_constants",
"FStar.UInt32.t",
"Lib.Buffer.recall_contents"
] | [] | module Hacl.Impl.Chacha20
open FStar.HyperStack
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Lib.ByteBuffer
open Hacl.Impl.Chacha20.Core32
module ST = FStar.HyperStack.ST
module Spec = Spec.Chacha20
module Loop = Lib.LoopCombinators
#set-options "--z3rlimit 200 --max_fuel 2"
//#set-options "--debug Hacl.Impl.Curve25519.Generic --debug_level ExtractNorm"
val rounds:
st:state
-> Stack unit
(requires fun h -> live h st)
(ensures fun h0 _ h1 -> modifies (loc st) h0 h1 /\
as_seq h1 st == Spec.rounds (as_seq h0 st))
[@ CInline]
let rounds st =
let h0 = ST.get () in
Loop.eq_repeat0 #Spec.state Spec.double_round (as_seq h0 st);
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 0;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 1;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 2;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 3;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 4;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 5;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 6;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 7;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 8;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 9;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st
val chacha20_core:
k:state
-> ctx0:state
-> ctr:size_t ->
Stack unit
(requires fun h -> live h ctx0 /\ live h k /\ disjoint ctx0 k)
(ensures fun h0 _ h1 -> modifies (loc k) h0 h1 /\
as_seq h1 k == Spec.chacha20_core (v ctr) (as_seq h0 ctx0))
[@ CInline ]
let chacha20_core k ctx ctr =
copy_state k ctx;
let ctr_u32 = size_to_uint32 ctr in
k.(12ul) <- k.(12ul) +. ctr_u32;
rounds k;
sum_state k ctx;
k.(12ul) <- k.(12ul) +. ctr_u32
val chacha20_constants:
b:glbuffer size_t 4ul{recallable b /\ witnessed b Spec.Chacha20.chacha20_constants}
let chacha20_constants =
[@ inline_let]
let l = [Spec.c0;Spec.c1;Spec.c2;Spec.c3] in
assert_norm(List.Tot.length l == 4);
createL_global l
val chacha20_init:
ctx:state
-> k:lbuffer uint8 32ul
-> n:lbuffer uint8 12ul
-> ctr0:size_t ->
Stack unit
(requires fun h ->
live h ctx /\ live h k /\ live h n /\
disjoint ctx k /\ disjoint ctx n /\
as_seq h ctx == Lib.Sequence.create 16 (u32 0))
(ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\
as_seq h1 ctx == Spec.chacha20_init (as_seq h0 k) (as_seq h0 n) (v ctr0)) | false | false | Hacl.Impl.Chacha20.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 2,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 200,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val chacha20_init:
ctx:state
-> k:lbuffer uint8 32ul
-> n:lbuffer uint8 12ul
-> ctr0:size_t ->
Stack unit
(requires fun h ->
live h ctx /\ live h k /\ live h n /\
disjoint ctx k /\ disjoint ctx n /\
as_seq h ctx == Lib.Sequence.create 16 (u32 0))
(ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\
as_seq h1 ctx == Spec.chacha20_init (as_seq h0 k) (as_seq h0 n) (v ctr0)) | [] | Hacl.Impl.Chacha20.chacha20_init | {
"file_name": "code/chacha20/Hacl.Impl.Chacha20.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
ctx: Hacl.Impl.Chacha20.Core32.state ->
k: Lib.Buffer.lbuffer Lib.IntTypes.uint8 32ul ->
n: Lib.Buffer.lbuffer Lib.IntTypes.uint8 12ul ->
ctr0: Lib.IntTypes.size_t
-> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 4,
"end_line": 113,
"start_col": 31,
"start_line": 95
} |
FStar.HyperStack.ST.Stack | val chacha20_encrypt_block:
ctx:state
-> out:lbuffer uint8 64ul
-> incr:size_t
-> text:lbuffer uint8 64ul ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
disjoint out ctx /\ disjoint text ctx)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_encrypt_block (as_seq h0 ctx) (v incr) (as_seq h0 text)) | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loop"
},
{
"abbrev": true,
"full_module": "Spec.Chacha20",
"short_module": "Spec"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Chacha20.Core32",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.ByteBuffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let chacha20_encrypt_block ctx out incr text =
push_frame();
let k = create 16ul (u32 0) in
chacha20_core k ctx incr;
xor_block out k text;
pop_frame() | val chacha20_encrypt_block:
ctx:state
-> out:lbuffer uint8 64ul
-> incr:size_t
-> text:lbuffer uint8 64ul ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
disjoint out ctx /\ disjoint text ctx)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_encrypt_block (as_seq h0 ctx) (v incr) (as_seq h0 text))
let chacha20_encrypt_block ctx out incr text = | true | null | false | push_frame ();
let k = create 16ul (u32 0) in
chacha20_core k ctx incr;
xor_block out k text;
pop_frame () | {
"checked_file": "Hacl.Impl.Chacha20.fst.checked",
"dependencies": [
"Spec.Chacha20.fst.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Chacha20.Core32.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Chacha20.fst"
} | [] | [
"Hacl.Impl.Chacha20.Core32.state",
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint8",
"FStar.UInt32.__uint_to_t",
"Lib.IntTypes.size_t",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Hacl.Impl.Chacha20.Core32.xor_block",
"Hacl.Impl.Chacha20.chacha20_core",
"Lib.Buffer.lbuffer_t",
"Lib.Buffer.MUT",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U32",
"Lib.IntTypes.SEC",
"FStar.UInt32.uint_to_t",
"FStar.UInt32.t",
"Lib.Buffer.create",
"Lib.IntTypes.uint32",
"Lib.IntTypes.u32",
"FStar.HyperStack.ST.push_frame"
] | [] | module Hacl.Impl.Chacha20
open FStar.HyperStack
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Lib.ByteBuffer
open Hacl.Impl.Chacha20.Core32
module ST = FStar.HyperStack.ST
module Spec = Spec.Chacha20
module Loop = Lib.LoopCombinators
#set-options "--z3rlimit 200 --max_fuel 2"
//#set-options "--debug Hacl.Impl.Curve25519.Generic --debug_level ExtractNorm"
val rounds:
st:state
-> Stack unit
(requires fun h -> live h st)
(ensures fun h0 _ h1 -> modifies (loc st) h0 h1 /\
as_seq h1 st == Spec.rounds (as_seq h0 st))
[@ CInline]
let rounds st =
let h0 = ST.get () in
Loop.eq_repeat0 #Spec.state Spec.double_round (as_seq h0 st);
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 0;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 1;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 2;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 3;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 4;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 5;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 6;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 7;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 8;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 9;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st
val chacha20_core:
k:state
-> ctx0:state
-> ctr:size_t ->
Stack unit
(requires fun h -> live h ctx0 /\ live h k /\ disjoint ctx0 k)
(ensures fun h0 _ h1 -> modifies (loc k) h0 h1 /\
as_seq h1 k == Spec.chacha20_core (v ctr) (as_seq h0 ctx0))
[@ CInline ]
let chacha20_core k ctx ctr =
copy_state k ctx;
let ctr_u32 = size_to_uint32 ctr in
k.(12ul) <- k.(12ul) +. ctr_u32;
rounds k;
sum_state k ctx;
k.(12ul) <- k.(12ul) +. ctr_u32
val chacha20_constants:
b:glbuffer size_t 4ul{recallable b /\ witnessed b Spec.Chacha20.chacha20_constants}
let chacha20_constants =
[@ inline_let]
let l = [Spec.c0;Spec.c1;Spec.c2;Spec.c3] in
assert_norm(List.Tot.length l == 4);
createL_global l
val chacha20_init:
ctx:state
-> k:lbuffer uint8 32ul
-> n:lbuffer uint8 12ul
-> ctr0:size_t ->
Stack unit
(requires fun h ->
live h ctx /\ live h k /\ live h n /\
disjoint ctx k /\ disjoint ctx n /\
as_seq h ctx == Lib.Sequence.create 16 (u32 0))
(ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\
as_seq h1 ctx == Spec.chacha20_init (as_seq h0 k) (as_seq h0 n) (v ctr0))
let chacha20_init ctx k n ctr =
let h0 = ST.get() in
recall_contents chacha20_constants Spec.chacha20_constants;
update_sub_f h0 ctx 0ul 4ul
(fun h -> Lib.Sequence.map secret Spec.chacha20_constants)
(fun _ -> mapT 4ul (sub ctx 0ul 4ul) secret chacha20_constants);
let h1 = ST.get() in
update_sub_f h1 ctx 4ul 8ul
(fun h -> Lib.ByteSequence.uints_from_bytes_le (as_seq h k))
(fun _ -> uints_from_bytes_le (sub ctx 4ul 8ul) k);
let h2 = ST.get() in
ctx.(12ul) <- size_to_uint32 ctr;
let h3 = ST.get() in
update_sub_f h3 ctx 13ul 3ul
(fun h -> Lib.ByteSequence.uints_from_bytes_le (as_seq h n))
(fun _ -> uints_from_bytes_le (sub ctx 13ul 3ul) n);
let h4 = ST.get() in
assert (as_seq h4 ctx == Spec.setup (as_seq h0 k) (as_seq h0 n) (v ctr) (as_seq h0 ctx));
()
val chacha20_encrypt_block:
ctx:state
-> out:lbuffer uint8 64ul
-> incr:size_t
-> text:lbuffer uint8 64ul ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
disjoint out ctx /\ disjoint text ctx)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_encrypt_block (as_seq h0 ctx) (v incr) (as_seq h0 text)) | false | false | Hacl.Impl.Chacha20.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 2,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 200,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val chacha20_encrypt_block:
ctx:state
-> out:lbuffer uint8 64ul
-> incr:size_t
-> text:lbuffer uint8 64ul ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
disjoint out ctx /\ disjoint text ctx)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_encrypt_block (as_seq h0 ctx) (v incr) (as_seq h0 text)) | [] | Hacl.Impl.Chacha20.chacha20_encrypt_block | {
"file_name": "code/chacha20/Hacl.Impl.Chacha20.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
ctx: Hacl.Impl.Chacha20.Core32.state ->
out: Lib.Buffer.lbuffer Lib.IntTypes.uint8 64ul ->
incr: Lib.IntTypes.size_t ->
text: Lib.Buffer.lbuffer Lib.IntTypes.uint8 64ul
-> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 13,
"end_line": 133,
"start_col": 2,
"start_line": 129
} |
FStar.HyperStack.ST.Stack | val chacha20_encrypt:
len:size_t
-> out:lbuffer uint8 len
-> text:lbuffer uint8 len
-> key:lbuffer uint8 32ul
-> n:lbuffer uint8 12ul
-> ctr:size_t ->
Stack unit
(requires fun h ->
live h key /\ live h n /\ live h text /\ live h out /\ eq_or_disjoint text out)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_encrypt_bytes (as_seq h0 key) (as_seq h0 n) (v ctr) (as_seq h0 text)) | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loop"
},
{
"abbrev": true,
"full_module": "Spec.Chacha20",
"short_module": "Spec"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Chacha20.Core32",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.ByteBuffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let chacha20_encrypt len out text key n ctr =
push_frame();
let ctx = create_state () in
chacha20_init ctx key n ctr;
chacha20_update ctx len out text;
pop_frame() | val chacha20_encrypt:
len:size_t
-> out:lbuffer uint8 len
-> text:lbuffer uint8 len
-> key:lbuffer uint8 32ul
-> n:lbuffer uint8 12ul
-> ctr:size_t ->
Stack unit
(requires fun h ->
live h key /\ live h n /\ live h text /\ live h out /\ eq_or_disjoint text out)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_encrypt_bytes (as_seq h0 key) (as_seq h0 n) (v ctr) (as_seq h0 text))
let chacha20_encrypt len out text key n ctr = | true | null | false | push_frame ();
let ctx = create_state () in
chacha20_init ctx key n ctr;
chacha20_update ctx len out text;
pop_frame () | {
"checked_file": "Hacl.Impl.Chacha20.fst.checked",
"dependencies": [
"Spec.Chacha20.fst.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Chacha20.Core32.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Chacha20.fst"
} | [] | [
"Lib.IntTypes.size_t",
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint8",
"FStar.UInt32.__uint_to_t",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Hacl.Impl.Chacha20.chacha20_update",
"Hacl.Impl.Chacha20.chacha20_init",
"Hacl.Impl.Chacha20.Core32.state",
"Hacl.Impl.Chacha20.Core32.create_state",
"FStar.HyperStack.ST.push_frame"
] | [] | module Hacl.Impl.Chacha20
open FStar.HyperStack
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Lib.ByteBuffer
open Hacl.Impl.Chacha20.Core32
module ST = FStar.HyperStack.ST
module Spec = Spec.Chacha20
module Loop = Lib.LoopCombinators
#set-options "--z3rlimit 200 --max_fuel 2"
//#set-options "--debug Hacl.Impl.Curve25519.Generic --debug_level ExtractNorm"
val rounds:
st:state
-> Stack unit
(requires fun h -> live h st)
(ensures fun h0 _ h1 -> modifies (loc st) h0 h1 /\
as_seq h1 st == Spec.rounds (as_seq h0 st))
[@ CInline]
let rounds st =
let h0 = ST.get () in
Loop.eq_repeat0 #Spec.state Spec.double_round (as_seq h0 st);
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 0;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 1;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 2;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 3;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 4;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 5;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 6;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 7;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 8;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 9;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st
val chacha20_core:
k:state
-> ctx0:state
-> ctr:size_t ->
Stack unit
(requires fun h -> live h ctx0 /\ live h k /\ disjoint ctx0 k)
(ensures fun h0 _ h1 -> modifies (loc k) h0 h1 /\
as_seq h1 k == Spec.chacha20_core (v ctr) (as_seq h0 ctx0))
[@ CInline ]
let chacha20_core k ctx ctr =
copy_state k ctx;
let ctr_u32 = size_to_uint32 ctr in
k.(12ul) <- k.(12ul) +. ctr_u32;
rounds k;
sum_state k ctx;
k.(12ul) <- k.(12ul) +. ctr_u32
val chacha20_constants:
b:glbuffer size_t 4ul{recallable b /\ witnessed b Spec.Chacha20.chacha20_constants}
let chacha20_constants =
[@ inline_let]
let l = [Spec.c0;Spec.c1;Spec.c2;Spec.c3] in
assert_norm(List.Tot.length l == 4);
createL_global l
val chacha20_init:
ctx:state
-> k:lbuffer uint8 32ul
-> n:lbuffer uint8 12ul
-> ctr0:size_t ->
Stack unit
(requires fun h ->
live h ctx /\ live h k /\ live h n /\
disjoint ctx k /\ disjoint ctx n /\
as_seq h ctx == Lib.Sequence.create 16 (u32 0))
(ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\
as_seq h1 ctx == Spec.chacha20_init (as_seq h0 k) (as_seq h0 n) (v ctr0))
let chacha20_init ctx k n ctr =
let h0 = ST.get() in
recall_contents chacha20_constants Spec.chacha20_constants;
update_sub_f h0 ctx 0ul 4ul
(fun h -> Lib.Sequence.map secret Spec.chacha20_constants)
(fun _ -> mapT 4ul (sub ctx 0ul 4ul) secret chacha20_constants);
let h1 = ST.get() in
update_sub_f h1 ctx 4ul 8ul
(fun h -> Lib.ByteSequence.uints_from_bytes_le (as_seq h k))
(fun _ -> uints_from_bytes_le (sub ctx 4ul 8ul) k);
let h2 = ST.get() in
ctx.(12ul) <- size_to_uint32 ctr;
let h3 = ST.get() in
update_sub_f h3 ctx 13ul 3ul
(fun h -> Lib.ByteSequence.uints_from_bytes_le (as_seq h n))
(fun _ -> uints_from_bytes_le (sub ctx 13ul 3ul) n);
let h4 = ST.get() in
assert (as_seq h4 ctx == Spec.setup (as_seq h0 k) (as_seq h0 n) (v ctr) (as_seq h0 ctx));
()
val chacha20_encrypt_block:
ctx:state
-> out:lbuffer uint8 64ul
-> incr:size_t
-> text:lbuffer uint8 64ul ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
disjoint out ctx /\ disjoint text ctx)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_encrypt_block (as_seq h0 ctx) (v incr) (as_seq h0 text))
let chacha20_encrypt_block ctx out incr text =
push_frame();
let k = create 16ul (u32 0) in
chacha20_core k ctx incr;
xor_block out k text;
pop_frame()
val chacha20_encrypt_last:
ctx:state
-> len:size_t{v len < 64}
-> out:lbuffer uint8 len
-> incr:size_t
-> text:lbuffer uint8 len ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
disjoint out ctx /\ disjoint text ctx)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_encrypt_last (as_seq h0 ctx) (v incr) (v len) (as_seq h0 text))
[@CInline]
let chacha20_encrypt_last ctx len out incr text =
push_frame();
let plain = create (size 64) (u8 0) in
update_sub plain 0ul len text;
chacha20_encrypt_block ctx plain incr plain;
copy out (sub plain 0ul len);
pop_frame()
val chacha20_update:
ctx:state
-> len:size_t
-> out:lbuffer uint8 len
-> text:lbuffer uint8 len ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
eq_or_disjoint text out /\ disjoint text ctx /\ disjoint out ctx)
(ensures fun h0 _ h1 -> modifies (loc ctx |+| loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_update (as_seq h0 ctx) (as_seq h0 text))
let chacha20_update ctx len out text =
push_frame();
let blocks = len /. size 64 in
let rem = len %. size 64 in
let h0 = ST.get() in
map_blocks h0 len 64ul text out
(fun h -> Spec.chacha20_encrypt_block (as_seq h0 ctx))
(fun h -> Spec.chacha20_encrypt_last (as_seq h0 ctx))
(fun i -> chacha20_encrypt_block ctx (sub out (i *! 64ul) 64ul) i (sub text (i *! 64ul) 64ul))
(fun i -> chacha20_encrypt_last ctx rem (sub out (i *! 64ul) rem) i (sub text (i *! 64ul) rem));
pop_frame()
inline_for_extraction
val chacha20_encrypt:
len:size_t
-> out:lbuffer uint8 len
-> text:lbuffer uint8 len
-> key:lbuffer uint8 32ul
-> n:lbuffer uint8 12ul
-> ctr:size_t ->
Stack unit
(requires fun h ->
live h key /\ live h n /\ live h text /\ live h out /\ eq_or_disjoint text out)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_encrypt_bytes (as_seq h0 key) (as_seq h0 n) (v ctr) (as_seq h0 text)) | false | false | Hacl.Impl.Chacha20.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 2,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 200,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val chacha20_encrypt:
len:size_t
-> out:lbuffer uint8 len
-> text:lbuffer uint8 len
-> key:lbuffer uint8 32ul
-> n:lbuffer uint8 12ul
-> ctr:size_t ->
Stack unit
(requires fun h ->
live h key /\ live h n /\ live h text /\ live h out /\ eq_or_disjoint text out)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_encrypt_bytes (as_seq h0 key) (as_seq h0 n) (v ctr) (as_seq h0 text)) | [] | Hacl.Impl.Chacha20.chacha20_encrypt | {
"file_name": "code/chacha20/Hacl.Impl.Chacha20.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
len: Lib.IntTypes.size_t ->
out: Lib.Buffer.lbuffer Lib.IntTypes.uint8 len ->
text: Lib.Buffer.lbuffer Lib.IntTypes.uint8 len ->
key: Lib.Buffer.lbuffer Lib.IntTypes.uint8 32ul ->
n: Lib.Buffer.lbuffer Lib.IntTypes.uint8 12ul ->
ctr: Lib.IntTypes.size_t
-> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 13,
"end_line": 203,
"start_col": 2,
"start_line": 199
} |
FStar.HyperStack.ST.Stack | val chacha20_decrypt:
len:size_t
-> out:lbuffer uint8 len
-> cipher:lbuffer uint8 len
-> key:lbuffer uint8 32ul
-> n:lbuffer uint8 12ul
-> ctr:size_t ->
Stack unit
(requires fun h ->
live h key /\ live h n /\ live h cipher /\ live h out /\ eq_or_disjoint cipher out)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_decrypt_bytes (as_seq h0 key) (as_seq h0 n) (v ctr) (as_seq h0 cipher)) | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loop"
},
{
"abbrev": true,
"full_module": "Spec.Chacha20",
"short_module": "Spec"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Chacha20.Core32",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.ByteBuffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let chacha20_decrypt len out cipher key n ctr =
push_frame();
let ctx = create_state () in
chacha20_init ctx key n ctr;
chacha20_update ctx len out cipher;
pop_frame() | val chacha20_decrypt:
len:size_t
-> out:lbuffer uint8 len
-> cipher:lbuffer uint8 len
-> key:lbuffer uint8 32ul
-> n:lbuffer uint8 12ul
-> ctr:size_t ->
Stack unit
(requires fun h ->
live h key /\ live h n /\ live h cipher /\ live h out /\ eq_or_disjoint cipher out)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_decrypt_bytes (as_seq h0 key) (as_seq h0 n) (v ctr) (as_seq h0 cipher))
let chacha20_decrypt len out cipher key n ctr = | true | null | false | push_frame ();
let ctx = create_state () in
chacha20_init ctx key n ctr;
chacha20_update ctx len out cipher;
pop_frame () | {
"checked_file": "Hacl.Impl.Chacha20.fst.checked",
"dependencies": [
"Spec.Chacha20.fst.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Chacha20.Core32.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Chacha20.fst"
} | [] | [
"Lib.IntTypes.size_t",
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint8",
"FStar.UInt32.__uint_to_t",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Hacl.Impl.Chacha20.chacha20_update",
"Hacl.Impl.Chacha20.chacha20_init",
"Hacl.Impl.Chacha20.Core32.state",
"Hacl.Impl.Chacha20.Core32.create_state",
"FStar.HyperStack.ST.push_frame"
] | [] | module Hacl.Impl.Chacha20
open FStar.HyperStack
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Lib.ByteBuffer
open Hacl.Impl.Chacha20.Core32
module ST = FStar.HyperStack.ST
module Spec = Spec.Chacha20
module Loop = Lib.LoopCombinators
#set-options "--z3rlimit 200 --max_fuel 2"
//#set-options "--debug Hacl.Impl.Curve25519.Generic --debug_level ExtractNorm"
val rounds:
st:state
-> Stack unit
(requires fun h -> live h st)
(ensures fun h0 _ h1 -> modifies (loc st) h0 h1 /\
as_seq h1 st == Spec.rounds (as_seq h0 st))
[@ CInline]
let rounds st =
let h0 = ST.get () in
Loop.eq_repeat0 #Spec.state Spec.double_round (as_seq h0 st);
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 0;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 1;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 2;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 3;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 4;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 5;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 6;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 7;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 8;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 9;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st
val chacha20_core:
k:state
-> ctx0:state
-> ctr:size_t ->
Stack unit
(requires fun h -> live h ctx0 /\ live h k /\ disjoint ctx0 k)
(ensures fun h0 _ h1 -> modifies (loc k) h0 h1 /\
as_seq h1 k == Spec.chacha20_core (v ctr) (as_seq h0 ctx0))
[@ CInline ]
let chacha20_core k ctx ctr =
copy_state k ctx;
let ctr_u32 = size_to_uint32 ctr in
k.(12ul) <- k.(12ul) +. ctr_u32;
rounds k;
sum_state k ctx;
k.(12ul) <- k.(12ul) +. ctr_u32
val chacha20_constants:
b:glbuffer size_t 4ul{recallable b /\ witnessed b Spec.Chacha20.chacha20_constants}
let chacha20_constants =
[@ inline_let]
let l = [Spec.c0;Spec.c1;Spec.c2;Spec.c3] in
assert_norm(List.Tot.length l == 4);
createL_global l
val chacha20_init:
ctx:state
-> k:lbuffer uint8 32ul
-> n:lbuffer uint8 12ul
-> ctr0:size_t ->
Stack unit
(requires fun h ->
live h ctx /\ live h k /\ live h n /\
disjoint ctx k /\ disjoint ctx n /\
as_seq h ctx == Lib.Sequence.create 16 (u32 0))
(ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\
as_seq h1 ctx == Spec.chacha20_init (as_seq h0 k) (as_seq h0 n) (v ctr0))
let chacha20_init ctx k n ctr =
let h0 = ST.get() in
recall_contents chacha20_constants Spec.chacha20_constants;
update_sub_f h0 ctx 0ul 4ul
(fun h -> Lib.Sequence.map secret Spec.chacha20_constants)
(fun _ -> mapT 4ul (sub ctx 0ul 4ul) secret chacha20_constants);
let h1 = ST.get() in
update_sub_f h1 ctx 4ul 8ul
(fun h -> Lib.ByteSequence.uints_from_bytes_le (as_seq h k))
(fun _ -> uints_from_bytes_le (sub ctx 4ul 8ul) k);
let h2 = ST.get() in
ctx.(12ul) <- size_to_uint32 ctr;
let h3 = ST.get() in
update_sub_f h3 ctx 13ul 3ul
(fun h -> Lib.ByteSequence.uints_from_bytes_le (as_seq h n))
(fun _ -> uints_from_bytes_le (sub ctx 13ul 3ul) n);
let h4 = ST.get() in
assert (as_seq h4 ctx == Spec.setup (as_seq h0 k) (as_seq h0 n) (v ctr) (as_seq h0 ctx));
()
val chacha20_encrypt_block:
ctx:state
-> out:lbuffer uint8 64ul
-> incr:size_t
-> text:lbuffer uint8 64ul ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
disjoint out ctx /\ disjoint text ctx)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_encrypt_block (as_seq h0 ctx) (v incr) (as_seq h0 text))
let chacha20_encrypt_block ctx out incr text =
push_frame();
let k = create 16ul (u32 0) in
chacha20_core k ctx incr;
xor_block out k text;
pop_frame()
val chacha20_encrypt_last:
ctx:state
-> len:size_t{v len < 64}
-> out:lbuffer uint8 len
-> incr:size_t
-> text:lbuffer uint8 len ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
disjoint out ctx /\ disjoint text ctx)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_encrypt_last (as_seq h0 ctx) (v incr) (v len) (as_seq h0 text))
[@CInline]
let chacha20_encrypt_last ctx len out incr text =
push_frame();
let plain = create (size 64) (u8 0) in
update_sub plain 0ul len text;
chacha20_encrypt_block ctx plain incr plain;
copy out (sub plain 0ul len);
pop_frame()
val chacha20_update:
ctx:state
-> len:size_t
-> out:lbuffer uint8 len
-> text:lbuffer uint8 len ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
eq_or_disjoint text out /\ disjoint text ctx /\ disjoint out ctx)
(ensures fun h0 _ h1 -> modifies (loc ctx |+| loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_update (as_seq h0 ctx) (as_seq h0 text))
let chacha20_update ctx len out text =
push_frame();
let blocks = len /. size 64 in
let rem = len %. size 64 in
let h0 = ST.get() in
map_blocks h0 len 64ul text out
(fun h -> Spec.chacha20_encrypt_block (as_seq h0 ctx))
(fun h -> Spec.chacha20_encrypt_last (as_seq h0 ctx))
(fun i -> chacha20_encrypt_block ctx (sub out (i *! 64ul) 64ul) i (sub text (i *! 64ul) 64ul))
(fun i -> chacha20_encrypt_last ctx rem (sub out (i *! 64ul) rem) i (sub text (i *! 64ul) rem));
pop_frame()
inline_for_extraction
val chacha20_encrypt:
len:size_t
-> out:lbuffer uint8 len
-> text:lbuffer uint8 len
-> key:lbuffer uint8 32ul
-> n:lbuffer uint8 12ul
-> ctr:size_t ->
Stack unit
(requires fun h ->
live h key /\ live h n /\ live h text /\ live h out /\ eq_or_disjoint text out)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_encrypt_bytes (as_seq h0 key) (as_seq h0 n) (v ctr) (as_seq h0 text))
let chacha20_encrypt len out text key n ctr =
push_frame();
let ctx = create_state () in
chacha20_init ctx key n ctr;
chacha20_update ctx len out text;
pop_frame()
inline_for_extraction
val chacha20_decrypt:
len:size_t
-> out:lbuffer uint8 len
-> cipher:lbuffer uint8 len
-> key:lbuffer uint8 32ul
-> n:lbuffer uint8 12ul
-> ctr:size_t ->
Stack unit
(requires fun h ->
live h key /\ live h n /\ live h cipher /\ live h out /\ eq_or_disjoint cipher out)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_decrypt_bytes (as_seq h0 key) (as_seq h0 n) (v ctr) (as_seq h0 cipher)) | false | false | Hacl.Impl.Chacha20.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 2,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 200,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val chacha20_decrypt:
len:size_t
-> out:lbuffer uint8 len
-> cipher:lbuffer uint8 len
-> key:lbuffer uint8 32ul
-> n:lbuffer uint8 12ul
-> ctr:size_t ->
Stack unit
(requires fun h ->
live h key /\ live h n /\ live h cipher /\ live h out /\ eq_or_disjoint cipher out)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_decrypt_bytes (as_seq h0 key) (as_seq h0 n) (v ctr) (as_seq h0 cipher)) | [] | Hacl.Impl.Chacha20.chacha20_decrypt | {
"file_name": "code/chacha20/Hacl.Impl.Chacha20.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
len: Lib.IntTypes.size_t ->
out: Lib.Buffer.lbuffer Lib.IntTypes.uint8 len ->
cipher: Lib.Buffer.lbuffer Lib.IntTypes.uint8 len ->
key: Lib.Buffer.lbuffer Lib.IntTypes.uint8 32ul ->
n: Lib.Buffer.lbuffer Lib.IntTypes.uint8 12ul ->
ctr: Lib.IntTypes.size_t
-> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 13,
"end_line": 225,
"start_col": 2,
"start_line": 221
} |
FStar.HyperStack.ST.Stack | val chacha20_encrypt_last:
ctx:state
-> len:size_t{v len < 64}
-> out:lbuffer uint8 len
-> incr:size_t
-> text:lbuffer uint8 len ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
disjoint out ctx /\ disjoint text ctx)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_encrypt_last (as_seq h0 ctx) (v incr) (v len) (as_seq h0 text)) | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loop"
},
{
"abbrev": true,
"full_module": "Spec.Chacha20",
"short_module": "Spec"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Chacha20.Core32",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.ByteBuffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let chacha20_encrypt_last ctx len out incr text =
push_frame();
let plain = create (size 64) (u8 0) in
update_sub plain 0ul len text;
chacha20_encrypt_block ctx plain incr plain;
copy out (sub plain 0ul len);
pop_frame() | val chacha20_encrypt_last:
ctx:state
-> len:size_t{v len < 64}
-> out:lbuffer uint8 len
-> incr:size_t
-> text:lbuffer uint8 len ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
disjoint out ctx /\ disjoint text ctx)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_encrypt_last (as_seq h0 ctx) (v incr) (v len) (as_seq h0 text))
let chacha20_encrypt_last ctx len out incr text = | true | null | false | push_frame ();
let plain = create (size 64) (u8 0) in
update_sub plain 0ul len text;
chacha20_encrypt_block ctx plain incr plain;
copy out (sub plain 0ul len);
pop_frame () | {
"checked_file": "Hacl.Impl.Chacha20.fst.checked",
"dependencies": [
"Spec.Chacha20.fst.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Chacha20.Core32.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Chacha20.fst"
} | [] | [
"Hacl.Impl.Chacha20.Core32.state",
"Lib.IntTypes.size_t",
"Prims.b2t",
"Prims.op_LessThan",
"Lib.IntTypes.v",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint8",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Lib.Buffer.copy",
"Lib.Buffer.MUT",
"Lib.Buffer.lbuffer_t",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U8",
"Lib.IntTypes.SEC",
"Lib.Buffer.sub",
"Lib.IntTypes.size",
"FStar.UInt32.__uint_to_t",
"Hacl.Impl.Chacha20.chacha20_encrypt_block",
"Lib.Buffer.update_sub",
"Lib.IntTypes.mk_int",
"Lib.Buffer.create",
"Lib.IntTypes.u8",
"FStar.HyperStack.ST.push_frame"
] | [] | module Hacl.Impl.Chacha20
open FStar.HyperStack
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Lib.ByteBuffer
open Hacl.Impl.Chacha20.Core32
module ST = FStar.HyperStack.ST
module Spec = Spec.Chacha20
module Loop = Lib.LoopCombinators
#set-options "--z3rlimit 200 --max_fuel 2"
//#set-options "--debug Hacl.Impl.Curve25519.Generic --debug_level ExtractNorm"
val rounds:
st:state
-> Stack unit
(requires fun h -> live h st)
(ensures fun h0 _ h1 -> modifies (loc st) h0 h1 /\
as_seq h1 st == Spec.rounds (as_seq h0 st))
[@ CInline]
let rounds st =
let h0 = ST.get () in
Loop.eq_repeat0 #Spec.state Spec.double_round (as_seq h0 st);
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 0;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 1;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 2;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 3;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 4;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 5;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 6;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 7;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 8;
Loop.unfold_repeat 10 Spec.double_round (as_seq h0 st) 9;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st;
double_round st
val chacha20_core:
k:state
-> ctx0:state
-> ctr:size_t ->
Stack unit
(requires fun h -> live h ctx0 /\ live h k /\ disjoint ctx0 k)
(ensures fun h0 _ h1 -> modifies (loc k) h0 h1 /\
as_seq h1 k == Spec.chacha20_core (v ctr) (as_seq h0 ctx0))
[@ CInline ]
let chacha20_core k ctx ctr =
copy_state k ctx;
let ctr_u32 = size_to_uint32 ctr in
k.(12ul) <- k.(12ul) +. ctr_u32;
rounds k;
sum_state k ctx;
k.(12ul) <- k.(12ul) +. ctr_u32
val chacha20_constants:
b:glbuffer size_t 4ul{recallable b /\ witnessed b Spec.Chacha20.chacha20_constants}
let chacha20_constants =
[@ inline_let]
let l = [Spec.c0;Spec.c1;Spec.c2;Spec.c3] in
assert_norm(List.Tot.length l == 4);
createL_global l
val chacha20_init:
ctx:state
-> k:lbuffer uint8 32ul
-> n:lbuffer uint8 12ul
-> ctr0:size_t ->
Stack unit
(requires fun h ->
live h ctx /\ live h k /\ live h n /\
disjoint ctx k /\ disjoint ctx n /\
as_seq h ctx == Lib.Sequence.create 16 (u32 0))
(ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\
as_seq h1 ctx == Spec.chacha20_init (as_seq h0 k) (as_seq h0 n) (v ctr0))
let chacha20_init ctx k n ctr =
let h0 = ST.get() in
recall_contents chacha20_constants Spec.chacha20_constants;
update_sub_f h0 ctx 0ul 4ul
(fun h -> Lib.Sequence.map secret Spec.chacha20_constants)
(fun _ -> mapT 4ul (sub ctx 0ul 4ul) secret chacha20_constants);
let h1 = ST.get() in
update_sub_f h1 ctx 4ul 8ul
(fun h -> Lib.ByteSequence.uints_from_bytes_le (as_seq h k))
(fun _ -> uints_from_bytes_le (sub ctx 4ul 8ul) k);
let h2 = ST.get() in
ctx.(12ul) <- size_to_uint32 ctr;
let h3 = ST.get() in
update_sub_f h3 ctx 13ul 3ul
(fun h -> Lib.ByteSequence.uints_from_bytes_le (as_seq h n))
(fun _ -> uints_from_bytes_le (sub ctx 13ul 3ul) n);
let h4 = ST.get() in
assert (as_seq h4 ctx == Spec.setup (as_seq h0 k) (as_seq h0 n) (v ctr) (as_seq h0 ctx));
()
val chacha20_encrypt_block:
ctx:state
-> out:lbuffer uint8 64ul
-> incr:size_t
-> text:lbuffer uint8 64ul ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
disjoint out ctx /\ disjoint text ctx)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_encrypt_block (as_seq h0 ctx) (v incr) (as_seq h0 text))
let chacha20_encrypt_block ctx out incr text =
push_frame();
let k = create 16ul (u32 0) in
chacha20_core k ctx incr;
xor_block out k text;
pop_frame()
val chacha20_encrypt_last:
ctx:state
-> len:size_t{v len < 64}
-> out:lbuffer uint8 len
-> incr:size_t
-> text:lbuffer uint8 len ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
disjoint out ctx /\ disjoint text ctx)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_encrypt_last (as_seq h0 ctx) (v incr) (v len) (as_seq h0 text))
[@CInline] | false | false | Hacl.Impl.Chacha20.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 2,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 200,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val chacha20_encrypt_last:
ctx:state
-> len:size_t{v len < 64}
-> out:lbuffer uint8 len
-> incr:size_t
-> text:lbuffer uint8 len ->
Stack unit
(requires fun h ->
live h ctx /\ live h text /\ live h out /\
disjoint out ctx /\ disjoint text ctx)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
as_seq h1 out == Spec.chacha20_encrypt_last (as_seq h0 ctx) (v incr) (v len) (as_seq h0 text)) | [] | Hacl.Impl.Chacha20.chacha20_encrypt_last | {
"file_name": "code/chacha20/Hacl.Impl.Chacha20.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
ctx: Hacl.Impl.Chacha20.Core32.state ->
len: Lib.IntTypes.size_t{Lib.IntTypes.v len < 64} ->
out: Lib.Buffer.lbuffer Lib.IntTypes.uint8 len ->
incr: Lib.IntTypes.size_t ->
text: Lib.Buffer.lbuffer Lib.IntTypes.uint8 len
-> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 13,
"end_line": 156,
"start_col": 2,
"start_line": 151
} |
FStar.Pervasives.Lemma | val right_identity (#a: Type) (m: cm a) (x: a) : Lemma (CM?.mult m x (CM?.unit m) == x) | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let right_identity (#a:Type) (m:cm a) (x:a) :
Lemma (CM?.mult m x (CM?.unit m) == x) =
CM?.commutativity m x (CM?.unit m); CM?.identity m x | val right_identity (#a: Type) (m: cm a) (x: a) : Lemma (CM?.mult m x (CM?.unit m) == x)
let right_identity (#a: Type) (m: cm a) (x: a) : Lemma (CM?.mult m x (CM?.unit m) == x) = | false | null | true | CM?.commutativity m x (CM?.unit m);
CM?.identity m x | {
"checked_file": "FStar.Algebra.CommMonoid.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Algebra.CommMonoid.fst"
} | [
"lemma"
] | [
"FStar.Algebra.CommMonoid.cm",
"FStar.Algebra.CommMonoid.__proj__CM__item__identity",
"Prims.unit",
"FStar.Algebra.CommMonoid.__proj__CM__item__commutativity",
"FStar.Algebra.CommMonoid.__proj__CM__item__unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"FStar.Algebra.CommMonoid.__proj__CM__item__mult",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Algebra.CommMonoid
open FStar.Mul
unopteq
type cm (a:Type) =
| CM :
unit:a ->
mult:(a -> a -> a) ->
identity : (x:a -> Lemma (unit `mult` x == x)) ->
associativity : (x:a -> y:a -> z:a ->
Lemma (x `mult` y `mult` z == x `mult` (y `mult` z))) ->
commutativity:(x:a -> y:a -> Lemma (x `mult` y == y `mult` x)) ->
cm a
let right_identity (#a:Type) (m:cm a) (x:a) : | false | false | FStar.Algebra.CommMonoid.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val right_identity (#a: Type) (m: cm a) (x: a) : Lemma (CM?.mult m x (CM?.unit m) == x) | [] | FStar.Algebra.CommMonoid.right_identity | {
"file_name": "ulib/FStar.Algebra.CommMonoid.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | m: FStar.Algebra.CommMonoid.cm a -> x: a
-> FStar.Pervasives.Lemma (ensures CM?.mult m x (CM?.unit m) == x) | {
"end_col": 54,
"end_line": 33,
"start_col": 2,
"start_line": 33
} |
Prims.Tot | val int_plus_cm:cm int | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let int_plus_cm : cm int =
CM 0 (+) (fun x -> ()) (fun x y z -> ()) (fun x y -> ()) | val int_plus_cm:cm int
let int_plus_cm:cm int = | false | null | false | CM 0 ( + ) (fun x -> ()) (fun x y z -> ()) (fun x y -> ()) | {
"checked_file": "FStar.Algebra.CommMonoid.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Algebra.CommMonoid.fst"
} | [
"total"
] | [
"FStar.Algebra.CommMonoid.CM",
"Prims.int",
"Prims.op_Addition",
"Prims.unit"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Algebra.CommMonoid
open FStar.Mul
unopteq
type cm (a:Type) =
| CM :
unit:a ->
mult:(a -> a -> a) ->
identity : (x:a -> Lemma (unit `mult` x == x)) ->
associativity : (x:a -> y:a -> z:a ->
Lemma (x `mult` y `mult` z == x `mult` (y `mult` z))) ->
commutativity:(x:a -> y:a -> Lemma (x `mult` y == y `mult` x)) ->
cm a
let right_identity (#a:Type) (m:cm a) (x:a) :
Lemma (CM?.mult m x (CM?.unit m) == x) =
CM?.commutativity m x (CM?.unit m); CM?.identity m x | false | true | FStar.Algebra.CommMonoid.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val int_plus_cm:cm int | [] | FStar.Algebra.CommMonoid.int_plus_cm | {
"file_name": "ulib/FStar.Algebra.CommMonoid.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | FStar.Algebra.CommMonoid.cm Prims.int | {
"end_col": 58,
"end_line": 36,
"start_col": 2,
"start_line": 36
} |
Prims.Tot | val int_multiply_cm:cm int | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let int_multiply_cm : cm int =
CM 1 ( * ) (fun x -> ()) (fun x y z -> ()) (fun x y -> ()) | val int_multiply_cm:cm int
let int_multiply_cm:cm int = | false | null | false | CM 1 ( * ) (fun x -> ()) (fun x y z -> ()) (fun x y -> ()) | {
"checked_file": "FStar.Algebra.CommMonoid.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Algebra.CommMonoid.fst"
} | [
"total"
] | [
"FStar.Algebra.CommMonoid.CM",
"Prims.int",
"FStar.Mul.op_Star",
"Prims.unit"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Algebra.CommMonoid
open FStar.Mul
unopteq
type cm (a:Type) =
| CM :
unit:a ->
mult:(a -> a -> a) ->
identity : (x:a -> Lemma (unit `mult` x == x)) ->
associativity : (x:a -> y:a -> z:a ->
Lemma (x `mult` y `mult` z == x `mult` (y `mult` z))) ->
commutativity:(x:a -> y:a -> Lemma (x `mult` y == y `mult` x)) ->
cm a
let right_identity (#a:Type) (m:cm a) (x:a) :
Lemma (CM?.mult m x (CM?.unit m) == x) =
CM?.commutativity m x (CM?.unit m); CM?.identity m x
let int_plus_cm : cm int =
CM 0 (+) (fun x -> ()) (fun x y z -> ()) (fun x y -> ()) | false | true | FStar.Algebra.CommMonoid.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val int_multiply_cm:cm int | [] | FStar.Algebra.CommMonoid.int_multiply_cm | {
"file_name": "ulib/FStar.Algebra.CommMonoid.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | FStar.Algebra.CommMonoid.cm Prims.int | {
"end_col": 60,
"end_line": 39,
"start_col": 2,
"start_line": 39
} |
FStar.Pervasives.Lemma | val gctr_partial_opaque_init (alg:algorithm) (plain cipher:seq quad32) (key:seq nat32) (icb:quad32) : Lemma
(requires is_aes_key_word alg key)
(ensures gctr_partial alg 0 plain cipher key icb) | [
{
"abbrev": false,
"full_module": "Vale.AES.Types_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers_BE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Four_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Math.Lemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gctr_partial_opaque_init alg plain cipher key icb =
gctr_partial_reveal ();
() | val gctr_partial_opaque_init (alg:algorithm) (plain cipher:seq quad32) (key:seq nat32) (icb:quad32) : Lemma
(requires is_aes_key_word alg key)
(ensures gctr_partial alg 0 plain cipher key icb)
let gctr_partial_opaque_init alg plain cipher key icb = | false | null | true | gctr_partial_reveal ();
() | {
"checked_file": "Vale.AES.GCTR_BE.fst.checked",
"dependencies": [
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Words.Four_s.fsti.checked",
"Vale.Def.TypesNative_s.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.AES.Types_helpers.fsti.checked",
"Vale.AES.GCTR_BE_s.fst.checked",
"Vale.AES.GCM_helpers_BE.fsti.checked",
"Vale.AES.AES_BE_s.fst.checked",
"prims.fst.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "Vale.AES.GCTR_BE.fst"
} | [
"lemma"
] | [
"Vale.AES.AES_common_s.algorithm",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.quad32",
"Vale.Def.Types_s.nat32",
"Prims.unit",
"Vale.AES.GCTR_BE.gctr_partial_reveal"
] | [] | module Vale.AES.GCTR_BE
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Words.Four_s
open Vale.Def.Types_s
open Vale.Arch.Types
open FStar.Mul
open FStar.Seq
open Vale.AES.AES_BE_s
open Vale.AES.GCTR_BE_s
open Vale.AES.GCM_helpers_BE
open FStar.Math.Lemmas
open Vale.Lib.Seqs
open Vale.AES.Types_helpers
let gctr_encrypt_block_offset (icb:quad32) (plain:quad32) (alg:algorithm) (key:seq nat32) (i:int) =
() | false | false | Vale.AES.GCTR_BE.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gctr_partial_opaque_init (alg:algorithm) (plain cipher:seq quad32) (key:seq nat32) (icb:quad32) : Lemma
(requires is_aes_key_word alg key)
(ensures gctr_partial alg 0 plain cipher key icb) | [] | Vale.AES.GCTR_BE.gctr_partial_opaque_init | {
"file_name": "vale/code/crypto/aes/Vale.AES.GCTR_BE.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
alg: Vale.AES.AES_common_s.algorithm ->
plain: FStar.Seq.Base.seq Vale.Def.Types_s.quad32 ->
cipher: FStar.Seq.Base.seq Vale.Def.Types_s.quad32 ->
key: FStar.Seq.Base.seq Vale.Def.Types_s.nat32 ->
icb: Vale.Def.Types_s.quad32
-> FStar.Pervasives.Lemma (requires Vale.AES.AES_BE_s.is_aes_key_word alg key)
(ensures Vale.AES.GCTR_BE.gctr_partial alg 0 plain cipher key icb) | {
"end_col": 4,
"end_line": 23,
"start_col": 2,
"start_line": 22
} |
FStar.Pervasives.Lemma | val gctr_encrypt_recursive_length
(icb: quad32)
(plain: gctr_plain_internal)
(alg: algorithm)
(key: aes_key_word alg)
(i: int)
: Lemma (requires True)
(ensures length (gctr_encrypt_recursive icb plain alg key i) == length plain)
(decreases %[length plain])
[SMTPat (length (gctr_encrypt_recursive icb plain alg key i))] | [
{
"abbrev": false,
"full_module": "Vale.AES.Types_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Math.Lemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers_BE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Four_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Math.Lemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec gctr_encrypt_recursive_length (icb:quad32) (plain:gctr_plain_internal)
(alg:algorithm) (key:aes_key_word alg) (i:int) : Lemma
(requires True)
(ensures length (gctr_encrypt_recursive icb plain alg key i) == length plain)
(decreases %[length plain])
[SMTPat (length (gctr_encrypt_recursive icb plain alg key i))]
=
if length plain = 0 then ()
else gctr_encrypt_recursive_length icb (tail plain) alg key (i + 1) | val gctr_encrypt_recursive_length
(icb: quad32)
(plain: gctr_plain_internal)
(alg: algorithm)
(key: aes_key_word alg)
(i: int)
: Lemma (requires True)
(ensures length (gctr_encrypt_recursive icb plain alg key i) == length plain)
(decreases %[length plain])
[SMTPat (length (gctr_encrypt_recursive icb plain alg key i))]
let rec gctr_encrypt_recursive_length
(icb: quad32)
(plain: gctr_plain_internal)
(alg: algorithm)
(key: aes_key_word alg)
(i: int)
: Lemma (requires True)
(ensures length (gctr_encrypt_recursive icb plain alg key i) == length plain)
(decreases %[length plain])
[SMTPat (length (gctr_encrypt_recursive icb plain alg key i))] = | false | null | true | if length plain = 0 then () else gctr_encrypt_recursive_length icb (tail plain) alg key (i + 1) | {
"checked_file": "Vale.AES.GCTR_BE.fst.checked",
"dependencies": [
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Words.Four_s.fsti.checked",
"Vale.Def.TypesNative_s.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.AES.Types_helpers.fsti.checked",
"Vale.AES.GCTR_BE_s.fst.checked",
"Vale.AES.GCM_helpers_BE.fsti.checked",
"Vale.AES.AES_BE_s.fst.checked",
"prims.fst.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "Vale.AES.GCTR_BE.fst"
} | [
"lemma",
""
] | [
"Vale.Def.Types_s.quad32",
"Vale.AES.GCTR_BE_s.gctr_plain_internal",
"Vale.AES.AES_common_s.algorithm",
"Vale.AES.AES_BE_s.aes_key_word",
"Prims.int",
"Prims.op_Equality",
"FStar.Seq.Base.length",
"Prims.bool",
"Vale.AES.GCTR_BE.gctr_encrypt_recursive_length",
"FStar.Seq.Properties.tail",
"Prims.op_Addition",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"Prims.nat",
"Vale.AES.GCTR_BE_s.gctr_encrypt_recursive",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | module Vale.AES.GCTR_BE
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Words.Four_s
open Vale.Def.Types_s
open Vale.Arch.Types
open FStar.Mul
open FStar.Seq
open Vale.AES.AES_BE_s
open Vale.AES.GCTR_BE_s
open Vale.AES.GCM_helpers_BE
open FStar.Math.Lemmas
open Vale.Lib.Seqs
open Vale.AES.Types_helpers
let gctr_encrypt_block_offset (icb:quad32) (plain:quad32) (alg:algorithm) (key:seq nat32) (i:int) =
()
let gctr_partial_opaque_init alg plain cipher key icb =
gctr_partial_reveal ();
()
#restart-solver
let lemma_gctr_partial_append alg b1 b2 p1 c1 p2 c2 key icb1 icb2 =
gctr_partial_reveal ();
()
let gctr_partial_opaque_ignores_postfix alg bound plain plain' cipher cipher' key icb =
gctr_partial_reveal ();
// OBSERVE:
assert (forall i . 0 <= i /\ i < bound ==> index plain i == index (slice plain 0 bound) i);
assert (forall i . 0 <= i /\ i < bound ==> index plain' i == index (slice plain' 0 bound) i);
assert (forall i . 0 <= i /\ i < bound ==> index cipher i == index (slice cipher 0 bound) i);
assert (forall i . 0 <= i /\ i < bound ==> index cipher' i == index (slice cipher' 0 bound) i);
()
let rec gctr_encrypt_recursive_length (icb:quad32) (plain:gctr_plain_internal)
(alg:algorithm) (key:aes_key_word alg) (i:int) : Lemma
(requires True)
(ensures length (gctr_encrypt_recursive icb plain alg key i) == length plain)
(decreases %[length plain])
[SMTPat (length (gctr_encrypt_recursive icb plain alg key i))] | false | false | Vale.AES.GCTR_BE.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gctr_encrypt_recursive_length
(icb: quad32)
(plain: gctr_plain_internal)
(alg: algorithm)
(key: aes_key_word alg)
(i: int)
: Lemma (requires True)
(ensures length (gctr_encrypt_recursive icb plain alg key i) == length plain)
(decreases %[length plain])
[SMTPat (length (gctr_encrypt_recursive icb plain alg key i))] | [
"recursion"
] | Vale.AES.GCTR_BE.gctr_encrypt_recursive_length | {
"file_name": "vale/code/crypto/aes/Vale.AES.GCTR_BE.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
icb: Vale.Def.Types_s.quad32 ->
plain: Vale.AES.GCTR_BE_s.gctr_plain_internal ->
alg: Vale.AES.AES_common_s.algorithm ->
key: Vale.AES.AES_BE_s.aes_key_word alg ->
i: Prims.int
-> FStar.Pervasives.Lemma
(ensures
FStar.Seq.Base.length (Vale.AES.GCTR_BE_s.gctr_encrypt_recursive icb plain alg key i) ==
FStar.Seq.Base.length plain)
(decreases FStar.Seq.Base.length plain)
[
SMTPat (FStar.Seq.Base.length (Vale.AES.GCTR_BE_s.gctr_encrypt_recursive icb plain alg key i
))
] | {
"end_col": 69,
"end_line": 47,
"start_col": 2,
"start_line": 46
} |
FStar.Pervasives.Lemma | val lemma_ishr_ixor_32 (x y: nat32) (k: nat)
: Lemma (ensures ishr #pow2_32 (ixor x y) k == ixor (ishr x k) (ishr y k)) | [
{
"abbrev": false,
"full_module": "FStar.Seq.Properties",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.Types_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Math.Lemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers_BE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Four_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Math.Lemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lemma_ishr_ixor_32 (x y:nat32) (k:nat) : Lemma
(ensures ishr #pow2_32 (ixor x y) k == ixor (ishr x k) (ishr y k))
=
Vale.Def.TypesNative_s.reveal_ishr 32 x k;
Vale.Def.TypesNative_s.reveal_ishr 32 y k;
Vale.Def.TypesNative_s.reveal_ishr 32 (ixor x y) k;
Vale.Def.TypesNative_s.reveal_ixor 32 x y;
Vale.Def.TypesNative_s.reveal_ixor 32 (ishr x k) (ishr y k);
FStar.UInt.shift_right_logxor_lemma #32 x y k;
() | val lemma_ishr_ixor_32 (x y: nat32) (k: nat)
: Lemma (ensures ishr #pow2_32 (ixor x y) k == ixor (ishr x k) (ishr y k))
let lemma_ishr_ixor_32 (x y: nat32) (k: nat)
: Lemma (ensures ishr #pow2_32 (ixor x y) k == ixor (ishr x k) (ishr y k)) = | false | null | true | Vale.Def.TypesNative_s.reveal_ishr 32 x k;
Vale.Def.TypesNative_s.reveal_ishr 32 y k;
Vale.Def.TypesNative_s.reveal_ishr 32 (ixor x y) k;
Vale.Def.TypesNative_s.reveal_ixor 32 x y;
Vale.Def.TypesNative_s.reveal_ixor 32 (ishr x k) (ishr y k);
FStar.UInt.shift_right_logxor_lemma #32 x y k;
() | {
"checked_file": "Vale.AES.GCTR_BE.fst.checked",
"dependencies": [
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Words.Four_s.fsti.checked",
"Vale.Def.TypesNative_s.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.AES.Types_helpers.fsti.checked",
"Vale.AES.GCTR_BE_s.fst.checked",
"Vale.AES.GCM_helpers_BE.fsti.checked",
"Vale.AES.AES_BE_s.fst.checked",
"prims.fst.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "Vale.AES.GCTR_BE.fst"
} | [
"lemma"
] | [
"Vale.Def.Types_s.nat32",
"Prims.nat",
"Prims.unit",
"FStar.UInt.shift_right_logxor_lemma",
"Vale.Def.TypesNative_s.reveal_ixor",
"Vale.Def.Types_s.ishr",
"Vale.Def.Words_s.pow2_32",
"Vale.Def.TypesNative_s.reveal_ishr",
"Vale.Def.Types_s.ixor",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"Vale.Def.Words_s.natN",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module Vale.AES.GCTR_BE
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Words.Four_s
open Vale.Def.Types_s
open Vale.Arch.Types
open FStar.Mul
open FStar.Seq
open Vale.AES.AES_BE_s
open Vale.AES.GCTR_BE_s
open Vale.AES.GCM_helpers_BE
open FStar.Math.Lemmas
open Vale.Lib.Seqs
open Vale.AES.Types_helpers
let gctr_encrypt_block_offset (icb:quad32) (plain:quad32) (alg:algorithm) (key:seq nat32) (i:int) =
()
let gctr_partial_opaque_init alg plain cipher key icb =
gctr_partial_reveal ();
()
#restart-solver
let lemma_gctr_partial_append alg b1 b2 p1 c1 p2 c2 key icb1 icb2 =
gctr_partial_reveal ();
()
let gctr_partial_opaque_ignores_postfix alg bound plain plain' cipher cipher' key icb =
gctr_partial_reveal ();
// OBSERVE:
assert (forall i . 0 <= i /\ i < bound ==> index plain i == index (slice plain 0 bound) i);
assert (forall i . 0 <= i /\ i < bound ==> index plain' i == index (slice plain' 0 bound) i);
assert (forall i . 0 <= i /\ i < bound ==> index cipher i == index (slice cipher 0 bound) i);
assert (forall i . 0 <= i /\ i < bound ==> index cipher' i == index (slice cipher' 0 bound) i);
()
let rec gctr_encrypt_recursive_length (icb:quad32) (plain:gctr_plain_internal)
(alg:algorithm) (key:aes_key_word alg) (i:int) : Lemma
(requires True)
(ensures length (gctr_encrypt_recursive icb plain alg key i) == length plain)
(decreases %[length plain])
[SMTPat (length (gctr_encrypt_recursive icb plain alg key i))]
=
if length plain = 0 then ()
else gctr_encrypt_recursive_length icb (tail plain) alg key (i + 1)
//TODO: Check if ever being used
#reset-options "--z3rlimit 40"
let gctr_encrypt_length (icb:quad32) (plain:gctr_plain)
(alg:algorithm) (key:aes_key_word alg) :
Lemma(length (gctr_encrypt icb plain alg key) == length plain)
[SMTPat (length (gctr_encrypt icb plain alg key))]
=
reveal_opaque (`%be_bytes_to_seq_quad32) be_bytes_to_seq_quad32;
gctr_encrypt_reveal ();
let num_extra = (length plain) % 16 in
let result = gctr_encrypt icb plain alg key in
if num_extra = 0 then (
let plain_quads = be_bytes_to_seq_quad32 plain in
gctr_encrypt_recursive_length icb plain_quads alg key 0
) else (
let full_bytes_len = (length plain) - num_extra in
let full_blocks, final_block = split plain full_bytes_len in
let full_quads = be_bytes_to_seq_quad32 full_blocks in
let final_quad = be_bytes_to_quad32 (pad_to_128_bits final_block) in
let cipher_quads = gctr_encrypt_recursive icb full_quads alg key 0 in
let final_cipher_quad = gctr_encrypt_block icb final_quad alg key (full_bytes_len / 16) in
let cipher_bytes_full = seq_nat32_to_seq_nat8_BE (seq_four_to_seq_BE cipher_quads) in
let final_cipher_bytes = slice (be_quad32_to_bytes final_cipher_quad) 0 num_extra in
gctr_encrypt_recursive_length icb full_quads alg key 0;
assert (length result == length cipher_bytes_full + length final_cipher_bytes);
assert (length cipher_quads == length full_quads);
assert (length cipher_bytes_full == 16 * length cipher_quads);
assert (16 * length full_quads == length full_blocks);
assert (length cipher_bytes_full == length full_blocks);
()
)
#reset-options
let rec gctr_indexed_helper (icb:quad32) (plain:gctr_plain_internal)
(alg:algorithm) (key:aes_key_word alg) (i:int) : Lemma
(requires True)
(ensures (let cipher = gctr_encrypt_recursive icb plain alg key i in
length cipher == length plain /\
(forall j . {:pattern index cipher j} 0 <= j /\ j < length plain ==>
index cipher j == quad32_xor (index plain j) (aes_encrypt_word alg key (inc32 icb (i + j)) ))))
(decreases %[length plain])
=
if length plain = 0 then ()
else
let tl = tail plain in
let cipher = gctr_encrypt_recursive icb plain alg key i in
let r_cipher = gctr_encrypt_recursive icb tl alg key (i+1) in
let helper (j:int) :
Lemma ((0 <= j /\ j < length plain) ==> (index cipher j == quad32_xor (index plain j) (aes_encrypt_word alg key (inc32 icb (i + j)) )))
=
aes_encrypt_word_reveal ();
if 0 < j && j < length plain then (
gctr_indexed_helper icb tl alg key (i+1);
assert(index r_cipher (j-1) == quad32_xor (index tl (j-1)) (aes_encrypt_word alg key (inc32 icb (i + 1 + j - 1)) )) // OBSERVE
) else ()
in
FStar.Classical.forall_intro helper
let gctr_indexed (icb:quad32) (plain:gctr_plain_internal)
(alg:algorithm) (key:aes_key_word alg) (cipher:seq quad32) : Lemma
(requires length cipher == length plain /\
(forall i . {:pattern index cipher i} 0 <= i /\ i < length cipher ==>
index cipher i == quad32_xor (index plain i) (aes_encrypt_word alg key (inc32 icb i) )))
(ensures cipher == gctr_encrypt_recursive icb plain alg key 0)
=
gctr_indexed_helper icb plain alg key 0;
let c = gctr_encrypt_recursive icb plain alg key 0 in
assert(equal cipher c) // OBSERVE: Invoke extensionality lemmas
let gctr_partial_completed (alg:algorithm) (plain cipher:seq quad32) (key:seq nat32) (icb:quad32) =
gctr_indexed icb plain alg key cipher;
()
let gctr_partial_opaque_completed (alg:algorithm) (plain cipher:seq quad32) (key:seq nat32) (icb:quad32) : Lemma
(requires
is_aes_key_word alg key /\
length plain == length cipher /\
length plain < pow2_32 /\
gctr_partial alg (length cipher) plain cipher key icb
)
(ensures cipher == gctr_encrypt_recursive icb plain alg key 0)
=
gctr_partial_reveal ();
gctr_partial_completed alg plain cipher key icb
let gctr_partial_to_full_basic (icb:quad32) (plain:seq quad32) (alg:algorithm) (key:seq nat32) (cipher:seq quad32) =
gctr_encrypt_reveal ();
let p = seq_nat32_to_seq_nat8_BE (seq_four_to_seq_BE plain) in
assert (length p % 16 == 0);
let plain_quads = be_bytes_to_seq_quad32 p in
let cipher_quads = gctr_encrypt_recursive icb plain_quads alg key 0 in
let cipher_bytes = seq_nat32_to_seq_nat8_BE (seq_four_to_seq_BE cipher_quads) in
be_bytes_to_seq_quad32_to_bytes plain;
()
let step1 (p:seq quad32) (num_bytes:nat{ num_bytes < 16 * length p }) : Lemma
(let num_extra = num_bytes % 16 in
let num_blocks = num_bytes / 16 in
let full_blocks, final_block = split (slice (seq_nat32_to_seq_nat8_BE (seq_four_to_seq_BE p)) 0 num_bytes) (num_blocks * 16) in
let full_quads_BE = be_bytes_to_seq_quad32 full_blocks in
let p_prefix = slice p 0 num_blocks in
p_prefix == full_quads_BE)
=
let num_extra = num_bytes % 16 in
let num_blocks = num_bytes / 16 in
let full_blocks, final_block = split (slice (seq_nat32_to_seq_nat8_BE (seq_four_to_seq_BE p)) 0 num_bytes) (num_blocks * 16) in
let full_quads_BE = be_bytes_to_seq_quad32 full_blocks in
let p_prefix = slice p 0 num_blocks in
assert (length full_blocks == num_blocks * 16);
assert (full_blocks == slice (slice (seq_nat32_to_seq_nat8_BE (seq_four_to_seq_BE p)) 0 num_bytes) 0 (num_blocks * 16));
assert (full_blocks == slice (seq_nat32_to_seq_nat8_BE (seq_four_to_seq_BE p)) 0 (num_blocks * 16));
slice_commutes_be_seq_quad32_to_bytes0 p num_blocks;
assert (full_blocks == seq_nat32_to_seq_nat8_BE (seq_four_to_seq_BE (slice p 0 num_blocks)));
be_bytes_to_seq_quad32_to_bytes (slice p 0 num_blocks);
assert (full_quads_BE == (slice p 0 num_blocks));
()
#reset-options "--smtencoding.elim_box true --z3rlimit 30"
let lemma_slice_orig_index (#a:Type) (s s':seq a) (m n:nat) : Lemma
(requires length s == length s' /\ m <= n /\ n <= length s /\ slice s m n == slice s' m n)
(ensures (forall (i:int).{:pattern (index s i) \/ (index s' i)} m <= i /\ i < n ==> index s i == index s' i))
=
let aux (i:nat{m <= i /\ i < n}) : Lemma (index s i == index s' i) =
lemma_index_slice s m n (i - m);
lemma_index_slice s' m n (i - m)
in Classical.forall_intro aux
let lemma_ishr_ixor_32 (x y:nat32) (k:nat) : Lemma
(ensures ishr #pow2_32 (ixor x y) k == ixor (ishr x k) (ishr y k)) | false | false | Vale.AES.GCTR_BE.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lemma_ishr_ixor_32 (x y: nat32) (k: nat)
: Lemma (ensures ishr #pow2_32 (ixor x y) k == ixor (ishr x k) (ishr y k)) | [] | Vale.AES.GCTR_BE.lemma_ishr_ixor_32 | {
"file_name": "vale/code/crypto/aes/Vale.AES.GCTR_BE.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: Vale.Def.Types_s.nat32 -> y: Vale.Def.Types_s.nat32 -> k: Prims.nat
-> FStar.Pervasives.Lemma
(ensures
Vale.Def.Types_s.ishr (Vale.Def.Types_s.ixor x y) k ==
Vale.Def.Types_s.ixor (Vale.Def.Types_s.ishr x k) (Vale.Def.Types_s.ishr y k)) | {
"end_col": 4,
"end_line": 189,
"start_col": 2,
"start_line": 183
} |
FStar.Pervasives.Lemma | val nat32_xor_bytewise_2 (k k' x x' m: nat32) (s s' t t': four nat8)
: Lemma
(requires
k == four_to_nat 8 s /\ k' == four_to_nat 8 s' /\ x == four_to_nat 8 t /\
x' == four_to_nat 8 t' /\ ixor k m == x /\ ixor k' m == x' /\ s.hi3 == s'.hi3 /\
s.hi2 == s'.hi2) (ensures t.hi3 == t'.hi3 /\ t.hi2 == t'.hi2) | [
{
"abbrev": false,
"full_module": "FStar.Seq.Properties",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.Types_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Math.Lemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers_BE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Four_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Math.Lemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let nat32_xor_bytewise_2 (k k' x x' m:nat32) (s s' t t':four nat8) : Lemma
(requires
k == four_to_nat 8 s /\
k' == four_to_nat 8 s' /\
x == four_to_nat 8 t /\
x' == four_to_nat 8 t' /\
ixor k m == x /\
ixor k' m == x' /\
s.hi3 == s'.hi3 /\ s.hi2 == s'.hi2
)
(ensures t.hi3 == t'.hi3 /\ t.hi2 == t'.hi2)
=
let Mkfour s0 s1 s2 s3 = s in
let Mkfour s0' s1' s2' s3' = s' in
let Mkfour t0 t1 t2 t3 = t in
let Mkfour t0' t1' t2' t3' = t' in
nat32_xor_bytewise_2_helper3 k k' s s';
lemma_ishr_32 k 16;
lemma_ishr_32 k' 16;
lemma_ishr_32 x 16;
lemma_ishr_32 x' 16;
lemma_ishr_ixor_32 k m 16;
lemma_ishr_ixor_32 k' m 16;
nat32_xor_bytewise_2_helper2 x x' t t';
() | val nat32_xor_bytewise_2 (k k' x x' m: nat32) (s s' t t': four nat8)
: Lemma
(requires
k == four_to_nat 8 s /\ k' == four_to_nat 8 s' /\ x == four_to_nat 8 t /\
x' == four_to_nat 8 t' /\ ixor k m == x /\ ixor k' m == x' /\ s.hi3 == s'.hi3 /\
s.hi2 == s'.hi2) (ensures t.hi3 == t'.hi3 /\ t.hi2 == t'.hi2)
let nat32_xor_bytewise_2 (k k' x x' m: nat32) (s s' t t': four nat8)
: Lemma
(requires
k == four_to_nat 8 s /\ k' == four_to_nat 8 s' /\ x == four_to_nat 8 t /\
x' == four_to_nat 8 t' /\ ixor k m == x /\ ixor k' m == x' /\ s.hi3 == s'.hi3 /\
s.hi2 == s'.hi2) (ensures t.hi3 == t'.hi3 /\ t.hi2 == t'.hi2) = | false | null | true | let Mkfour s0 s1 s2 s3 = s in
let Mkfour s0' s1' s2' s3' = s' in
let Mkfour t0 t1 t2 t3 = t in
let Mkfour t0' t1' t2' t3' = t' in
nat32_xor_bytewise_2_helper3 k k' s s';
lemma_ishr_32 k 16;
lemma_ishr_32 k' 16;
lemma_ishr_32 x 16;
lemma_ishr_32 x' 16;
lemma_ishr_ixor_32 k m 16;
lemma_ishr_ixor_32 k' m 16;
nat32_xor_bytewise_2_helper2 x x' t t';
() | {
"checked_file": "Vale.AES.GCTR_BE.fst.checked",
"dependencies": [
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Words.Four_s.fsti.checked",
"Vale.Def.TypesNative_s.fst.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.AES.Types_helpers.fsti.checked",
"Vale.AES.GCTR_BE_s.fst.checked",
"Vale.AES.GCM_helpers_BE.fsti.checked",
"Vale.AES.AES_BE_s.fst.checked",
"prims.fst.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "Vale.AES.GCTR_BE.fst"
} | [
"lemma"
] | [
"Vale.Def.Types_s.nat32",
"Vale.Def.Words_s.four",
"Vale.Def.Types_s.nat8",
"Prims.unit",
"Vale.AES.GCTR_BE.nat32_xor_bytewise_2_helper2",
"Vale.AES.GCTR_BE.lemma_ishr_ixor_32",
"Vale.AES.Types_helpers.lemma_ishr_32",
"Vale.AES.GCTR_BE.nat32_xor_bytewise_2_helper3",
"Vale.Def.Words_s.nat8",
"Prims.l_and",
"Prims.eq2",
"Vale.Def.Words_s.natN",
"Vale.Def.Words_s.pow2_32",
"Vale.Def.Words.Four_s.four_to_nat",
"Vale.Def.Types_s.ixor",
"Vale.Def.Words_s.__proj__Mkfour__item__hi3",
"Vale.Def.Words_s.__proj__Mkfour__item__hi2",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module Vale.AES.GCTR_BE
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Words.Four_s
open Vale.Def.Types_s
open Vale.Arch.Types
open FStar.Mul
open FStar.Seq
open Vale.AES.AES_BE_s
open Vale.AES.GCTR_BE_s
open Vale.AES.GCM_helpers_BE
open FStar.Math.Lemmas
open Vale.Lib.Seqs
open Vale.AES.Types_helpers
let gctr_encrypt_block_offset (icb:quad32) (plain:quad32) (alg:algorithm) (key:seq nat32) (i:int) =
()
let gctr_partial_opaque_init alg plain cipher key icb =
gctr_partial_reveal ();
()
#restart-solver
let lemma_gctr_partial_append alg b1 b2 p1 c1 p2 c2 key icb1 icb2 =
gctr_partial_reveal ();
()
let gctr_partial_opaque_ignores_postfix alg bound plain plain' cipher cipher' key icb =
gctr_partial_reveal ();
// OBSERVE:
assert (forall i . 0 <= i /\ i < bound ==> index plain i == index (slice plain 0 bound) i);
assert (forall i . 0 <= i /\ i < bound ==> index plain' i == index (slice plain' 0 bound) i);
assert (forall i . 0 <= i /\ i < bound ==> index cipher i == index (slice cipher 0 bound) i);
assert (forall i . 0 <= i /\ i < bound ==> index cipher' i == index (slice cipher' 0 bound) i);
()
let rec gctr_encrypt_recursive_length (icb:quad32) (plain:gctr_plain_internal)
(alg:algorithm) (key:aes_key_word alg) (i:int) : Lemma
(requires True)
(ensures length (gctr_encrypt_recursive icb plain alg key i) == length plain)
(decreases %[length plain])
[SMTPat (length (gctr_encrypt_recursive icb plain alg key i))]
=
if length plain = 0 then ()
else gctr_encrypt_recursive_length icb (tail plain) alg key (i + 1)
//TODO: Check if ever being used
#reset-options "--z3rlimit 40"
let gctr_encrypt_length (icb:quad32) (plain:gctr_plain)
(alg:algorithm) (key:aes_key_word alg) :
Lemma(length (gctr_encrypt icb plain alg key) == length plain)
[SMTPat (length (gctr_encrypt icb plain alg key))]
=
reveal_opaque (`%be_bytes_to_seq_quad32) be_bytes_to_seq_quad32;
gctr_encrypt_reveal ();
let num_extra = (length plain) % 16 in
let result = gctr_encrypt icb plain alg key in
if num_extra = 0 then (
let plain_quads = be_bytes_to_seq_quad32 plain in
gctr_encrypt_recursive_length icb plain_quads alg key 0
) else (
let full_bytes_len = (length plain) - num_extra in
let full_blocks, final_block = split plain full_bytes_len in
let full_quads = be_bytes_to_seq_quad32 full_blocks in
let final_quad = be_bytes_to_quad32 (pad_to_128_bits final_block) in
let cipher_quads = gctr_encrypt_recursive icb full_quads alg key 0 in
let final_cipher_quad = gctr_encrypt_block icb final_quad alg key (full_bytes_len / 16) in
let cipher_bytes_full = seq_nat32_to_seq_nat8_BE (seq_four_to_seq_BE cipher_quads) in
let final_cipher_bytes = slice (be_quad32_to_bytes final_cipher_quad) 0 num_extra in
gctr_encrypt_recursive_length icb full_quads alg key 0;
assert (length result == length cipher_bytes_full + length final_cipher_bytes);
assert (length cipher_quads == length full_quads);
assert (length cipher_bytes_full == 16 * length cipher_quads);
assert (16 * length full_quads == length full_blocks);
assert (length cipher_bytes_full == length full_blocks);
()
)
#reset-options
let rec gctr_indexed_helper (icb:quad32) (plain:gctr_plain_internal)
(alg:algorithm) (key:aes_key_word alg) (i:int) : Lemma
(requires True)
(ensures (let cipher = gctr_encrypt_recursive icb plain alg key i in
length cipher == length plain /\
(forall j . {:pattern index cipher j} 0 <= j /\ j < length plain ==>
index cipher j == quad32_xor (index plain j) (aes_encrypt_word alg key (inc32 icb (i + j)) ))))
(decreases %[length plain])
=
if length plain = 0 then ()
else
let tl = tail plain in
let cipher = gctr_encrypt_recursive icb plain alg key i in
let r_cipher = gctr_encrypt_recursive icb tl alg key (i+1) in
let helper (j:int) :
Lemma ((0 <= j /\ j < length plain) ==> (index cipher j == quad32_xor (index plain j) (aes_encrypt_word alg key (inc32 icb (i + j)) )))
=
aes_encrypt_word_reveal ();
if 0 < j && j < length plain then (
gctr_indexed_helper icb tl alg key (i+1);
assert(index r_cipher (j-1) == quad32_xor (index tl (j-1)) (aes_encrypt_word alg key (inc32 icb (i + 1 + j - 1)) )) // OBSERVE
) else ()
in
FStar.Classical.forall_intro helper
let gctr_indexed (icb:quad32) (plain:gctr_plain_internal)
(alg:algorithm) (key:aes_key_word alg) (cipher:seq quad32) : Lemma
(requires length cipher == length plain /\
(forall i . {:pattern index cipher i} 0 <= i /\ i < length cipher ==>
index cipher i == quad32_xor (index plain i) (aes_encrypt_word alg key (inc32 icb i) )))
(ensures cipher == gctr_encrypt_recursive icb plain alg key 0)
=
gctr_indexed_helper icb plain alg key 0;
let c = gctr_encrypt_recursive icb plain alg key 0 in
assert(equal cipher c) // OBSERVE: Invoke extensionality lemmas
let gctr_partial_completed (alg:algorithm) (plain cipher:seq quad32) (key:seq nat32) (icb:quad32) =
gctr_indexed icb plain alg key cipher;
()
let gctr_partial_opaque_completed (alg:algorithm) (plain cipher:seq quad32) (key:seq nat32) (icb:quad32) : Lemma
(requires
is_aes_key_word alg key /\
length plain == length cipher /\
length plain < pow2_32 /\
gctr_partial alg (length cipher) plain cipher key icb
)
(ensures cipher == gctr_encrypt_recursive icb plain alg key 0)
=
gctr_partial_reveal ();
gctr_partial_completed alg plain cipher key icb
let gctr_partial_to_full_basic (icb:quad32) (plain:seq quad32) (alg:algorithm) (key:seq nat32) (cipher:seq quad32) =
gctr_encrypt_reveal ();
let p = seq_nat32_to_seq_nat8_BE (seq_four_to_seq_BE plain) in
assert (length p % 16 == 0);
let plain_quads = be_bytes_to_seq_quad32 p in
let cipher_quads = gctr_encrypt_recursive icb plain_quads alg key 0 in
let cipher_bytes = seq_nat32_to_seq_nat8_BE (seq_four_to_seq_BE cipher_quads) in
be_bytes_to_seq_quad32_to_bytes plain;
()
let step1 (p:seq quad32) (num_bytes:nat{ num_bytes < 16 * length p }) : Lemma
(let num_extra = num_bytes % 16 in
let num_blocks = num_bytes / 16 in
let full_blocks, final_block = split (slice (seq_nat32_to_seq_nat8_BE (seq_four_to_seq_BE p)) 0 num_bytes) (num_blocks * 16) in
let full_quads_BE = be_bytes_to_seq_quad32 full_blocks in
let p_prefix = slice p 0 num_blocks in
p_prefix == full_quads_BE)
=
let num_extra = num_bytes % 16 in
let num_blocks = num_bytes / 16 in
let full_blocks, final_block = split (slice (seq_nat32_to_seq_nat8_BE (seq_four_to_seq_BE p)) 0 num_bytes) (num_blocks * 16) in
let full_quads_BE = be_bytes_to_seq_quad32 full_blocks in
let p_prefix = slice p 0 num_blocks in
assert (length full_blocks == num_blocks * 16);
assert (full_blocks == slice (slice (seq_nat32_to_seq_nat8_BE (seq_four_to_seq_BE p)) 0 num_bytes) 0 (num_blocks * 16));
assert (full_blocks == slice (seq_nat32_to_seq_nat8_BE (seq_four_to_seq_BE p)) 0 (num_blocks * 16));
slice_commutes_be_seq_quad32_to_bytes0 p num_blocks;
assert (full_blocks == seq_nat32_to_seq_nat8_BE (seq_four_to_seq_BE (slice p 0 num_blocks)));
be_bytes_to_seq_quad32_to_bytes (slice p 0 num_blocks);
assert (full_quads_BE == (slice p 0 num_blocks));
()
#reset-options "--smtencoding.elim_box true --z3rlimit 30"
let lemma_slice_orig_index (#a:Type) (s s':seq a) (m n:nat) : Lemma
(requires length s == length s' /\ m <= n /\ n <= length s /\ slice s m n == slice s' m n)
(ensures (forall (i:int).{:pattern (index s i) \/ (index s' i)} m <= i /\ i < n ==> index s i == index s' i))
=
let aux (i:nat{m <= i /\ i < n}) : Lemma (index s i == index s' i) =
lemma_index_slice s m n (i - m);
lemma_index_slice s' m n (i - m)
in Classical.forall_intro aux
let lemma_ishr_ixor_32 (x y:nat32) (k:nat) : Lemma
(ensures ishr #pow2_32 (ixor x y) k == ixor (ishr x k) (ishr y k))
=
Vale.Def.TypesNative_s.reveal_ishr 32 x k;
Vale.Def.TypesNative_s.reveal_ishr 32 y k;
Vale.Def.TypesNative_s.reveal_ishr 32 (ixor x y) k;
Vale.Def.TypesNative_s.reveal_ixor 32 x y;
Vale.Def.TypesNative_s.reveal_ixor 32 (ishr x k) (ishr y k);
FStar.UInt.shift_right_logxor_lemma #32 x y k;
()
let nat32_xor_bytewise_1_helper1 (x0 x0':nat8) (x1 x1':nat24) (x x':nat32) : Lemma
(requires
x == 0x1000000 * x0 + x1 /\
x' == 0x1000000 * x0' + x1' /\
x / 0x1000000 == x' / 0x1000000
)
(ensures x0 == x0')
=
()
let nat32_xor_bytewise_2_helper1 (x0 x0' x1 x1':nat16) (x x':nat32) : Lemma
(requires
x == 0x10000 * x0 + x1 /\
x' == 0x10000 * x0' + x1' /\
x / 0x10000 == x' / 0x10000
)
(ensures x0 == x0')
=
()
let nat32_xor_bytewise_3_helper1 (x0 x0':nat24) (x1 x1':nat8) (x x':nat32) : Lemma
(requires
x == 0x100 * x0 + x1 /\
x' == 0x100 * x0' + x1' /\
x / 0x100 == x' / 0x100
)
(ensures x0 == x0')
=
()
let nat32_xor_bytewise_1_helper2 (x x':nat32) (t t':four nat8) : Lemma
(requires
x == four_to_nat 8 t /\
x' == four_to_nat 8 t' /\
x / 0x1000000 == x' / 0x1000000
)
(ensures t.hi3 == t'.hi3)
=
let Mkfour t0 t1 t2 t3 = t in
let Mkfour t0' t1' t2' t3' = t' in
let t012 = t0 + 0x100 * t1 + 0x10000 * t2 in
let t012' = t0' + 0x100 * t1' + 0x10000 * t2' in
assert_norm (four_to_nat 8 t == four_to_nat_unfold 8 t );
assert_norm (four_to_nat 8 t' == four_to_nat_unfold 8 t');
nat32_xor_bytewise_1_helper1 t3 t3' t012 t012' x x';
()
let nat32_xor_bytewise_2_helper2 (x x':nat32) (t t':four nat8) : Lemma
(requires
x == four_to_nat 8 t /\
x' == four_to_nat 8 t' /\
x / 0x10000 == x' / 0x10000
)
(ensures t.hi3 == t'.hi3 /\ t.hi2 == t'.hi2)
=
let Mkfour t0 t1 t2 t3 = t in
let Mkfour t0' t1' t2' t3' = t' in
let t01 = t0 + 0x100 * t1 in
let t23 = t2 + 0x100 * t3 in
let t01' = t0' + 0x100 * t1' in
let t23' = t2' + 0x100 * t3' in
assert_norm (four_to_nat 8 t == four_to_nat_unfold 8 t );
assert_norm (four_to_nat 8 t' == four_to_nat_unfold 8 t');
nat32_xor_bytewise_2_helper1 t23 t23' t01 t01' x x';
()
let nat32_xor_bytewise_3_helper2 (x x':nat32) (t t':four nat8) : Lemma
(requires
x == four_to_nat 8 t /\
x' == four_to_nat 8 t' /\
x / 0x100 == x' / 0x100
)
(ensures t.hi3 == t'.hi3 /\ t.hi2 == t'.hi2 /\ t.lo1 == t'.lo1)
=
let Mkfour t0 t1 t2 t3 = t in
let Mkfour t0' t1' t2' t3' = t' in
let t123 = t1 + 0x100 * t2 + 0x10000 * t3 in
let t123' = t1' + 0x100 * t2' + 0x10000 * t3' in
assert_norm (four_to_nat 8 t == four_to_nat_unfold 8 t );
assert_norm (four_to_nat 8 t' == four_to_nat_unfold 8 t');
nat32_xor_bytewise_3_helper1 t123 t123' t0 t0' x x';
()
let nat32_xor_bytewise_1_helper3 (k k':nat32) (s s':four nat8) : Lemma
(requires
k == four_to_nat 8 s /\
k' == four_to_nat 8 s' /\
s.hi3 == s'.hi3
)
(ensures k / 0x1000000 == k' / 0x1000000)
=
let Mkfour _ _ _ _ = s in
let Mkfour _ _ _ _ = s' in
assert_norm (four_to_nat 8 s == four_to_nat_unfold 8 s );
assert_norm (four_to_nat 8 s' == four_to_nat_unfold 8 s');
()
let nat32_xor_bytewise_2_helper3 (k k':nat32) (s s':four nat8) : Lemma
(requires
k == four_to_nat 8 s /\
k' == four_to_nat 8 s' /\
s.hi3 == s'.hi3 /\ s.hi2 == s'.hi2
)
(ensures k / 0x10000 == k' / 0x10000)
=
let Mkfour _ _ _ _ = s in
let Mkfour _ _ _ _ = s' in
assert_norm (four_to_nat 8 s == four_to_nat_unfold 8 s );
assert_norm (four_to_nat 8 s' == four_to_nat_unfold 8 s');
()
let nat32_xor_bytewise_3_helper3 (k k':nat32) (s s':four nat8) : Lemma
(requires
k == four_to_nat 8 s /\
k' == four_to_nat 8 s' /\
s.hi3 == s'.hi3 /\ s.hi2 == s'.hi2 /\ s.lo1 == s'.lo1
)
(ensures k / 0x100 == k' / 0x100)
=
let Mkfour _ _ _ _ = s in
let Mkfour _ _ _ _ = s' in
assert_norm (four_to_nat 8 s == four_to_nat_unfold 8 s );
assert_norm (four_to_nat 8 s' == four_to_nat_unfold 8 s');
()
let nat32_xor_bytewise_1 (k k' x x' m:nat32) (s s' t t':four nat8) : Lemma
(requires
k == four_to_nat 8 s /\
k' == four_to_nat 8 s' /\
x == four_to_nat 8 t /\
x' == four_to_nat 8 t' /\
ixor k m == x /\
ixor k' m == x' /\
s.hi3 == s'.hi3
)
(ensures t.hi3 == t'.hi3)
=
let Mkfour s0 s1 s2 s3 = s in
let Mkfour s0' s1' s2' s3' = s' in
let Mkfour t0 t1 t2 t3 = t in
let Mkfour t0' t1' t2' t3' = t' in
nat32_xor_bytewise_1_helper3 k k' s s';
lemma_ishr_32 k 24;
lemma_ishr_32 k' 24;
lemma_ishr_32 x 24;
lemma_ishr_32 x' 24;
lemma_ishr_ixor_32 k m 24;
lemma_ishr_ixor_32 k' m 24;
assert_norm (pow2 24 == pow2_24);
nat32_xor_bytewise_1_helper2 x x' t t';
()
let nat32_xor_bytewise_2 (k k' x x' m:nat32) (s s' t t':four nat8) : Lemma
(requires
k == four_to_nat 8 s /\
k' == four_to_nat 8 s' /\
x == four_to_nat 8 t /\
x' == four_to_nat 8 t' /\
ixor k m == x /\
ixor k' m == x' /\
s.hi3 == s'.hi3 /\ s.hi2 == s'.hi2
) | false | false | Vale.AES.GCTR_BE.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val nat32_xor_bytewise_2 (k k' x x' m: nat32) (s s' t t': four nat8)
: Lemma
(requires
k == four_to_nat 8 s /\ k' == four_to_nat 8 s' /\ x == four_to_nat 8 t /\
x' == four_to_nat 8 t' /\ ixor k m == x /\ ixor k' m == x' /\ s.hi3 == s'.hi3 /\
s.hi2 == s'.hi2) (ensures t.hi3 == t'.hi3 /\ t.hi2 == t'.hi2) | [] | Vale.AES.GCTR_BE.nat32_xor_bytewise_2 | {
"file_name": "vale/code/crypto/aes/Vale.AES.GCTR_BE.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
k: Vale.Def.Types_s.nat32 ->
k': Vale.Def.Types_s.nat32 ->
x: Vale.Def.Types_s.nat32 ->
x': Vale.Def.Types_s.nat32 ->
m: Vale.Def.Types_s.nat32 ->
s: Vale.Def.Words_s.four Vale.Def.Types_s.nat8 ->
s': Vale.Def.Words_s.four Vale.Def.Types_s.nat8 ->
t: Vale.Def.Words_s.four Vale.Def.Types_s.nat8 ->
t': Vale.Def.Words_s.four Vale.Def.Types_s.nat8
-> FStar.Pervasives.Lemma
(requires
k == Vale.Def.Words.Four_s.four_to_nat 8 s /\ k' == Vale.Def.Words.Four_s.four_to_nat 8 s' /\
x == Vale.Def.Words.Four_s.four_to_nat 8 t /\ x' == Vale.Def.Words.Four_s.four_to_nat 8 t' /\
Vale.Def.Types_s.ixor k m == x /\ Vale.Def.Types_s.ixor k' m == x' /\
Mkfour?.hi3 s == Mkfour?.hi3 s' /\ Mkfour?.hi2 s == Mkfour?.hi2 s')
(ensures Mkfour?.hi3 t == Mkfour?.hi3 t' /\ Mkfour?.hi2 t == Mkfour?.hi2 t') | {
"end_col": 4,
"end_line": 367,
"start_col": 3,
"start_line": 354
} |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.