file_name
stringlengths 5
52
| name
stringlengths 4
95
| original_source_type
stringlengths 0
23k
| source_type
stringlengths 9
23k
| source_definition
stringlengths 9
57.9k
| source
dict | source_range
dict | file_context
stringlengths 0
721k
| dependencies
dict | opens_and_abbrevs
listlengths 2
94
| vconfig
dict | interleaved
bool 1
class | verbose_type
stringlengths 1
7.42k
| effect
stringclasses 118
values | effect_flags
sequencelengths 0
2
| mutual_with
sequencelengths 0
11
| ideal_premises
sequencelengths 0
236
| proof_features
sequencelengths 0
1
| is_simple_lemma
bool 2
classes | is_div
bool 2
classes | is_proof
bool 2
classes | is_simply_typed
bool 2
classes | is_type
bool 2
classes | partial_definition
stringlengths 5
3.99k
| completed_definiton
stringlengths 1
1.63M
| isa_cross_project_example
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Vale.X64.MemoryAdapters.fsti | Vale.X64.MemoryAdapters.as_vale_stack | val as_vale_stack (st: BS.machine_stack) : SI.vale_stack | val as_vale_stack (st: BS.machine_stack) : SI.vale_stack | let as_vale_stack (st:BS.machine_stack)
: SI.vale_stack
= IB.coerce st | {
"file_name": "vale/code/arch/x64/interop/Vale.X64.MemoryAdapters.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 16,
"end_line": 44,
"start_col": 0,
"start_line": 42
} | module Vale.X64.MemoryAdapters
open FStar.Mul
open Vale.Interop.Base
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
open Vale.Arch.MachineHeap_s
module BS = Vale.X64.Machine_Semantics_s
module BV = LowStar.BufferView
module HS = FStar.HyperStack
module ME = Vale.X64.Memory
module SI = Vale.X64.Stack_i
module IB = Vale.Interop.Base
module VS = Vale.X64.State
module V = Vale.X64.Decls
module Map16 = Vale.Lib.Map16
val as_vale_buffer (#src #t:base_typ) (i:IB.buf_t src t) : GTot (ME.buffer t)
val as_vale_immbuffer (#src #t:base_typ) (i:IB.ibuf_t src t) : GTot (ME.buffer t)
val stack_eq : squash (BS.machine_stack == SI.vale_stack)
val as_mem (h:ME.vale_heap) : GTot IB.interop_heap
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
val lemma_heap_impl : squash (heap_impl == vale_full_heap)
val create_initial_vale_heap (ih:IB.interop_heap) : GTot vale_heap
val create_initial_vale_full_heap (ih:IB.interop_heap) (mt:memTaint_t) : Ghost vale_full_heap
(requires True)
(ensures fun h ->
h == coerce (heap_create_impl ih mt) /\
ME.mem_inv h /\
ME.is_initial_heap h.vf_layout h.vf_heap /\
ME.get_heaplet_id h.vf_heap == None /\
h.vf_heap == create_initial_vale_heap ih
) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Arch.MachineHeap_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"LowStar.BufferView.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Vale.X64.MemoryAdapters.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Base",
"short_module": "IB"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Base",
"short_module": "IB"
},
{
"abbrev": true,
"full_module": "Vale.X64.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView",
"short_module": "BV"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "BS"
},
{
"abbrev": false,
"full_module": "Vale.Arch.MachineHeap_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | st: Vale.X64.Machine_Semantics_s.machine_stack -> Vale.X64.Stack_i.vale_stack | Prims.Tot | [
"total"
] | [] | [
"Vale.X64.Machine_Semantics_s.machine_stack",
"Vale.Interop.Base.coerce",
"Vale.X64.Stack_i.vale_stack"
] | [] | false | false | false | true | false | let as_vale_stack (st: BS.machine_stack) : SI.vale_stack =
| IB.coerce st | false |
Vale.X64.MemoryAdapters.fsti | Vale.X64.MemoryAdapters.coerce | val coerce (#b #a: Type) (x: a{a == b}) : b | val coerce (#b #a: Type) (x: a{a == b}) : b | let coerce (#b #a:Type) (x:a{a == b}) : b = x | {
"file_name": "vale/code/arch/x64/interop/Vale.X64.MemoryAdapters.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 52,
"end_line": 25,
"start_col": 7,
"start_line": 25
} | module Vale.X64.MemoryAdapters
open FStar.Mul
open Vale.Interop.Base
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
open Vale.Arch.MachineHeap_s
module BS = Vale.X64.Machine_Semantics_s
module BV = LowStar.BufferView
module HS = FStar.HyperStack
module ME = Vale.X64.Memory
module SI = Vale.X64.Stack_i
module IB = Vale.Interop.Base
module VS = Vale.X64.State
module V = Vale.X64.Decls
module Map16 = Vale.Lib.Map16
val as_vale_buffer (#src #t:base_typ) (i:IB.buf_t src t) : GTot (ME.buffer t)
val as_vale_immbuffer (#src #t:base_typ) (i:IB.ibuf_t src t) : GTot (ME.buffer t)
val stack_eq : squash (BS.machine_stack == SI.vale_stack)
val as_mem (h:ME.vale_heap) : GTot IB.interop_heap | {
"checked_file": "/",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Arch.MachineHeap_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"LowStar.BufferView.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Vale.X64.MemoryAdapters.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Base",
"short_module": "IB"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.X64.Decls",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "Vale.X64.State",
"short_module": "VS"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Base",
"short_module": "IB"
},
{
"abbrev": true,
"full_module": "Vale.X64.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView",
"short_module": "BV"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "BS"
},
{
"abbrev": false,
"full_module": "Vale.Arch.MachineHeap_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: a{a == b} -> b | Prims.Tot | [
"total"
] | [] | [
"Prims.eq2"
] | [] | false | false | false | false | false | let coerce (#b #a: Type) (x: a{a == b}) : b =
| x | false |
Hacl.Impl.Frodo.KEM.Encaps.fst | Hacl.Impl.Frodo.KEM.Encaps.frodo_mul_add_sa_plus_e | val frodo_mul_add_sa_plus_e:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> bp_matrix:matrix_t params_nbar (params_n a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h ep_matrix /\ live h sp_matrix /\ live h bp_matrix /\
disjoint bp_matrix seed_a /\ disjoint bp_matrix ep_matrix /\ disjoint bp_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc bp_matrix) h0 h1 /\
as_matrix h1 bp_matrix ==
S.frodo_mul_add_sa_plus_e a gen_a (as_seq h0 seed_a) (as_matrix h0 sp_matrix) (as_matrix h0 ep_matrix)) | val frodo_mul_add_sa_plus_e:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> bp_matrix:matrix_t params_nbar (params_n a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h ep_matrix /\ live h sp_matrix /\ live h bp_matrix /\
disjoint bp_matrix seed_a /\ disjoint bp_matrix ep_matrix /\ disjoint bp_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc bp_matrix) h0 h1 /\
as_matrix h1 bp_matrix ==
S.frodo_mul_add_sa_plus_e a gen_a (as_seq h0 seed_a) (as_matrix h0 sp_matrix) (as_matrix h0 ep_matrix)) | let frodo_mul_add_sa_plus_e a gen_a seed_a sp_matrix ep_matrix bp_matrix =
push_frame ();
let a_matrix = matrix_create (params_n a) (params_n a) in
frodo_gen_matrix gen_a (params_n a) seed_a a_matrix;
matrix_mul sp_matrix a_matrix bp_matrix;
matrix_add bp_matrix ep_matrix;
pop_frame () | {
"file_name": "code/frodo/Hacl.Impl.Frodo.KEM.Encaps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 14,
"end_line": 53,
"start_col": 0,
"start_line": 47
} | module Hacl.Impl.Frodo.KEM.Encaps
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open LowStar.Buffer
open Lib.IntTypes
open Lib.Buffer
open Hacl.Impl.Matrix
open Hacl.Impl.Frodo.Params
open Hacl.Impl.Frodo.KEM
open Hacl.Impl.Frodo.Encode
open Hacl.Impl.Frodo.Pack
open Hacl.Impl.Frodo.Sample
open Hacl.Frodo.Random
module ST = FStar.HyperStack.ST
module LSeq = Lib.Sequence
module LB = Lib.ByteSequence
module FP = Spec.Frodo.Params
module S = Spec.Frodo.KEM.Encaps
module M = Spec.Matrix
module KG = Hacl.Impl.Frodo.KEM.KeyGen
#set-options "--z3rlimit 100 --fuel 0 --ifuel 0"
inline_for_extraction noextract
val frodo_mul_add_sa_plus_e:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> bp_matrix:matrix_t params_nbar (params_n a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h ep_matrix /\ live h sp_matrix /\ live h bp_matrix /\
disjoint bp_matrix seed_a /\ disjoint bp_matrix ep_matrix /\ disjoint bp_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc bp_matrix) h0 h1 /\
as_matrix h1 bp_matrix ==
S.frodo_mul_add_sa_plus_e a gen_a (as_seq h0 seed_a) (as_matrix h0 sp_matrix) (as_matrix h0 ep_matrix)) | {
"checked_file": "/",
"dependencies": [
"Spec.Matrix.fst.checked",
"Spec.Frodo.Params.fst.checked",
"Spec.Frodo.KEM.Encaps.fst.checked",
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Matrix.fst.checked",
"Hacl.Impl.Frodo.Sample.fst.checked",
"Hacl.Impl.Frodo.Params.fst.checked",
"Hacl.Impl.Frodo.Pack.fst.checked",
"Hacl.Impl.Frodo.KEM.KeyGen.fst.checked",
"Hacl.Impl.Frodo.KEM.fst.checked",
"Hacl.Impl.Frodo.Encode.fst.checked",
"Hacl.Frodo.Random.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Frodo.KEM.Encaps.fst"
} | [
{
"abbrev": true,
"full_module": "Hacl.Impl.Frodo.KEM.KeyGen",
"short_module": "KG"
},
{
"abbrev": true,
"full_module": "Spec.Matrix",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "Spec.Frodo.KEM.Encaps",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Spec.Frodo.Params",
"short_module": "FP"
},
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "LB"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Frodo.Random",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Sample",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Pack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Encode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Params",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Matrix",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
a: Spec.Frodo.Params.frodo_alg ->
gen_a: Spec.Frodo.Params.frodo_gen_a{Hacl.Impl.Frodo.Params.is_supported gen_a} ->
seed_a: Hacl.Impl.Matrix.lbytes Hacl.Impl.Frodo.Params.bytes_seed_a ->
sp_matrix:
Hacl.Impl.Matrix.matrix_t Hacl.Impl.Frodo.Params.params_nbar
(Hacl.Impl.Frodo.Params.params_n a) ->
ep_matrix:
Hacl.Impl.Matrix.matrix_t Hacl.Impl.Frodo.Params.params_nbar
(Hacl.Impl.Frodo.Params.params_n a) ->
bp_matrix:
Hacl.Impl.Matrix.matrix_t Hacl.Impl.Frodo.Params.params_nbar
(Hacl.Impl.Frodo.Params.params_n a)
-> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Spec.Frodo.Params.frodo_alg",
"Spec.Frodo.Params.frodo_gen_a",
"Prims.b2t",
"Hacl.Impl.Frodo.Params.is_supported",
"Hacl.Impl.Matrix.lbytes",
"Hacl.Impl.Frodo.Params.bytes_seed_a",
"Hacl.Impl.Matrix.matrix_t",
"Hacl.Impl.Frodo.Params.params_nbar",
"Hacl.Impl.Frodo.Params.params_n",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Hacl.Impl.Matrix.matrix_add",
"Hacl.Impl.Matrix.matrix_mul",
"Hacl.Impl.Frodo.Params.frodo_gen_matrix",
"Lib.Buffer.lbuffer_t",
"Lib.Buffer.MUT",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U16",
"Lib.IntTypes.SEC",
"Lib.IntTypes.mul",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Hacl.Impl.Matrix.matrix_create",
"FStar.HyperStack.ST.push_frame"
] | [] | false | true | false | false | false | let frodo_mul_add_sa_plus_e a gen_a seed_a sp_matrix ep_matrix bp_matrix =
| push_frame ();
let a_matrix = matrix_create (params_n a) (params_n a) in
frodo_gen_matrix gen_a (params_n a) seed_a a_matrix;
matrix_mul sp_matrix a_matrix bp_matrix;
matrix_add bp_matrix ep_matrix;
pop_frame () | false |
Hacl.Impl.Ed25519.Ladder.fst | Hacl.Impl.Ed25519.Ladder.point_mul_g_mk_q1234 | val point_mul_g_mk_q1234: out:point -> bscalar:lbuffer uint64 4ul -> q1:point ->
Stack unit
(requires fun h ->
live h bscalar /\ live h out /\ live h q1 /\
disjoint out bscalar /\ disjoint out q1 /\
BD.bn_v h bscalar < pow2 256 /\
F51.linv (as_seq h q1) /\ refl (as_seq h q1) == g_aff)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.linv (as_seq h1 out) /\
(let (b0, b1, b2, b3) = SPT256.decompose_nat256_as_four_u64 (BD.bn_v h0 bscalar) in
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_four_fw S.mk_ed25519_comm_monoid
g_aff 64 b0 g_pow2_64 b1 g_pow2_128 b2 g_pow2_192 b3 4)) | val point_mul_g_mk_q1234: out:point -> bscalar:lbuffer uint64 4ul -> q1:point ->
Stack unit
(requires fun h ->
live h bscalar /\ live h out /\ live h q1 /\
disjoint out bscalar /\ disjoint out q1 /\
BD.bn_v h bscalar < pow2 256 /\
F51.linv (as_seq h q1) /\ refl (as_seq h q1) == g_aff)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.linv (as_seq h1 out) /\
(let (b0, b1, b2, b3) = SPT256.decompose_nat256_as_four_u64 (BD.bn_v h0 bscalar) in
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_four_fw S.mk_ed25519_comm_monoid
g_aff 64 b0 g_pow2_64 b1 g_pow2_128 b2 g_pow2_192 b3 4)) | let point_mul_g_mk_q1234 out bscalar q1 =
push_frame ();
let q2 = mk_ext_g_pow2_64 () in
let q3 = mk_ext_g_pow2_128 () in
let q4 = mk_ext_g_pow2_192 () in
ext_g_pow2_64_lseq_lemma ();
ext_g_pow2_128_lseq_lemma ();
ext_g_pow2_192_lseq_lemma ();
point_mul_g_noalloc out bscalar q1 q2 q3 q4;
pop_frame () | {
"file_name": "code/ed25519/Hacl.Impl.Ed25519.Ladder.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 14,
"end_line": 215,
"start_col": 0,
"start_line": 206
} | module Hacl.Impl.Ed25519.Ladder
module ST = FStar.HyperStack.ST
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Hacl.Bignum25519
module F51 = Hacl.Impl.Ed25519.Field51
module BSeq = Lib.ByteSequence
module LE = Lib.Exponentiation
module SE = Spec.Exponentiation
module BE = Hacl.Impl.Exponentiation
module ME = Hacl.Impl.MultiExponentiation
module PT = Hacl.Impl.PrecompTable
module SPT256 = Hacl.Spec.PrecompBaseTable256
module BD = Hacl.Bignum.Definitions
module SD = Hacl.Spec.Bignum.Definitions
module S = Spec.Ed25519
open Hacl.Impl.Ed25519.PointConstants
include Hacl.Impl.Ed25519.Group
include Hacl.Ed25519.PrecompTable
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
inline_for_extraction noextract
let table_inv_w4 : BE.table_inv_t U64 20ul 16ul =
[@inline_let] let len = 20ul in
[@inline_let] let ctx_len = 0ul in
[@inline_let] let k = mk_ed25519_concrete_ops in
[@inline_let] let l = 4ul in
[@inline_let] let table_len = 16ul in
BE.table_inv_precomp len ctx_len k l table_len
inline_for_extraction noextract
let table_inv_w5 : BE.table_inv_t U64 20ul 32ul =
[@inline_let] let len = 20ul in
[@inline_let] let ctx_len = 0ul in
[@inline_let] let k = mk_ed25519_concrete_ops in
[@inline_let] let l = 5ul in
[@inline_let] let table_len = 32ul in
assert_norm (pow2 (v l) = v table_len);
BE.table_inv_precomp len ctx_len k l table_len
inline_for_extraction noextract
val convert_scalar: scalar:lbuffer uint8 32ul -> bscalar:lbuffer uint64 4ul ->
Stack unit
(requires fun h -> live h scalar /\ live h bscalar /\ disjoint scalar bscalar)
(ensures fun h0 _ h1 -> modifies (loc bscalar) h0 h1 /\
BD.bn_v h1 bscalar == BSeq.nat_from_bytes_le (as_seq h0 scalar))
let convert_scalar scalar bscalar =
let h0 = ST.get () in
Hacl.Spec.Bignum.Convert.bn_from_bytes_le_lemma #U64 32 (as_seq h0 scalar);
Hacl.Bignum.Convert.mk_bn_from_bytes_le true 32ul scalar bscalar
inline_for_extraction noextract
val point_mul_noalloc:
out:point
-> bscalar:lbuffer uint64 4ul
-> q:point ->
Stack unit
(requires fun h ->
live h bscalar /\ live h q /\ live h out /\
disjoint q out /\ disjoint q bscalar /\ disjoint out bscalar /\
F51.point_inv_t h q /\ F51.inv_ext_point (as_seq h q) /\
BD.bn_v h bscalar < pow2 256)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.point_inv_t h1 out /\ F51.inv_ext_point (as_seq h1 out) /\
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_fw S.mk_ed25519_comm_monoid
(S.to_aff_point (F51.point_eval h0 q)) 256 (BD.bn_v h0 bscalar) 4)
let point_mul_noalloc out bscalar q =
BE.lexp_fw_consttime 20ul 0ul mk_ed25519_concrete_ops
4ul (null uint64) q 4ul 256ul bscalar out
let point_mul out scalar q =
let h0 = ST.get () in
SE.exp_fw_lemma S.mk_ed25519_concrete_ops
(F51.point_eval h0 q) 256 (BSeq.nat_from_bytes_le (as_seq h0 scalar)) 4;
push_frame ();
let bscalar = create 4ul (u64 0) in
convert_scalar scalar bscalar;
point_mul_noalloc out bscalar q;
pop_frame ()
val precomp_get_consttime: BE.pow_a_to_small_b_st U64 20ul 0ul mk_ed25519_concrete_ops 4ul 16ul
(BE.table_inv_precomp 20ul 0ul mk_ed25519_concrete_ops 4ul 16ul)
[@CInline]
let precomp_get_consttime ctx a table bits_l tmp =
[@inline_let] let len = 20ul in
[@inline_let] let ctx_len = 0ul in
[@inline_let] let k = mk_ed25519_concrete_ops in
[@inline_let] let l = 4ul in
[@inline_let] let table_len = 16ul in
BE.lprecomp_get_consttime len ctx_len k l table_len ctx a table bits_l tmp
inline_for_extraction noextract
val point_mul_g_noalloc: out:point -> bscalar:lbuffer uint64 4ul
-> q1:point -> q2:point
-> q3:point -> q4:point ->
Stack unit
(requires fun h ->
live h bscalar /\ live h out /\ live h q1 /\
live h q2 /\ live h q3 /\ live h q4 /\
disjoint out bscalar /\ disjoint out q1 /\ disjoint out q2 /\
disjoint out q3 /\ disjoint out q4 /\
disjoint q1 q2 /\ disjoint q1 q3 /\ disjoint q1 q4 /\
disjoint q2 q3 /\ disjoint q2 q4 /\ disjoint q3 q4 /\
BD.bn_v h bscalar < pow2 256 /\
F51.linv (as_seq h q1) /\ refl (as_seq h q1) == g_aff /\
F51.linv (as_seq h q2) /\ refl (as_seq h q2) == g_pow2_64 /\
F51.linv (as_seq h q3) /\ refl (as_seq h q3) == g_pow2_128 /\
F51.linv (as_seq h q4) /\ refl (as_seq h q4) == g_pow2_192)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.linv (as_seq h1 out) /\
(let (b0, b1, b2, b3) = SPT256.decompose_nat256_as_four_u64 (BD.bn_v h0 bscalar) in
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_four_fw S.mk_ed25519_comm_monoid
g_aff 64 b0 g_pow2_64 b1 g_pow2_128 b2 g_pow2_192 b3 4))
let point_mul_g_noalloc out bscalar q1 q2 q3 q4 =
[@inline_let] let len = 20ul in
[@inline_let] let ctx_len = 0ul in
[@inline_let] let k = mk_ed25519_concrete_ops in
[@inline_let] let l = 4ul in
[@inline_let] let table_len = 16ul in
[@inline_let] let bLen = 1ul in
[@inline_let] let bBits = 64ul in
let h0 = ST.get () in
recall_contents precomp_basepoint_table_w4 precomp_basepoint_table_lseq_w4;
let h1 = ST.get () in
precomp_basepoint_table_lemma_w4 ();
assert (table_inv_w4 (as_seq h1 q1) (as_seq h1 precomp_basepoint_table_w4));
recall_contents precomp_g_pow2_64_table_w4 precomp_g_pow2_64_table_lseq_w4;
let h1 = ST.get () in
precomp_g_pow2_64_table_lemma_w4 ();
assert (table_inv_w4 (as_seq h1 q2) (as_seq h1 precomp_g_pow2_64_table_w4));
recall_contents precomp_g_pow2_128_table_w4 precomp_g_pow2_128_table_lseq_w4;
let h1 = ST.get () in
precomp_g_pow2_128_table_lemma_w4 ();
assert (table_inv_w4 (as_seq h1 q3) (as_seq h1 precomp_g_pow2_128_table_w4));
recall_contents precomp_g_pow2_192_table_w4 precomp_g_pow2_192_table_lseq_w4;
let h1 = ST.get () in
precomp_g_pow2_192_table_lemma_w4 ();
assert (table_inv_w4 (as_seq h1 q4) (as_seq h1 precomp_g_pow2_192_table_w4));
let r1 = sub bscalar 0ul 1ul in
let r2 = sub bscalar 1ul 1ul in
let r3 = sub bscalar 2ul 1ul in
let r4 = sub bscalar 3ul 1ul in
SPT256.lemma_decompose_nat256_as_four_u64_lbignum (as_seq h0 bscalar);
ME.mk_lexp_four_fw_tables len ctx_len k l table_len
table_inv_w4 table_inv_w4 table_inv_w4 table_inv_w4
precomp_get_consttime
precomp_get_consttime
precomp_get_consttime
precomp_get_consttime
(null uint64) q1 bLen bBits r1 q2 r2 q3 r3 q4 r4
(to_const precomp_basepoint_table_w4)
(to_const precomp_g_pow2_64_table_w4)
(to_const precomp_g_pow2_128_table_w4)
(to_const precomp_g_pow2_192_table_w4)
out;
LowStar.Ignore.ignore q2; // q2, q3, q4 are unused variables
LowStar.Ignore.ignore q3;
LowStar.Ignore.ignore q4
inline_for_extraction noextract
val point_mul_g_mk_q1234: out:point -> bscalar:lbuffer uint64 4ul -> q1:point ->
Stack unit
(requires fun h ->
live h bscalar /\ live h out /\ live h q1 /\
disjoint out bscalar /\ disjoint out q1 /\
BD.bn_v h bscalar < pow2 256 /\
F51.linv (as_seq h q1) /\ refl (as_seq h q1) == g_aff)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.linv (as_seq h1 out) /\
(let (b0, b1, b2, b3) = SPT256.decompose_nat256_as_four_u64 (BD.bn_v h0 bscalar) in
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_four_fw S.mk_ed25519_comm_monoid
g_aff 64 b0 g_pow2_64 b1 g_pow2_128 b2 g_pow2_192 b3 4)) | {
"checked_file": "/",
"dependencies": [
"Spec.Exponentiation.fsti.checked",
"Spec.Ed25519.Lemmas.fsti.checked",
"Spec.Ed25519.fst.checked",
"prims.fst.checked",
"LowStar.Ignore.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Exponentiation.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Spec.PrecompBaseTable256.fsti.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"Hacl.Spec.Bignum.Convert.fst.checked",
"Hacl.Impl.PrecompTable.fsti.checked",
"Hacl.Impl.MultiExponentiation.fsti.checked",
"Hacl.Impl.Exponentiation.fsti.checked",
"Hacl.Impl.Ed25519.PointNegate.fst.checked",
"Hacl.Impl.Ed25519.PointConstants.fst.checked",
"Hacl.Impl.Ed25519.Group.fst.checked",
"Hacl.Impl.Ed25519.Field51.fst.checked",
"Hacl.Ed25519.PrecompTable.fsti.checked",
"Hacl.Bignum25519.fsti.checked",
"Hacl.Bignum.Definitions.fst.checked",
"Hacl.Bignum.Convert.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked"
],
"interface_file": true,
"source_file": "Hacl.Impl.Ed25519.Ladder.fst"
} | [
{
"abbrev": false,
"full_module": "Hacl.Ed25519.PrecompTable",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519.Group",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519.PointConstants",
"short_module": null
},
{
"abbrev": true,
"full_module": "Spec.Ed25519",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "SD"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.PrecompBaseTable256",
"short_module": "SPT256"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.PrecompTable",
"short_module": "PT"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.MultiExponentiation",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Exponentiation",
"short_module": "BE"
},
{
"abbrev": true,
"full_module": "Spec.Exponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation",
"short_module": "LE"
},
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Ed25519.Field51",
"short_module": "F51"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": true,
"full_module": "Spec.Ed25519",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Ed25519.Field51",
"short_module": "F51"
},
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
out: Hacl.Bignum25519.point ->
bscalar: Lib.Buffer.lbuffer Lib.IntTypes.uint64 4ul ->
q1: Hacl.Bignum25519.point
-> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Hacl.Bignum25519.point",
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint64",
"FStar.UInt32.__uint_to_t",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Hacl.Impl.Ed25519.Ladder.point_mul_g_noalloc",
"Hacl.Ed25519.PrecompTable.ext_g_pow2_192_lseq_lemma",
"Hacl.Ed25519.PrecompTable.ext_g_pow2_128_lseq_lemma",
"Hacl.Ed25519.PrecompTable.ext_g_pow2_64_lseq_lemma",
"Lib.Buffer.lbuffer_t",
"Lib.Buffer.MUT",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U64",
"Lib.IntTypes.SEC",
"FStar.UInt32.uint_to_t",
"FStar.UInt32.t",
"Hacl.Ed25519.PrecompTable.mk_ext_g_pow2_192",
"Hacl.Ed25519.PrecompTable.mk_ext_g_pow2_128",
"Hacl.Ed25519.PrecompTable.mk_ext_g_pow2_64",
"FStar.HyperStack.ST.push_frame"
] | [] | false | true | false | false | false | let point_mul_g_mk_q1234 out bscalar q1 =
| push_frame ();
let q2 = mk_ext_g_pow2_64 () in
let q3 = mk_ext_g_pow2_128 () in
let q4 = mk_ext_g_pow2_192 () in
ext_g_pow2_64_lseq_lemma ();
ext_g_pow2_128_lseq_lemma ();
ext_g_pow2_192_lseq_lemma ();
point_mul_g_noalloc out bscalar q1 q2 q3 q4;
pop_frame () | false |
SteelLoops.fst | SteelLoops.sum_to_n_while | val sum_to_n_while (r: ref UInt32.t) : SteelT unit (vptr r) (fun _ -> vptr r) | val sum_to_n_while (r: ref UInt32.t) : SteelT unit (vptr r) (fun _ -> vptr r) | let sum_to_n_while (r:ref UInt32.t) : SteelT unit (vptr r) (fun _ -> vptr r) =
intro_exists (Ghost.hide true) (fun _ -> vptr r);
while_loop
(fun _ -> vptr r)
(fun _ ->
let _ = witness_exists () in
let n = read r in
FStar.UInt32.lt n 10ul
)
(fun _ ->
let n = read r in
write r (n `FStar.UInt32.add_mod` 1ul);
intro_exists (Ghost.hide true) (fun _ -> vptr r)
) | {
"file_name": "share/steel/tests/krml/SteelLoops.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 5,
"end_line": 39,
"start_col": 0,
"start_line": 26
} | module SteelLoops
open Steel.Effect.Atomic
open Steel.Effect
open Steel.Reference
open Steel.Loops
let sum_to_n_for (r:ref UInt32.t) : SteelT unit (vptr r) (fun _ -> vptr r) =
for_loop
0sz
10sz
(fun _ -> vptr r)
(fun _ -> let x = read r in write r (x `FStar.UInt32.add_mod` 1ul))
let sum_to_n_for_2 (r:ref UInt32.t) : Steel unit (vptr r) (fun _ -> vptr r)
(requires fun h -> sel r h == 0ul)
(ensures fun h0 _ h1 -> sel r h1 == 10ul)
=
for_loop_full
0sz
10sz
(fun _ -> vptr r)
(fun i v -> v == UInt32.uint_to_t i)
(fun _ -> let x = read r in write r (x `FStar.UInt32.add_mod` 1ul)) | {
"checked_file": "/",
"dependencies": [
"Steel.Reference.fsti.checked",
"Steel.Loops.fsti.checked",
"Steel.Effect.Atomic.fsti.checked",
"Steel.Effect.fsti.checked",
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.SizeT.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Int32.fsti.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "SteelLoops.fst"
} | [
{
"abbrev": false,
"full_module": "Steel.Loops",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Reference",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: Steel.Reference.ref FStar.UInt32.t -> Steel.Effect.SteelT Prims.unit | Steel.Effect.SteelT | [] | [] | [
"Steel.Reference.ref",
"FStar.UInt32.t",
"Steel.Loops.while_loop",
"FStar.Ghost.erased",
"Prims.bool",
"Steel.Reference.vptr",
"Steel.Effect.Common.vprop",
"Prims.unit",
"FStar.UInt32.lt",
"FStar.UInt32.__uint_to_t",
"Steel.Reference.read",
"Steel.Effect.Atomic.witness_exists",
"FStar.Ghost.hide",
"FStar.Set.set",
"Steel.Memory.iname",
"FStar.Set.empty",
"Steel.Reference.vptrp",
"Steel.FractionalPermission.full_perm",
"Steel.Effect.Atomic.intro_exists",
"Steel.Reference.write",
"FStar.UInt32.add_mod"
] | [] | false | true | false | false | false | let sum_to_n_while (r: ref UInt32.t) : SteelT unit (vptr r) (fun _ -> vptr r) =
| intro_exists (Ghost.hide true) (fun _ -> vptr r);
while_loop (fun _ -> vptr r)
(fun _ ->
let _ = witness_exists () in
let n = read r in
FStar.UInt32.lt n 10ul)
(fun _ ->
let n = read r in
write r (n `FStar.UInt32.add_mod` 1ul);
intro_exists (Ghost.hide true) (fun _ -> vptr r)) | false |
Hacl.Impl.Frodo.KEM.Encaps.fst | Hacl.Impl.Frodo.KEM.Encaps.crypto_kem_enc_ss | val crypto_kem_enc_ss:
a:FP.frodo_alg
-> k:lbytes (crypto_bytes a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> Stack unit
(requires fun h ->
live h k /\ live h ct /\ live h ss /\
disjoint ct ss /\ disjoint k ct /\ disjoint k ss)
(ensures fun h0 _ h1 -> modifies (loc ss) h0 h1 /\
as_seq h1 ss == S.crypto_kem_enc_ss a (as_seq h0 k) (as_seq h0 ct)) | val crypto_kem_enc_ss:
a:FP.frodo_alg
-> k:lbytes (crypto_bytes a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> Stack unit
(requires fun h ->
live h k /\ live h ct /\ live h ss /\
disjoint ct ss /\ disjoint k ct /\ disjoint k ss)
(ensures fun h0 _ h1 -> modifies (loc ss) h0 h1 /\
as_seq h1 ss == S.crypto_kem_enc_ss a (as_seq h0 k) (as_seq h0 ct)) | let crypto_kem_enc_ss a k ct ss =
push_frame ();
let ss_init_len = crypto_ciphertextbytes a +! crypto_bytes a in
let shake_input_ss = create ss_init_len (u8 0) in
concat2 (crypto_ciphertextbytes a) ct (crypto_bytes a) k shake_input_ss;
frodo_shake a ss_init_len shake_input_ss (crypto_bytes a) ss;
clear_words_u8 shake_input_ss;
pop_frame () | {
"file_name": "code/frodo/Hacl.Impl.Frodo.KEM.Encaps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 14,
"end_line": 304,
"start_col": 0,
"start_line": 297
} | module Hacl.Impl.Frodo.KEM.Encaps
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open LowStar.Buffer
open Lib.IntTypes
open Lib.Buffer
open Hacl.Impl.Matrix
open Hacl.Impl.Frodo.Params
open Hacl.Impl.Frodo.KEM
open Hacl.Impl.Frodo.Encode
open Hacl.Impl.Frodo.Pack
open Hacl.Impl.Frodo.Sample
open Hacl.Frodo.Random
module ST = FStar.HyperStack.ST
module LSeq = Lib.Sequence
module LB = Lib.ByteSequence
module FP = Spec.Frodo.Params
module S = Spec.Frodo.KEM.Encaps
module M = Spec.Matrix
module KG = Hacl.Impl.Frodo.KEM.KeyGen
#set-options "--z3rlimit 100 --fuel 0 --ifuel 0"
inline_for_extraction noextract
val frodo_mul_add_sa_plus_e:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> bp_matrix:matrix_t params_nbar (params_n a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h ep_matrix /\ live h sp_matrix /\ live h bp_matrix /\
disjoint bp_matrix seed_a /\ disjoint bp_matrix ep_matrix /\ disjoint bp_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc bp_matrix) h0 h1 /\
as_matrix h1 bp_matrix ==
S.frodo_mul_add_sa_plus_e a gen_a (as_seq h0 seed_a) (as_matrix h0 sp_matrix) (as_matrix h0 ep_matrix))
let frodo_mul_add_sa_plus_e a gen_a seed_a sp_matrix ep_matrix bp_matrix =
push_frame ();
let a_matrix = matrix_create (params_n a) (params_n a) in
frodo_gen_matrix gen_a (params_n a) seed_a a_matrix;
matrix_mul sp_matrix a_matrix bp_matrix;
matrix_add bp_matrix ep_matrix;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct_pack_c1:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> c1:lbytes (ct1bytes_len a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h ep_matrix /\ live h sp_matrix /\ live h c1 /\
disjoint seed_a c1 /\ disjoint ep_matrix c1 /\ disjoint sp_matrix c1)
(ensures fun h0 _ h1 -> modifies (loc c1) h0 h1 /\
as_seq h1 c1 ==
S.crypto_kem_enc_ct_pack_c1 a gen_a (as_seq h0 seed_a) (as_matrix h0 sp_matrix) (as_matrix h0 ep_matrix))
let crypto_kem_enc_ct_pack_c1 a gen_a seed_a sp_matrix ep_matrix c1 =
push_frame ();
let bp_matrix = matrix_create params_nbar (params_n a) in
frodo_mul_add_sa_plus_e a gen_a seed_a sp_matrix ep_matrix bp_matrix;
frodo_pack (params_logq a) bp_matrix c1;
pop_frame ()
inline_for_extraction noextract
val frodo_mul_add_sb_plus_e:
a:FP.frodo_alg
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> v_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h b /\ live h epp_matrix /\ live h v_matrix /\ live h sp_matrix /\
disjoint v_matrix b /\ disjoint v_matrix epp_matrix /\ disjoint v_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc v_matrix) h0 h1 /\
as_matrix h1 v_matrix ==
S.frodo_mul_add_sb_plus_e a (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix))
let frodo_mul_add_sb_plus_e a b sp_matrix epp_matrix v_matrix =
push_frame ();
let b_matrix = matrix_create (params_n a) params_nbar in
frodo_unpack (params_n a) params_nbar (params_logq a) b b_matrix;
matrix_mul sp_matrix b_matrix v_matrix;
matrix_add v_matrix epp_matrix;
pop_frame ()
inline_for_extraction noextract
val frodo_mul_add_sb_plus_e_plus_mu:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> v_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h b /\ live h mu /\ live h v_matrix /\
live h sp_matrix /\ live h epp_matrix /\
disjoint v_matrix b /\ disjoint v_matrix sp_matrix /\
disjoint v_matrix mu /\ disjoint v_matrix epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc v_matrix) h0 h1 /\
as_matrix h1 v_matrix ==
S.frodo_mul_add_sb_plus_e_plus_mu a (as_seq h0 mu) (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix))
let frodo_mul_add_sb_plus_e_plus_mu a mu b sp_matrix epp_matrix v_matrix =
push_frame ();
frodo_mul_add_sb_plus_e a b sp_matrix epp_matrix v_matrix;
let mu_encode = matrix_create params_nbar params_nbar in
frodo_key_encode (params_logq a) (params_extracted_bits a) params_nbar mu mu_encode;
matrix_add v_matrix mu_encode;
clear_matrix mu_encode;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct_pack_c2:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> c2:lbytes (ct2bytes_len a)
-> Stack unit
(requires fun h ->
live h mu /\ live h b /\ live h sp_matrix /\
live h epp_matrix /\ live h c2 /\
disjoint mu c2 /\ disjoint b c2 /\
disjoint sp_matrix c2 /\ disjoint epp_matrix c2)
(ensures fun h0 _ h1 -> modifies (loc c2) h0 h1 /\
as_seq h1 c2 ==
S.crypto_kem_enc_ct_pack_c2 a (as_seq h0 mu) (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix))
#push-options "--z3rlimit 200"
let crypto_kem_enc_ct_pack_c2 a mu b sp_matrix epp_matrix c2 =
push_frame ();
let v_matrix = matrix_create params_nbar params_nbar in
frodo_mul_add_sb_plus_e_plus_mu a mu b sp_matrix epp_matrix v_matrix;
frodo_pack (params_logq a) v_matrix c2;
clear_matrix v_matrix;
pop_frame ()
#pop-options
inline_for_extraction noextract
val get_sp_ep_epp_matrices:
a:FP.frodo_alg
-> seed_se:lbytes (crypto_bytes a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h seed_se /\ live h sp_matrix /\
live h ep_matrix /\ live h epp_matrix /\
disjoint seed_se sp_matrix /\ disjoint seed_se ep_matrix /\
disjoint seed_se epp_matrix /\ disjoint sp_matrix ep_matrix /\
disjoint sp_matrix epp_matrix /\ disjoint ep_matrix epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc sp_matrix |+| loc ep_matrix |+| loc epp_matrix) h0 h1 /\
(as_matrix h1 sp_matrix, as_matrix h1 ep_matrix, as_matrix h1 epp_matrix) ==
S.get_sp_ep_epp_matrices a (as_seq h0 seed_se))
let get_sp_ep_epp_matrices a seed_se sp_matrix ep_matrix epp_matrix =
push_frame ();
[@inline_let] let s_bytes_len = secretmatrixbytes_len a in
let r = create (2ul *! s_bytes_len +! 2ul *! params_nbar *! params_nbar) (u8 0) in
KG.frodo_shake_r a (u8 0x96) seed_se (2ul *! s_bytes_len +! 2ul *! params_nbar *! params_nbar) r;
frodo_sample_matrix a params_nbar (params_n a) (sub r 0ul s_bytes_len) sp_matrix;
frodo_sample_matrix a params_nbar (params_n a) (sub r s_bytes_len s_bytes_len) ep_matrix;
frodo_sample_matrix a params_nbar params_nbar (sub r (2ul *! s_bytes_len) (2ul *! params_nbar *! params_nbar)) epp_matrix;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct0:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> b:lbytes (publicmatrixbytes_len a)
-> mu:lbytes (bytes_mu a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> ct:lbytes (crypto_ciphertextbytes a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h b /\ live h mu /\ live h ct /\
live h sp_matrix /\ live h ep_matrix /\ live h epp_matrix /\
disjoint ct seed_a /\ disjoint ct b /\ disjoint ct mu /\
disjoint ct sp_matrix /\ disjoint ct ep_matrix /\ disjoint ct epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc ct) h0 h1 /\
(let c1:LB.lbytes (FP.ct1bytes_len a) = S.crypto_kem_enc_ct_pack_c1 a gen_a (as_seq h0 seed_a) (as_seq h0 sp_matrix) (as_seq h0 ep_matrix) in
let c2:LB.lbytes (FP.ct2bytes_len a) = S.crypto_kem_enc_ct_pack_c2 a (as_seq h0 mu) (as_seq h0 b) (as_seq h0 sp_matrix) (as_seq h0 epp_matrix) in
v (crypto_ciphertextbytes a) == FP.ct1bytes_len a + FP.ct2bytes_len a /\
as_seq h1 ct `Seq.equal` LSeq.concat #_ #(FP.ct1bytes_len a) #(FP.ct2bytes_len a) c1 c2))
let crypto_kem_enc_ct0 a gen_a seed_a b mu sp_matrix ep_matrix epp_matrix ct =
let c1 = sub ct 0ul (ct1bytes_len a) in
let c2 = sub ct (ct1bytes_len a) (ct2bytes_len a) in
let h0 = ST.get () in
crypto_kem_enc_ct_pack_c1 a gen_a seed_a sp_matrix ep_matrix c1;
let h1 = ST.get () in
crypto_kem_enc_ct_pack_c2 a mu b sp_matrix epp_matrix c2;
let h2 = ST.get () in
LSeq.eq_intro
(LSeq.sub (as_seq h2 ct) 0 (v (ct1bytes_len a)))
(LSeq.sub (as_seq h1 ct) 0 (v (ct1bytes_len a)));
LSeq.lemma_concat2
(v (ct1bytes_len a)) (LSeq.sub (as_seq h1 ct) 0 (v (ct1bytes_len a)))
(v (ct2bytes_len a)) (LSeq.sub (as_seq h2 ct) (v (ct1bytes_len a)) (v (ct2bytes_len a))) (as_seq h2 ct)
inline_for_extraction noextract
val clear_matrix3:
a:FP.frodo_alg
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h sp_matrix /\ live h ep_matrix /\ live h epp_matrix /\
disjoint sp_matrix ep_matrix /\ disjoint sp_matrix epp_matrix /\
disjoint ep_matrix epp_matrix)
(ensures fun h0 _ h1 ->
modifies (loc sp_matrix |+| loc ep_matrix |+| loc epp_matrix) h0 h1)
let clear_matrix3 a sp_matrix ep_matrix epp_matrix =
clear_matrix sp_matrix;
clear_matrix ep_matrix;
clear_matrix epp_matrix
inline_for_extraction noextract
val crypto_kem_enc_ct:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> mu:lbytes (bytes_mu a)
-> pk:lbytes (crypto_publickeybytes a)
-> seed_se:lbytes (crypto_bytes a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> Stack unit
(requires fun h ->
live h mu /\ live h pk /\ live h seed_se /\ live h ct /\
disjoint ct mu /\ disjoint ct pk /\ disjoint ct seed_se)
(ensures fun h0 _ h1 -> modifies (loc ct) h0 h1 /\
as_seq h1 ct == S.crypto_kem_enc_ct a gen_a (as_seq h0 mu) (as_seq h0 pk) (as_seq h0 seed_se))
#push-options "--z3rlimit 200"
let crypto_kem_enc_ct a gen_a mu pk seed_se ct =
push_frame ();
let h0 = ST.get () in
FP.expand_crypto_publickeybytes a;
let seed_a = sub pk 0ul bytes_seed_a in
let b = sub pk bytes_seed_a (publicmatrixbytes_len a) in
let sp_matrix = matrix_create params_nbar (params_n a) in
let ep_matrix = matrix_create params_nbar (params_n a) in
let epp_matrix = matrix_create params_nbar params_nbar in
get_sp_ep_epp_matrices a seed_se sp_matrix ep_matrix epp_matrix;
crypto_kem_enc_ct0 a gen_a seed_a b mu sp_matrix ep_matrix epp_matrix ct;
clear_matrix3 a sp_matrix ep_matrix epp_matrix;
let h1 = ST.get () in
LSeq.eq_intro
(as_seq h1 ct)
(S.crypto_kem_enc_ct a gen_a (as_seq h0 mu) (as_seq h0 pk) (as_seq h0 seed_se));
pop_frame ()
#pop-options
inline_for_extraction noextract
val crypto_kem_enc_ss:
a:FP.frodo_alg
-> k:lbytes (crypto_bytes a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> Stack unit
(requires fun h ->
live h k /\ live h ct /\ live h ss /\
disjoint ct ss /\ disjoint k ct /\ disjoint k ss)
(ensures fun h0 _ h1 -> modifies (loc ss) h0 h1 /\
as_seq h1 ss == S.crypto_kem_enc_ss a (as_seq h0 k) (as_seq h0 ct)) | {
"checked_file": "/",
"dependencies": [
"Spec.Matrix.fst.checked",
"Spec.Frodo.Params.fst.checked",
"Spec.Frodo.KEM.Encaps.fst.checked",
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Matrix.fst.checked",
"Hacl.Impl.Frodo.Sample.fst.checked",
"Hacl.Impl.Frodo.Params.fst.checked",
"Hacl.Impl.Frodo.Pack.fst.checked",
"Hacl.Impl.Frodo.KEM.KeyGen.fst.checked",
"Hacl.Impl.Frodo.KEM.fst.checked",
"Hacl.Impl.Frodo.Encode.fst.checked",
"Hacl.Frodo.Random.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Frodo.KEM.Encaps.fst"
} | [
{
"abbrev": true,
"full_module": "Hacl.Impl.Frodo.KEM.KeyGen",
"short_module": "KG"
},
{
"abbrev": true,
"full_module": "Spec.Matrix",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "Spec.Frodo.KEM.Encaps",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Spec.Frodo.Params",
"short_module": "FP"
},
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "LB"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Frodo.Random",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Sample",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Pack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Encode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Params",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Matrix",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
a: Spec.Frodo.Params.frodo_alg ->
k: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.crypto_bytes a) ->
ct: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.crypto_ciphertextbytes a) ->
ss: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.crypto_bytes a)
-> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Spec.Frodo.Params.frodo_alg",
"Hacl.Impl.Matrix.lbytes",
"Hacl.Impl.Frodo.Params.crypto_bytes",
"Hacl.Impl.Frodo.Params.crypto_ciphertextbytes",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Hacl.Impl.Frodo.KEM.clear_words_u8",
"Hacl.Impl.Frodo.Params.frodo_shake",
"Lib.Buffer.concat2",
"Lib.Buffer.MUT",
"Lib.IntTypes.uint8",
"Lib.Buffer.lbuffer_t",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U8",
"Lib.IntTypes.SEC",
"Lib.Buffer.create",
"Lib.IntTypes.u8",
"Lib.Buffer.lbuffer",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Lib.IntTypes.op_Plus_Bang",
"FStar.HyperStack.ST.push_frame"
] | [] | false | true | false | false | false | let crypto_kem_enc_ss a k ct ss =
| push_frame ();
let ss_init_len = crypto_ciphertextbytes a +! crypto_bytes a in
let shake_input_ss = create ss_init_len (u8 0) in
concat2 (crypto_ciphertextbytes a) ct (crypto_bytes a) k shake_input_ss;
frodo_shake a ss_init_len shake_input_ss (crypto_bytes a) ss;
clear_words_u8 shake_input_ss;
pop_frame () | false |
LowParse.Repr.fsti | LowParse.Repr.strong_parser_kind | val strong_parser_kind : Type0 | let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
} | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 5,
"end_line": 49,
"start_col": 0,
"start_line": 46
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Type0 | Prims.Tot | [
"total"
] | [] | [
"LowParse.Spec.Base.parser_kind",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong"
] | [] | false | false | false | true | true | let strong_parser_kind =
| k: LP.parser_kind{let open LP in k.parser_kind_subkind == Some ParserStrong} | false |
|
LowParse.Repr.fsti | LowParse.Repr.preorder | val preorder : c: LowStar.ConstBuffer.const_buffer LowParse.Bytes.byte
-> FStar.Preorder.preorder (FStar.Seq.Base.seq LowParse.Bytes.byte) | let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c) | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 66,
"end_line": 51,
"start_col": 0,
"start_line": 51
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
} | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | c: LowStar.ConstBuffer.const_buffer LowParse.Bytes.byte
-> FStar.Preorder.preorder (FStar.Seq.Base.seq LowParse.Bytes.byte) | Prims.Tot | [
"total"
] | [] | [
"LowStar.ConstBuffer.const_buffer",
"LowParse.Bytes.byte",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"FStar.Preorder.preorder",
"FStar.Seq.Base.seq"
] | [] | false | false | false | true | false | let preorder (c: C.const_buffer LP.byte) =
| C.qbuf_pre (C.as_qbuf c) | false |
|
Hacl.Impl.Frodo.KEM.Encaps.fst | Hacl.Impl.Frodo.KEM.Encaps.frodo_mul_add_sb_plus_e | val frodo_mul_add_sb_plus_e:
a:FP.frodo_alg
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> v_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h b /\ live h epp_matrix /\ live h v_matrix /\ live h sp_matrix /\
disjoint v_matrix b /\ disjoint v_matrix epp_matrix /\ disjoint v_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc v_matrix) h0 h1 /\
as_matrix h1 v_matrix ==
S.frodo_mul_add_sb_plus_e a (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix)) | val frodo_mul_add_sb_plus_e:
a:FP.frodo_alg
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> v_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h b /\ live h epp_matrix /\ live h v_matrix /\ live h sp_matrix /\
disjoint v_matrix b /\ disjoint v_matrix epp_matrix /\ disjoint v_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc v_matrix) h0 h1 /\
as_matrix h1 v_matrix ==
S.frodo_mul_add_sb_plus_e a (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix)) | let frodo_mul_add_sb_plus_e a b sp_matrix epp_matrix v_matrix =
push_frame ();
let b_matrix = matrix_create (params_n a) params_nbar in
frodo_unpack (params_n a) params_nbar (params_logq a) b b_matrix;
matrix_mul sp_matrix b_matrix v_matrix;
matrix_add v_matrix epp_matrix;
pop_frame () | {
"file_name": "code/frodo/Hacl.Impl.Frodo.KEM.Encaps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 14,
"end_line": 101,
"start_col": 0,
"start_line": 95
} | module Hacl.Impl.Frodo.KEM.Encaps
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open LowStar.Buffer
open Lib.IntTypes
open Lib.Buffer
open Hacl.Impl.Matrix
open Hacl.Impl.Frodo.Params
open Hacl.Impl.Frodo.KEM
open Hacl.Impl.Frodo.Encode
open Hacl.Impl.Frodo.Pack
open Hacl.Impl.Frodo.Sample
open Hacl.Frodo.Random
module ST = FStar.HyperStack.ST
module LSeq = Lib.Sequence
module LB = Lib.ByteSequence
module FP = Spec.Frodo.Params
module S = Spec.Frodo.KEM.Encaps
module M = Spec.Matrix
module KG = Hacl.Impl.Frodo.KEM.KeyGen
#set-options "--z3rlimit 100 --fuel 0 --ifuel 0"
inline_for_extraction noextract
val frodo_mul_add_sa_plus_e:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> bp_matrix:matrix_t params_nbar (params_n a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h ep_matrix /\ live h sp_matrix /\ live h bp_matrix /\
disjoint bp_matrix seed_a /\ disjoint bp_matrix ep_matrix /\ disjoint bp_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc bp_matrix) h0 h1 /\
as_matrix h1 bp_matrix ==
S.frodo_mul_add_sa_plus_e a gen_a (as_seq h0 seed_a) (as_matrix h0 sp_matrix) (as_matrix h0 ep_matrix))
let frodo_mul_add_sa_plus_e a gen_a seed_a sp_matrix ep_matrix bp_matrix =
push_frame ();
let a_matrix = matrix_create (params_n a) (params_n a) in
frodo_gen_matrix gen_a (params_n a) seed_a a_matrix;
matrix_mul sp_matrix a_matrix bp_matrix;
matrix_add bp_matrix ep_matrix;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct_pack_c1:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> c1:lbytes (ct1bytes_len a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h ep_matrix /\ live h sp_matrix /\ live h c1 /\
disjoint seed_a c1 /\ disjoint ep_matrix c1 /\ disjoint sp_matrix c1)
(ensures fun h0 _ h1 -> modifies (loc c1) h0 h1 /\
as_seq h1 c1 ==
S.crypto_kem_enc_ct_pack_c1 a gen_a (as_seq h0 seed_a) (as_matrix h0 sp_matrix) (as_matrix h0 ep_matrix))
let crypto_kem_enc_ct_pack_c1 a gen_a seed_a sp_matrix ep_matrix c1 =
push_frame ();
let bp_matrix = matrix_create params_nbar (params_n a) in
frodo_mul_add_sa_plus_e a gen_a seed_a sp_matrix ep_matrix bp_matrix;
frodo_pack (params_logq a) bp_matrix c1;
pop_frame ()
inline_for_extraction noextract
val frodo_mul_add_sb_plus_e:
a:FP.frodo_alg
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> v_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h b /\ live h epp_matrix /\ live h v_matrix /\ live h sp_matrix /\
disjoint v_matrix b /\ disjoint v_matrix epp_matrix /\ disjoint v_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc v_matrix) h0 h1 /\
as_matrix h1 v_matrix ==
S.frodo_mul_add_sb_plus_e a (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix)) | {
"checked_file": "/",
"dependencies": [
"Spec.Matrix.fst.checked",
"Spec.Frodo.Params.fst.checked",
"Spec.Frodo.KEM.Encaps.fst.checked",
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Matrix.fst.checked",
"Hacl.Impl.Frodo.Sample.fst.checked",
"Hacl.Impl.Frodo.Params.fst.checked",
"Hacl.Impl.Frodo.Pack.fst.checked",
"Hacl.Impl.Frodo.KEM.KeyGen.fst.checked",
"Hacl.Impl.Frodo.KEM.fst.checked",
"Hacl.Impl.Frodo.Encode.fst.checked",
"Hacl.Frodo.Random.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Frodo.KEM.Encaps.fst"
} | [
{
"abbrev": true,
"full_module": "Hacl.Impl.Frodo.KEM.KeyGen",
"short_module": "KG"
},
{
"abbrev": true,
"full_module": "Spec.Matrix",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "Spec.Frodo.KEM.Encaps",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Spec.Frodo.Params",
"short_module": "FP"
},
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "LB"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Frodo.Random",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Sample",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Pack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Encode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Params",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Matrix",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
a: Spec.Frodo.Params.frodo_alg ->
b: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.publicmatrixbytes_len a) ->
sp_matrix:
Hacl.Impl.Matrix.matrix_t Hacl.Impl.Frodo.Params.params_nbar
(Hacl.Impl.Frodo.Params.params_n a) ->
epp_matrix:
Hacl.Impl.Matrix.matrix_t Hacl.Impl.Frodo.Params.params_nbar
Hacl.Impl.Frodo.Params.params_nbar ->
v_matrix:
Hacl.Impl.Matrix.matrix_t Hacl.Impl.Frodo.Params.params_nbar
Hacl.Impl.Frodo.Params.params_nbar
-> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Spec.Frodo.Params.frodo_alg",
"Hacl.Impl.Matrix.lbytes",
"Hacl.Impl.Frodo.Params.publicmatrixbytes_len",
"Hacl.Impl.Matrix.matrix_t",
"Hacl.Impl.Frodo.Params.params_nbar",
"Hacl.Impl.Frodo.Params.params_n",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Hacl.Impl.Matrix.matrix_add",
"Hacl.Impl.Matrix.matrix_mul",
"Hacl.Impl.Frodo.Pack.frodo_unpack",
"Hacl.Impl.Frodo.Params.params_logq",
"Lib.Buffer.lbuffer_t",
"Lib.Buffer.MUT",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U16",
"Lib.IntTypes.SEC",
"Lib.IntTypes.mul",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Hacl.Impl.Matrix.matrix_create",
"FStar.HyperStack.ST.push_frame"
] | [] | false | true | false | false | false | let frodo_mul_add_sb_plus_e a b sp_matrix epp_matrix v_matrix =
| push_frame ();
let b_matrix = matrix_create (params_n a) params_nbar in
frodo_unpack (params_n a) params_nbar (params_logq a) b b_matrix;
matrix_mul sp_matrix b_matrix v_matrix;
matrix_add v_matrix epp_matrix;
pop_frame () | false |
LowParse.Repr.fsti | LowParse.Repr.to_slice | val to_slice (x: const_slice) : Tot (LP.slice (preorder x.base) (preorder x.base)) | val to_slice (x: const_slice) : Tot (LP.slice (preorder x.base) (preorder x.base)) | let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 44,
"end_line": 84,
"start_col": 0,
"start_line": 82
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: LowParse.Repr.const_slice
-> LowParse.Slice.slice (LowParse.Repr.preorder (MkSlice?.base x))
(LowParse.Repr.preorder (MkSlice?.base x)) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Repr.const_slice",
"LowParse.Repr.slice_of_const_buffer",
"LowParse.Repr.__proj__MkSlice__item__base",
"LowParse.Repr.__proj__MkSlice__item__slice_len",
"LowParse.Slice.slice",
"LowParse.Repr.preorder"
] | [] | false | false | false | false | false | let to_slice (x: const_slice) : Tot (LP.slice (preorder x.base) (preorder x.base)) =
| slice_of_const_buffer x.base x.slice_len | false |
Vale.Lib.BufferViewHelpers.fst | Vale.Lib.BufferViewHelpers.lemma_uv_equal | val lemma_uv_equal (#src #dst: Type) (view: UV.view src dst) (b: DV.buffer src) (h0 h1: HS.mem)
: Lemma (requires (DV.length b % UV.View?.n view == 0 /\ DV.as_seq h0 b == DV.as_seq h1 b))
(ensures
(let bv = UV.mk_buffer b view in
UV.as_seq h0 bv == UV.as_seq h1 bv))
[SMTPat (UV.as_seq h0 (UV.mk_buffer b view)); SMTPat (UV.as_seq h1 (UV.mk_buffer b view))] | val lemma_uv_equal (#src #dst: Type) (view: UV.view src dst) (b: DV.buffer src) (h0 h1: HS.mem)
: Lemma (requires (DV.length b % UV.View?.n view == 0 /\ DV.as_seq h0 b == DV.as_seq h1 b))
(ensures
(let bv = UV.mk_buffer b view in
UV.as_seq h0 bv == UV.as_seq h1 bv))
[SMTPat (UV.as_seq h0 (UV.mk_buffer b view)); SMTPat (UV.as_seq h1 (UV.mk_buffer b view))] | let lemma_uv_equal
(#src:Type) (#dst:Type)
(view:UV.view src dst) (b:DV.buffer src) (h0 h1:HS.mem)
:Lemma (requires (DV.length b % UV.View?.n view == 0 /\ DV.as_seq h0 b == DV.as_seq h1 b))
(ensures (let bv = UV.mk_buffer b view in UV.as_seq h0 bv == UV.as_seq h1 bv))
[SMTPat (UV.as_seq h0 (UV.mk_buffer b view)); SMTPat (UV.as_seq h1 (UV.mk_buffer b view))]
= let uv = UV.mk_buffer b view in
let s0 = UV.as_seq h0 uv in
let s1 = UV.as_seq h1 uv in
let aux (i:nat{i < UV.length uv}) : Lemma (Seq.index s0 i == Seq.index s1 i) =
UV.as_seq_sel h0 uv i;
UV.as_seq_sel h1 uv i;
UV.get_sel h0 uv i;
UV.get_sel h1 uv i
in Classical.forall_intro aux;
Seq.lemma_eq_intro s0 s1 | {
"file_name": "vale/code/lib/util/Vale.Lib.BufferViewHelpers.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 28,
"end_line": 51,
"start_col": 0,
"start_line": 36
} | module Vale.Lib.BufferViewHelpers
open FStar.Mul
module MB = LowStar.Monotonic.Buffer
module BV = LowStar.BufferView
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
module HS = FStar.HyperStack
module ST = FStar.HyperStack.ST
open FStar.HyperStack.ST
open LowStar.Modifies
open LowStar.ModifiesPat
let lemma_dv_equal
(#src:Type)
(#rel #rrel:MB.srel src)
(#dst:Type)
(view:DV.view src dst)
(b:MB.mbuffer src rel rrel)
(h0 h1:HS.mem) : Lemma
(requires MB.as_seq h0 b == MB.as_seq h1 b)
(ensures (let dv = DV.mk_buffer_view b view in
DV.as_seq h0 dv == DV.as_seq h1 dv)) =
let dv = DV.mk_buffer_view b view in
let s0 = DV.as_seq h0 dv in
let s1 = DV.as_seq h1 dv in
let aux (i:nat{i < DV.length dv}) : Lemma (Seq.index s0 i == Seq.index s1 i) =
DV.as_seq_sel h0 dv i;
DV.as_seq_sel h1 dv i;
DV.get_sel h0 dv i;
DV.get_sel h1 dv i
in Classical.forall_intro aux;
Seq.lemma_eq_intro s0 s1 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ModifiesPat.fst.checked",
"LowStar.Modifies.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.BufferView.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Lib.BufferViewHelpers.fst"
} | [
{
"abbrev": false,
"full_module": "LowStar.ModifiesPat",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Modifies",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView",
"short_module": "BV"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "MB"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
view: LowStar.BufferView.Up.view src dst ->
b: LowStar.BufferView.Down.buffer src ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
LowStar.BufferView.Down.length b % View?.n view == 0 /\
LowStar.BufferView.Down.as_seq h0 b == LowStar.BufferView.Down.as_seq h1 b)
(ensures
(let bv = LowStar.BufferView.Up.mk_buffer b view in
LowStar.BufferView.Up.as_seq h0 bv == LowStar.BufferView.Up.as_seq h1 bv))
[
SMTPat (LowStar.BufferView.Up.as_seq h0 (LowStar.BufferView.Up.mk_buffer b view));
SMTPat (LowStar.BufferView.Up.as_seq h1 (LowStar.BufferView.Up.mk_buffer b view))
] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.BufferView.Up.view",
"LowStar.BufferView.Down.buffer",
"FStar.Monotonic.HyperStack.mem",
"FStar.Seq.Base.lemma_eq_intro",
"Prims.unit",
"FStar.Classical.forall_intro",
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThan",
"LowStar.BufferView.Up.length",
"Prims.eq2",
"FStar.Seq.Base.index",
"Prims.l_True",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern",
"LowStar.BufferView.Up.get_sel",
"LowStar.BufferView.Up.as_seq_sel",
"FStar.Seq.Properties.lseq",
"LowStar.BufferView.Up.as_seq",
"LowStar.BufferView.Up.buffer",
"LowStar.BufferView.Up.mk_buffer",
"Prims.l_and",
"Prims.int",
"Prims.op_Modulus",
"LowStar.BufferView.Down.length",
"LowStar.BufferView.Up.__proj__View__item__n",
"LowStar.BufferView.Down.as_seq",
"Prims.Cons",
"FStar.Pervasives.smt_pat"
] | [] | false | false | true | false | false | let lemma_uv_equal (#src #dst: Type) (view: UV.view src dst) (b: DV.buffer src) (h0 h1: HS.mem)
: Lemma (requires (DV.length b % UV.View?.n view == 0 /\ DV.as_seq h0 b == DV.as_seq h1 b))
(ensures
(let bv = UV.mk_buffer b view in
UV.as_seq h0 bv == UV.as_seq h1 bv))
[SMTPat (UV.as_seq h0 (UV.mk_buffer b view)); SMTPat (UV.as_seq h1 (UV.mk_buffer b view))] =
| let uv = UV.mk_buffer b view in
let s0 = UV.as_seq h0 uv in
let s1 = UV.as_seq h1 uv in
let aux (i: nat{i < UV.length uv}) : Lemma (Seq.index s0 i == Seq.index s1 i) =
UV.as_seq_sel h0 uv i;
UV.as_seq_sel h1 uv i;
UV.get_sel h0 uv i;
UV.get_sel h1 uv i
in
Classical.forall_intro aux;
Seq.lemma_eq_intro s0 s1 | false |
LowParse.Repr.fsti | LowParse.Repr.live_slice | val live_slice : h: FStar.Monotonic.HyperStack.mem -> c: LowParse.Repr.const_slice -> Type0 | let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 19,
"end_line": 93,
"start_col": 0,
"start_line": 92
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | h: FStar.Monotonic.HyperStack.mem -> c: LowParse.Repr.const_slice -> Type0 | Prims.Tot | [
"total"
] | [] | [
"FStar.Monotonic.HyperStack.mem",
"LowParse.Repr.const_slice",
"LowStar.ConstBuffer.live",
"LowParse.Bytes.byte",
"LowParse.Repr.__proj__MkSlice__item__base"
] | [] | false | false | false | true | true | let live_slice (h: HS.mem) (c: const_slice) =
| C.live h c.base | false |
|
LowParse.Repr.fsti | LowParse.Repr.mut_p | val mut_p : LowStar.Monotonic.Buffer.srel LowParse.Bytes.byte | let mut_p = LowStar.Buffer.trivial_preorder LP.byte | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 51,
"end_line": 62,
"start_col": 0,
"start_line": 62
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
}) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | LowStar.Monotonic.Buffer.srel LowParse.Bytes.byte | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.trivial_preorder",
"LowParse.Bytes.byte"
] | [] | false | false | false | true | false | let mut_p =
| LowStar.Buffer.trivial_preorder LP.byte | false |
|
LowParse.Repr.fsti | LowParse.Repr.region_of | val region_of (#t: _) (p: repr_ptr t) : GTot HS.rid | val region_of (#t: _) (p: repr_ptr t) : GTot HS.rid | let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b) | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 70,
"end_line": 152,
"start_col": 0,
"start_line": 152
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: LowParse.Repr.repr_ptr t -> Prims.GTot FStar.Monotonic.HyperHeap.rid | Prims.GTot | [
"sometrivial"
] | [] | [
"LowParse.Repr.repr_ptr",
"LowStar.Monotonic.Buffer.frameOf",
"LowParse.Bytes.byte",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"LowParse.Repr.__proj__Ptr__item__b",
"LowStar.ConstBuffer.cast",
"FStar.Monotonic.HyperHeap.rid"
] | [] | false | false | false | false | false | let region_of #t (p: repr_ptr t) : GTot HS.rid =
| B.frameOf (C.cast p.b) | false |
Vale.Lib.BufferViewHelpers.fst | Vale.Lib.BufferViewHelpers.lemma_dv_equal | val lemma_dv_equal
(#src: Type)
(#rel #rrel: MB.srel src)
(#dst: Type)
(view: DV.view src dst)
(b: MB.mbuffer src rel rrel)
(h0 h1: HS.mem)
: Lemma (requires MB.as_seq h0 b == MB.as_seq h1 b)
(ensures
(let dv = DV.mk_buffer_view b view in
DV.as_seq h0 dv == DV.as_seq h1 dv)) | val lemma_dv_equal
(#src: Type)
(#rel #rrel: MB.srel src)
(#dst: Type)
(view: DV.view src dst)
(b: MB.mbuffer src rel rrel)
(h0 h1: HS.mem)
: Lemma (requires MB.as_seq h0 b == MB.as_seq h1 b)
(ensures
(let dv = DV.mk_buffer_view b view in
DV.as_seq h0 dv == DV.as_seq h1 dv)) | let lemma_dv_equal
(#src:Type)
(#rel #rrel:MB.srel src)
(#dst:Type)
(view:DV.view src dst)
(b:MB.mbuffer src rel rrel)
(h0 h1:HS.mem) : Lemma
(requires MB.as_seq h0 b == MB.as_seq h1 b)
(ensures (let dv = DV.mk_buffer_view b view in
DV.as_seq h0 dv == DV.as_seq h1 dv)) =
let dv = DV.mk_buffer_view b view in
let s0 = DV.as_seq h0 dv in
let s1 = DV.as_seq h1 dv in
let aux (i:nat{i < DV.length dv}) : Lemma (Seq.index s0 i == Seq.index s1 i) =
DV.as_seq_sel h0 dv i;
DV.as_seq_sel h1 dv i;
DV.get_sel h0 dv i;
DV.get_sel h1 dv i
in Classical.forall_intro aux;
Seq.lemma_eq_intro s0 s1 | {
"file_name": "vale/code/lib/util/Vale.Lib.BufferViewHelpers.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 28,
"end_line": 34,
"start_col": 0,
"start_line": 15
} | module Vale.Lib.BufferViewHelpers
open FStar.Mul
module MB = LowStar.Monotonic.Buffer
module BV = LowStar.BufferView
module DV = LowStar.BufferView.Down
module UV = LowStar.BufferView.Up
module HS = FStar.HyperStack
module ST = FStar.HyperStack.ST
open FStar.HyperStack.ST
open LowStar.Modifies
open LowStar.ModifiesPat | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Monotonic.Buffer.fsti.checked",
"LowStar.ModifiesPat.fst.checked",
"LowStar.Modifies.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.BufferView.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Lib.BufferViewHelpers.fst"
} | [
{
"abbrev": false,
"full_module": "LowStar.ModifiesPat",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Modifies",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView",
"short_module": "BV"
},
{
"abbrev": true,
"full_module": "LowStar.Monotonic.Buffer",
"short_module": "MB"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
view: LowStar.BufferView.Down.view src dst ->
b: LowStar.Monotonic.Buffer.mbuffer src rel rrel ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires LowStar.Monotonic.Buffer.as_seq h0 b == LowStar.Monotonic.Buffer.as_seq h1 b)
(ensures
(let dv = LowStar.BufferView.Down.mk_buffer_view b view in
LowStar.BufferView.Down.as_seq h0 dv == LowStar.BufferView.Down.as_seq h1 dv)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.BufferView.Down.view",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Monotonic.HyperStack.mem",
"FStar.Seq.Base.lemma_eq_intro",
"Prims.unit",
"FStar.Classical.forall_intro",
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThan",
"LowStar.BufferView.Down.length",
"Prims.eq2",
"FStar.Seq.Base.index",
"Prims.l_True",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern",
"LowStar.BufferView.Down.get_sel",
"LowStar.BufferView.Down.as_seq_sel",
"FStar.Seq.Properties.lseq",
"LowStar.BufferView.Down.as_seq",
"LowStar.BufferView.Down.buffer",
"LowStar.BufferView.Down.mk_buffer_view",
"FStar.Seq.Base.seq",
"LowStar.Monotonic.Buffer.as_seq"
] | [] | false | false | true | false | false | let lemma_dv_equal
(#src: Type)
(#rel #rrel: MB.srel src)
(#dst: Type)
(view: DV.view src dst)
(b: MB.mbuffer src rel rrel)
(h0 h1: HS.mem)
: Lemma (requires MB.as_seq h0 b == MB.as_seq h1 b)
(ensures
(let dv = DV.mk_buffer_view b view in
DV.as_seq h0 dv == DV.as_seq h1 dv)) =
| let dv = DV.mk_buffer_view b view in
let s0 = DV.as_seq h0 dv in
let s1 = DV.as_seq h1 dv in
let aux (i: nat{i < DV.length dv}) : Lemma (Seq.index s0 i == Seq.index s1 i) =
DV.as_seq_sel h0 dv i;
DV.as_seq_sel h1 dv i;
DV.get_sel h0 dv i;
DV.get_sel h1 dv i
in
Classical.forall_intro aux;
Seq.lemma_eq_intro s0 s1 | false |
Hacl.Spec.FFDHE.Lemmas.fst | Hacl.Spec.FFDHE.Lemmas.ffdhe_p_lemma_len | val ffdhe_p_lemma_len: a:ffdhe_alg -> Lemma
(let ffdhe_p = get_ffdhe_params a in
let p = Mk_ffdhe_params?.ffdhe_p ffdhe_p in
Seq.index p 0 == 0xffuy) | val ffdhe_p_lemma_len: a:ffdhe_alg -> Lemma
(let ffdhe_p = get_ffdhe_params a in
let p = Mk_ffdhe_params?.ffdhe_p ffdhe_p in
Seq.index p 0 == 0xffuy) | let ffdhe_p_lemma_len a =
let ffdhe_p = get_ffdhe_params a in
let p = Mk_ffdhe_params?.ffdhe_p ffdhe_p in
allow_inversion ffdhe_alg;
match a with
| FFDHE2048 ->
assert (p == of_list list_ffdhe_p2048);
assert_norm (List.Tot.index list_ffdhe_p2048 0 == 0xffuy);
assert (Seq.index (Seq.seq_of_list list_ffdhe_p2048) 0 == 0xffuy)
| FFDHE3072 ->
assert (p == of_list list_ffdhe_p3072);
assert_norm (List.Tot.index list_ffdhe_p3072 0 == 0xffuy);
assert (Seq.index (Seq.seq_of_list list_ffdhe_p3072) 0 == 0xffuy)
| FFDHE4096 ->
assert (p == of_list list_ffdhe_p4096);
assert_norm (List.Tot.index list_ffdhe_p4096 0 == 0xffuy);
assert (Seq.index (Seq.seq_of_list list_ffdhe_p4096) 0 == 0xffuy)
| FFDHE6144 ->
assert (p == of_list list_ffdhe_p6144);
assert_norm (List.Tot.index list_ffdhe_p6144 0 == 0xffuy);
assert (Seq.index (Seq.seq_of_list list_ffdhe_p6144) 0 == 0xffuy)
| FFDHE8192 ->
assert (p == of_list list_ffdhe_p8192);
assert_norm (List.Tot.index list_ffdhe_p8192 0 == 0xffuy);
assert (Seq.index (Seq.seq_of_list list_ffdhe_p8192) 0 == 0xffuy) | {
"file_name": "code/ffdhe/Hacl.Spec.FFDHE.Lemmas.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 69,
"end_line": 43,
"start_col": 0,
"start_line": 18
} | module Hacl.Spec.FFDHE.Lemmas
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
open Lib.ByteSequence
open Spec.FFDHE
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
val ffdhe_p_lemma_len: a:ffdhe_alg -> Lemma
(let ffdhe_p = get_ffdhe_params a in
let p = Mk_ffdhe_params?.ffdhe_p ffdhe_p in
Seq.index p 0 == 0xffuy) | {
"checked_file": "/",
"dependencies": [
"Spec.FFDHE.fst.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "Hacl.Spec.FFDHE.Lemmas.fst"
} | [
{
"abbrev": false,
"full_module": "Spec.FFDHE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.ByteSequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec.FFDHE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec.FFDHE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | a: Spec.FFDHE.ffdhe_alg
-> FStar.Pervasives.Lemma
(ensures
(let ffdhe_p = Spec.FFDHE.get_ffdhe_params a in
let p = Mk_ffdhe_params?.ffdhe_p ffdhe_p in
FStar.Seq.Base.index p 0 == 0xffuy)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Spec.FFDHE.ffdhe_alg",
"Prims._assert",
"Prims.eq2",
"FStar.UInt8.t",
"FStar.Seq.Base.index",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U8",
"Lib.IntTypes.PUB",
"FStar.Seq.Base.seq_of_list",
"Spec.FFDHE.list_ffdhe_p2048",
"FStar.UInt8.__uint_to_t",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"FStar.List.Tot.Base.index",
"Lib.Sequence.seq",
"Lib.IntTypes.pub_uint8",
"Prims.l_or",
"Prims.nat",
"FStar.Seq.Base.length",
"Spec.FFDHE.__proj__Mk_ffdhe_params__item__ffdhe_p_len",
"Prims.l_and",
"FStar.List.Tot.Base.length",
"FStar.Seq.Base.seq",
"Lib.Sequence.to_seq",
"Lib.Sequence.of_list",
"Spec.FFDHE.list_ffdhe_p3072",
"Spec.FFDHE.list_ffdhe_p4096",
"Spec.FFDHE.list_ffdhe_p6144",
"Spec.FFDHE.list_ffdhe_p8192",
"FStar.Pervasives.allow_inversion",
"Lib.Sequence.lseq",
"Spec.FFDHE.__proj__Mk_ffdhe_params__item__ffdhe_p",
"Spec.FFDHE.ffdhe_params_t",
"Spec.FFDHE.get_ffdhe_params"
] | [] | false | false | true | false | false | let ffdhe_p_lemma_len a =
| let ffdhe_p = get_ffdhe_params a in
let p = Mk_ffdhe_params?.ffdhe_p ffdhe_p in
allow_inversion ffdhe_alg;
match a with
| FFDHE2048 ->
assert (p == of_list list_ffdhe_p2048);
assert_norm (List.Tot.index list_ffdhe_p2048 0 == 0xffuy);
assert (Seq.index (Seq.seq_of_list list_ffdhe_p2048) 0 == 0xffuy)
| FFDHE3072 ->
assert (p == of_list list_ffdhe_p3072);
assert_norm (List.Tot.index list_ffdhe_p3072 0 == 0xffuy);
assert (Seq.index (Seq.seq_of_list list_ffdhe_p3072) 0 == 0xffuy)
| FFDHE4096 ->
assert (p == of_list list_ffdhe_p4096);
assert_norm (List.Tot.index list_ffdhe_p4096 0 == 0xffuy);
assert (Seq.index (Seq.seq_of_list list_ffdhe_p4096) 0 == 0xffuy)
| FFDHE6144 ->
assert (p == of_list list_ffdhe_p6144);
assert_norm (List.Tot.index list_ffdhe_p6144 0 == 0xffuy);
assert (Seq.index (Seq.seq_of_list list_ffdhe_p6144) 0 == 0xffuy)
| FFDHE8192 ->
assert (p == of_list list_ffdhe_p8192);
assert_norm (List.Tot.index list_ffdhe_p8192 0 == 0xffuy);
assert (Seq.index (Seq.seq_of_list list_ffdhe_p8192) 0 == 0xffuy) | false |
LowParse.Repr.fsti | LowParse.Repr.value | val value (#t: _) (p: repr_ptr t) : GTot t | val value (#t: _) (p: repr_ptr t) : GTot t | let value #t (p:repr_ptr t) : GTot t = p.meta.v | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 47,
"end_line": 154,
"start_col": 0,
"start_line": 154
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: LowParse.Repr.repr_ptr t -> Prims.GTot t | Prims.GTot | [
"sometrivial"
] | [] | [
"LowParse.Repr.repr_ptr",
"LowParse.Repr.__proj__Mkmeta__item__v",
"LowParse.Repr.__proj__Ptr__item__meta"
] | [] | false | false | false | false | false | let value #t (p: repr_ptr t) : GTot t =
| p.meta.v | false |
LowParse.Repr.fsti | LowParse.Repr.repr_ptr_p | val repr_ptr_p : t: Type -> parser: LowParse.Spec.Base.parser k t -> Type | let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser } | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 68,
"end_line": 157,
"start_col": 0,
"start_line": 156
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: Type -> parser: LowParse.Spec.Base.parser k t -> Type | Prims.Tot | [
"total"
] | [] | [
"LowParse.Repr.strong_parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Repr.repr_ptr",
"Prims.l_and",
"Prims.eq2",
"LowParse.Repr.__proj__Mkmeta__item__parser_kind",
"LowParse.Repr.__proj__Ptr__item__meta",
"LowParse.Repr.__proj__Mkmeta__item__parser"
] | [] | false | false | false | false | true | let repr_ptr_p (t: Type) (#k: strong_parser_kind) (parser: LP.parser k t) =
| p: repr_ptr t {p.meta.parser_kind == k /\ p.meta.parser == parser} | false |
|
LowParse.Repr.fsti | LowParse.Repr.sub_ptr | val sub_ptr : p2: LowParse.Repr.repr_ptr 'a -> p1: LowParse.Repr.repr_ptr 'b -> Prims.logical | let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1 | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 66,
"end_line": 164,
"start_col": 0,
"start_line": 163
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p2: LowParse.Repr.repr_ptr 'a -> p1: LowParse.Repr.repr_ptr 'b -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"LowParse.Repr.repr_ptr",
"Prims.l_Exists",
"FStar.UInt32.t",
"LowStar.ConstBuffer.const_sub_buffer",
"LowParse.Bytes.byte",
"LowParse.Repr.__proj__Ptr__item__b",
"Prims.logical"
] | [] | false | false | false | true | true | let sub_ptr (p2: repr_ptr 'a) (p1: repr_ptr 'b) =
| exists pos len. C.const_sub_buffer pos len (Ptr?.b p2) (Ptr?.b p1) | false |
|
Hacl.Impl.Ed25519.Ladder.fst | Hacl.Impl.Ed25519.Ladder.point_mul_g_noalloc | val point_mul_g_noalloc: out:point -> bscalar:lbuffer uint64 4ul
-> q1:point -> q2:point
-> q3:point -> q4:point ->
Stack unit
(requires fun h ->
live h bscalar /\ live h out /\ live h q1 /\
live h q2 /\ live h q3 /\ live h q4 /\
disjoint out bscalar /\ disjoint out q1 /\ disjoint out q2 /\
disjoint out q3 /\ disjoint out q4 /\
disjoint q1 q2 /\ disjoint q1 q3 /\ disjoint q1 q4 /\
disjoint q2 q3 /\ disjoint q2 q4 /\ disjoint q3 q4 /\
BD.bn_v h bscalar < pow2 256 /\
F51.linv (as_seq h q1) /\ refl (as_seq h q1) == g_aff /\
F51.linv (as_seq h q2) /\ refl (as_seq h q2) == g_pow2_64 /\
F51.linv (as_seq h q3) /\ refl (as_seq h q3) == g_pow2_128 /\
F51.linv (as_seq h q4) /\ refl (as_seq h q4) == g_pow2_192)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.linv (as_seq h1 out) /\
(let (b0, b1, b2, b3) = SPT256.decompose_nat256_as_four_u64 (BD.bn_v h0 bscalar) in
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_four_fw S.mk_ed25519_comm_monoid
g_aff 64 b0 g_pow2_64 b1 g_pow2_128 b2 g_pow2_192 b3 4)) | val point_mul_g_noalloc: out:point -> bscalar:lbuffer uint64 4ul
-> q1:point -> q2:point
-> q3:point -> q4:point ->
Stack unit
(requires fun h ->
live h bscalar /\ live h out /\ live h q1 /\
live h q2 /\ live h q3 /\ live h q4 /\
disjoint out bscalar /\ disjoint out q1 /\ disjoint out q2 /\
disjoint out q3 /\ disjoint out q4 /\
disjoint q1 q2 /\ disjoint q1 q3 /\ disjoint q1 q4 /\
disjoint q2 q3 /\ disjoint q2 q4 /\ disjoint q3 q4 /\
BD.bn_v h bscalar < pow2 256 /\
F51.linv (as_seq h q1) /\ refl (as_seq h q1) == g_aff /\
F51.linv (as_seq h q2) /\ refl (as_seq h q2) == g_pow2_64 /\
F51.linv (as_seq h q3) /\ refl (as_seq h q3) == g_pow2_128 /\
F51.linv (as_seq h q4) /\ refl (as_seq h q4) == g_pow2_192)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.linv (as_seq h1 out) /\
(let (b0, b1, b2, b3) = SPT256.decompose_nat256_as_four_u64 (BD.bn_v h0 bscalar) in
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_four_fw S.mk_ed25519_comm_monoid
g_aff 64 b0 g_pow2_64 b1 g_pow2_128 b2 g_pow2_192 b3 4)) | let point_mul_g_noalloc out bscalar q1 q2 q3 q4 =
[@inline_let] let len = 20ul in
[@inline_let] let ctx_len = 0ul in
[@inline_let] let k = mk_ed25519_concrete_ops in
[@inline_let] let l = 4ul in
[@inline_let] let table_len = 16ul in
[@inline_let] let bLen = 1ul in
[@inline_let] let bBits = 64ul in
let h0 = ST.get () in
recall_contents precomp_basepoint_table_w4 precomp_basepoint_table_lseq_w4;
let h1 = ST.get () in
precomp_basepoint_table_lemma_w4 ();
assert (table_inv_w4 (as_seq h1 q1) (as_seq h1 precomp_basepoint_table_w4));
recall_contents precomp_g_pow2_64_table_w4 precomp_g_pow2_64_table_lseq_w4;
let h1 = ST.get () in
precomp_g_pow2_64_table_lemma_w4 ();
assert (table_inv_w4 (as_seq h1 q2) (as_seq h1 precomp_g_pow2_64_table_w4));
recall_contents precomp_g_pow2_128_table_w4 precomp_g_pow2_128_table_lseq_w4;
let h1 = ST.get () in
precomp_g_pow2_128_table_lemma_w4 ();
assert (table_inv_w4 (as_seq h1 q3) (as_seq h1 precomp_g_pow2_128_table_w4));
recall_contents precomp_g_pow2_192_table_w4 precomp_g_pow2_192_table_lseq_w4;
let h1 = ST.get () in
precomp_g_pow2_192_table_lemma_w4 ();
assert (table_inv_w4 (as_seq h1 q4) (as_seq h1 precomp_g_pow2_192_table_w4));
let r1 = sub bscalar 0ul 1ul in
let r2 = sub bscalar 1ul 1ul in
let r3 = sub bscalar 2ul 1ul in
let r4 = sub bscalar 3ul 1ul in
SPT256.lemma_decompose_nat256_as_four_u64_lbignum (as_seq h0 bscalar);
ME.mk_lexp_four_fw_tables len ctx_len k l table_len
table_inv_w4 table_inv_w4 table_inv_w4 table_inv_w4
precomp_get_consttime
precomp_get_consttime
precomp_get_consttime
precomp_get_consttime
(null uint64) q1 bLen bBits r1 q2 r2 q3 r3 q4 r4
(to_const precomp_basepoint_table_w4)
(to_const precomp_g_pow2_64_table_w4)
(to_const precomp_g_pow2_128_table_w4)
(to_const precomp_g_pow2_192_table_w4)
out;
LowStar.Ignore.ignore q2; // q2, q3, q4 are unused variables
LowStar.Ignore.ignore q3;
LowStar.Ignore.ignore q4 | {
"file_name": "code/ed25519/Hacl.Impl.Ed25519.Ladder.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 26,
"end_line": 187,
"start_col": 0,
"start_line": 136
} | module Hacl.Impl.Ed25519.Ladder
module ST = FStar.HyperStack.ST
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Hacl.Bignum25519
module F51 = Hacl.Impl.Ed25519.Field51
module BSeq = Lib.ByteSequence
module LE = Lib.Exponentiation
module SE = Spec.Exponentiation
module BE = Hacl.Impl.Exponentiation
module ME = Hacl.Impl.MultiExponentiation
module PT = Hacl.Impl.PrecompTable
module SPT256 = Hacl.Spec.PrecompBaseTable256
module BD = Hacl.Bignum.Definitions
module SD = Hacl.Spec.Bignum.Definitions
module S = Spec.Ed25519
open Hacl.Impl.Ed25519.PointConstants
include Hacl.Impl.Ed25519.Group
include Hacl.Ed25519.PrecompTable
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
inline_for_extraction noextract
let table_inv_w4 : BE.table_inv_t U64 20ul 16ul =
[@inline_let] let len = 20ul in
[@inline_let] let ctx_len = 0ul in
[@inline_let] let k = mk_ed25519_concrete_ops in
[@inline_let] let l = 4ul in
[@inline_let] let table_len = 16ul in
BE.table_inv_precomp len ctx_len k l table_len
inline_for_extraction noextract
let table_inv_w5 : BE.table_inv_t U64 20ul 32ul =
[@inline_let] let len = 20ul in
[@inline_let] let ctx_len = 0ul in
[@inline_let] let k = mk_ed25519_concrete_ops in
[@inline_let] let l = 5ul in
[@inline_let] let table_len = 32ul in
assert_norm (pow2 (v l) = v table_len);
BE.table_inv_precomp len ctx_len k l table_len
inline_for_extraction noextract
val convert_scalar: scalar:lbuffer uint8 32ul -> bscalar:lbuffer uint64 4ul ->
Stack unit
(requires fun h -> live h scalar /\ live h bscalar /\ disjoint scalar bscalar)
(ensures fun h0 _ h1 -> modifies (loc bscalar) h0 h1 /\
BD.bn_v h1 bscalar == BSeq.nat_from_bytes_le (as_seq h0 scalar))
let convert_scalar scalar bscalar =
let h0 = ST.get () in
Hacl.Spec.Bignum.Convert.bn_from_bytes_le_lemma #U64 32 (as_seq h0 scalar);
Hacl.Bignum.Convert.mk_bn_from_bytes_le true 32ul scalar bscalar
inline_for_extraction noextract
val point_mul_noalloc:
out:point
-> bscalar:lbuffer uint64 4ul
-> q:point ->
Stack unit
(requires fun h ->
live h bscalar /\ live h q /\ live h out /\
disjoint q out /\ disjoint q bscalar /\ disjoint out bscalar /\
F51.point_inv_t h q /\ F51.inv_ext_point (as_seq h q) /\
BD.bn_v h bscalar < pow2 256)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.point_inv_t h1 out /\ F51.inv_ext_point (as_seq h1 out) /\
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_fw S.mk_ed25519_comm_monoid
(S.to_aff_point (F51.point_eval h0 q)) 256 (BD.bn_v h0 bscalar) 4)
let point_mul_noalloc out bscalar q =
BE.lexp_fw_consttime 20ul 0ul mk_ed25519_concrete_ops
4ul (null uint64) q 4ul 256ul bscalar out
let point_mul out scalar q =
let h0 = ST.get () in
SE.exp_fw_lemma S.mk_ed25519_concrete_ops
(F51.point_eval h0 q) 256 (BSeq.nat_from_bytes_le (as_seq h0 scalar)) 4;
push_frame ();
let bscalar = create 4ul (u64 0) in
convert_scalar scalar bscalar;
point_mul_noalloc out bscalar q;
pop_frame ()
val precomp_get_consttime: BE.pow_a_to_small_b_st U64 20ul 0ul mk_ed25519_concrete_ops 4ul 16ul
(BE.table_inv_precomp 20ul 0ul mk_ed25519_concrete_ops 4ul 16ul)
[@CInline]
let precomp_get_consttime ctx a table bits_l tmp =
[@inline_let] let len = 20ul in
[@inline_let] let ctx_len = 0ul in
[@inline_let] let k = mk_ed25519_concrete_ops in
[@inline_let] let l = 4ul in
[@inline_let] let table_len = 16ul in
BE.lprecomp_get_consttime len ctx_len k l table_len ctx a table bits_l tmp
inline_for_extraction noextract
val point_mul_g_noalloc: out:point -> bscalar:lbuffer uint64 4ul
-> q1:point -> q2:point
-> q3:point -> q4:point ->
Stack unit
(requires fun h ->
live h bscalar /\ live h out /\ live h q1 /\
live h q2 /\ live h q3 /\ live h q4 /\
disjoint out bscalar /\ disjoint out q1 /\ disjoint out q2 /\
disjoint out q3 /\ disjoint out q4 /\
disjoint q1 q2 /\ disjoint q1 q3 /\ disjoint q1 q4 /\
disjoint q2 q3 /\ disjoint q2 q4 /\ disjoint q3 q4 /\
BD.bn_v h bscalar < pow2 256 /\
F51.linv (as_seq h q1) /\ refl (as_seq h q1) == g_aff /\
F51.linv (as_seq h q2) /\ refl (as_seq h q2) == g_pow2_64 /\
F51.linv (as_seq h q3) /\ refl (as_seq h q3) == g_pow2_128 /\
F51.linv (as_seq h q4) /\ refl (as_seq h q4) == g_pow2_192)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.linv (as_seq h1 out) /\
(let (b0, b1, b2, b3) = SPT256.decompose_nat256_as_four_u64 (BD.bn_v h0 bscalar) in
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_four_fw S.mk_ed25519_comm_monoid
g_aff 64 b0 g_pow2_64 b1 g_pow2_128 b2 g_pow2_192 b3 4)) | {
"checked_file": "/",
"dependencies": [
"Spec.Exponentiation.fsti.checked",
"Spec.Ed25519.Lemmas.fsti.checked",
"Spec.Ed25519.fst.checked",
"prims.fst.checked",
"LowStar.Ignore.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Exponentiation.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Spec.PrecompBaseTable256.fsti.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"Hacl.Spec.Bignum.Convert.fst.checked",
"Hacl.Impl.PrecompTable.fsti.checked",
"Hacl.Impl.MultiExponentiation.fsti.checked",
"Hacl.Impl.Exponentiation.fsti.checked",
"Hacl.Impl.Ed25519.PointNegate.fst.checked",
"Hacl.Impl.Ed25519.PointConstants.fst.checked",
"Hacl.Impl.Ed25519.Group.fst.checked",
"Hacl.Impl.Ed25519.Field51.fst.checked",
"Hacl.Ed25519.PrecompTable.fsti.checked",
"Hacl.Bignum25519.fsti.checked",
"Hacl.Bignum.Definitions.fst.checked",
"Hacl.Bignum.Convert.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked"
],
"interface_file": true,
"source_file": "Hacl.Impl.Ed25519.Ladder.fst"
} | [
{
"abbrev": false,
"full_module": "Hacl.Ed25519.PrecompTable",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519.Group",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519.PointConstants",
"short_module": null
},
{
"abbrev": true,
"full_module": "Spec.Ed25519",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "SD"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.PrecompBaseTable256",
"short_module": "SPT256"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.PrecompTable",
"short_module": "PT"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.MultiExponentiation",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Exponentiation",
"short_module": "BE"
},
{
"abbrev": true,
"full_module": "Spec.Exponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation",
"short_module": "LE"
},
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Ed25519.Field51",
"short_module": "F51"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": true,
"full_module": "Spec.Ed25519",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Ed25519.Field51",
"short_module": "F51"
},
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
out: Hacl.Bignum25519.point ->
bscalar: Lib.Buffer.lbuffer Lib.IntTypes.uint64 4ul ->
q1: Hacl.Bignum25519.point ->
q2: Hacl.Bignum25519.point ->
q3: Hacl.Bignum25519.point ->
q4: Hacl.Bignum25519.point
-> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Hacl.Bignum25519.point",
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint64",
"FStar.UInt32.__uint_to_t",
"LowStar.Ignore.ignore",
"Prims.unit",
"Hacl.Impl.MultiExponentiation.mk_lexp_four_fw_tables",
"Lib.IntTypes.U64",
"Hacl.Impl.Ed25519.Ladder.table_inv_w4",
"Hacl.Impl.Ed25519.Ladder.precomp_get_consttime",
"Lib.Buffer.null",
"Lib.Buffer.MUT",
"Lib.Buffer.to_const",
"Lib.Buffer.CONST",
"Hacl.Ed25519.PrecompTable.precomp_basepoint_table_w4",
"Hacl.Ed25519.PrecompTable.precomp_g_pow2_64_table_w4",
"Hacl.Ed25519.PrecompTable.precomp_g_pow2_128_table_w4",
"Hacl.Ed25519.PrecompTable.precomp_g_pow2_192_table_w4",
"Hacl.Spec.PrecompBaseTable256.lemma_decompose_nat256_as_four_u64_lbignum",
"Lib.Buffer.as_seq",
"Lib.Buffer.lbuffer_t",
"Lib.IntTypes.int_t",
"Lib.IntTypes.SEC",
"FStar.UInt32.uint_to_t",
"FStar.UInt32.t",
"Lib.Buffer.sub",
"Prims._assert",
"Hacl.Ed25519.PrecompTable.precomp_g_pow2_192_table_lemma_w4",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"Lib.Buffer.recall_contents",
"Hacl.Ed25519.PrecompTable.precomp_g_pow2_192_table_lseq_w4",
"Hacl.Ed25519.PrecompTable.precomp_g_pow2_128_table_lemma_w4",
"Hacl.Ed25519.PrecompTable.precomp_g_pow2_128_table_lseq_w4",
"Hacl.Ed25519.PrecompTable.precomp_g_pow2_64_table_lemma_w4",
"Hacl.Ed25519.PrecompTable.precomp_g_pow2_64_table_lseq_w4",
"Hacl.Ed25519.PrecompTable.precomp_basepoint_table_lemma_w4",
"Hacl.Ed25519.PrecompTable.precomp_basepoint_table_lseq_w4",
"Hacl.Impl.Exponentiation.Definitions.concrete_ops",
"Hacl.Impl.Ed25519.Group.mk_ed25519_concrete_ops"
] | [] | false | true | false | false | false | let point_mul_g_noalloc out bscalar q1 q2 q3 q4 =
| [@@ inline_let ]let len = 20ul in
[@@ inline_let ]let ctx_len = 0ul in
[@@ inline_let ]let k = mk_ed25519_concrete_ops in
[@@ inline_let ]let l = 4ul in
[@@ inline_let ]let table_len = 16ul in
[@@ inline_let ]let bLen = 1ul in
[@@ inline_let ]let bBits = 64ul in
let h0 = ST.get () in
recall_contents precomp_basepoint_table_w4 precomp_basepoint_table_lseq_w4;
let h1 = ST.get () in
precomp_basepoint_table_lemma_w4 ();
assert (table_inv_w4 (as_seq h1 q1) (as_seq h1 precomp_basepoint_table_w4));
recall_contents precomp_g_pow2_64_table_w4 precomp_g_pow2_64_table_lseq_w4;
let h1 = ST.get () in
precomp_g_pow2_64_table_lemma_w4 ();
assert (table_inv_w4 (as_seq h1 q2) (as_seq h1 precomp_g_pow2_64_table_w4));
recall_contents precomp_g_pow2_128_table_w4 precomp_g_pow2_128_table_lseq_w4;
let h1 = ST.get () in
precomp_g_pow2_128_table_lemma_w4 ();
assert (table_inv_w4 (as_seq h1 q3) (as_seq h1 precomp_g_pow2_128_table_w4));
recall_contents precomp_g_pow2_192_table_w4 precomp_g_pow2_192_table_lseq_w4;
let h1 = ST.get () in
precomp_g_pow2_192_table_lemma_w4 ();
assert (table_inv_w4 (as_seq h1 q4) (as_seq h1 precomp_g_pow2_192_table_w4));
let r1 = sub bscalar 0ul 1ul in
let r2 = sub bscalar 1ul 1ul in
let r3 = sub bscalar 2ul 1ul in
let r4 = sub bscalar 3ul 1ul in
SPT256.lemma_decompose_nat256_as_four_u64_lbignum (as_seq h0 bscalar);
ME.mk_lexp_four_fw_tables len ctx_len k l table_len table_inv_w4 table_inv_w4 table_inv_w4
table_inv_w4 precomp_get_consttime precomp_get_consttime precomp_get_consttime
precomp_get_consttime (null uint64) q1 bLen bBits r1 q2 r2 q3 r3 q4 r4
(to_const precomp_basepoint_table_w4) (to_const precomp_g_pow2_64_table_w4)
(to_const precomp_g_pow2_128_table_w4) (to_const precomp_g_pow2_192_table_w4) out;
LowStar.Ignore.ignore q2;
LowStar.Ignore.ignore q3;
LowStar.Ignore.ignore q4 | false |
LowParse.Repr.fsti | LowParse.Repr.fp | val fp (#t: _) (p: repr_ptr t) : GTot B.loc | val fp (#t: _) (p: repr_ptr t) : GTot B.loc | let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 20,
"end_line": 227,
"start_col": 0,
"start_line": 225
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: LowParse.Repr.repr_ptr t -> Prims.GTot LowStar.Monotonic.Buffer.loc | Prims.GTot | [
"sometrivial"
] | [] | [
"LowParse.Repr.repr_ptr",
"LowStar.ConstBuffer.loc_buffer",
"LowParse.Bytes.byte",
"LowParse.Repr.__proj__Ptr__item__b",
"LowStar.Monotonic.Buffer.loc"
] | [] | false | false | false | false | false | let fp #t (p: repr_ptr t) : GTot B.loc =
| C.loc_buffer p.b | false |
LowParse.Repr.fsti | LowParse.Repr.stable_repr_ptr | val stable_repr_ptr : t: Type -> Type | let stable_repr_ptr t= p:repr_ptr t { valid_if_live p } | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 55,
"end_line": 423,
"start_col": 0,
"start_line": 423
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: Type -> Type | Prims.Tot | [
"total"
] | [] | [
"LowParse.Repr.repr_ptr",
"LowParse.Repr.valid_if_live"
] | [] | false | false | false | true | true | let stable_repr_ptr t =
| p: repr_ptr t {valid_if_live p} | false |
|
LowParse.TestLib.Low.fst | LowParse.TestLib.Low.test_file_buffer | val test_file_buffer (t: testbuffer_t) (filename: string)
: ST unit (fun _ -> true) (fun _ _ _ -> true) | val test_file_buffer (t: testbuffer_t) (filename: string)
: ST unit (fun _ -> true) (fun _ _ _ -> true) | let test_file_buffer (t:testbuffer_t) (filename:string): ST unit (fun _ -> true) (fun _ _ _ -> true) =
push_frame();
let input = load_file_buffer filename in
(*test_buffer t filename input inputlen;*)
pop_frame() | {
"file_name": "src/lowparse/LowParse.TestLib.Low.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 13,
"end_line": 93,
"start_col": 0,
"start_line": 89
} | module LowParse.TestLib.Low
open FStar.HyperStack.ST
open FStar.HyperStack.IO
open FStar.Printf
open LowParse.Low.Base
module B = LowStar.Buffer
module IB = LowStar.ImmutableBuffer
module U32 = FStar.UInt32
module M = LowStar.Modifies
#reset-options "--using_facts_from '* -LowParse'"
#reset-options "--z3cliopt smt.arith.nl=false"
(** The type of a unit test. It takes an input buffer8, parses it,
and returns a newly formatted buffer8. Or it returns None if
there is a fail to parse. *)
inline_for_extraction
type testbuffer_t = (#rrel: _) -> (#rel: _) -> (input: slice rrel rel) -> ST (option (slice rrel rel))
(requires(fun h -> live_slice h input))
(ensures(fun h0 y h1 ->
M.modifies M.loc_none h0 h1 /\ (
match y with
| None -> true
| Some out ->
B.unused_in out.base h0 /\
live_slice h1 out
)))
assume val load_file_buffer: (filename:string) -> ST (slice (srel_of_buffer_srel (IB.immutable_preorder _)) (srel_of_buffer_srel (IB.immutable_preorder _)))
(requires (fun h -> True))
(ensures (fun h out h' ->
M.modifies M.loc_none h h' /\ B.unused_in out.base h /\ live_slice h' out
))
assume val load_file_buffer_c: (filename:C.String.t) -> ST (slice (srel_of_buffer_srel (IB.immutable_preorder _)) (srel_of_buffer_srel (IB.immutable_preorder _)))
(requires (fun h -> True))
(ensures (fun h out h' ->
M.modifies M.loc_none h h' /\ B.unused_in out.base h /\ live_slice h' out
))
(* TODO: implement in LowStar.Buffer *)
module U32 = FStar.UInt32
(** Corresponds to memcmp for `eqtype` *)
(* dirty trick: the additional unit arg prevents F* and KaRaMeL from viewing preorder arguments as sources of polymorphism *)
assume
val beqb: unit -> (#rrel1: _) -> (#rel1: _) -> (#rrel2: _) -> (#rel2: _) -> b1:B.mbuffer byte (buffer_srel_of_srel rrel1) (buffer_srel_of_srel rel1) -> b2:B.mbuffer byte (buffer_srel_of_srel rrel2) (buffer_srel_of_srel rel2)
-> len:U32.t{U32.v len <= B.length b1 /\ U32.v len <= B.length b2}
-> Stack bool
(requires (fun h ->
B.live h b1 /\
B.live h b2
))
(ensures (fun h0 z h1 -> h1 == h0 /\
(z <==> Seq.equal (Seq.slice (B.as_seq h0 b1) 0 (U32.v len)) (Seq.slice (B.as_seq h0 b2) 0 (U32.v len)))))
(** Test one parser+formatter pair against an in-memory buffer of UInt8.t *)
inline_for_extraction
noextract
let test_buffer (t:testbuffer_t) (testname:string) (#rrel #rel: _) (input:slice rrel rel)
: ST unit
(requires (fun h -> live_slice h input))
(ensures (fun _ _ _ -> true)) =
push_frame();
print_string (sprintf "==== Testing buffer %s ====\n" testname);
let result = t input in
(match result with
| Some output -> (
if U32.lte output.len input.len then (
if beqb () input.base output.base output.len then
print_string "Formatted data matches original input data\n"
else (
print_string "FAIL: formatted data does not match original input data\n"
)
) else (
print_string "Invalid length return - it is longer than the input buffer!"
))
| _ -> print_string "Failed to parse\n"
);
print_string (sprintf "==== Finished %s ====\n" testname);
pop_frame()
(** Test one parser+formatter pair against a disk file, using buffer *)
inline_for_extraction | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Modifies.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.IO.fst.checked",
"C.String.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.TestLib.Low.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Modifies",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "IB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Printf",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.IO",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.TestLib",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.TestLib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [
"smt.arith.nl=false"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: LowParse.TestLib.Low.testbuffer_t -> filename: Prims.string -> FStar.HyperStack.ST.ST Prims.unit | FStar.HyperStack.ST.ST | [] | [] | [
"LowParse.TestLib.Low.testbuffer_t",
"Prims.string",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"LowParse.Slice.slice",
"LowParse.Slice.srel_of_buffer_srel",
"LowParse.Bytes.byte",
"LowStar.ImmutableBuffer.immutable_preorder",
"LowParse.TestLib.Low.load_file_buffer",
"FStar.HyperStack.ST.push_frame",
"FStar.Monotonic.HyperStack.mem",
"Prims.b2t"
] | [] | false | true | false | false | false | let test_file_buffer (t: testbuffer_t) (filename: string)
: ST unit (fun _ -> true) (fun _ _ _ -> true) =
| push_frame ();
let input = load_file_buffer filename in
pop_frame () | false |
LowParse.Repr.fsti | LowParse.Repr.is_stable_in_region | val is_stable_in_region : p: LowParse.Repr.repr_ptr t -> Prims.logical | let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b) | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 36,
"end_line": 527,
"start_col": 0,
"start_line": 523
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: LowParse.Repr.repr_ptr t -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"LowParse.Repr.repr_ptr",
"Prims.l_and",
"LowParse.Repr.valid_if_live",
"Prims.eq2",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowParse.Bytes.byte",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"LowParse.Repr.__proj__Ptr__item__b",
"LowStar.ConstBuffer.cast",
"LowStar.Monotonic.Buffer.region_lifetime_buf",
"Prims.logical"
] | [] | false | false | false | true | true | let is_stable_in_region #t (p: repr_ptr t) =
| let r = B.frameOf (C.cast p.b) in
valid_if_live p /\ B.frameOf (C.cast p.b) == r /\ B.region_lifetime_buf (C.cast p.b) | false |
|
LowParse.TestLib.Low.fst | LowParse.TestLib.Low.test_buffer | val test_buffer (t: testbuffer_t) (testname: string) (#rrel #rel: _) (input: slice rrel rel)
: ST unit (requires (fun h -> live_slice h input)) (ensures (fun _ _ _ -> true)) | val test_buffer (t: testbuffer_t) (testname: string) (#rrel #rel: _) (input: slice rrel rel)
: ST unit (requires (fun h -> live_slice h input)) (ensures (fun _ _ _ -> true)) | let test_buffer (t:testbuffer_t) (testname:string) (#rrel #rel: _) (input:slice rrel rel)
: ST unit
(requires (fun h -> live_slice h input))
(ensures (fun _ _ _ -> true)) =
push_frame();
print_string (sprintf "==== Testing buffer %s ====\n" testname);
let result = t input in
(match result with
| Some output -> (
if U32.lte output.len input.len then (
if beqb () input.base output.base output.len then
print_string "Formatted data matches original input data\n"
else (
print_string "FAIL: formatted data does not match original input data\n"
)
) else (
print_string "Invalid length return - it is longer than the input buffer!"
))
| _ -> print_string "Failed to parse\n"
);
print_string (sprintf "==== Finished %s ====\n" testname);
pop_frame() | {
"file_name": "src/lowparse/LowParse.TestLib.Low.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 13,
"end_line": 84,
"start_col": 0,
"start_line": 63
} | module LowParse.TestLib.Low
open FStar.HyperStack.ST
open FStar.HyperStack.IO
open FStar.Printf
open LowParse.Low.Base
module B = LowStar.Buffer
module IB = LowStar.ImmutableBuffer
module U32 = FStar.UInt32
module M = LowStar.Modifies
#reset-options "--using_facts_from '* -LowParse'"
#reset-options "--z3cliopt smt.arith.nl=false"
(** The type of a unit test. It takes an input buffer8, parses it,
and returns a newly formatted buffer8. Or it returns None if
there is a fail to parse. *)
inline_for_extraction
type testbuffer_t = (#rrel: _) -> (#rel: _) -> (input: slice rrel rel) -> ST (option (slice rrel rel))
(requires(fun h -> live_slice h input))
(ensures(fun h0 y h1 ->
M.modifies M.loc_none h0 h1 /\ (
match y with
| None -> true
| Some out ->
B.unused_in out.base h0 /\
live_slice h1 out
)))
assume val load_file_buffer: (filename:string) -> ST (slice (srel_of_buffer_srel (IB.immutable_preorder _)) (srel_of_buffer_srel (IB.immutable_preorder _)))
(requires (fun h -> True))
(ensures (fun h out h' ->
M.modifies M.loc_none h h' /\ B.unused_in out.base h /\ live_slice h' out
))
assume val load_file_buffer_c: (filename:C.String.t) -> ST (slice (srel_of_buffer_srel (IB.immutable_preorder _)) (srel_of_buffer_srel (IB.immutable_preorder _)))
(requires (fun h -> True))
(ensures (fun h out h' ->
M.modifies M.loc_none h h' /\ B.unused_in out.base h /\ live_slice h' out
))
(* TODO: implement in LowStar.Buffer *)
module U32 = FStar.UInt32
(** Corresponds to memcmp for `eqtype` *)
(* dirty trick: the additional unit arg prevents F* and KaRaMeL from viewing preorder arguments as sources of polymorphism *)
assume
val beqb: unit -> (#rrel1: _) -> (#rel1: _) -> (#rrel2: _) -> (#rel2: _) -> b1:B.mbuffer byte (buffer_srel_of_srel rrel1) (buffer_srel_of_srel rel1) -> b2:B.mbuffer byte (buffer_srel_of_srel rrel2) (buffer_srel_of_srel rel2)
-> len:U32.t{U32.v len <= B.length b1 /\ U32.v len <= B.length b2}
-> Stack bool
(requires (fun h ->
B.live h b1 /\
B.live h b2
))
(ensures (fun h0 z h1 -> h1 == h0 /\
(z <==> Seq.equal (Seq.slice (B.as_seq h0 b1) 0 (U32.v len)) (Seq.slice (B.as_seq h0 b2) 0 (U32.v len)))))
(** Test one parser+formatter pair against an in-memory buffer of UInt8.t *)
inline_for_extraction | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Modifies.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.IO.fst.checked",
"C.String.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.TestLib.Low.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.Modifies",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "IB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "LowParse.Low.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Printf",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.IO",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.TestLib",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.TestLib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [
"smt.arith.nl=false"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
t: LowParse.TestLib.Low.testbuffer_t ->
testname: Prims.string ->
input: LowParse.Slice.slice rrel rel
-> FStar.HyperStack.ST.ST Prims.unit | FStar.HyperStack.ST.ST | [] | [] | [
"LowParse.TestLib.Low.testbuffer_t",
"Prims.string",
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"FStar.HyperStack.IO.print_string",
"FStar.Printf.sprintf",
"FStar.UInt32.lte",
"LowParse.Slice.__proj__Mkslice__item__len",
"Prims.bool",
"LowParse.TestLib.Low.beqb",
"LowParse.Slice.__proj__Mkslice__item__base",
"FStar.Pervasives.Native.option",
"FStar.HyperStack.ST.push_frame",
"FStar.Monotonic.HyperStack.mem",
"LowParse.Slice.live_slice",
"Prims.b2t"
] | [] | false | true | false | false | false | let test_buffer (t: testbuffer_t) (testname: string) (#rrel #rel: _) (input: slice rrel rel)
: ST unit (requires (fun h -> live_slice h input)) (ensures (fun _ _ _ -> true)) =
| push_frame ();
print_string (sprintf "==== Testing buffer %s ====\n" testname);
let result = t input in
(match result with
| Some output ->
(if U32.lte output.len input.len
then
(if beqb () input.base output.base output.len
then print_string "Formatted data matches original input data\n"
else (print_string "FAIL: formatted data does not match original input data\n"))
else (print_string "Invalid length return - it is longer than the input buffer!"))
| _ -> print_string "Failed to parse\n");
print_string (sprintf "==== Finished %s ====\n" testname);
pop_frame () | false |
LowParse.Repr.fsti | LowParse.Repr.slice_of_const_buffer | val slice_of_const_buffer (b: C.const_buffer LP.byte) (len: uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b) | val slice_of_const_buffer (b: C.const_buffer LP.byte) (len: uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b) | let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
}) | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 6,
"end_line": 60,
"start_col": 0,
"start_line": 54
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
b: LowStar.ConstBuffer.const_buffer LowParse.Bytes.byte ->
len: FStar.Integers.uint_32{FStar.UInt32.v len <= LowStar.ConstBuffer.length b}
-> LowParse.Slice.slice (LowParse.Repr.preorder b) (LowParse.Repr.preorder b) | Prims.Tot | [
"total"
] | [] | [
"LowStar.ConstBuffer.const_buffer",
"LowParse.Bytes.byte",
"FStar.Integers.uint_32",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.UInt32.v",
"LowStar.ConstBuffer.length",
"LowParse.Slice.Mkslice",
"LowParse.Repr.preorder",
"LowStar.ConstBuffer.cast",
"LowParse.Slice.slice"
] | [] | false | false | false | false | false | let slice_of_const_buffer (b: C.const_buffer LP.byte) (len: uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b) =
| let open LP in { base = C.cast b; len = len } | false |
LowParse.Repr.fsti | LowParse.Repr.of_slice | val of_slice (x: LP.slice mut_p mut_p) : Tot const_slice | val of_slice (x: LP.slice mut_p mut_p) : Tot const_slice | let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 17,
"end_line": 90,
"start_col": 0,
"start_line": 86
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: LowParse.Slice.slice LowParse.Repr.mut_p LowParse.Repr.mut_p -> LowParse.Repr.const_slice | Prims.Tot | [
"total"
] | [] | [
"LowParse.Slice.slice",
"LowParse.Repr.mut_p",
"LowParse.Repr.MkSlice",
"FStar.UInt32.t",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.length",
"LowParse.Bytes.byte",
"LowParse.Slice.buffer_srel_of_srel",
"LowParse.Slice.__proj__Mkslice__item__base",
"LowParse.Slice.__proj__Mkslice__item__len",
"LowStar.ConstBuffer.const_buffer",
"LowStar.ConstBuffer.of_buffer",
"LowParse.Repr.const_slice"
] | [] | false | false | false | true | false | let of_slice (x: LP.slice mut_p mut_p) : Tot const_slice =
| let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len | false |
Hacl.Impl.Frodo.KEM.Encaps.fst | Hacl.Impl.Frodo.KEM.Encaps.crypto_kem_enc_seed_se_k | val crypto_kem_enc_seed_se_k:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> pk:lbytes (crypto_publickeybytes a)
-> seed_se_k:lbytes (2ul *! crypto_bytes a)
-> Stack unit
(requires fun h ->
live h mu /\ live h pk /\ live h seed_se_k /\
disjoint seed_se_k mu /\ disjoint seed_se_k pk)
(ensures fun h0 _ h1 -> modifies (loc seed_se_k) h0 h1 /\
as_seq h1 seed_se_k == S.crypto_kem_enc_seed_se_k a (as_seq h0 mu) (as_seq h0 pk)) | val crypto_kem_enc_seed_se_k:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> pk:lbytes (crypto_publickeybytes a)
-> seed_se_k:lbytes (2ul *! crypto_bytes a)
-> Stack unit
(requires fun h ->
live h mu /\ live h pk /\ live h seed_se_k /\
disjoint seed_se_k mu /\ disjoint seed_se_k pk)
(ensures fun h0 _ h1 -> modifies (loc seed_se_k) h0 h1 /\
as_seq h1 seed_se_k == S.crypto_kem_enc_seed_se_k a (as_seq h0 mu) (as_seq h0 pk)) | let crypto_kem_enc_seed_se_k a mu pk seed_se_k =
push_frame ();
let pkh_mu = create (bytes_pkhash a +! bytes_mu a) (u8 0) in
let h0 = ST.get () in
update_sub_f h0 pkh_mu 0ul (bytes_pkhash a)
(fun h -> FP.frodo_shake a (v (crypto_publickeybytes a)) (as_seq h0 pk) (v (bytes_pkhash a)))
(fun _ -> frodo_shake a (crypto_publickeybytes a) pk (bytes_pkhash a) (sub pkh_mu 0ul (bytes_pkhash a)));
let h1 = ST.get () in
update_sub pkh_mu (bytes_pkhash a) (bytes_mu a) mu;
let h2 = ST.get () in
LSeq.eq_intro
(LSeq.sub (as_seq h2 pkh_mu) 0 (v (bytes_pkhash a)))
(LSeq.sub (as_seq h1 pkh_mu) 0 (v (bytes_pkhash a)));
LSeq.lemma_concat2
(v (bytes_pkhash a)) (LSeq.sub (as_seq h1 pkh_mu) 0 (v (bytes_pkhash a)))
(v (bytes_mu a)) (as_seq h0 mu) (as_seq h2 pkh_mu);
//concat2 (bytes_pkhash a) pkh (bytes_mu a) mu pkh_mu;
frodo_shake a (bytes_pkhash a +! bytes_mu a) pkh_mu (2ul *! crypto_bytes a) seed_se_k;
pop_frame () | {
"file_name": "code/frodo/Hacl.Impl.Frodo.KEM.Encaps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 14,
"end_line": 339,
"start_col": 0,
"start_line": 320
} | module Hacl.Impl.Frodo.KEM.Encaps
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open LowStar.Buffer
open Lib.IntTypes
open Lib.Buffer
open Hacl.Impl.Matrix
open Hacl.Impl.Frodo.Params
open Hacl.Impl.Frodo.KEM
open Hacl.Impl.Frodo.Encode
open Hacl.Impl.Frodo.Pack
open Hacl.Impl.Frodo.Sample
open Hacl.Frodo.Random
module ST = FStar.HyperStack.ST
module LSeq = Lib.Sequence
module LB = Lib.ByteSequence
module FP = Spec.Frodo.Params
module S = Spec.Frodo.KEM.Encaps
module M = Spec.Matrix
module KG = Hacl.Impl.Frodo.KEM.KeyGen
#set-options "--z3rlimit 100 --fuel 0 --ifuel 0"
inline_for_extraction noextract
val frodo_mul_add_sa_plus_e:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> bp_matrix:matrix_t params_nbar (params_n a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h ep_matrix /\ live h sp_matrix /\ live h bp_matrix /\
disjoint bp_matrix seed_a /\ disjoint bp_matrix ep_matrix /\ disjoint bp_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc bp_matrix) h0 h1 /\
as_matrix h1 bp_matrix ==
S.frodo_mul_add_sa_plus_e a gen_a (as_seq h0 seed_a) (as_matrix h0 sp_matrix) (as_matrix h0 ep_matrix))
let frodo_mul_add_sa_plus_e a gen_a seed_a sp_matrix ep_matrix bp_matrix =
push_frame ();
let a_matrix = matrix_create (params_n a) (params_n a) in
frodo_gen_matrix gen_a (params_n a) seed_a a_matrix;
matrix_mul sp_matrix a_matrix bp_matrix;
matrix_add bp_matrix ep_matrix;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct_pack_c1:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> c1:lbytes (ct1bytes_len a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h ep_matrix /\ live h sp_matrix /\ live h c1 /\
disjoint seed_a c1 /\ disjoint ep_matrix c1 /\ disjoint sp_matrix c1)
(ensures fun h0 _ h1 -> modifies (loc c1) h0 h1 /\
as_seq h1 c1 ==
S.crypto_kem_enc_ct_pack_c1 a gen_a (as_seq h0 seed_a) (as_matrix h0 sp_matrix) (as_matrix h0 ep_matrix))
let crypto_kem_enc_ct_pack_c1 a gen_a seed_a sp_matrix ep_matrix c1 =
push_frame ();
let bp_matrix = matrix_create params_nbar (params_n a) in
frodo_mul_add_sa_plus_e a gen_a seed_a sp_matrix ep_matrix bp_matrix;
frodo_pack (params_logq a) bp_matrix c1;
pop_frame ()
inline_for_extraction noextract
val frodo_mul_add_sb_plus_e:
a:FP.frodo_alg
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> v_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h b /\ live h epp_matrix /\ live h v_matrix /\ live h sp_matrix /\
disjoint v_matrix b /\ disjoint v_matrix epp_matrix /\ disjoint v_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc v_matrix) h0 h1 /\
as_matrix h1 v_matrix ==
S.frodo_mul_add_sb_plus_e a (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix))
let frodo_mul_add_sb_plus_e a b sp_matrix epp_matrix v_matrix =
push_frame ();
let b_matrix = matrix_create (params_n a) params_nbar in
frodo_unpack (params_n a) params_nbar (params_logq a) b b_matrix;
matrix_mul sp_matrix b_matrix v_matrix;
matrix_add v_matrix epp_matrix;
pop_frame ()
inline_for_extraction noextract
val frodo_mul_add_sb_plus_e_plus_mu:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> v_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h b /\ live h mu /\ live h v_matrix /\
live h sp_matrix /\ live h epp_matrix /\
disjoint v_matrix b /\ disjoint v_matrix sp_matrix /\
disjoint v_matrix mu /\ disjoint v_matrix epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc v_matrix) h0 h1 /\
as_matrix h1 v_matrix ==
S.frodo_mul_add_sb_plus_e_plus_mu a (as_seq h0 mu) (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix))
let frodo_mul_add_sb_plus_e_plus_mu a mu b sp_matrix epp_matrix v_matrix =
push_frame ();
frodo_mul_add_sb_plus_e a b sp_matrix epp_matrix v_matrix;
let mu_encode = matrix_create params_nbar params_nbar in
frodo_key_encode (params_logq a) (params_extracted_bits a) params_nbar mu mu_encode;
matrix_add v_matrix mu_encode;
clear_matrix mu_encode;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct_pack_c2:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> c2:lbytes (ct2bytes_len a)
-> Stack unit
(requires fun h ->
live h mu /\ live h b /\ live h sp_matrix /\
live h epp_matrix /\ live h c2 /\
disjoint mu c2 /\ disjoint b c2 /\
disjoint sp_matrix c2 /\ disjoint epp_matrix c2)
(ensures fun h0 _ h1 -> modifies (loc c2) h0 h1 /\
as_seq h1 c2 ==
S.crypto_kem_enc_ct_pack_c2 a (as_seq h0 mu) (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix))
#push-options "--z3rlimit 200"
let crypto_kem_enc_ct_pack_c2 a mu b sp_matrix epp_matrix c2 =
push_frame ();
let v_matrix = matrix_create params_nbar params_nbar in
frodo_mul_add_sb_plus_e_plus_mu a mu b sp_matrix epp_matrix v_matrix;
frodo_pack (params_logq a) v_matrix c2;
clear_matrix v_matrix;
pop_frame ()
#pop-options
inline_for_extraction noextract
val get_sp_ep_epp_matrices:
a:FP.frodo_alg
-> seed_se:lbytes (crypto_bytes a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h seed_se /\ live h sp_matrix /\
live h ep_matrix /\ live h epp_matrix /\
disjoint seed_se sp_matrix /\ disjoint seed_se ep_matrix /\
disjoint seed_se epp_matrix /\ disjoint sp_matrix ep_matrix /\
disjoint sp_matrix epp_matrix /\ disjoint ep_matrix epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc sp_matrix |+| loc ep_matrix |+| loc epp_matrix) h0 h1 /\
(as_matrix h1 sp_matrix, as_matrix h1 ep_matrix, as_matrix h1 epp_matrix) ==
S.get_sp_ep_epp_matrices a (as_seq h0 seed_se))
let get_sp_ep_epp_matrices a seed_se sp_matrix ep_matrix epp_matrix =
push_frame ();
[@inline_let] let s_bytes_len = secretmatrixbytes_len a in
let r = create (2ul *! s_bytes_len +! 2ul *! params_nbar *! params_nbar) (u8 0) in
KG.frodo_shake_r a (u8 0x96) seed_se (2ul *! s_bytes_len +! 2ul *! params_nbar *! params_nbar) r;
frodo_sample_matrix a params_nbar (params_n a) (sub r 0ul s_bytes_len) sp_matrix;
frodo_sample_matrix a params_nbar (params_n a) (sub r s_bytes_len s_bytes_len) ep_matrix;
frodo_sample_matrix a params_nbar params_nbar (sub r (2ul *! s_bytes_len) (2ul *! params_nbar *! params_nbar)) epp_matrix;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct0:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> b:lbytes (publicmatrixbytes_len a)
-> mu:lbytes (bytes_mu a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> ct:lbytes (crypto_ciphertextbytes a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h b /\ live h mu /\ live h ct /\
live h sp_matrix /\ live h ep_matrix /\ live h epp_matrix /\
disjoint ct seed_a /\ disjoint ct b /\ disjoint ct mu /\
disjoint ct sp_matrix /\ disjoint ct ep_matrix /\ disjoint ct epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc ct) h0 h1 /\
(let c1:LB.lbytes (FP.ct1bytes_len a) = S.crypto_kem_enc_ct_pack_c1 a gen_a (as_seq h0 seed_a) (as_seq h0 sp_matrix) (as_seq h0 ep_matrix) in
let c2:LB.lbytes (FP.ct2bytes_len a) = S.crypto_kem_enc_ct_pack_c2 a (as_seq h0 mu) (as_seq h0 b) (as_seq h0 sp_matrix) (as_seq h0 epp_matrix) in
v (crypto_ciphertextbytes a) == FP.ct1bytes_len a + FP.ct2bytes_len a /\
as_seq h1 ct `Seq.equal` LSeq.concat #_ #(FP.ct1bytes_len a) #(FP.ct2bytes_len a) c1 c2))
let crypto_kem_enc_ct0 a gen_a seed_a b mu sp_matrix ep_matrix epp_matrix ct =
let c1 = sub ct 0ul (ct1bytes_len a) in
let c2 = sub ct (ct1bytes_len a) (ct2bytes_len a) in
let h0 = ST.get () in
crypto_kem_enc_ct_pack_c1 a gen_a seed_a sp_matrix ep_matrix c1;
let h1 = ST.get () in
crypto_kem_enc_ct_pack_c2 a mu b sp_matrix epp_matrix c2;
let h2 = ST.get () in
LSeq.eq_intro
(LSeq.sub (as_seq h2 ct) 0 (v (ct1bytes_len a)))
(LSeq.sub (as_seq h1 ct) 0 (v (ct1bytes_len a)));
LSeq.lemma_concat2
(v (ct1bytes_len a)) (LSeq.sub (as_seq h1 ct) 0 (v (ct1bytes_len a)))
(v (ct2bytes_len a)) (LSeq.sub (as_seq h2 ct) (v (ct1bytes_len a)) (v (ct2bytes_len a))) (as_seq h2 ct)
inline_for_extraction noextract
val clear_matrix3:
a:FP.frodo_alg
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h sp_matrix /\ live h ep_matrix /\ live h epp_matrix /\
disjoint sp_matrix ep_matrix /\ disjoint sp_matrix epp_matrix /\
disjoint ep_matrix epp_matrix)
(ensures fun h0 _ h1 ->
modifies (loc sp_matrix |+| loc ep_matrix |+| loc epp_matrix) h0 h1)
let clear_matrix3 a sp_matrix ep_matrix epp_matrix =
clear_matrix sp_matrix;
clear_matrix ep_matrix;
clear_matrix epp_matrix
inline_for_extraction noextract
val crypto_kem_enc_ct:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> mu:lbytes (bytes_mu a)
-> pk:lbytes (crypto_publickeybytes a)
-> seed_se:lbytes (crypto_bytes a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> Stack unit
(requires fun h ->
live h mu /\ live h pk /\ live h seed_se /\ live h ct /\
disjoint ct mu /\ disjoint ct pk /\ disjoint ct seed_se)
(ensures fun h0 _ h1 -> modifies (loc ct) h0 h1 /\
as_seq h1 ct == S.crypto_kem_enc_ct a gen_a (as_seq h0 mu) (as_seq h0 pk) (as_seq h0 seed_se))
#push-options "--z3rlimit 200"
let crypto_kem_enc_ct a gen_a mu pk seed_se ct =
push_frame ();
let h0 = ST.get () in
FP.expand_crypto_publickeybytes a;
let seed_a = sub pk 0ul bytes_seed_a in
let b = sub pk bytes_seed_a (publicmatrixbytes_len a) in
let sp_matrix = matrix_create params_nbar (params_n a) in
let ep_matrix = matrix_create params_nbar (params_n a) in
let epp_matrix = matrix_create params_nbar params_nbar in
get_sp_ep_epp_matrices a seed_se sp_matrix ep_matrix epp_matrix;
crypto_kem_enc_ct0 a gen_a seed_a b mu sp_matrix ep_matrix epp_matrix ct;
clear_matrix3 a sp_matrix ep_matrix epp_matrix;
let h1 = ST.get () in
LSeq.eq_intro
(as_seq h1 ct)
(S.crypto_kem_enc_ct a gen_a (as_seq h0 mu) (as_seq h0 pk) (as_seq h0 seed_se));
pop_frame ()
#pop-options
inline_for_extraction noextract
val crypto_kem_enc_ss:
a:FP.frodo_alg
-> k:lbytes (crypto_bytes a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> Stack unit
(requires fun h ->
live h k /\ live h ct /\ live h ss /\
disjoint ct ss /\ disjoint k ct /\ disjoint k ss)
(ensures fun h0 _ h1 -> modifies (loc ss) h0 h1 /\
as_seq h1 ss == S.crypto_kem_enc_ss a (as_seq h0 k) (as_seq h0 ct))
let crypto_kem_enc_ss a k ct ss =
push_frame ();
let ss_init_len = crypto_ciphertextbytes a +! crypto_bytes a in
let shake_input_ss = create ss_init_len (u8 0) in
concat2 (crypto_ciphertextbytes a) ct (crypto_bytes a) k shake_input_ss;
frodo_shake a ss_init_len shake_input_ss (crypto_bytes a) ss;
clear_words_u8 shake_input_ss;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_seed_se_k:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> pk:lbytes (crypto_publickeybytes a)
-> seed_se_k:lbytes (2ul *! crypto_bytes a)
-> Stack unit
(requires fun h ->
live h mu /\ live h pk /\ live h seed_se_k /\
disjoint seed_se_k mu /\ disjoint seed_se_k pk)
(ensures fun h0 _ h1 -> modifies (loc seed_se_k) h0 h1 /\
as_seq h1 seed_se_k == S.crypto_kem_enc_seed_se_k a (as_seq h0 mu) (as_seq h0 pk)) | {
"checked_file": "/",
"dependencies": [
"Spec.Matrix.fst.checked",
"Spec.Frodo.Params.fst.checked",
"Spec.Frodo.KEM.Encaps.fst.checked",
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Matrix.fst.checked",
"Hacl.Impl.Frodo.Sample.fst.checked",
"Hacl.Impl.Frodo.Params.fst.checked",
"Hacl.Impl.Frodo.Pack.fst.checked",
"Hacl.Impl.Frodo.KEM.KeyGen.fst.checked",
"Hacl.Impl.Frodo.KEM.fst.checked",
"Hacl.Impl.Frodo.Encode.fst.checked",
"Hacl.Frodo.Random.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Frodo.KEM.Encaps.fst"
} | [
{
"abbrev": true,
"full_module": "Hacl.Impl.Frodo.KEM.KeyGen",
"short_module": "KG"
},
{
"abbrev": true,
"full_module": "Spec.Matrix",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "Spec.Frodo.KEM.Encaps",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Spec.Frodo.Params",
"short_module": "FP"
},
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "LB"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Frodo.Random",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Sample",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Pack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Encode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Params",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Matrix",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
a: Spec.Frodo.Params.frodo_alg ->
mu: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.bytes_mu a) ->
pk: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.crypto_publickeybytes a) ->
seed_se_k: Hacl.Impl.Matrix.lbytes (2ul *! Hacl.Impl.Frodo.Params.crypto_bytes a)
-> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Spec.Frodo.Params.frodo_alg",
"Hacl.Impl.Matrix.lbytes",
"Hacl.Impl.Frodo.Params.bytes_mu",
"Hacl.Impl.Frodo.Params.crypto_publickeybytes",
"Lib.IntTypes.op_Star_Bang",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"FStar.UInt32.__uint_to_t",
"Hacl.Impl.Frodo.Params.crypto_bytes",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Hacl.Impl.Frodo.Params.frodo_shake",
"Lib.IntTypes.op_Plus_Bang",
"Hacl.Impl.Frodo.Params.bytes_pkhash",
"Lib.Sequence.lemma_concat2",
"Lib.IntTypes.uint_t",
"Lib.IntTypes.U8",
"Lib.IntTypes.SEC",
"Lib.IntTypes.v",
"Lib.Sequence.sub",
"Lib.Buffer.as_seq",
"Lib.Buffer.MUT",
"Lib.IntTypes.uint8",
"Lib.Sequence.eq_intro",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"Lib.Buffer.update_sub",
"Lib.Buffer.update_sub_f",
"Spec.Frodo.Params.frodo_shake",
"Lib.Sequence.lseq",
"Lib.Buffer.lbuffer_t",
"Lib.IntTypes.int_t",
"Lib.Buffer.sub",
"Lib.IntTypes.add",
"Lib.Buffer.create",
"Lib.IntTypes.u8",
"Lib.Buffer.lbuffer",
"FStar.HyperStack.ST.push_frame"
] | [] | false | true | false | false | false | let crypto_kem_enc_seed_se_k a mu pk seed_se_k =
| push_frame ();
let pkh_mu = create (bytes_pkhash a +! bytes_mu a) (u8 0) in
let h0 = ST.get () in
update_sub_f h0
pkh_mu
0ul
(bytes_pkhash a)
(fun h -> FP.frodo_shake a (v (crypto_publickeybytes a)) (as_seq h0 pk) (v (bytes_pkhash a)))
(fun _ ->
frodo_shake a (crypto_publickeybytes a) pk (bytes_pkhash a) (sub pkh_mu 0ul (bytes_pkhash a)));
let h1 = ST.get () in
update_sub pkh_mu (bytes_pkhash a) (bytes_mu a) mu;
let h2 = ST.get () in
LSeq.eq_intro (LSeq.sub (as_seq h2 pkh_mu) 0 (v (bytes_pkhash a)))
(LSeq.sub (as_seq h1 pkh_mu) 0 (v (bytes_pkhash a)));
LSeq.lemma_concat2 (v (bytes_pkhash a))
(LSeq.sub (as_seq h1 pkh_mu) 0 (v (bytes_pkhash a)))
(v (bytes_mu a))
(as_seq h0 mu)
(as_seq h2 pkh_mu);
frodo_shake a (bytes_pkhash a +! bytes_mu a) pkh_mu (2ul *! crypto_bytes a) seed_se_k;
pop_frame () | false |
LowParse.Repr.fsti | LowParse.Repr.field_accessor_comp | val field_accessor_comp
(#k1 #k2 #k3: strong_parser_kind)
(#t1 #t2 #t3: Type)
(#p1: LP.parser k1 t1)
(#p2: LP.parser k2 t2)
(#p3: LP.parser k3 t3)
(f12: field_accessor p1 p2)
(f23: field_accessor p2 p3)
: field_accessor p1 p3 | val field_accessor_comp
(#k1 #k2 #k3: strong_parser_kind)
(#t1 #t2 #t3: Type)
(#p1: LP.parser k1 t1)
(#p2: LP.parser k2 t2)
(#p3: LP.parser k3 t3)
(f12: field_accessor p1 p2)
(f23: field_accessor p2 p3)
: field_accessor p1 p3 | let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3' | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 31,
"end_line": 625,
"start_col": 0,
"start_line": 613
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f12: LowParse.Repr.field_accessor p1 p2 -> f23: LowParse.Repr.field_accessor p2 p3
-> LowParse.Repr.field_accessor p1 p3 | Prims.Tot | [
"total"
] | [] | [
"LowParse.Repr.strong_parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Repr.field_accessor",
"LowParse.Low.Base.Spec.clens",
"LowParse.Low.Base.Spec.gaccessor",
"LowParse.Low.Base.accessor",
"LowParse.Low.Base.jumper",
"LowParse.SLow.Base.parser32",
"LowParse.Repr.FieldAccessor",
"LowParse.Low.Base.Spec.clens_compose",
"LowParse.Low.Base.Spec.gaccessor_compose",
"LowParse.Low.Base.accessor_compose"
] | [] | false | false | false | false | false | let field_accessor_comp
(#k1 #k2 #k3: strong_parser_kind)
(#t1 #t2 #t3: Type)
(#p1: LP.parser k1 t1)
(#p2: LP.parser k2 t2)
(#p3: LP.parser k3 t3)
(f12: field_accessor p1 p2)
(f23: field_accessor p2 p3)
: field_accessor p1 p3 =
| [@@ inline_let ]let FieldAccessor acc12 j2 p2' = f12 in
[@@ inline_let ]let FieldAccessor acc23 j3 p3' = f23 in
[@@ inline_let ]let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3' | false |
LowParse.Repr.fsti | LowParse.Repr.stable_region_repr_ptr | val stable_region_repr_ptr : r: FStar.HyperStack.ST.drgn -> t: Type -> Type | let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
} | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 3,
"end_line": 533,
"start_col": 0,
"start_line": 529
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: FStar.HyperStack.ST.drgn -> t: Type -> Type | Prims.Tot | [
"total"
] | [] | [
"FStar.HyperStack.ST.drgn",
"LowParse.Repr.repr_ptr",
"Prims.l_and",
"LowParse.Repr.is_stable_in_region",
"Prims.eq2",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowParse.Bytes.byte",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"LowParse.Repr.__proj__Ptr__item__b",
"LowStar.ConstBuffer.cast",
"FStar.HyperStack.ST.rid_of_drgn"
] | [] | false | false | false | true | true | let stable_region_repr_ptr (r: ST.drgn) (t: Type) =
| p: repr_ptr t {is_stable_in_region p /\ B.frameOf (C.cast p.b) == ST.rid_of_drgn r} | false |
|
LowParse.Repr.fsti | LowParse.Repr.slice_of_repr_ptr | val slice_of_repr_ptr (#t: _) (p: repr_ptr t) : GTot (LP.slice (preorder p.b) (preorder p.b)) | val slice_of_repr_ptr (#t: _) (p: repr_ptr t) : GTot (LP.slice (preorder p.b) (preorder p.b)) | let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 40,
"end_line": 161,
"start_col": 0,
"start_line": 159
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser } | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: LowParse.Repr.repr_ptr t
-> Prims.GTot
(LowParse.Slice.slice (LowParse.Repr.preorder (Ptr?.b p)) (LowParse.Repr.preorder (Ptr?.b p))) | Prims.GTot | [
"sometrivial"
] | [] | [
"LowParse.Repr.repr_ptr",
"LowParse.Repr.slice_of_const_buffer",
"LowParse.Repr.__proj__Ptr__item__b",
"LowParse.Repr.__proj__Mkmeta__item__len",
"LowParse.Repr.__proj__Ptr__item__meta",
"LowParse.Slice.slice",
"LowParse.Repr.preorder"
] | [] | false | false | false | false | false | let slice_of_repr_ptr #t (p: repr_ptr t) : GTot (LP.slice (preorder p.b) (preorder p.b)) =
| slice_of_const_buffer p.b p.meta.len | false |
LowParse.Repr.fsti | LowParse.Repr.valid' | val valid' : p: LowParse.Repr.repr_ptr t -> h: FStar.Monotonic.HyperStack.mem -> Prims.GTot Prims.logical | let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 28,
"end_line": 212,
"start_col": 0,
"start_line": 210
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: LowParse.Repr.repr_ptr t -> h: FStar.Monotonic.HyperStack.mem -> Prims.GTot Prims.logical | Prims.GTot | [
"sometrivial"
] | [] | [
"LowParse.Repr.repr_ptr",
"FStar.Monotonic.HyperStack.mem",
"LowParse.Repr.valid_slice",
"LowParse.Repr.preorder",
"LowParse.Repr.__proj__Ptr__item__b",
"LowParse.Repr.__proj__Ptr__item__meta",
"LowParse.Slice.slice",
"LowParse.Repr.slice_of_const_buffer",
"LowParse.Repr.__proj__Mkmeta__item__len",
"Prims.logical"
] | [] | false | false | false | false | true | let valid' (#t: Type) (p: repr_ptr t) (h: HS.mem) =
| let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h | false |
|
LowParse.Repr.fsti | LowParse.Repr.index | val index : b: LowParse.Repr.const_slice -> Type0 | let index (b:const_slice)= i:uint_32{ i <= b.slice_len } | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 56,
"end_line": 710,
"start_col": 0,
"start_line": 710
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2
unfold
let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p))
inline_for_extraction
let read_field (#k1:strong_parser_kind) (#t1:_) (#p1:LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: field_reader_t f
= reveal_valid ();
fun p ->
[@inline_let]
let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos
(*** Positional representation types ***) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b: LowParse.Repr.const_slice -> Type0 | Prims.Tot | [
"total"
] | [] | [
"LowParse.Repr.const_slice",
"FStar.Integers.uint_32",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"LowParse.Repr.__proj__MkSlice__item__slice_len"
] | [] | false | false | false | true | true | let index (b: const_slice) =
| i: uint_32{i <= b.slice_len} | false |
|
LowParse.Repr.fsti | LowParse.Repr.temp_slice_of_repr_ptr | val temp_slice_of_repr_ptr (#t: _) (p: repr_ptr t) : Tot (LP.slice (preorder p.b) (preorder p.b)) | val temp_slice_of_repr_ptr (#t: _) (p: repr_ptr t) : Tot (LP.slice (preorder p.b) (preorder p.b)) | let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 38,
"end_line": 184,
"start_col": 0,
"start_line": 182
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: LowParse.Repr.repr_ptr t
-> LowParse.Slice.slice (LowParse.Repr.preorder (Ptr?.b p)) (LowParse.Repr.preorder (Ptr?.b p)) | Prims.Tot | [
"total"
] | [] | [
"LowParse.Repr.repr_ptr",
"LowParse.Repr.slice_of_const_buffer",
"LowParse.Repr.__proj__Ptr__item__b",
"LowParse.Repr.__proj__Ptr__item__length",
"LowParse.Slice.slice",
"LowParse.Repr.preorder"
] | [] | false | false | false | false | false | let temp_slice_of_repr_ptr #t (p: repr_ptr t) : Tot (LP.slice (preorder p.b) (preorder p.b)) =
| slice_of_const_buffer p.b p.length | false |
LowParse.Repr.fsti | LowParse.Repr.value_pos | val value_pos (#t #b: _) (r: repr_pos t b) : GTot t | val value_pos (#t #b: _) (r: repr_pos t b) : GTot t | let value_pos #t #b (r:repr_pos t b) : GTot t = r.meta.v | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 56,
"end_line": 722,
"start_col": 0,
"start_line": 722
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2
unfold
let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p))
inline_for_extraction
let read_field (#k1:strong_parser_kind) (#t1:_) (#p1:LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: field_reader_t f
= reveal_valid ();
fun p ->
[@inline_let]
let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos
(*** Positional representation types ***)
/// `index b` is the type of valid indexes into `b`
let index (b:const_slice)= i:uint_32{ i <= b.slice_len }
noeq
type repr_pos (t:Type) (b:const_slice) =
| Pos: start_pos:index b ->
meta:meta t ->
vv_pos:t -> //temporary
length:U32.t { U32.v start_pos + U32.v meta.len <= U32.v b.slice_len /\
vv_pos == meta.v /\
length == meta.len } ->
repr_pos t b | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: LowParse.Repr.repr_pos t b -> Prims.GTot t | Prims.GTot | [
"sometrivial"
] | [] | [
"LowParse.Repr.const_slice",
"LowParse.Repr.repr_pos",
"LowParse.Repr.__proj__Mkmeta__item__v",
"LowParse.Repr.__proj__Pos__item__meta"
] | [] | false | false | false | false | false | let value_pos #t #b (r: repr_pos t b) : GTot t =
| r.meta.v | false |
LowParse.Repr.fsti | LowParse.Repr.valid_repr_pos | val valid_repr_pos : r: LowParse.Repr.repr_pos t b -> h: FStar.Monotonic.HyperStack.mem -> Prims.logical | let valid_repr_pos (#t:Type) (#b:const_slice) (r:repr_pos t b) (h:HS.mem)
= valid (as_ptr_spec r) h /\
C.live h b.base | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 19,
"end_line": 747,
"start_col": 0,
"start_line": 745
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2
unfold
let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p))
inline_for_extraction
let read_field (#k1:strong_parser_kind) (#t1:_) (#p1:LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: field_reader_t f
= reveal_valid ();
fun p ->
[@inline_let]
let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos
(*** Positional representation types ***)
/// `index b` is the type of valid indexes into `b`
let index (b:const_slice)= i:uint_32{ i <= b.slice_len }
noeq
type repr_pos (t:Type) (b:const_slice) =
| Pos: start_pos:index b ->
meta:meta t ->
vv_pos:t -> //temporary
length:U32.t { U32.v start_pos + U32.v meta.len <= U32.v b.slice_len /\
vv_pos == meta.v /\
length == meta.len } ->
repr_pos t b
let value_pos #t #b (r:repr_pos t b) : GTot t = r.meta.v
let as_ptr_spec #t #b (p:repr_pos t b)
: GTot (repr_ptr t)
= Ptr (C.gsub b.base p.start_pos ((Pos?.meta p).len))
(Pos?.meta p)
(Pos?.vv_pos p)
(Pos?.length p)
let const_buffer_of_repr_pos #t #b (r:repr_pos t b)
: GTot (C.const_buffer LP.byte)
= C.gsub b.base r.start_pos r.meta.len
/// `repr_pos_p`, the analog of `repr_ptr_p`
let repr_pos_p (t:Type) (b:const_slice) #k (parser:LP.parser k t) =
r:repr_pos t b {
r.meta.parser_kind == k /\
r.meta.parser == parser
}
(*** Validity ***) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: LowParse.Repr.repr_pos t b -> h: FStar.Monotonic.HyperStack.mem -> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"LowParse.Repr.const_slice",
"LowParse.Repr.repr_pos",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowParse.Repr.valid",
"LowParse.Repr.as_ptr_spec",
"LowStar.ConstBuffer.live",
"LowParse.Bytes.byte",
"LowParse.Repr.__proj__MkSlice__item__base",
"Prims.logical"
] | [] | false | false | false | false | true | let valid_repr_pos (#t: Type) (#b: const_slice) (r: repr_pos t b) (h: HS.mem) =
| valid (as_ptr_spec r) h /\ C.live h b.base | false |
|
LowParse.Repr.fsti | LowParse.Repr.valid_slice | val valid_slice : slice: LowParse.Slice.slice r s -> meta: LowParse.Repr.meta t -> h: FStar.Monotonic.HyperStack.mem
-> Prims.logical | let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 67,
"end_line": 207,
"start_col": 0,
"start_line": 205
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | slice: LowParse.Slice.slice r s -> meta: LowParse.Repr.meta t -> h: FStar.Monotonic.HyperStack.mem
-> Prims.logical | Prims.Tot | [
"total"
] | [] | [
"LowParse.Slice.srel",
"LowParse.Bytes.byte",
"LowParse.Slice.slice",
"LowParse.Repr.meta",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowParse.Low.Base.Spec.valid_content_pos",
"LowParse.Repr.__proj__Mkmeta__item__parser_kind",
"LowParse.Repr.__proj__Mkmeta__item__parser",
"FStar.UInt32.__uint_to_t",
"LowParse.Repr.__proj__Mkmeta__item__v",
"LowParse.Repr.__proj__Mkmeta__item__len",
"Prims.eq2",
"FStar.Seq.Base.seq",
"LowParse.Repr.__proj__Mkmeta__item__repr_bytes",
"LowParse.Low.Base.Spec.bytes_of_slice_from_to",
"Prims.logical"
] | [] | false | false | false | false | true | let valid_slice (#t: Type) (#r #s: _) (slice: LP.slice r s) (meta: meta t) (h: HS.mem) =
| LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len | false |
|
LowParse.Repr.fsti | LowParse.Repr.fp_pos | val fp_pos (#t: _) (#b: const_slice) (r: repr_pos t b) : GTot B.loc | val fp_pos (#t: _) (#b: const_slice) (r: repr_pos t b) : GTot B.loc | let fp_pos #t (#b:const_slice) (r:repr_pos t b)
: GTot B.loc
= fp (as_ptr_spec r) | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 22,
"end_line": 753,
"start_col": 0,
"start_line": 751
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2
unfold
let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p))
inline_for_extraction
let read_field (#k1:strong_parser_kind) (#t1:_) (#p1:LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: field_reader_t f
= reveal_valid ();
fun p ->
[@inline_let]
let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos
(*** Positional representation types ***)
/// `index b` is the type of valid indexes into `b`
let index (b:const_slice)= i:uint_32{ i <= b.slice_len }
noeq
type repr_pos (t:Type) (b:const_slice) =
| Pos: start_pos:index b ->
meta:meta t ->
vv_pos:t -> //temporary
length:U32.t { U32.v start_pos + U32.v meta.len <= U32.v b.slice_len /\
vv_pos == meta.v /\
length == meta.len } ->
repr_pos t b
let value_pos #t #b (r:repr_pos t b) : GTot t = r.meta.v
let as_ptr_spec #t #b (p:repr_pos t b)
: GTot (repr_ptr t)
= Ptr (C.gsub b.base p.start_pos ((Pos?.meta p).len))
(Pos?.meta p)
(Pos?.vv_pos p)
(Pos?.length p)
let const_buffer_of_repr_pos #t #b (r:repr_pos t b)
: GTot (C.const_buffer LP.byte)
= C.gsub b.base r.start_pos r.meta.len
/// `repr_pos_p`, the analog of `repr_ptr_p`
let repr_pos_p (t:Type) (b:const_slice) #k (parser:LP.parser k t) =
r:repr_pos t b {
r.meta.parser_kind == k /\
r.meta.parser == parser
}
(*** Validity ***)
/// `valid`: abstract validity
let valid_repr_pos (#t:Type) (#b:const_slice) (r:repr_pos t b) (h:HS.mem)
= valid (as_ptr_spec r) h /\
C.live h b.base
/// `fp r`: The memory footprint of a repr_pos is the | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: LowParse.Repr.repr_pos t b -> Prims.GTot LowStar.Monotonic.Buffer.loc | Prims.GTot | [
"sometrivial"
] | [] | [
"LowParse.Repr.const_slice",
"LowParse.Repr.repr_pos",
"LowParse.Repr.fp",
"LowParse.Repr.as_ptr_spec",
"LowStar.Monotonic.Buffer.loc"
] | [] | false | false | false | false | false | let fp_pos #t (#b: const_slice) (r: repr_pos t b) : GTot B.loc =
| fp (as_ptr_spec r) | false |
LowParse.Repr.fsti | LowParse.Repr.slice_as_seq | val slice_as_seq : h: FStar.Monotonic.HyperStack.mem -> c: LowParse.Repr.const_slice
-> Prims.GTot (FStar.Seq.Base.seq LowParse.Bytes.byte) | let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len) | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 55,
"end_line": 96,
"start_col": 0,
"start_line": 95
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | h: FStar.Monotonic.HyperStack.mem -> c: LowParse.Repr.const_slice
-> Prims.GTot (FStar.Seq.Base.seq LowParse.Bytes.byte) | Prims.GTot | [
"sometrivial"
] | [] | [
"FStar.Monotonic.HyperStack.mem",
"LowParse.Repr.const_slice",
"FStar.Seq.Base.slice",
"LowParse.Bytes.byte",
"LowStar.ConstBuffer.as_seq",
"LowParse.Repr.__proj__MkSlice__item__base",
"FStar.UInt32.v",
"LowParse.Repr.__proj__MkSlice__item__slice_len",
"FStar.Seq.Base.seq"
] | [] | false | false | false | false | false | let slice_as_seq (h: HS.mem) (c: const_slice) =
| Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len) | false |
|
LowParse.Repr.fsti | LowParse.Repr.to_bytes | val to_bytes (#t: _) (p: repr_ptr t) (len: uint_32)
: Stack FStar.Bytes.bytes
(requires fun h -> valid p h /\ len == p.meta.len)
(ensures
fun h x h' ->
B.modifies B.loc_none h h' /\ FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len) | val to_bytes (#t: _) (p: repr_ptr t) (len: uint_32)
: Stack FStar.Bytes.bytes
(requires fun h -> valid p h /\ len == p.meta.len)
(ensures
fun h x h' ->
B.modifies B.loc_none h h' /\ FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len) | let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b) | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 42,
"end_line": 386,
"start_col": 0,
"start_line": 374
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: LowParse.Repr.repr_ptr t -> len: FStar.Integers.uint_32
-> FStar.HyperStack.ST.Stack FStar.Bytes.bytes | FStar.HyperStack.ST.Stack | [] | [] | [
"LowParse.Repr.repr_ptr",
"FStar.Integers.uint_32",
"FStar.Bytes.of_buffer",
"LowStar.ConstBuffer.qbuf_pre",
"LowParse.Bytes.byte",
"LowStar.ConstBuffer.as_qbuf",
"LowParse.Repr.__proj__Ptr__item__b",
"LowStar.ConstBuffer.cast",
"FStar.Bytes.bytes",
"Prims.b2t",
"Prims.op_Equality",
"FStar.UInt.uint_t",
"FStar.Bytes.length",
"FStar.UInt32.v",
"Prims.unit",
"LowParse.Repr.reveal_valid",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowParse.Repr.valid",
"Prims.eq2",
"LowParse.Repr.__proj__Mkmeta__item__len",
"LowParse.Repr.__proj__Ptr__item__meta",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_none",
"FStar.Seq.Base.seq",
"FStar.Bytes.reveal",
"LowParse.Repr.__proj__Mkmeta__item__repr_bytes",
"FStar.UInt32.t",
"FStar.Bytes.len"
] | [] | false | true | false | false | false | let to_bytes #t (p: repr_ptr t) (len: uint_32)
: Stack FStar.Bytes.bytes
(requires fun h -> valid p h /\ len == p.meta.len)
(ensures
fun h x h' ->
B.modifies B.loc_none h h' /\ FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len) =
| reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b) | false |
Hacl.Impl.Ed25519.Ladder.fst | Hacl.Impl.Ed25519.Ladder.point_mul_g | val point_mul_g:
out:point
-> scalar:lbuffer uint8 32ul ->
Stack unit
(requires fun h ->
live h scalar /\ live h out /\ disjoint out scalar)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.point_inv_t h1 out /\ F51.inv_ext_point (as_seq h1 out) /\
S.to_aff_point (F51.point_eval h1 out) ==
S.to_aff_point (S.point_mul_g (as_seq h0 scalar))) | val point_mul_g:
out:point
-> scalar:lbuffer uint8 32ul ->
Stack unit
(requires fun h ->
live h scalar /\ live h out /\ disjoint out scalar)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.point_inv_t h1 out /\ F51.inv_ext_point (as_seq h1 out) /\
S.to_aff_point (F51.point_eval h1 out) ==
S.to_aff_point (S.point_mul_g (as_seq h0 scalar))) | let point_mul_g out scalar =
push_frame ();
let h0 = ST.get () in
let bscalar = create 4ul (u64 0) in
convert_scalar scalar bscalar;
let q1 = create 20ul (u64 0) in
make_g q1;
point_mul_g_mk_q1234 out bscalar q1;
lemma_exp_four_fw_local (as_seq h0 scalar);
pop_frame () | {
"file_name": "code/ed25519/Hacl.Impl.Ed25519.Ladder.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 14,
"end_line": 248,
"start_col": 0,
"start_line": 239
} | module Hacl.Impl.Ed25519.Ladder
module ST = FStar.HyperStack.ST
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Hacl.Bignum25519
module F51 = Hacl.Impl.Ed25519.Field51
module BSeq = Lib.ByteSequence
module LE = Lib.Exponentiation
module SE = Spec.Exponentiation
module BE = Hacl.Impl.Exponentiation
module ME = Hacl.Impl.MultiExponentiation
module PT = Hacl.Impl.PrecompTable
module SPT256 = Hacl.Spec.PrecompBaseTable256
module BD = Hacl.Bignum.Definitions
module SD = Hacl.Spec.Bignum.Definitions
module S = Spec.Ed25519
open Hacl.Impl.Ed25519.PointConstants
include Hacl.Impl.Ed25519.Group
include Hacl.Ed25519.PrecompTable
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
inline_for_extraction noextract
let table_inv_w4 : BE.table_inv_t U64 20ul 16ul =
[@inline_let] let len = 20ul in
[@inline_let] let ctx_len = 0ul in
[@inline_let] let k = mk_ed25519_concrete_ops in
[@inline_let] let l = 4ul in
[@inline_let] let table_len = 16ul in
BE.table_inv_precomp len ctx_len k l table_len
inline_for_extraction noextract
let table_inv_w5 : BE.table_inv_t U64 20ul 32ul =
[@inline_let] let len = 20ul in
[@inline_let] let ctx_len = 0ul in
[@inline_let] let k = mk_ed25519_concrete_ops in
[@inline_let] let l = 5ul in
[@inline_let] let table_len = 32ul in
assert_norm (pow2 (v l) = v table_len);
BE.table_inv_precomp len ctx_len k l table_len
inline_for_extraction noextract
val convert_scalar: scalar:lbuffer uint8 32ul -> bscalar:lbuffer uint64 4ul ->
Stack unit
(requires fun h -> live h scalar /\ live h bscalar /\ disjoint scalar bscalar)
(ensures fun h0 _ h1 -> modifies (loc bscalar) h0 h1 /\
BD.bn_v h1 bscalar == BSeq.nat_from_bytes_le (as_seq h0 scalar))
let convert_scalar scalar bscalar =
let h0 = ST.get () in
Hacl.Spec.Bignum.Convert.bn_from_bytes_le_lemma #U64 32 (as_seq h0 scalar);
Hacl.Bignum.Convert.mk_bn_from_bytes_le true 32ul scalar bscalar
inline_for_extraction noextract
val point_mul_noalloc:
out:point
-> bscalar:lbuffer uint64 4ul
-> q:point ->
Stack unit
(requires fun h ->
live h bscalar /\ live h q /\ live h out /\
disjoint q out /\ disjoint q bscalar /\ disjoint out bscalar /\
F51.point_inv_t h q /\ F51.inv_ext_point (as_seq h q) /\
BD.bn_v h bscalar < pow2 256)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.point_inv_t h1 out /\ F51.inv_ext_point (as_seq h1 out) /\
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_fw S.mk_ed25519_comm_monoid
(S.to_aff_point (F51.point_eval h0 q)) 256 (BD.bn_v h0 bscalar) 4)
let point_mul_noalloc out bscalar q =
BE.lexp_fw_consttime 20ul 0ul mk_ed25519_concrete_ops
4ul (null uint64) q 4ul 256ul bscalar out
let point_mul out scalar q =
let h0 = ST.get () in
SE.exp_fw_lemma S.mk_ed25519_concrete_ops
(F51.point_eval h0 q) 256 (BSeq.nat_from_bytes_le (as_seq h0 scalar)) 4;
push_frame ();
let bscalar = create 4ul (u64 0) in
convert_scalar scalar bscalar;
point_mul_noalloc out bscalar q;
pop_frame ()
val precomp_get_consttime: BE.pow_a_to_small_b_st U64 20ul 0ul mk_ed25519_concrete_ops 4ul 16ul
(BE.table_inv_precomp 20ul 0ul mk_ed25519_concrete_ops 4ul 16ul)
[@CInline]
let precomp_get_consttime ctx a table bits_l tmp =
[@inline_let] let len = 20ul in
[@inline_let] let ctx_len = 0ul in
[@inline_let] let k = mk_ed25519_concrete_ops in
[@inline_let] let l = 4ul in
[@inline_let] let table_len = 16ul in
BE.lprecomp_get_consttime len ctx_len k l table_len ctx a table bits_l tmp
inline_for_extraction noextract
val point_mul_g_noalloc: out:point -> bscalar:lbuffer uint64 4ul
-> q1:point -> q2:point
-> q3:point -> q4:point ->
Stack unit
(requires fun h ->
live h bscalar /\ live h out /\ live h q1 /\
live h q2 /\ live h q3 /\ live h q4 /\
disjoint out bscalar /\ disjoint out q1 /\ disjoint out q2 /\
disjoint out q3 /\ disjoint out q4 /\
disjoint q1 q2 /\ disjoint q1 q3 /\ disjoint q1 q4 /\
disjoint q2 q3 /\ disjoint q2 q4 /\ disjoint q3 q4 /\
BD.bn_v h bscalar < pow2 256 /\
F51.linv (as_seq h q1) /\ refl (as_seq h q1) == g_aff /\
F51.linv (as_seq h q2) /\ refl (as_seq h q2) == g_pow2_64 /\
F51.linv (as_seq h q3) /\ refl (as_seq h q3) == g_pow2_128 /\
F51.linv (as_seq h q4) /\ refl (as_seq h q4) == g_pow2_192)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.linv (as_seq h1 out) /\
(let (b0, b1, b2, b3) = SPT256.decompose_nat256_as_four_u64 (BD.bn_v h0 bscalar) in
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_four_fw S.mk_ed25519_comm_monoid
g_aff 64 b0 g_pow2_64 b1 g_pow2_128 b2 g_pow2_192 b3 4))
let point_mul_g_noalloc out bscalar q1 q2 q3 q4 =
[@inline_let] let len = 20ul in
[@inline_let] let ctx_len = 0ul in
[@inline_let] let k = mk_ed25519_concrete_ops in
[@inline_let] let l = 4ul in
[@inline_let] let table_len = 16ul in
[@inline_let] let bLen = 1ul in
[@inline_let] let bBits = 64ul in
let h0 = ST.get () in
recall_contents precomp_basepoint_table_w4 precomp_basepoint_table_lseq_w4;
let h1 = ST.get () in
precomp_basepoint_table_lemma_w4 ();
assert (table_inv_w4 (as_seq h1 q1) (as_seq h1 precomp_basepoint_table_w4));
recall_contents precomp_g_pow2_64_table_w4 precomp_g_pow2_64_table_lseq_w4;
let h1 = ST.get () in
precomp_g_pow2_64_table_lemma_w4 ();
assert (table_inv_w4 (as_seq h1 q2) (as_seq h1 precomp_g_pow2_64_table_w4));
recall_contents precomp_g_pow2_128_table_w4 precomp_g_pow2_128_table_lseq_w4;
let h1 = ST.get () in
precomp_g_pow2_128_table_lemma_w4 ();
assert (table_inv_w4 (as_seq h1 q3) (as_seq h1 precomp_g_pow2_128_table_w4));
recall_contents precomp_g_pow2_192_table_w4 precomp_g_pow2_192_table_lseq_w4;
let h1 = ST.get () in
precomp_g_pow2_192_table_lemma_w4 ();
assert (table_inv_w4 (as_seq h1 q4) (as_seq h1 precomp_g_pow2_192_table_w4));
let r1 = sub bscalar 0ul 1ul in
let r2 = sub bscalar 1ul 1ul in
let r3 = sub bscalar 2ul 1ul in
let r4 = sub bscalar 3ul 1ul in
SPT256.lemma_decompose_nat256_as_four_u64_lbignum (as_seq h0 bscalar);
ME.mk_lexp_four_fw_tables len ctx_len k l table_len
table_inv_w4 table_inv_w4 table_inv_w4 table_inv_w4
precomp_get_consttime
precomp_get_consttime
precomp_get_consttime
precomp_get_consttime
(null uint64) q1 bLen bBits r1 q2 r2 q3 r3 q4 r4
(to_const precomp_basepoint_table_w4)
(to_const precomp_g_pow2_64_table_w4)
(to_const precomp_g_pow2_128_table_w4)
(to_const precomp_g_pow2_192_table_w4)
out;
LowStar.Ignore.ignore q2; // q2, q3, q4 are unused variables
LowStar.Ignore.ignore q3;
LowStar.Ignore.ignore q4
inline_for_extraction noextract
val point_mul_g_mk_q1234: out:point -> bscalar:lbuffer uint64 4ul -> q1:point ->
Stack unit
(requires fun h ->
live h bscalar /\ live h out /\ live h q1 /\
disjoint out bscalar /\ disjoint out q1 /\
BD.bn_v h bscalar < pow2 256 /\
F51.linv (as_seq h q1) /\ refl (as_seq h q1) == g_aff)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.linv (as_seq h1 out) /\
(let (b0, b1, b2, b3) = SPT256.decompose_nat256_as_four_u64 (BD.bn_v h0 bscalar) in
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_four_fw S.mk_ed25519_comm_monoid
g_aff 64 b0 g_pow2_64 b1 g_pow2_128 b2 g_pow2_192 b3 4))
let point_mul_g_mk_q1234 out bscalar q1 =
push_frame ();
let q2 = mk_ext_g_pow2_64 () in
let q3 = mk_ext_g_pow2_128 () in
let q4 = mk_ext_g_pow2_192 () in
ext_g_pow2_64_lseq_lemma ();
ext_g_pow2_128_lseq_lemma ();
ext_g_pow2_192_lseq_lemma ();
point_mul_g_noalloc out bscalar q1 q2 q3 q4;
pop_frame ()
val lemma_exp_four_fw_local: b:BSeq.lbytes 32 ->
Lemma (let bn = BSeq.nat_from_bytes_le b in
let (b0, b1, b2, b3) = SPT256.decompose_nat256_as_four_u64 bn in
let cm = S.mk_ed25519_comm_monoid in
LE.exp_four_fw cm g_aff 64 b0 g_pow2_64 b1 g_pow2_128 b2 g_pow2_192 b3 4 ==
S.to_aff_point (S.point_mul_g b))
let lemma_exp_four_fw_local b =
let bn = BSeq.nat_from_bytes_le b in
let (b0, b1, b2, b3) = SPT256.decompose_nat256_as_four_u64 bn in
let cm = S.mk_ed25519_comm_monoid in
let res = LE.exp_four_fw cm g_aff 64 b0 g_pow2_64 b1 g_pow2_128 b2 g_pow2_192 b3 4 in
assert (res == SPT256.exp_as_exp_four_nat256_precomp cm g_aff bn);
SPT256.lemma_point_mul_base_precomp4 cm g_aff bn;
assert (res == LE.pow cm g_aff bn);
SE.exp_fw_lemma S.mk_ed25519_concrete_ops g_c 256 bn 4;
LE.exp_fw_lemma cm g_aff 256 bn 4;
assert (S.to_aff_point (S.point_mul_g b) == LE.pow cm g_aff bn) | {
"checked_file": "/",
"dependencies": [
"Spec.Exponentiation.fsti.checked",
"Spec.Ed25519.Lemmas.fsti.checked",
"Spec.Ed25519.fst.checked",
"prims.fst.checked",
"LowStar.Ignore.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Exponentiation.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Spec.PrecompBaseTable256.fsti.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"Hacl.Spec.Bignum.Convert.fst.checked",
"Hacl.Impl.PrecompTable.fsti.checked",
"Hacl.Impl.MultiExponentiation.fsti.checked",
"Hacl.Impl.Exponentiation.fsti.checked",
"Hacl.Impl.Ed25519.PointNegate.fst.checked",
"Hacl.Impl.Ed25519.PointConstants.fst.checked",
"Hacl.Impl.Ed25519.Group.fst.checked",
"Hacl.Impl.Ed25519.Field51.fst.checked",
"Hacl.Ed25519.PrecompTable.fsti.checked",
"Hacl.Bignum25519.fsti.checked",
"Hacl.Bignum.Definitions.fst.checked",
"Hacl.Bignum.Convert.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked"
],
"interface_file": true,
"source_file": "Hacl.Impl.Ed25519.Ladder.fst"
} | [
{
"abbrev": false,
"full_module": "Hacl.Ed25519.PrecompTable",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519.Group",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519.PointConstants",
"short_module": null
},
{
"abbrev": true,
"full_module": "Spec.Ed25519",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "SD"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.PrecompBaseTable256",
"short_module": "SPT256"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.PrecompTable",
"short_module": "PT"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.MultiExponentiation",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Exponentiation",
"short_module": "BE"
},
{
"abbrev": true,
"full_module": "Spec.Exponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation",
"short_module": "LE"
},
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Ed25519.Field51",
"short_module": "F51"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": true,
"full_module": "Spec.Ed25519",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Ed25519.Field51",
"short_module": "F51"
},
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | out: Hacl.Bignum25519.point -> scalar: Lib.Buffer.lbuffer Lib.IntTypes.uint8 32ul
-> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Hacl.Bignum25519.point",
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint8",
"FStar.UInt32.__uint_to_t",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Hacl.Impl.Ed25519.Ladder.lemma_exp_four_fw_local",
"Lib.Buffer.as_seq",
"Lib.Buffer.MUT",
"Hacl.Impl.Ed25519.Ladder.point_mul_g_mk_q1234",
"Hacl.Impl.Ed25519.PointConstants.make_g",
"Lib.Buffer.lbuffer_t",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U64",
"Lib.IntTypes.SEC",
"FStar.UInt32.uint_to_t",
"FStar.UInt32.t",
"Lib.Buffer.create",
"Lib.IntTypes.uint64",
"Lib.IntTypes.u64",
"Hacl.Impl.Ed25519.Ladder.convert_scalar",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"FStar.HyperStack.ST.push_frame"
] | [] | false | true | false | false | false | let point_mul_g out scalar =
| push_frame ();
let h0 = ST.get () in
let bscalar = create 4ul (u64 0) in
convert_scalar scalar bscalar;
let q1 = create 20ul (u64 0) in
make_g q1;
point_mul_g_mk_q1234 out bscalar q1;
lemma_exp_four_fw_local (as_seq h0 scalar);
pop_frame () | false |
LowParse.Repr.fsti | LowParse.Repr.length | val length (#t: _) (p: repr_ptr t) (j: LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h -> valid p h)
(ensures fun h n h' -> B.modifies B.loc_none h h' /\ n == p.meta.len) | val length (#t: _) (p: repr_ptr t) (j: LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h -> valid p h)
(ensures fun h n h' -> B.modifies B.loc_none h h' /\ n == p.meta.len) | let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 11,
"end_line": 371,
"start_col": 0,
"start_line": 361
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: LowParse.Repr.repr_ptr t -> j: LowParse.Low.Base.jumper (Mkmeta?.parser (Ptr?.meta p))
-> FStar.HyperStack.ST.Stack FStar.UInt32.t | FStar.HyperStack.ST.Stack | [] | [] | [
"LowParse.Repr.repr_ptr",
"LowParse.Low.Base.jumper",
"LowParse.Repr.__proj__Mkmeta__item__parser_kind",
"LowParse.Repr.__proj__Ptr__item__meta",
"LowParse.Repr.__proj__Mkmeta__item__parser",
"LowParse.Repr.preorder",
"LowParse.Repr.__proj__Ptr__item__b",
"FStar.UInt32.__uint_to_t",
"FStar.UInt32.t",
"LowParse.Slice.slice",
"LowParse.Repr.temp_slice_of_repr_ptr",
"Prims.unit",
"LowParse.Repr.reveal_valid",
"FStar.Monotonic.HyperStack.mem",
"LowParse.Repr.valid",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_none",
"Prims.eq2",
"LowParse.Repr.__proj__Mkmeta__item__len"
] | [] | false | true | false | false | false | let length #t (p: repr_ptr t) (j: LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h -> valid p h)
(ensures fun h n h' -> B.modifies B.loc_none h h' /\ n == p.meta.len) =
| reveal_valid ();
let s = temp_slice_of_repr_ptr p in
j s 0ul | false |
EverParse3d.InputStream.Base.fst | EverParse3d.InputStream.Base.length_all | val length_all: #t: _ -> #input_stream_inst t -> x: t -> GTot nat | val length_all: #t: _ -> #input_stream_inst t -> x: t -> GTot nat | let length_all #t (#_: input_stream_inst t) (x: t) : GTot nat = U64.v (len_all x) | {
"file_name": "src/3d/prelude/EverParse3d.InputStream.Base.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 81,
"end_line": 232,
"start_col": 0,
"start_line": 232
} | module EverParse3d.InputStream.Base
module U8 = FStar.UInt8
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module B = LowStar.Buffer
module LPE = EverParse3d.ErrorCode
module LP = LowParse.Low.Base
noextract
inline_for_extraction
class input_stream_inst (t: Type) : Type = {
live: t -> HS.mem -> Tot prop;
footprint: (x: t) -> Ghost B.loc
(requires True)
(ensures (fun y -> B.address_liveness_insensitive_locs `B.loc_includes` y));
perm_footprint: (x: t) -> Ghost B.loc
(requires True)
(ensures (fun y -> footprint x `B.loc_includes` y));
live_not_unused_in:
(x: t) ->
(h: HS.mem) ->
Lemma
(requires (live x h))
(ensures (B.loc_not_unused_in h `B.loc_includes` footprint x));
len_all: (x: t) -> GTot LPE.pos_t;
get_all: (x: t) -> Ghost (Seq.seq U8.t)
(requires True)
(ensures (fun y -> Seq.length y == U64.v (len_all x)));
get_remaining: (x: t) -> (h: HS.mem) -> Ghost (Seq.seq U8.t)
(requires (live x h))
(ensures (fun y -> Seq.length y <= U64.v (len_all x)));
get_read: (x: t) -> (h: HS.mem) -> Ghost (Seq.seq U8.t)
(requires (live x h))
(ensures (fun y -> get_all x `Seq.equal` (y `Seq.append` get_remaining x h)));
preserved:
(x: t) ->
(l: B.loc) ->
(h: HS.mem) ->
(h' : HS.mem) ->
Lemma
(requires (live x h /\ B.modifies l h h' /\ B.loc_disjoint (footprint x) l))
(ensures (
live x h' /\
get_remaining x h' == get_remaining x h /\
get_read x h' == get_read x h
));
tlen: t -> Type0;
extra_t: Type0;
has:
(# [EverParse3d.Util.solve_from_ctx () ] extra_t ) ->
(x: t) ->
(len: tlen x) ->
(pos: LPE.pos_t) ->
(n: U64.t) ->
HST.Stack bool
(requires (fun h ->
live x h /\
U64.v pos == Seq.length (get_read x h)
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
(res == true <==> Seq.length (get_remaining x h) >= U64.v n)
));
read:
(# [EverParse3d.Util.solve_from_ctx ()] extra_t ) ->
(t': Type0) ->
(k: LP.parser_kind) ->
(p: LP.parser k t') ->
(r: LP.leaf_reader p) ->
(x: t) ->
(pos: LPE.pos_t) ->
(n: U64.t) ->
HST.Stack t'
(requires (fun h ->
live x h /\
U64.v pos == Seq.length (get_read x h) /\
k.LP.parser_kind_subkind == Some LP.ParserStrong /\
k.LP.parser_kind_high == Some k.LP.parser_kind_low /\
k.LP.parser_kind_low == U64.v n /\
U64.v n > 0 /\
U64.v n < 4294967296 /\
Some? (LP.parse p (get_remaining x h))
))
(ensures (fun h dst' h' ->
let s = get_remaining x h in
B.modifies (perm_footprint x) h h' /\
Seq.length s >= U64.v n /\
LP.parse p (Seq.slice s 0 (U64.v n)) == Some (dst', U64.v n) /\
LP.parse p s == Some (dst', U64.v n) /\
live x h' /\
get_remaining x h' `Seq.equal` Seq.slice s (U64.v n) (Seq.length s)
));
skip:
(# [EverParse3d.Util.solve_from_ctx ()] extra_t ) ->
(x: t) ->
(pos: LPE.pos_t) ->
(n: U64.t) ->
HST.Stack unit
(requires (fun h ->
live x h /\
U64.v pos == Seq.length (get_read x h) /\
Seq.length (get_remaining x h) >= U64.v n
))
(ensures (fun h _ h' ->
let s = get_remaining x h in
B.modifies (perm_footprint x) h h' /\
live x h' /\
get_remaining x h' `Seq.equal` Seq.slice s (U64.v n) (Seq.length s)
));
skip_if_success:
(# [EverParse3d.Util.solve_from_ctx ()] extra_t ) ->
(x: t) ->
(pos: LPE.pos_t) ->
(res: U64.t) ->
HST.Stack unit
(requires (fun h ->
live x h /\
(LPE.is_success res ==> (
U64.v pos == Seq.length (get_read x h)) /\
U64.v res >= U64.v pos /\
U64.v pos + Seq.length (get_remaining x h) >= U64.v res
)))
(ensures (fun h _ h' ->
let s = get_remaining x h in
B.modifies (perm_footprint x) h h' /\
live x h' /\
get_remaining x h' == (if LPE.is_success res then Seq.slice s (U64.v res - U64.v pos) (Seq.length s) else get_remaining x h)
));
empty:
(# [EverParse3d.Util.solve_from_ctx ()] extra_t ) ->
(x: t) ->
(len: tlen x) ->
(pos: LPE.pos_t) ->
HST.Stack LPE.pos_t
(requires (fun h ->
live x h /\
U64.v pos == Seq.length (get_read x h)
))
(ensures (fun h res h' ->
B.modifies (perm_footprint x) h h' /\
live x h' /\
U64.v res == Seq.length (get_read x h') /\
get_remaining x h' `Seq.equal` Seq.empty
));
is_prefix_of:
(x: t) ->
(y: t) ->
Tot prop;
get_suffix:
(x: t) ->
(y: t) ->
Ghost (Seq.seq U8.t)
(requires (x `is_prefix_of` y))
(ensures (fun _ -> True));
is_prefix_of_prop:
(x: t) ->
(y: t) ->
(h: HS.mem) ->
Lemma
(requires (
live x h /\
x `is_prefix_of` y
))
(ensures (
live y h /\
get_read y h `Seq.equal` get_read x h /\
get_remaining y h `Seq.equal` (get_remaining x h `Seq.append` get_suffix x y)
));
truncate:
(x: t) ->
(pos: LPE.pos_t) ->
(n: U64.t) ->
HST.Stack t
(requires (fun h ->
live x h /\
U64.v pos == Seq.length (get_read x h) /\
U64.v n <= Seq.length (get_remaining x h)
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res `is_prefix_of` x /\
footprint res == footprint x /\
perm_footprint res == perm_footprint x /\
live res h' /\
Seq.length (get_remaining res h') == U64.v n
));
truncate_len:
(x: t) ->
(pos: LPE.pos_t) ->
(n: U64.t) ->
(res: t) ->
HST.Stack (tlen res)
(requires (fun h ->
live x h /\
U64.v pos == Seq.length (get_read x h) /\
U64.v n <= Seq.length (get_remaining x h) /\
res `is_prefix_of` x /\
footprint res == footprint x /\
perm_footprint res == perm_footprint x /\
live res h /\
Seq.length (get_remaining res h) == U64.v n
))
(ensures (fun h res_len h' ->
B.modifies B.loc_none h h'
));
} | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.Tactics.Typeclasses.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"EverParse3d.Util.fst.checked",
"EverParse3d.ErrorCode.fst.checked"
],
"interface_file": false,
"source_file": "EverParse3d.InputStream.Base.fst"
} | [
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "EverParse3d.ErrorCode",
"short_module": "LPE"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": false,
"full_module": "EverParse3d.InputStream",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverParse3d.InputStream",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 2,
"max_fuel": 0,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [
"smt.qi.eager_threshold=10"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: t -> Prims.GTot Prims.nat | Prims.GTot | [
"sometrivial"
] | [] | [
"EverParse3d.InputStream.Base.input_stream_inst",
"FStar.UInt64.v",
"EverParse3d.InputStream.Base.len_all",
"Prims.nat"
] | [] | false | false | false | false | false | let length_all #t (#_: input_stream_inst t) (x: t) : GTot nat =
| U64.v (len_all x) | false |
LowParse.Repr.fsti | LowParse.Repr.valid_if_live_intro | val valid_if_live_intro (#t: _) (r: repr_ptr t) (h: HS.mem)
: Lemma
(requires
(C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\ valid r h /\
(let i:I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\ i `I.value_is` (Ghost.hide m.repr_bytes))))
(ensures valid_if_live r) | val valid_if_live_intro (#t: _) (r: repr_ptr t) (h: HS.mem)
: Lemma
(requires
(C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\ valid r h /\
(let i:I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\ i `I.value_is` (Ghost.hide m.repr_bytes))))
(ensures valid_if_live r) | let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
() | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 6,
"end_line": 458,
"start_col": 0,
"start_line": 434
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \ | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: LowParse.Repr.repr_ptr t -> h: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
LowStar.ConstBuffer.qbuf_qual (LowStar.ConstBuffer.as_qbuf (Ptr?.b r)) ==
LowStar.ConstBuffer.IMMUTABLE /\ LowParse.Repr.valid r h /\
(let i = LowStar.ConstBuffer.as_mbuf (Ptr?.b r) in
let m = Ptr?.meta r in
LowStar.Monotonic.Buffer.as_seq h i == Mkmeta?.repr_bytes m /\
LowStar.ImmutableBuffer.value_is i (FStar.Ghost.hide (Mkmeta?.repr_bytes m))))
(ensures LowParse.Repr.valid_if_live r) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Repr.repr_ptr",
"FStar.Monotonic.HyperStack.mem",
"Prims.unit",
"Prims.l_and",
"LowStar.ConstBuffer.live",
"LowParse.Bytes.byte",
"LowParse.Repr.__proj__Ptr__item__b",
"FStar.Seq.Base.equal",
"LowStar.Monotonic.Buffer.as_seq",
"LowStar.ImmutableBuffer.immutable_preorder",
"Prims.squash",
"LowParse.Repr.valid",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.prop",
"Prims.Nil",
"LowParse.Low.Base.Spec.valid_ext_intro",
"LowParse.Repr.preorder",
"LowParse.Repr.__proj__Mkmeta__item__parser_kind",
"LowParse.Repr.__proj__Mkmeta__item__parser",
"LowParse.Repr.slice_of_repr_ptr",
"FStar.UInt32.__uint_to_t",
"LowParse.Repr.meta",
"LowParse.Repr.__proj__Ptr__item__meta",
"LowStar.ImmutableBuffer.ibuffer",
"LowStar.ConstBuffer.as_mbuf",
"LowParse.Repr.reveal_valid",
"Prims.eq2",
"LowStar.ConstBuffer.qual",
"LowStar.ConstBuffer.qbuf_qual",
"LowStar.ConstBuffer.as_qbuf",
"LowStar.ConstBuffer.IMMUTABLE",
"FStar.Seq.Base.seq",
"LowParse.Repr.__proj__Mkmeta__item__repr_bytes",
"LowStar.ImmutableBuffer.value_is",
"FStar.Ghost.hide",
"LowParse.Repr.valid_if_live"
] | [] | false | false | true | false | false | let valid_if_live_intro #t (r: repr_ptr t) (h: HS.mem)
: Lemma
(requires
(C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\ valid r h /\
(let i:I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\ i `I.value_is` (Ghost.hide m.repr_bytes))))
(ensures valid_if_live r) =
| reveal_valid ();
let i:I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h': HS.mem)
: Lemma (requires C.live h' r.b /\ (B.as_seq h i) `Seq.equal` (B.as_seq h' i))
(ensures valid r h')
[SMTPat (valid r h')] =
let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
() | false |
LowParse.Repr.fsti | LowParse.Repr.valid_if_live | val valid_if_live : p: LowParse.Repr.repr_ptr t -> Prims.GTot Prims.logical | let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h'))) | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 21,
"end_line": 419,
"start_col": 0,
"start_line": 408
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`, | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: LowParse.Repr.repr_ptr t -> Prims.GTot Prims.logical | Prims.GTot | [
"sometrivial"
] | [] | [
"LowParse.Repr.repr_ptr",
"Prims.l_and",
"Prims.eq2",
"LowStar.ConstBuffer.qual",
"LowStar.ConstBuffer.qbuf_qual",
"LowParse.Bytes.byte",
"LowStar.ConstBuffer.as_qbuf",
"LowParse.Repr.__proj__Ptr__item__b",
"LowStar.ConstBuffer.IMMUTABLE",
"LowStar.ImmutableBuffer.value_is",
"FStar.Ghost.hide",
"FStar.Seq.Base.seq",
"LowParse.Repr.__proj__Mkmeta__item__repr_bytes",
"Prims.l_Exists",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.as_seq",
"LowStar.ImmutableBuffer.immutable_preorder",
"LowParse.Repr.valid",
"Prims.l_Forall",
"Prims.l_imp",
"LowStar.ConstBuffer.live",
"FStar.Seq.Base.equal",
"LowParse.Repr.meta",
"LowParse.Repr.__proj__Ptr__item__meta",
"LowStar.ImmutableBuffer.ibuffer",
"LowStar.ConstBuffer.as_mbuf",
"Prims.logical"
] | [] | false | false | false | false | true | let valid_if_live #t (p: repr_ptr t) =
| C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i:I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` (Ghost.hide m.repr_bytes) /\
(exists (h: HS.mem). {:pattern valid p h}
m.repr_bytes == B.as_seq h i /\ valid p h /\
(forall h'. C.live h' p.b /\ (B.as_seq h i) `Seq.equal` (B.as_seq h' i) ==> valid p h'))) | false |
|
LowParse.Repr.fsti | LowParse.Repr.field_accessor_t | val field_accessor_t : f: LowParse.Repr.field_accessor p1 p2 -> Type | let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p) | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 35,
"end_line": 643,
"start_col": 0,
"start_line": 628
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3' | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: LowParse.Repr.field_accessor p1 p2 -> Type | Prims.Tot | [
"total"
] | [] | [
"LowParse.Repr.strong_parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Repr.field_accessor",
"LowParse.Repr.repr_ptr_p",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowParse.Repr.valid",
"LowParse.Low.Base.Spec.__proj__Mkclens__item__clens_cond",
"LowParse.Repr.__proj__FieldAccessor__item__cl",
"LowParse.Repr.__proj__Mkmeta__item__v",
"LowParse.Repr.__proj__Ptr__item__meta",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_none",
"Prims.eq2",
"LowParse.Repr.value",
"LowParse.Low.Base.Spec.__proj__Mkclens__item__clens_get",
"LowParse.Repr.sub_ptr",
"FStar.Monotonic.HyperHeap.rid",
"LowParse.Repr.region_of"
] | [] | false | false | false | false | true | let field_accessor_t
(#k1: strong_parser_kind)
#t1
(#p1: LP.parser k1 t1)
(#k2: strong_parser_kind)
(#t2: Type)
(#p2: LP.parser k2 t2)
(f: field_accessor p1 p2)
=
| p: repr_ptr_p t1 p1
-> Stack (repr_ptr_p t2 p2)
(requires fun h -> valid p h /\ f.cl.LP.clens_cond p.meta.v)
(ensures
fun h0 (q: repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\ B.modifies B.loc_none h0 h1 /\ valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\ q `sub_ptr` p /\ region_of q == region_of p) | false |
|
LowParse.Repr.fsti | LowParse.Repr.as_ptr_spec | val as_ptr_spec (#t #b: _) (p: repr_pos t b) : GTot (repr_ptr t) | val as_ptr_spec (#t #b: _) (p: repr_pos t b) : GTot (repr_ptr t) | let as_ptr_spec #t #b (p:repr_pos t b)
: GTot (repr_ptr t)
= Ptr (C.gsub b.base p.start_pos ((Pos?.meta p).len))
(Pos?.meta p)
(Pos?.vv_pos p)
(Pos?.length p) | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 23,
"end_line": 729,
"start_col": 0,
"start_line": 724
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2
unfold
let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p))
inline_for_extraction
let read_field (#k1:strong_parser_kind) (#t1:_) (#p1:LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: field_reader_t f
= reveal_valid ();
fun p ->
[@inline_let]
let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos
(*** Positional representation types ***)
/// `index b` is the type of valid indexes into `b`
let index (b:const_slice)= i:uint_32{ i <= b.slice_len }
noeq
type repr_pos (t:Type) (b:const_slice) =
| Pos: start_pos:index b ->
meta:meta t ->
vv_pos:t -> //temporary
length:U32.t { U32.v start_pos + U32.v meta.len <= U32.v b.slice_len /\
vv_pos == meta.v /\
length == meta.len } ->
repr_pos t b
let value_pos #t #b (r:repr_pos t b) : GTot t = r.meta.v | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: LowParse.Repr.repr_pos t b -> Prims.GTot (LowParse.Repr.repr_ptr t) | Prims.GTot | [
"sometrivial"
] | [] | [
"LowParse.Repr.const_slice",
"LowParse.Repr.repr_pos",
"LowParse.Repr.Ptr",
"LowStar.ConstBuffer.gsub",
"LowParse.Bytes.byte",
"LowParse.Repr.__proj__MkSlice__item__base",
"LowParse.Repr.__proj__Pos__item__start_pos",
"LowParse.Repr.__proj__Mkmeta__item__len",
"LowParse.Repr.__proj__Pos__item__meta",
"LowParse.Repr.__proj__Pos__item__vv_pos",
"LowParse.Repr.__proj__Pos__item__length",
"LowParse.Repr.repr_ptr"
] | [] | false | false | false | false | false | let as_ptr_spec #t #b (p: repr_pos t b) : GTot (repr_ptr t) =
| Ptr (C.gsub b.base p.start_pos ((Pos?.meta p).len)) (Pos?.meta p) (Pos?.vv_pos p) (Pos?.length p) | false |
Hacl.Impl.Frodo.KEM.Encaps.fst | Hacl.Impl.Frodo.KEM.Encaps.crypto_kem_enc_ct_ss | val crypto_kem_enc_ct_ss:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_se_k:lbytes (2ul *! crypto_bytes a)
-> mu:lbytes (bytes_mu a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> pk:lbytes (crypto_publickeybytes a)
-> Stack unit
(requires fun h ->
live h seed_se_k /\ live h ct /\ live h ss /\ live h pk /\ live h mu /\
disjoint ct ss /\ disjoint ct pk /\ disjoint ss pk /\
disjoint mu ss /\ disjoint mu ct /\ disjoint seed_se_k ct /\ disjoint seed_se_k ss)
(ensures fun h0 _ h1 -> modifies (loc ct |+| loc ss) h0 h1 /\
(let seed_se = LSeq.sub (as_seq h0 seed_se_k) 0 (v (crypto_bytes a)) in
let k = LSeq.sub (as_seq h0 seed_se_k) (v (crypto_bytes a)) (v (crypto_bytes a)) in
as_seq h1 ct == S.crypto_kem_enc_ct a gen_a (as_seq h0 mu) (as_seq h0 pk) seed_se /\
as_seq h1 ss == S.crypto_kem_enc_ss a k (as_seq h1 ct))) | val crypto_kem_enc_ct_ss:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_se_k:lbytes (2ul *! crypto_bytes a)
-> mu:lbytes (bytes_mu a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> pk:lbytes (crypto_publickeybytes a)
-> Stack unit
(requires fun h ->
live h seed_se_k /\ live h ct /\ live h ss /\ live h pk /\ live h mu /\
disjoint ct ss /\ disjoint ct pk /\ disjoint ss pk /\
disjoint mu ss /\ disjoint mu ct /\ disjoint seed_se_k ct /\ disjoint seed_se_k ss)
(ensures fun h0 _ h1 -> modifies (loc ct |+| loc ss) h0 h1 /\
(let seed_se = LSeq.sub (as_seq h0 seed_se_k) 0 (v (crypto_bytes a)) in
let k = LSeq.sub (as_seq h0 seed_se_k) (v (crypto_bytes a)) (v (crypto_bytes a)) in
as_seq h1 ct == S.crypto_kem_enc_ct a gen_a (as_seq h0 mu) (as_seq h0 pk) seed_se /\
as_seq h1 ss == S.crypto_kem_enc_ss a k (as_seq h1 ct))) | let crypto_kem_enc_ct_ss a gen_a seed_se_k mu ct ss pk =
let seed_se = sub seed_se_k 0ul (crypto_bytes a) in
let k = sub seed_se_k (crypto_bytes a) (crypto_bytes a) in
crypto_kem_enc_ct a gen_a mu pk seed_se ct;
crypto_kem_enc_ss a k ct ss | {
"file_name": "code/frodo/Hacl.Impl.Frodo.KEM.Encaps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 29,
"end_line": 366,
"start_col": 0,
"start_line": 362
} | module Hacl.Impl.Frodo.KEM.Encaps
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open LowStar.Buffer
open Lib.IntTypes
open Lib.Buffer
open Hacl.Impl.Matrix
open Hacl.Impl.Frodo.Params
open Hacl.Impl.Frodo.KEM
open Hacl.Impl.Frodo.Encode
open Hacl.Impl.Frodo.Pack
open Hacl.Impl.Frodo.Sample
open Hacl.Frodo.Random
module ST = FStar.HyperStack.ST
module LSeq = Lib.Sequence
module LB = Lib.ByteSequence
module FP = Spec.Frodo.Params
module S = Spec.Frodo.KEM.Encaps
module M = Spec.Matrix
module KG = Hacl.Impl.Frodo.KEM.KeyGen
#set-options "--z3rlimit 100 --fuel 0 --ifuel 0"
inline_for_extraction noextract
val frodo_mul_add_sa_plus_e:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> bp_matrix:matrix_t params_nbar (params_n a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h ep_matrix /\ live h sp_matrix /\ live h bp_matrix /\
disjoint bp_matrix seed_a /\ disjoint bp_matrix ep_matrix /\ disjoint bp_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc bp_matrix) h0 h1 /\
as_matrix h1 bp_matrix ==
S.frodo_mul_add_sa_plus_e a gen_a (as_seq h0 seed_a) (as_matrix h0 sp_matrix) (as_matrix h0 ep_matrix))
let frodo_mul_add_sa_plus_e a gen_a seed_a sp_matrix ep_matrix bp_matrix =
push_frame ();
let a_matrix = matrix_create (params_n a) (params_n a) in
frodo_gen_matrix gen_a (params_n a) seed_a a_matrix;
matrix_mul sp_matrix a_matrix bp_matrix;
matrix_add bp_matrix ep_matrix;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct_pack_c1:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> c1:lbytes (ct1bytes_len a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h ep_matrix /\ live h sp_matrix /\ live h c1 /\
disjoint seed_a c1 /\ disjoint ep_matrix c1 /\ disjoint sp_matrix c1)
(ensures fun h0 _ h1 -> modifies (loc c1) h0 h1 /\
as_seq h1 c1 ==
S.crypto_kem_enc_ct_pack_c1 a gen_a (as_seq h0 seed_a) (as_matrix h0 sp_matrix) (as_matrix h0 ep_matrix))
let crypto_kem_enc_ct_pack_c1 a gen_a seed_a sp_matrix ep_matrix c1 =
push_frame ();
let bp_matrix = matrix_create params_nbar (params_n a) in
frodo_mul_add_sa_plus_e a gen_a seed_a sp_matrix ep_matrix bp_matrix;
frodo_pack (params_logq a) bp_matrix c1;
pop_frame ()
inline_for_extraction noextract
val frodo_mul_add_sb_plus_e:
a:FP.frodo_alg
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> v_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h b /\ live h epp_matrix /\ live h v_matrix /\ live h sp_matrix /\
disjoint v_matrix b /\ disjoint v_matrix epp_matrix /\ disjoint v_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc v_matrix) h0 h1 /\
as_matrix h1 v_matrix ==
S.frodo_mul_add_sb_plus_e a (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix))
let frodo_mul_add_sb_plus_e a b sp_matrix epp_matrix v_matrix =
push_frame ();
let b_matrix = matrix_create (params_n a) params_nbar in
frodo_unpack (params_n a) params_nbar (params_logq a) b b_matrix;
matrix_mul sp_matrix b_matrix v_matrix;
matrix_add v_matrix epp_matrix;
pop_frame ()
inline_for_extraction noextract
val frodo_mul_add_sb_plus_e_plus_mu:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> v_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h b /\ live h mu /\ live h v_matrix /\
live h sp_matrix /\ live h epp_matrix /\
disjoint v_matrix b /\ disjoint v_matrix sp_matrix /\
disjoint v_matrix mu /\ disjoint v_matrix epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc v_matrix) h0 h1 /\
as_matrix h1 v_matrix ==
S.frodo_mul_add_sb_plus_e_plus_mu a (as_seq h0 mu) (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix))
let frodo_mul_add_sb_plus_e_plus_mu a mu b sp_matrix epp_matrix v_matrix =
push_frame ();
frodo_mul_add_sb_plus_e a b sp_matrix epp_matrix v_matrix;
let mu_encode = matrix_create params_nbar params_nbar in
frodo_key_encode (params_logq a) (params_extracted_bits a) params_nbar mu mu_encode;
matrix_add v_matrix mu_encode;
clear_matrix mu_encode;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct_pack_c2:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> c2:lbytes (ct2bytes_len a)
-> Stack unit
(requires fun h ->
live h mu /\ live h b /\ live h sp_matrix /\
live h epp_matrix /\ live h c2 /\
disjoint mu c2 /\ disjoint b c2 /\
disjoint sp_matrix c2 /\ disjoint epp_matrix c2)
(ensures fun h0 _ h1 -> modifies (loc c2) h0 h1 /\
as_seq h1 c2 ==
S.crypto_kem_enc_ct_pack_c2 a (as_seq h0 mu) (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix))
#push-options "--z3rlimit 200"
let crypto_kem_enc_ct_pack_c2 a mu b sp_matrix epp_matrix c2 =
push_frame ();
let v_matrix = matrix_create params_nbar params_nbar in
frodo_mul_add_sb_plus_e_plus_mu a mu b sp_matrix epp_matrix v_matrix;
frodo_pack (params_logq a) v_matrix c2;
clear_matrix v_matrix;
pop_frame ()
#pop-options
inline_for_extraction noextract
val get_sp_ep_epp_matrices:
a:FP.frodo_alg
-> seed_se:lbytes (crypto_bytes a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h seed_se /\ live h sp_matrix /\
live h ep_matrix /\ live h epp_matrix /\
disjoint seed_se sp_matrix /\ disjoint seed_se ep_matrix /\
disjoint seed_se epp_matrix /\ disjoint sp_matrix ep_matrix /\
disjoint sp_matrix epp_matrix /\ disjoint ep_matrix epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc sp_matrix |+| loc ep_matrix |+| loc epp_matrix) h0 h1 /\
(as_matrix h1 sp_matrix, as_matrix h1 ep_matrix, as_matrix h1 epp_matrix) ==
S.get_sp_ep_epp_matrices a (as_seq h0 seed_se))
let get_sp_ep_epp_matrices a seed_se sp_matrix ep_matrix epp_matrix =
push_frame ();
[@inline_let] let s_bytes_len = secretmatrixbytes_len a in
let r = create (2ul *! s_bytes_len +! 2ul *! params_nbar *! params_nbar) (u8 0) in
KG.frodo_shake_r a (u8 0x96) seed_se (2ul *! s_bytes_len +! 2ul *! params_nbar *! params_nbar) r;
frodo_sample_matrix a params_nbar (params_n a) (sub r 0ul s_bytes_len) sp_matrix;
frodo_sample_matrix a params_nbar (params_n a) (sub r s_bytes_len s_bytes_len) ep_matrix;
frodo_sample_matrix a params_nbar params_nbar (sub r (2ul *! s_bytes_len) (2ul *! params_nbar *! params_nbar)) epp_matrix;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct0:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> b:lbytes (publicmatrixbytes_len a)
-> mu:lbytes (bytes_mu a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> ct:lbytes (crypto_ciphertextbytes a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h b /\ live h mu /\ live h ct /\
live h sp_matrix /\ live h ep_matrix /\ live h epp_matrix /\
disjoint ct seed_a /\ disjoint ct b /\ disjoint ct mu /\
disjoint ct sp_matrix /\ disjoint ct ep_matrix /\ disjoint ct epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc ct) h0 h1 /\
(let c1:LB.lbytes (FP.ct1bytes_len a) = S.crypto_kem_enc_ct_pack_c1 a gen_a (as_seq h0 seed_a) (as_seq h0 sp_matrix) (as_seq h0 ep_matrix) in
let c2:LB.lbytes (FP.ct2bytes_len a) = S.crypto_kem_enc_ct_pack_c2 a (as_seq h0 mu) (as_seq h0 b) (as_seq h0 sp_matrix) (as_seq h0 epp_matrix) in
v (crypto_ciphertextbytes a) == FP.ct1bytes_len a + FP.ct2bytes_len a /\
as_seq h1 ct `Seq.equal` LSeq.concat #_ #(FP.ct1bytes_len a) #(FP.ct2bytes_len a) c1 c2))
let crypto_kem_enc_ct0 a gen_a seed_a b mu sp_matrix ep_matrix epp_matrix ct =
let c1 = sub ct 0ul (ct1bytes_len a) in
let c2 = sub ct (ct1bytes_len a) (ct2bytes_len a) in
let h0 = ST.get () in
crypto_kem_enc_ct_pack_c1 a gen_a seed_a sp_matrix ep_matrix c1;
let h1 = ST.get () in
crypto_kem_enc_ct_pack_c2 a mu b sp_matrix epp_matrix c2;
let h2 = ST.get () in
LSeq.eq_intro
(LSeq.sub (as_seq h2 ct) 0 (v (ct1bytes_len a)))
(LSeq.sub (as_seq h1 ct) 0 (v (ct1bytes_len a)));
LSeq.lemma_concat2
(v (ct1bytes_len a)) (LSeq.sub (as_seq h1 ct) 0 (v (ct1bytes_len a)))
(v (ct2bytes_len a)) (LSeq.sub (as_seq h2 ct) (v (ct1bytes_len a)) (v (ct2bytes_len a))) (as_seq h2 ct)
inline_for_extraction noextract
val clear_matrix3:
a:FP.frodo_alg
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h sp_matrix /\ live h ep_matrix /\ live h epp_matrix /\
disjoint sp_matrix ep_matrix /\ disjoint sp_matrix epp_matrix /\
disjoint ep_matrix epp_matrix)
(ensures fun h0 _ h1 ->
modifies (loc sp_matrix |+| loc ep_matrix |+| loc epp_matrix) h0 h1)
let clear_matrix3 a sp_matrix ep_matrix epp_matrix =
clear_matrix sp_matrix;
clear_matrix ep_matrix;
clear_matrix epp_matrix
inline_for_extraction noextract
val crypto_kem_enc_ct:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> mu:lbytes (bytes_mu a)
-> pk:lbytes (crypto_publickeybytes a)
-> seed_se:lbytes (crypto_bytes a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> Stack unit
(requires fun h ->
live h mu /\ live h pk /\ live h seed_se /\ live h ct /\
disjoint ct mu /\ disjoint ct pk /\ disjoint ct seed_se)
(ensures fun h0 _ h1 -> modifies (loc ct) h0 h1 /\
as_seq h1 ct == S.crypto_kem_enc_ct a gen_a (as_seq h0 mu) (as_seq h0 pk) (as_seq h0 seed_se))
#push-options "--z3rlimit 200"
let crypto_kem_enc_ct a gen_a mu pk seed_se ct =
push_frame ();
let h0 = ST.get () in
FP.expand_crypto_publickeybytes a;
let seed_a = sub pk 0ul bytes_seed_a in
let b = sub pk bytes_seed_a (publicmatrixbytes_len a) in
let sp_matrix = matrix_create params_nbar (params_n a) in
let ep_matrix = matrix_create params_nbar (params_n a) in
let epp_matrix = matrix_create params_nbar params_nbar in
get_sp_ep_epp_matrices a seed_se sp_matrix ep_matrix epp_matrix;
crypto_kem_enc_ct0 a gen_a seed_a b mu sp_matrix ep_matrix epp_matrix ct;
clear_matrix3 a sp_matrix ep_matrix epp_matrix;
let h1 = ST.get () in
LSeq.eq_intro
(as_seq h1 ct)
(S.crypto_kem_enc_ct a gen_a (as_seq h0 mu) (as_seq h0 pk) (as_seq h0 seed_se));
pop_frame ()
#pop-options
inline_for_extraction noextract
val crypto_kem_enc_ss:
a:FP.frodo_alg
-> k:lbytes (crypto_bytes a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> Stack unit
(requires fun h ->
live h k /\ live h ct /\ live h ss /\
disjoint ct ss /\ disjoint k ct /\ disjoint k ss)
(ensures fun h0 _ h1 -> modifies (loc ss) h0 h1 /\
as_seq h1 ss == S.crypto_kem_enc_ss a (as_seq h0 k) (as_seq h0 ct))
let crypto_kem_enc_ss a k ct ss =
push_frame ();
let ss_init_len = crypto_ciphertextbytes a +! crypto_bytes a in
let shake_input_ss = create ss_init_len (u8 0) in
concat2 (crypto_ciphertextbytes a) ct (crypto_bytes a) k shake_input_ss;
frodo_shake a ss_init_len shake_input_ss (crypto_bytes a) ss;
clear_words_u8 shake_input_ss;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_seed_se_k:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> pk:lbytes (crypto_publickeybytes a)
-> seed_se_k:lbytes (2ul *! crypto_bytes a)
-> Stack unit
(requires fun h ->
live h mu /\ live h pk /\ live h seed_se_k /\
disjoint seed_se_k mu /\ disjoint seed_se_k pk)
(ensures fun h0 _ h1 -> modifies (loc seed_se_k) h0 h1 /\
as_seq h1 seed_se_k == S.crypto_kem_enc_seed_se_k a (as_seq h0 mu) (as_seq h0 pk))
let crypto_kem_enc_seed_se_k a mu pk seed_se_k =
push_frame ();
let pkh_mu = create (bytes_pkhash a +! bytes_mu a) (u8 0) in
let h0 = ST.get () in
update_sub_f h0 pkh_mu 0ul (bytes_pkhash a)
(fun h -> FP.frodo_shake a (v (crypto_publickeybytes a)) (as_seq h0 pk) (v (bytes_pkhash a)))
(fun _ -> frodo_shake a (crypto_publickeybytes a) pk (bytes_pkhash a) (sub pkh_mu 0ul (bytes_pkhash a)));
let h1 = ST.get () in
update_sub pkh_mu (bytes_pkhash a) (bytes_mu a) mu;
let h2 = ST.get () in
LSeq.eq_intro
(LSeq.sub (as_seq h2 pkh_mu) 0 (v (bytes_pkhash a)))
(LSeq.sub (as_seq h1 pkh_mu) 0 (v (bytes_pkhash a)));
LSeq.lemma_concat2
(v (bytes_pkhash a)) (LSeq.sub (as_seq h1 pkh_mu) 0 (v (bytes_pkhash a)))
(v (bytes_mu a)) (as_seq h0 mu) (as_seq h2 pkh_mu);
//concat2 (bytes_pkhash a) pkh (bytes_mu a) mu pkh_mu;
frodo_shake a (bytes_pkhash a +! bytes_mu a) pkh_mu (2ul *! crypto_bytes a) seed_se_k;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct_ss:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_se_k:lbytes (2ul *! crypto_bytes a)
-> mu:lbytes (bytes_mu a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> pk:lbytes (crypto_publickeybytes a)
-> Stack unit
(requires fun h ->
live h seed_se_k /\ live h ct /\ live h ss /\ live h pk /\ live h mu /\
disjoint ct ss /\ disjoint ct pk /\ disjoint ss pk /\
disjoint mu ss /\ disjoint mu ct /\ disjoint seed_se_k ct /\ disjoint seed_se_k ss)
(ensures fun h0 _ h1 -> modifies (loc ct |+| loc ss) h0 h1 /\
(let seed_se = LSeq.sub (as_seq h0 seed_se_k) 0 (v (crypto_bytes a)) in
let k = LSeq.sub (as_seq h0 seed_se_k) (v (crypto_bytes a)) (v (crypto_bytes a)) in
as_seq h1 ct == S.crypto_kem_enc_ct a gen_a (as_seq h0 mu) (as_seq h0 pk) seed_se /\
as_seq h1 ss == S.crypto_kem_enc_ss a k (as_seq h1 ct))) | {
"checked_file": "/",
"dependencies": [
"Spec.Matrix.fst.checked",
"Spec.Frodo.Params.fst.checked",
"Spec.Frodo.KEM.Encaps.fst.checked",
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Matrix.fst.checked",
"Hacl.Impl.Frodo.Sample.fst.checked",
"Hacl.Impl.Frodo.Params.fst.checked",
"Hacl.Impl.Frodo.Pack.fst.checked",
"Hacl.Impl.Frodo.KEM.KeyGen.fst.checked",
"Hacl.Impl.Frodo.KEM.fst.checked",
"Hacl.Impl.Frodo.Encode.fst.checked",
"Hacl.Frodo.Random.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Frodo.KEM.Encaps.fst"
} | [
{
"abbrev": true,
"full_module": "Hacl.Impl.Frodo.KEM.KeyGen",
"short_module": "KG"
},
{
"abbrev": true,
"full_module": "Spec.Matrix",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "Spec.Frodo.KEM.Encaps",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Spec.Frodo.Params",
"short_module": "FP"
},
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "LB"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Frodo.Random",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Sample",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Pack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Encode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Params",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Matrix",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
a: Spec.Frodo.Params.frodo_alg ->
gen_a: Spec.Frodo.Params.frodo_gen_a{Hacl.Impl.Frodo.Params.is_supported gen_a} ->
seed_se_k: Hacl.Impl.Matrix.lbytes (2ul *! Hacl.Impl.Frodo.Params.crypto_bytes a) ->
mu: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.bytes_mu a) ->
ct: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.crypto_ciphertextbytes a) ->
ss: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.crypto_bytes a) ->
pk: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.crypto_publickeybytes a)
-> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Spec.Frodo.Params.frodo_alg",
"Spec.Frodo.Params.frodo_gen_a",
"Prims.b2t",
"Hacl.Impl.Frodo.Params.is_supported",
"Hacl.Impl.Matrix.lbytes",
"Lib.IntTypes.op_Star_Bang",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"FStar.UInt32.__uint_to_t",
"Hacl.Impl.Frodo.Params.crypto_bytes",
"Hacl.Impl.Frodo.Params.bytes_mu",
"Hacl.Impl.Frodo.Params.crypto_ciphertextbytes",
"Hacl.Impl.Frodo.Params.crypto_publickeybytes",
"Hacl.Impl.Frodo.KEM.Encaps.crypto_kem_enc_ss",
"Prims.unit",
"Hacl.Impl.Frodo.KEM.Encaps.crypto_kem_enc_ct",
"Lib.Buffer.lbuffer_t",
"Lib.Buffer.MUT",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U8",
"Lib.IntTypes.SEC",
"Lib.Buffer.sub",
"Lib.IntTypes.uint8"
] | [] | false | true | false | false | false | let crypto_kem_enc_ct_ss a gen_a seed_se_k mu ct ss pk =
| let seed_se = sub seed_se_k 0ul (crypto_bytes a) in
let k = sub seed_se_k (crypto_bytes a) (crypto_bytes a) in
crypto_kem_enc_ct a gen_a mu pk seed_se ct;
crypto_kem_enc_ss a k ct ss | false |
LowParse.Repr.fsti | LowParse.Repr.recall_stable_region_repr_ptr | val recall_stable_region_repr_ptr (#t: _) (r: ST.drgn) (p: stable_region_repr_ptr r t)
: Stack unit
(requires fun h -> HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 -> h0 == h1 /\ valid p h1) | val recall_stable_region_repr_ptr (#t: _) (r: ST.drgn) (p: stable_region_repr_ptr r t)
: Stack unit
(requires fun h -> HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 -> h0 == h1 /\ valid p h1) | let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 28,
"end_line": 543,
"start_col": 0,
"start_line": 535
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
} | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: FStar.HyperStack.ST.drgn -> p: LowParse.Repr.stable_region_repr_ptr r t
-> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"FStar.HyperStack.ST.drgn",
"LowParse.Repr.stable_region_repr_ptr",
"LowParse.Repr.recall_stable_repr_ptr",
"Prims.unit",
"LowStar.Monotonic.Buffer.recall",
"LowParse.Bytes.byte",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"LowParse.Repr.__proj__Ptr__item__b",
"LowStar.ConstBuffer.cast",
"FStar.Monotonic.HyperStack.mem",
"Prims.b2t",
"FStar.Monotonic.HyperStack.live_region",
"FStar.HyperStack.ST.rid_of_drgn",
"Prims.l_and",
"Prims.eq2",
"LowParse.Repr.valid"
] | [] | false | true | false | false | false | let recall_stable_region_repr_ptr #t (r: ST.drgn) (p: stable_region_repr_ptr r t)
: Stack unit
(requires fun h -> HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 -> h0 == h1 /\ valid p h1) =
| B.recall (C.cast p.b);
recall_stable_repr_ptr p | false |
LowParse.Repr.fsti | LowParse.Repr.stash | val stash (rgn: ST.drgn) (#t: _) (r: repr_ptr t) (len: uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h -> valid r h /\ HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 -> B.modifies B.loc_none h0 h1 /\ valid r' h1 /\ r.meta == r'.meta) | val stash (rgn: ST.drgn) (#t: _) (r: repr_ptr t) (len: uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h -> valid r h /\ HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 -> B.modifies B.loc_none h0 h1 /\ valid r' h1 /\ r.meta == r'.meta) | let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 4,
"end_line": 592,
"start_col": 0,
"start_line": 571
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
rgn: FStar.HyperStack.ST.drgn ->
r: LowParse.Repr.repr_ptr t ->
len: FStar.Integers.uint_32{len == Mkmeta?.len (Ptr?.meta r)}
-> FStar.HyperStack.ST.ST (LowParse.Repr.stable_region_repr_ptr rgn t) | FStar.HyperStack.ST.ST | [] | [] | [
"FStar.HyperStack.ST.drgn",
"LowParse.Repr.repr_ptr",
"FStar.Integers.uint_32",
"Prims.eq2",
"LowParse.Repr.__proj__Mkmeta__item__len",
"LowParse.Repr.__proj__Ptr__item__meta",
"Prims.unit",
"LowParse.Repr.valid_if_live_intro",
"LowParse.Repr.Ptr",
"LowParse.Repr.__proj__Ptr__item__vv",
"LowParse.Repr.__proj__Ptr__item__length",
"LowParse.Low.Base.Spec.valid_facts",
"LowParse.Repr.preorder",
"LowParse.Repr.__proj__Mkmeta__item__parser_kind",
"LowParse.Repr.__proj__Mkmeta__item__parser",
"FStar.UInt32.__uint_to_t",
"LowParse.Repr.__proj__Ptr__item__b",
"LowParse.Slice.slice",
"LowParse.Repr.slice_of_const_buffer",
"LowParse.Repr.stable_region_repr_ptr",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowParse.Repr.const_slice",
"LowParse.Repr.MkSlice",
"LowStar.ConstBuffer.const_buffer",
"LowParse.Bytes.byte",
"LowParse.Repr.ralloc_and_blit",
"LowParse.Repr.reveal_valid",
"Prims.l_and",
"LowParse.Repr.valid",
"Prims.b2t",
"FStar.Monotonic.HyperStack.live_region",
"FStar.HyperStack.ST.rid_of_drgn",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_none",
"LowParse.Repr.meta"
] | [] | false | true | false | false | false | let stash (rgn: ST.drgn) #t (r: repr_ptr t) (len: uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h -> valid r h /\ HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 -> B.modifies B.loc_none h0 h1 /\ valid r' h1 /\ r.meta == r'.meta) =
| reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul;
LP.valid_facts r.meta.parser h slice' 0ul
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p | false |
EverParse3d.InputStream.Base.fst | EverParse3d.InputStream.Base.preserved' | val preserved'
(#t: Type)
(#[EverParse3d.Util.solve_from_ctx ()] inst: input_stream_inst t)
(x: t)
(l: B.loc)
(h h': HS.mem)
: Lemma (requires (live x h /\ B.modifies l h h' /\ B.loc_disjoint (footprint x) l))
(ensures
(live x h' /\ get_remaining x h' == get_remaining x h /\ get_read x h' == get_read x h))
[
SMTPatOr
[
[SMTPat (live x h); SMTPat (B.modifies l h h')];
[SMTPat (live x h'); SMTPat (B.modifies l h h')];
[SMTPat (get_remaining x h); SMTPat (B.modifies l h h')];
[SMTPat (get_remaining x h'); SMTPat (B.modifies l h h')];
[SMTPat (get_read x h); SMTPat (B.modifies l h h')];
[SMTPat (get_read x h'); SMTPat (B.modifies l h h')]
]
] | val preserved'
(#t: Type)
(#[EverParse3d.Util.solve_from_ctx ()] inst: input_stream_inst t)
(x: t)
(l: B.loc)
(h h': HS.mem)
: Lemma (requires (live x h /\ B.modifies l h h' /\ B.loc_disjoint (footprint x) l))
(ensures
(live x h' /\ get_remaining x h' == get_remaining x h /\ get_read x h' == get_read x h))
[
SMTPatOr
[
[SMTPat (live x h); SMTPat (B.modifies l h h')];
[SMTPat (live x h'); SMTPat (B.modifies l h h')];
[SMTPat (get_remaining x h); SMTPat (B.modifies l h h')];
[SMTPat (get_remaining x h'); SMTPat (B.modifies l h h')];
[SMTPat (get_read x h); SMTPat (B.modifies l h h')];
[SMTPat (get_read x h'); SMTPat (B.modifies l h h')]
]
] | let preserved'
(#t: Type)
(# [EverParse3d.Util.solve_from_ctx ()] inst : input_stream_inst t)
(x: t)
(l: B.loc)
(h: HS.mem)
(h' : HS.mem)
: Lemma
(requires (live x h /\ B.modifies l h h' /\ B.loc_disjoint (footprint x) l))
(ensures (
live x h' /\
get_remaining x h' == get_remaining x h /\
get_read x h' == get_read x h
))
[SMTPatOr [
[SMTPat (live x h); SMTPat (B.modifies l h h')];
[SMTPat (live x h'); SMTPat (B.modifies l h h')];
[SMTPat (get_remaining x h); SMTPat (B.modifies l h h')];
[SMTPat (get_remaining x h'); SMTPat (B.modifies l h h')];
[SMTPat (get_read x h); SMTPat (B.modifies l h h')];
[SMTPat (get_read x h'); SMTPat (B.modifies l h h')];
]]
= preserved x l h h' | {
"file_name": "src/3d/prelude/EverParse3d.InputStream.Base.fst",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 20,
"end_line": 256,
"start_col": 0,
"start_line": 234
} | module EverParse3d.InputStream.Base
module U8 = FStar.UInt8
module U64 = FStar.UInt64
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module B = LowStar.Buffer
module LPE = EverParse3d.ErrorCode
module LP = LowParse.Low.Base
noextract
inline_for_extraction
class input_stream_inst (t: Type) : Type = {
live: t -> HS.mem -> Tot prop;
footprint: (x: t) -> Ghost B.loc
(requires True)
(ensures (fun y -> B.address_liveness_insensitive_locs `B.loc_includes` y));
perm_footprint: (x: t) -> Ghost B.loc
(requires True)
(ensures (fun y -> footprint x `B.loc_includes` y));
live_not_unused_in:
(x: t) ->
(h: HS.mem) ->
Lemma
(requires (live x h))
(ensures (B.loc_not_unused_in h `B.loc_includes` footprint x));
len_all: (x: t) -> GTot LPE.pos_t;
get_all: (x: t) -> Ghost (Seq.seq U8.t)
(requires True)
(ensures (fun y -> Seq.length y == U64.v (len_all x)));
get_remaining: (x: t) -> (h: HS.mem) -> Ghost (Seq.seq U8.t)
(requires (live x h))
(ensures (fun y -> Seq.length y <= U64.v (len_all x)));
get_read: (x: t) -> (h: HS.mem) -> Ghost (Seq.seq U8.t)
(requires (live x h))
(ensures (fun y -> get_all x `Seq.equal` (y `Seq.append` get_remaining x h)));
preserved:
(x: t) ->
(l: B.loc) ->
(h: HS.mem) ->
(h' : HS.mem) ->
Lemma
(requires (live x h /\ B.modifies l h h' /\ B.loc_disjoint (footprint x) l))
(ensures (
live x h' /\
get_remaining x h' == get_remaining x h /\
get_read x h' == get_read x h
));
tlen: t -> Type0;
extra_t: Type0;
has:
(# [EverParse3d.Util.solve_from_ctx () ] extra_t ) ->
(x: t) ->
(len: tlen x) ->
(pos: LPE.pos_t) ->
(n: U64.t) ->
HST.Stack bool
(requires (fun h ->
live x h /\
U64.v pos == Seq.length (get_read x h)
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
(res == true <==> Seq.length (get_remaining x h) >= U64.v n)
));
read:
(# [EverParse3d.Util.solve_from_ctx ()] extra_t ) ->
(t': Type0) ->
(k: LP.parser_kind) ->
(p: LP.parser k t') ->
(r: LP.leaf_reader p) ->
(x: t) ->
(pos: LPE.pos_t) ->
(n: U64.t) ->
HST.Stack t'
(requires (fun h ->
live x h /\
U64.v pos == Seq.length (get_read x h) /\
k.LP.parser_kind_subkind == Some LP.ParserStrong /\
k.LP.parser_kind_high == Some k.LP.parser_kind_low /\
k.LP.parser_kind_low == U64.v n /\
U64.v n > 0 /\
U64.v n < 4294967296 /\
Some? (LP.parse p (get_remaining x h))
))
(ensures (fun h dst' h' ->
let s = get_remaining x h in
B.modifies (perm_footprint x) h h' /\
Seq.length s >= U64.v n /\
LP.parse p (Seq.slice s 0 (U64.v n)) == Some (dst', U64.v n) /\
LP.parse p s == Some (dst', U64.v n) /\
live x h' /\
get_remaining x h' `Seq.equal` Seq.slice s (U64.v n) (Seq.length s)
));
skip:
(# [EverParse3d.Util.solve_from_ctx ()] extra_t ) ->
(x: t) ->
(pos: LPE.pos_t) ->
(n: U64.t) ->
HST.Stack unit
(requires (fun h ->
live x h /\
U64.v pos == Seq.length (get_read x h) /\
Seq.length (get_remaining x h) >= U64.v n
))
(ensures (fun h _ h' ->
let s = get_remaining x h in
B.modifies (perm_footprint x) h h' /\
live x h' /\
get_remaining x h' `Seq.equal` Seq.slice s (U64.v n) (Seq.length s)
));
skip_if_success:
(# [EverParse3d.Util.solve_from_ctx ()] extra_t ) ->
(x: t) ->
(pos: LPE.pos_t) ->
(res: U64.t) ->
HST.Stack unit
(requires (fun h ->
live x h /\
(LPE.is_success res ==> (
U64.v pos == Seq.length (get_read x h)) /\
U64.v res >= U64.v pos /\
U64.v pos + Seq.length (get_remaining x h) >= U64.v res
)))
(ensures (fun h _ h' ->
let s = get_remaining x h in
B.modifies (perm_footprint x) h h' /\
live x h' /\
get_remaining x h' == (if LPE.is_success res then Seq.slice s (U64.v res - U64.v pos) (Seq.length s) else get_remaining x h)
));
empty:
(# [EverParse3d.Util.solve_from_ctx ()] extra_t ) ->
(x: t) ->
(len: tlen x) ->
(pos: LPE.pos_t) ->
HST.Stack LPE.pos_t
(requires (fun h ->
live x h /\
U64.v pos == Seq.length (get_read x h)
))
(ensures (fun h res h' ->
B.modifies (perm_footprint x) h h' /\
live x h' /\
U64.v res == Seq.length (get_read x h') /\
get_remaining x h' `Seq.equal` Seq.empty
));
is_prefix_of:
(x: t) ->
(y: t) ->
Tot prop;
get_suffix:
(x: t) ->
(y: t) ->
Ghost (Seq.seq U8.t)
(requires (x `is_prefix_of` y))
(ensures (fun _ -> True));
is_prefix_of_prop:
(x: t) ->
(y: t) ->
(h: HS.mem) ->
Lemma
(requires (
live x h /\
x `is_prefix_of` y
))
(ensures (
live y h /\
get_read y h `Seq.equal` get_read x h /\
get_remaining y h `Seq.equal` (get_remaining x h `Seq.append` get_suffix x y)
));
truncate:
(x: t) ->
(pos: LPE.pos_t) ->
(n: U64.t) ->
HST.Stack t
(requires (fun h ->
live x h /\
U64.v pos == Seq.length (get_read x h) /\
U64.v n <= Seq.length (get_remaining x h)
))
(ensures (fun h res h' ->
B.modifies B.loc_none h h' /\
res `is_prefix_of` x /\
footprint res == footprint x /\
perm_footprint res == perm_footprint x /\
live res h' /\
Seq.length (get_remaining res h') == U64.v n
));
truncate_len:
(x: t) ->
(pos: LPE.pos_t) ->
(n: U64.t) ->
(res: t) ->
HST.Stack (tlen res)
(requires (fun h ->
live x h /\
U64.v pos == Seq.length (get_read x h) /\
U64.v n <= Seq.length (get_remaining x h) /\
res `is_prefix_of` x /\
footprint res == footprint x /\
perm_footprint res == perm_footprint x /\
live res h /\
Seq.length (get_remaining res h) == U64.v n
))
(ensures (fun h res_len h' ->
B.modifies B.loc_none h h'
));
}
let length_all #t (#_: input_stream_inst t) (x: t) : GTot nat = U64.v (len_all x) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.Tactics.Typeclasses.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"EverParse3d.Util.fst.checked",
"EverParse3d.ErrorCode.fst.checked"
],
"interface_file": false,
"source_file": "EverParse3d.InputStream.Base.fst"
} | [
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "EverParse3d.ErrorCode",
"short_module": "LPE"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": false,
"full_module": "EverParse3d.InputStream",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverParse3d.InputStream",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 2,
"max_fuel": 0,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [
"smt.qi.eager_threshold=10"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
x: t ->
l: LowStar.Monotonic.Buffer.loc ->
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
EverParse3d.InputStream.Base.live x h /\ LowStar.Monotonic.Buffer.modifies l h h' /\
LowStar.Monotonic.Buffer.loc_disjoint (EverParse3d.InputStream.Base.footprint x) l)
(ensures
EverParse3d.InputStream.Base.live x h' /\
EverParse3d.InputStream.Base.get_remaining x h' ==
EverParse3d.InputStream.Base.get_remaining x h /\
EverParse3d.InputStream.Base.get_read x h' == EverParse3d.InputStream.Base.get_read x h)
[
SMTPatOr [
[
SMTPat (EverParse3d.InputStream.Base.live x h);
SMTPat (LowStar.Monotonic.Buffer.modifies l h h')
];
[
SMTPat (EverParse3d.InputStream.Base.live x h');
SMTPat (LowStar.Monotonic.Buffer.modifies l h h')
];
[
SMTPat (EverParse3d.InputStream.Base.get_remaining x h);
SMTPat (LowStar.Monotonic.Buffer.modifies l h h')
];
[
SMTPat (EverParse3d.InputStream.Base.get_remaining x h');
SMTPat (LowStar.Monotonic.Buffer.modifies l h h')
];
[
SMTPat (EverParse3d.InputStream.Base.get_read x h);
SMTPat (LowStar.Monotonic.Buffer.modifies l h h')
];
[
SMTPat (EverParse3d.InputStream.Base.get_read x h');
SMTPat (LowStar.Monotonic.Buffer.modifies l h h')
]
]
] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"EverParse3d.InputStream.Base.input_stream_inst",
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"EverParse3d.InputStream.Base.preserved",
"Prims.unit",
"Prims.l_and",
"EverParse3d.InputStream.Base.live",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_disjoint",
"EverParse3d.InputStream.Base.footprint",
"Prims.squash",
"Prims.eq2",
"FStar.Seq.Base.seq",
"FStar.UInt8.t",
"EverParse3d.InputStream.Base.get_remaining",
"EverParse3d.InputStream.Base.get_read",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat_or",
"Prims.list",
"FStar.Pervasives.smt_pat",
"Prims.prop",
"Prims.Nil"
] | [] | true | false | true | false | false | let preserved'
(#t: Type)
(#[EverParse3d.Util.solve_from_ctx ()] inst: input_stream_inst t)
(x: t)
(l: B.loc)
(h h': HS.mem)
: Lemma (requires (live x h /\ B.modifies l h h' /\ B.loc_disjoint (footprint x) l))
(ensures
(live x h' /\ get_remaining x h' == get_remaining x h /\ get_read x h' == get_read x h))
[
SMTPatOr
[
[SMTPat (live x h); SMTPat (B.modifies l h h')];
[SMTPat (live x h'); SMTPat (B.modifies l h h')];
[SMTPat (get_remaining x h); SMTPat (B.modifies l h h')];
[SMTPat (get_remaining x h'); SMTPat (B.modifies l h h')];
[SMTPat (get_read x h); SMTPat (B.modifies l h h')];
[SMTPat (get_read x h'); SMTPat (B.modifies l h h')]
]
] =
| preserved x l h h' | false |
LowParse.Repr.fsti | LowParse.Repr.mk | val mk
(#k: strong_parser_kind)
(#t: _)
(#parser: LP.parser k t)
(parser32: LS.parser32 parser)
(#q: _)
(slice: LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to: uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h -> LP.valid_pos parser h slice from to)
(ensures
fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\ valid p h1 /\
C.const_sub_buffer from (to - from) p.b (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from) | val mk
(#k: strong_parser_kind)
(#t: _)
(#parser: LP.parser k t)
(parser32: LS.parser32 parser)
(#q: _)
(slice: LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to: uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h -> LP.valid_pos parser h slice from to)
(ensures
fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\ valid p h1 /\
C.const_sub_buffer from (to - from) p.b (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from) | let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 42,
"end_line": 310,
"start_col": 0,
"start_line": 296
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
parser32: LowParse.SLow.Base.parser32 parser ->
slice:
LowParse.Slice.slice (LowStar.ConstBuffer.q_preorder q LowParse.Bytes.byte)
(LowStar.ConstBuffer.q_preorder q LowParse.Bytes.byte) ->
from: FStar.Integers.uint_32 ->
to: FStar.Integers.uint_32
-> FStar.HyperStack.ST.Stack (LowParse.Repr.repr_ptr_p t parser) | FStar.HyperStack.ST.Stack | [] | [] | [
"LowParse.Repr.strong_parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.SLow.Base.parser32",
"LowStar.ConstBuffer.qual",
"LowParse.Slice.slice",
"LowStar.ConstBuffer.q_preorder",
"LowParse.Bytes.byte",
"FStar.Integers.uint_32",
"LowParse.Repr.mk_from_const_slice",
"LowParse.Repr.repr_ptr_p",
"LowParse.Repr.const_slice",
"LowParse.Repr.MkSlice",
"LowStar.ConstBuffer.of_qbuf",
"LowParse.Slice.__proj__Mkslice__item__base",
"LowParse.Slice.__proj__Mkslice__item__len",
"FStar.Monotonic.HyperStack.mem",
"LowParse.Low.Base.Spec.valid_pos",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_none",
"LowParse.Repr.valid",
"LowStar.ConstBuffer.const_sub_buffer",
"FStar.Integers.op_Subtraction",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"LowParse.Repr.__proj__Ptr__item__b",
"Prims.eq2",
"LowParse.Repr.__proj__Mkmeta__item__v",
"LowParse.Repr.__proj__Ptr__item__meta",
"LowParse.Low.Base.Spec.contents"
] | [] | false | true | false | false | false | let mk
(#k: strong_parser_kind)
#t
(#parser: LP.parser k t)
(parser32: LS.parser32 parser)
#q
(slice: LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from: uint_32)
(to: uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h -> LP.valid_pos parser h slice from to)
(ensures
fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\ valid p h1 /\
C.const_sub_buffer from (to - from) p.b (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from) =
| let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to | false |
LowParse.Repr.fsti | LowParse.Repr.recall_stable_repr_ptr | val recall_stable_repr_ptr (#t: _) (r: stable_repr_ptr t)
: Stack unit (requires fun h -> C.live h r.b) (ensures fun h0 _ h1 -> h0 == h1 /\ valid r h1) | val recall_stable_repr_ptr (#t: _) (r: stable_repr_ptr t)
: Stack unit (requires fun h -> C.live h r.b) (ensures fun h0 _ h1 -> h0 == h1 /\ valid r h1) | let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 24,
"end_line": 521,
"start_col": 0,
"start_line": 496
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: LowParse.Repr.stable_repr_ptr t -> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"LowParse.Repr.stable_repr_ptr",
"LowStar.ImmutableBuffer.recall_value",
"LowParse.Bytes.byte",
"Prims.unit",
"FStar.Ghost.erased",
"FStar.Seq.Base.seq",
"FStar.Ghost.hide",
"LowParse.Repr.__proj__Mkmeta__item__repr_bytes",
"LowParse.Repr.meta",
"LowParse.Repr.__proj__Ptr__item__meta",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowParse.Repr.valid",
"Prims.eq2",
"LowStar.Monotonic.Buffer.as_seq",
"LowStar.ImmutableBuffer.immutable_preorder",
"Prims.squash",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.prop",
"Prims.Nil",
"LowParse.Low.Base.Spec.valid_ext_intro",
"LowParse.Repr.preorder",
"LowParse.Repr.__proj__Ptr__item__b",
"LowParse.Repr.__proj__Mkmeta__item__parser_kind",
"LowParse.Repr.__proj__Mkmeta__item__parser",
"LowParse.Repr.slice_of_repr_ptr",
"FStar.UInt32.__uint_to_t",
"LowStar.ImmutableBuffer.ibuffer",
"LowStar.ConstBuffer.to_ibuffer",
"FStar.HyperStack.ST.get",
"LowParse.Repr.reveal_valid",
"LowStar.ConstBuffer.live"
] | [] | false | true | false | false | false | let recall_stable_repr_ptr #t (r: stable_repr_ptr t)
: Stack unit (requires fun h -> C.live h r.b) (ensures fun h0 _ h1 -> h0 == h1 /\ valid r h1) =
| reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h: HS.mem)
: Lemma (requires valid r h /\ B.as_seq h i == B.as_seq h1 i)
(ensures valid r h1)
[SMTPat (valid r h)] =
let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es | false |
LowParse.Repr.fsti | LowParse.Repr.mk_from_serialize | val mk_from_serialize
(#k: strong_parser_kind)
(#t: _)
(#parser: LP.parser k t)
(#serializer: LP.serializer parser)
(parser32: LS.parser32 parser)
(serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b: LP.slice mut_p mut_p)
(from: uint_32{from <= b.LP.len})
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h -> LP.live_slice h b)
(ensures
fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None -> Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\ U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\ p.meta.v == x)) | val mk_from_serialize
(#k: strong_parser_kind)
(#t: _)
(#parser: LP.parser k t)
(#serializer: LP.serializer parser)
(parser32: LS.parser32 parser)
(serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b: LP.slice mut_p mut_p)
(from: uint_32{from <= b.LP.len})
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h -> LP.live_slice h b)
(ensures
fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None -> Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\ U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\ p.meta.v == x)) | let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 7,
"end_line": 354,
"start_col": 0,
"start_line": 319
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
parser32: LowParse.SLow.Base.parser32 parser ->
serializer32: LowParse.SLow.Base.serializer32 serializer ->
size32: LowParse.SLow.Base.size32 serializer ->
b: LowParse.Slice.slice LowParse.Repr.mut_p LowParse.Repr.mut_p ->
from: FStar.Integers.uint_32{from <= Mkslice?.len b} ->
x: t
-> FStar.HyperStack.ST.Stack (FStar.Pervasives.Native.option (LowParse.Repr.repr_ptr_p t parser)) | FStar.HyperStack.ST.Stack | [] | [] | [
"LowParse.Repr.strong_parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"LowParse.SLow.Base.parser32",
"LowParse.SLow.Base.serializer32",
"LowParse.SLow.Base.size32",
"LowParse.Slice.slice",
"LowParse.Repr.mut_p",
"FStar.Integers.uint_32",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"LowParse.Slice.__proj__Mkslice__item__len",
"FStar.Integers.op_Less",
"FStar.Pervasives.Native.None",
"LowParse.Repr.repr_ptr_p",
"FStar.Pervasives.Native.option",
"Prims.bool",
"FStar.Pervasives.Native.Some",
"LowParse.Repr.mk",
"LowStar.ConstBuffer.MUTABLE",
"Prims.unit",
"LowParse.Low.Base.Spec.serialize_valid_exact",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"FStar.Integers.int_t",
"FStar.Integers.op_Plus",
"FStar.Integers.op_Greater",
"FStar.UInt32.__uint_to_t",
"FStar.Bytes.store_bytes",
"LowStar.Monotonic.Buffer.mbuffer",
"LowParse.Bytes.byte",
"LowStar.Buffer.trivial_preorder",
"LowStar.Buffer.sub",
"LowParse.Slice.__proj__Mkslice__item__base",
"FStar.Ghost.hide",
"FStar.UInt32.t",
"LowParse.SLow.Base.bytes32",
"LowParse.SLow.Base.serializer32_correct",
"FStar.Integers.op_Subtraction",
"LowParse.SLow.Base.size32_postcond",
"LowParse.Slice.live_slice",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowParse.Slice.loc_slice_from",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.Seq.Base.length",
"LowParse.Spec.Base.serialize",
"FStar.UInt32.v",
"LowParse.Repr.valid",
"Prims.eq2",
"LowStar.ConstBuffer.const_buffer",
"LowParse.Repr.__proj__Ptr__item__b",
"LowStar.ConstBuffer.gsub",
"LowStar.ConstBuffer.of_buffer",
"LowParse.Repr.__proj__Mkmeta__item__v",
"LowParse.Repr.__proj__Ptr__item__meta",
"Prims.logical"
] | [] | false | true | false | false | false | let mk_from_serialize
(#k: strong_parser_kind)
#t
(#parser: LP.parser k t)
(#serializer: LP.serializer parser)
(parser32: LS.parser32 parser)
(serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b: LP.slice mut_p mut_p)
(from: uint_32{from <= b.LP.len})
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h -> LP.live_slice h b)
(ensures
fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None -> Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\ U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\ p.meta.v == x)) =
| let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r | false |
LowParse.Repr.fsti | LowParse.Repr.repr_pos_p | val repr_pos_p : t: Type -> b: LowParse.Repr.const_slice -> parser: LowParse.Spec.Base.parser k t -> Type | let repr_pos_p (t:Type) (b:const_slice) #k (parser:LP.parser k t) =
r:repr_pos t b {
r.meta.parser_kind == k /\
r.meta.parser == parser
} | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 3,
"end_line": 741,
"start_col": 0,
"start_line": 737
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2
unfold
let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p))
inline_for_extraction
let read_field (#k1:strong_parser_kind) (#t1:_) (#p1:LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: field_reader_t f
= reveal_valid ();
fun p ->
[@inline_let]
let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos
(*** Positional representation types ***)
/// `index b` is the type of valid indexes into `b`
let index (b:const_slice)= i:uint_32{ i <= b.slice_len }
noeq
type repr_pos (t:Type) (b:const_slice) =
| Pos: start_pos:index b ->
meta:meta t ->
vv_pos:t -> //temporary
length:U32.t { U32.v start_pos + U32.v meta.len <= U32.v b.slice_len /\
vv_pos == meta.v /\
length == meta.len } ->
repr_pos t b
let value_pos #t #b (r:repr_pos t b) : GTot t = r.meta.v
let as_ptr_spec #t #b (p:repr_pos t b)
: GTot (repr_ptr t)
= Ptr (C.gsub b.base p.start_pos ((Pos?.meta p).len))
(Pos?.meta p)
(Pos?.vv_pos p)
(Pos?.length p)
let const_buffer_of_repr_pos #t #b (r:repr_pos t b)
: GTot (C.const_buffer LP.byte)
= C.gsub b.base r.start_pos r.meta.len | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: Type -> b: LowParse.Repr.const_slice -> parser: LowParse.Spec.Base.parser k t -> Type | Prims.Tot | [
"total"
] | [] | [
"LowParse.Repr.const_slice",
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Repr.repr_pos",
"Prims.l_and",
"Prims.eq2",
"LowParse.Repr.__proj__Mkmeta__item__parser_kind",
"LowParse.Repr.__proj__Pos__item__meta",
"LowParse.Repr.__proj__Mkmeta__item__parser"
] | [] | false | false | false | false | true | let repr_pos_p (t: Type) (b: const_slice) #k (parser: LP.parser k t) =
| r: repr_pos t b {r.meta.parser_kind == k /\ r.meta.parser == parser} | false |
|
LowParse.Repr.fsti | LowParse.Repr.read_field | val read_field
(#k1: strong_parser_kind)
(#t1: _)
(#p1: LP.parser k1 t1)
(#t2: _)
(f: field_reader p1 t2)
: field_reader_t f | val read_field
(#k1: strong_parser_kind)
(#t1: _)
(#p1: LP.parser k1 t1)
(#t2: _)
(f: field_reader p1 t2)
: field_reader_t f | let read_field (#k1:strong_parser_kind) (#t1:_) (#p1:LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: field_reader_t f
= reveal_valid ();
fun p ->
[@inline_let]
let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 16,
"end_line": 705,
"start_col": 0,
"start_line": 696
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2
unfold
let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: LowParse.Repr.field_reader p1 t2 -> LowParse.Repr.field_reader_t f | Prims.Tot | [
"total"
] | [] | [
"LowParse.Repr.strong_parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Repr.field_reader",
"LowParse.Repr.repr_ptr_p",
"LowParse.Low.Base.Spec.clens",
"LowParse.Low.Base.Spec.gaccessor",
"LowParse.Low.Base.accessor",
"LowParse.Low.Base.leaf_reader",
"LowParse.Repr.preorder",
"LowParse.Repr.__proj__Ptr__item__b",
"FStar.UInt32.t",
"FStar.UInt32.__uint_to_t",
"LowParse.Slice.slice",
"LowParse.Repr.temp_slice_of_repr_ptr",
"Prims.unit",
"LowParse.Repr.reveal_valid",
"LowParse.Repr.field_reader_t"
] | [] | false | false | false | false | false | let read_field (#k1: strong_parser_kind) (#t1: _) (#p1: LP.parser k1 t1) #t2 (f: field_reader p1 t2)
: field_reader_t f =
| reveal_valid ();
fun p ->
[@@ inline_let ]let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos | false |
LowParse.Repr.fsti | LowParse.Repr.field_reader_t | val field_reader_t : f: LowParse.Repr.field_reader p1 t2 -> Type | let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p)) | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 41,
"end_line": 693,
"start_col": 0,
"start_line": 682
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: LowParse.Repr.field_reader p1 t2 -> Type | Prims.Tot | [
"total"
] | [] | [
"LowParse.Repr.strong_parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Repr.field_reader",
"LowParse.Repr.repr_ptr_p",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowParse.Repr.valid",
"LowParse.Low.Base.Spec.__proj__Mkclens__item__clens_cond",
"LowParse.Repr.__proj__FieldReader__item__cl",
"LowParse.Repr.__proj__Mkmeta__item__v",
"LowParse.Repr.__proj__Ptr__item__meta",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_none",
"Prims.eq2",
"LowParse.Low.Base.Spec.__proj__Mkclens__item__clens_get",
"LowParse.Repr.value"
] | [] | false | false | false | false | true | let field_reader_t
(#k1: strong_parser_kind)
#t1
(#p1: LP.parser k1 t1)
(#t2: Type)
(f: field_reader p1 t2)
=
| p: repr_ptr_p t1 p1
-> Stack t2
(requires fun h -> valid p h /\ f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 -> B.modifies B.loc_none h0 h1 /\ pv == f.cl.LP.clens_get (value p)) | false |
|
LowParse.Repr.fsti | LowParse.Repr.const_buffer_of_repr_pos | val const_buffer_of_repr_pos (#t #b: _) (r: repr_pos t b) : GTot (C.const_buffer LP.byte) | val const_buffer_of_repr_pos (#t #b: _) (r: repr_pos t b) : GTot (C.const_buffer LP.byte) | let const_buffer_of_repr_pos #t #b (r:repr_pos t b)
: GTot (C.const_buffer LP.byte)
= C.gsub b.base r.start_pos r.meta.len | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 40,
"end_line": 734,
"start_col": 0,
"start_line": 732
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2
unfold
let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p))
inline_for_extraction
let read_field (#k1:strong_parser_kind) (#t1:_) (#p1:LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: field_reader_t f
= reveal_valid ();
fun p ->
[@inline_let]
let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos
(*** Positional representation types ***)
/// `index b` is the type of valid indexes into `b`
let index (b:const_slice)= i:uint_32{ i <= b.slice_len }
noeq
type repr_pos (t:Type) (b:const_slice) =
| Pos: start_pos:index b ->
meta:meta t ->
vv_pos:t -> //temporary
length:U32.t { U32.v start_pos + U32.v meta.len <= U32.v b.slice_len /\
vv_pos == meta.v /\
length == meta.len } ->
repr_pos t b
let value_pos #t #b (r:repr_pos t b) : GTot t = r.meta.v
let as_ptr_spec #t #b (p:repr_pos t b)
: GTot (repr_ptr t)
= Ptr (C.gsub b.base p.start_pos ((Pos?.meta p).len))
(Pos?.meta p)
(Pos?.vv_pos p)
(Pos?.length p) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: LowParse.Repr.repr_pos t b -> Prims.GTot (LowStar.ConstBuffer.const_buffer LowParse.Bytes.byte) | Prims.GTot | [
"sometrivial"
] | [] | [
"LowParse.Repr.const_slice",
"LowParse.Repr.repr_pos",
"LowStar.ConstBuffer.gsub",
"LowParse.Bytes.byte",
"LowParse.Repr.__proj__MkSlice__item__base",
"LowParse.Repr.__proj__Pos__item__start_pos",
"LowParse.Repr.__proj__Mkmeta__item__len",
"LowParse.Repr.__proj__Pos__item__meta",
"LowStar.ConstBuffer.const_buffer"
] | [] | false | false | false | false | false | let const_buffer_of_repr_pos #t #b (r: repr_pos t b) : GTot (C.const_buffer LP.byte) =
| C.gsub b.base r.start_pos r.meta.len | false |
FStar.Matrix.fst | FStar.Matrix.matrix_mul_is_right_distributive | val matrix_mul_is_right_distributive (#c:_) (#eq:_) (#m #n #p:pos) (add: CE.cm c eq)
(mul: CE.cm c eq{is_fully_distributive mul add /\ is_absorber add.unit mul})
(mx my: matrix c m n) (mz: matrix c n p)
: Lemma (matrix_mul add mul (matrix_add add mx my) mz `(matrix_equiv eq m p).eq`
matrix_add add (matrix_mul add mul mx mz) (matrix_mul add mul my mz)) | val matrix_mul_is_right_distributive (#c:_) (#eq:_) (#m #n #p:pos) (add: CE.cm c eq)
(mul: CE.cm c eq{is_fully_distributive mul add /\ is_absorber add.unit mul})
(mx my: matrix c m n) (mz: matrix c n p)
: Lemma (matrix_mul add mul (matrix_add add mx my) mz `(matrix_equiv eq m p).eq`
matrix_add add (matrix_mul add mul mx mz) (matrix_mul add mul my mz)) | let matrix_mul_is_right_distributive #c #eq #m #n #p (add: CE.cm c eq)
(mul: CE.cm c eq{is_fully_distributive mul add /\ is_absorber add.unit mul})
(mx my: matrix c m n) (mz: matrix c n p)
: Lemma (matrix_mul add mul (matrix_add add mx my) mz `matrix_eq_fun eq`
matrix_add add (matrix_mul add mul mx mz) (matrix_mul add mul my mz)) =
let mxy = matrix_add add mx my in
let mxz = matrix_mul add mul mx mz in
let myz = matrix_mul add mul my mz in
let lhs = matrix_mul add mul mxy mz in
let rhs = matrix_add add mxz myz in
let sum_j (f: under n -> c) = SP.foldm_snoc add (SB.init n f) in
let sum_k (f: under p -> c) = SP.foldm_snoc add (SB.init p f) in
let aux i k : Lemma (ijth lhs i k `eq.eq`
ijth rhs i k) =
let init_lhs j = mul.mult (ijth mxy i j) (ijth mz j k) in
let init_xz j = mul.mult (ijth mx i j) (ijth mz j k) in
let init_yz j = mul.mult (ijth my i j) (ijth mz j k) in
let init_rhs j = mul.mult (ijth mx i j) (ijth mz j k) `add.mult`
mul.mult (ijth my i j) (ijth mz j k) in
Classical.forall_intro eq.reflexivity;
matrix_mul_ijth_eq_sum_of_seq_for_init add mul mxy mz i k init_lhs;
matrix_mul_ijth_eq_sum_of_seq_for_init add mul mx mz i k init_xz;
matrix_mul_ijth_eq_sum_of_seq_for_init add mul my mz i k init_yz;
SP.foldm_snoc_split_seq add (SB.init n init_xz)
(SB.init n init_yz)
(SB.init n init_rhs)
(fun j -> ());
eq.symmetry (ijth rhs i k) (sum_j init_rhs);
SP.foldm_snoc_of_equal_inits add init_lhs init_rhs;
eq.transitivity (ijth lhs i k)
(sum_j init_rhs)
(ijth rhs i k)
in matrix_equiv_from_proof eq lhs rhs aux | {
"file_name": "ulib/FStar.Matrix.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 43,
"end_line": 1172,
"start_col": 0,
"start_line": 1140
} | (*
Copyright 2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: A. Rozanov
*)
(*
In this module we provide basic definitions to work with matrices via
seqs, and define transpose transform together with theorems that assert
matrix fold equality of original and transposed matrices.
*)
module FStar.Matrix
module CE = FStar.Algebra.CommMonoid.Equiv
module CF = FStar.Algebra.CommMonoid.Fold
module SP = FStar.Seq.Permutation
module SB = FStar.Seq.Base
module SProp = FStar.Seq.Properties
module ML = FStar.Math.Lemmas
open FStar.IntegerIntervals
open FStar.Mul
open FStar.Seq.Equiv
(*
A little glossary that might help reading this file
We don't list common terms like associativity and reflexivity.
lhs, rhs left hand side, right hand side
liat subsequence of all elements except the last (tail read backwards)
snoc construction of sequence from a pair (liat, last) (cons read backwards)
un_snoc decomposition of sequence into a pair (liat, last)
foldm sum or product of all elements in a sequence using given CommMonoid
foldm_snoc recursively defined sum/product of a sequence, starting from the last element
congruence respect of equivalence ( = ) by a binary operation ( * ), a=b ==> a*x = b*x
unit identity element (xu=x, ux=x) (not to be confused with invertible elements)
*)
type matrix c m n = z:SB.seq c { SB.length z = m*n }
let seq_of_matrix #c #m #n mx = mx
let ijth #c #m #n mx i j = SB.index mx (get_ij m n i j)
let ijth_lemma #c #m #n mx i j
: Lemma (ijth mx i j == SB.index (seq_of_matrix mx) (get_ij m n i j)) = ()
let matrix_of_seq #c m n s = s
let foldm #c #eq #m #n cm mx = SP.foldm_snoc cm mx
let matrix_fold_equals_fold_of_seq #c #eq #m #n cm mx
: Lemma (ensures foldm cm mx `eq.eq` SP.foldm_snoc cm (seq_of_matrix mx)) [SMTPat(foldm cm mx)]
= eq.reflexivity (foldm cm mx)
let matrix_fold_internal #c #eq #m #n (cm:CE.cm c eq) (mx: matrix c m n)
: Lemma (ensures foldm cm mx == SP.foldm_snoc cm mx) = ()
(* A flattened matrix (seq) constructed from generator function
Notice how the domains of both indices are strictly controlled. *)
let init #c (#m #n: pos) (generator: matrix_generator c m n)
: matrix_of generator =
let mn = m * n in
let generator_ij ij = generator (get_i m n ij) (get_j m n ij) in
let flat_indices = indices_seq mn in
let result = SProp.map_seq generator_ij flat_indices in
SProp.map_seq_len generator_ij flat_indices;
assert (SB.length result == SB.length flat_indices);
let aux (i: under m) (j: under n)
: Lemma (SB.index (SProp.map_seq generator_ij flat_indices) (get_ij m n i j) == generator i j)
= consistency_of_i_j m n i j;
consistency_of_ij m n (get_ij m n i j);
assert (generator_ij (get_ij m n i j) == generator i j);
SProp.map_seq_index generator_ij flat_indices (get_ij m n i j) in
let aux1 (ij: under mn)
: Lemma (SB.index (SProp.map_seq generator_ij flat_indices) ij == generator_ij ij)
= SProp.map_seq_index generator_ij flat_indices ij in
FStar.Classical.forall_intro aux1;
FStar.Classical.forall_intro_2 aux;
result
private let matrix_seq #c #m #n (gen: matrix_generator c m n) : (t:SB.seq c{ (SB.length t = (m*n)) /\
(forall (i: under m) (j: under n). SB.index t (get_ij m n i j) == gen i j) /\
(forall(ij: under (m*n)). SB.index t ij == gen (get_i m n ij) (get_j m n ij))
}) = init gen
(* This auxiliary lemma establishes the decomposition of the seq-matrix
into the concatenation of its first (m-1) rows and its last row (thus snoc) *)
let matrix_append_snoc_lemma #c (#m #n: pos) (generator: matrix_generator c m n)
: Lemma (matrix_seq generator == (SB.slice (matrix_seq generator) 0 ((m-1)*n))
`SB.append`
(SB.slice (matrix_seq generator) ((m-1)*n) (m*n)))
= SB.lemma_eq_elim (matrix_seq generator)
(SB.append (SB.slice (matrix_seq generator) 0 ((m-1)*n))
(SB.slice (matrix_seq generator) ((m-1)*n) (m*n)))
let matrix_seq_decomposition_lemma #c (#m:greater_than 1) (#n: pos) (generator: matrix_generator c m n)
: Lemma ((matrix_seq generator) ==
SB.append (matrix_seq #c #(m-1) #n generator)
(SB.slice (matrix_seq generator) ((m-1)*n) (m*n)))
= SB.lemma_eq_elim (matrix_seq generator)
((matrix_seq #c #(m-1) #n generator) `SB.append`
(SB.slice (matrix_seq generator) ((m-1)*n) (m*n)))
(* This auxiliary lemma establishes the equality of the fold of the entire matrix
to the op of folds of (the submatrix of the first (m-1) rows) and (the last row). *)
let matrix_fold_snoc_lemma #c #eq
(#m: not_less_than 2)
(#n: pos)
(cm: CE.cm c eq)
(generator: matrix_generator c m n)
: Lemma (assert ((m-1)*n < m*n);
SP.foldm_snoc cm (matrix_seq generator) `eq.eq`
cm.mult (SP.foldm_snoc cm (matrix_seq #c #(m-1) #n generator))
(SP.foldm_snoc cm (SB.slice (matrix_seq #c #m #n generator) ((m-1)*n) (m*n))))
= SB.lemma_eq_elim (matrix_seq generator)
((matrix_seq #c #(m-1) #n generator) `SB.append`
(SB.slice (matrix_seq generator) ((m-1)*n) (m*n)));
SP.foldm_snoc_append cm (matrix_seq #c #(m-1) #n generator)
(SB.slice (matrix_seq generator) ((m-1)*n) (m*n))
(*
There are many auxiliary lemmas like this that are extracted because
lemma_eq_elim invocations often impact verification speed more than
one might expect they would.
*)
let matrix_submatrix_lemma #c (#m: not_less_than 2) (#n: pos)
(generator: matrix_generator c m n)
: Lemma ((matrix_seq generator) == (matrix_seq (fun (i:under(m-1)) (j:under n) -> generator i j)
`SB.append` SB.init n (generator (m-1))))
= SB.lemma_eq_elim (matrix_seq (fun (i:under (m-1)) (j:under n) -> generator i j))
(matrix_seq #c #(m-1) #n generator);
SB.lemma_eq_elim (SB.slice (matrix_seq generator) ((m-1)*n) (m*n))
(SB.init n (generator (m-1)));
matrix_seq_decomposition_lemma generator
let matrix_seq_of_one_row_matrix #c #m #n (generator : matrix_generator c m n)
: Lemma (requires m==1)
(ensures matrix_seq generator == (SB.init n (generator 0))) =
SB.lemma_eq_elim (matrix_seq generator) (SB.init n (generator 0))
let one_row_matrix_fold_aux #c #eq #m #n (cm:CE.cm c eq) (generator : matrix_generator c m n) : Lemma
(requires m=1)
(ensures foldm cm (init generator) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i)))) /\
SP.foldm_snoc cm (seq_of_matrix (init generator)) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))) =
let lhs_seq = matrix_seq generator in
let rhs_seq = SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))) in
let lhs = SP.foldm_snoc cm (matrix_seq generator) in
let rhs = SP.foldm_snoc cm rhs_seq in
SP.foldm_snoc_singleton cm (SP.foldm_snoc cm (SB.init n (generator 0)));
SB.lemma_eq_elim (SB.create 1 (SP.foldm_snoc cm (SB.init n (generator 0))))
(SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))));
matrix_seq_of_one_row_matrix generator;
eq.symmetry rhs lhs
let fold_of_subgen_aux #c #eq (#m:pos{m>1}) #n (cm: CE.cm c eq) (gen: matrix_generator c m n) (subgen: matrix_generator c (m-1) n) : Lemma
(requires subgen == (fun (i: under (m-1)) (j: under n) -> gen i j))
(ensures forall (i: under (m-1)). SP.foldm_snoc cm (SB.init n (subgen i)) ==
SP.foldm_snoc cm (SB.init n (gen i))) =
let aux_pat (i: under (m-1)) : Lemma (SP.foldm_snoc cm (SB.init n (subgen i))
== SP.foldm_snoc cm (SB.init n (gen i))) =
SB.lemma_eq_elim (SB.init n (subgen i)) (SB.init n (gen i)) in
Classical.forall_intro aux_pat
let arithm_aux (m: pos{m>1}) (n: pos) : Lemma ((m-1)*n < m*n) = ()
let terminal_case_aux #c #eq (#p:pos{p=1}) #n (cm:CE.cm c eq) (generator: matrix_generator c p n) (m: pos{m<=p}) : Lemma
(ensures SP.foldm_snoc cm (SB.slice (seq_of_matrix (init generator)) 0 (m*n)) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun (i:under m) -> SP.foldm_snoc cm (SB.init n (generator i)))))
= one_row_matrix_fold_aux cm generator
#push-options "--ifuel 0 --fuel 1 --z3rlimit 10"
let terminal_case_two_aux #c #eq (#p:pos) #n (cm:CE.cm c eq) (generator: matrix_generator c p n) (m: pos{m=1}) : Lemma
(ensures SP.foldm_snoc cm (SB.slice (seq_of_matrix (init generator)) 0 (m*n)) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun (i:under m) -> SP.foldm_snoc cm (SB.init n (generator i)))))
=
SP.foldm_snoc_singleton cm (SP.foldm_snoc cm (SB.init n (generator 0)));
assert (SP.foldm_snoc cm (SB.init m (fun (i:under m) -> SP.foldm_snoc cm (SB.init n (generator i)))) `eq.eq`
SP.foldm_snoc cm (SB.init n (generator 0)));
let line = SB.init n (generator 0) in
let slice = SB.slice (matrix_seq generator) 0 n in
let aux (ij: under n) : Lemma (SB.index slice ij == SB.index line ij) =
Math.Lemmas.small_div ij n;
Math.Lemmas.small_mod ij n
in Classical.forall_intro aux;
SB.lemma_eq_elim line slice;
eq.symmetry (SP.foldm_snoc cm (SB.init m (fun (i:under m) -> SP.foldm_snoc cm (SB.init n (generator i)))))
(SP.foldm_snoc cm line)
#pop-options
let liat_equals_init #c (m:pos) (gen: under m -> c)
: Lemma (fst (SProp.un_snoc (SB.init m gen)) == SB.init (m-1) gen) =
SB.lemma_eq_elim (fst (SProp.un_snoc (SB.init m gen))) (SB.init (m-1) gen)
let math_aux (m n: pos) (j: under n) : Lemma (j+((m-1)*n) < m*n) = ()
let math_aux_2 (m n: pos) (j: under n) : Lemma (get_j m n (j+(m-1)*n) == j)
=
Math.Lemmas.modulo_addition_lemma j n (m-1);
Math.Lemmas.small_mod j n
let math_aux_3 (m n: pos) (j: under n) : Lemma (get_i m n (j+(m-1)*n) == (m-1))
=
Math.Lemmas.division_addition_lemma j n (m-1);
Math.Lemmas.small_div j n
let math_aux_4 (m n: pos) (j: under n) : Lemma ((j+((m-1)*n)) - ((m-1)*n) == j) = ()
let seq_eq_from_member_eq #c (n: pos) (p q: (z:SB.seq c{SB.length z=n}))
(proof: (i: under n) -> Lemma (SB.index p i == SB.index q i))
: Lemma (p == q) =
Classical.forall_intro proof;
SB.lemma_eq_elim p q
let math_wut_lemma (x: pos) : Lemma (requires x>1) (ensures x-1 > 0) = ()
(* This proof used to be very unstable, so I rewrote it with as much precision
and control over lambdas as possible.
I also left intact some trivial auxiliaries and the quake option
in order to catch regressions the moment they happen instead of several
releases later -- Alex *)
#push-options "--ifuel 0 --fuel 0 --z3rlimit 15"
#restart-solver
let rec matrix_fold_equals_double_fold #c #eq (#p:pos) #n (cm:CE.cm c eq)
(generator: matrix_generator c p n) (m: pos{m<=p})
: Lemma (ensures SP.foldm_snoc cm (SB.slice (seq_of_matrix (init generator)) 0 (m*n)) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (generator i)))))
(decreases m) =
if p=1 then terminal_case_aux cm generator m
else if m=1 then terminal_case_two_aux cm generator m
else
let lhs_seq = (SB.slice (matrix_seq generator) 0 (m*n)) in
let rhs_seq_gen = fun (i: under m) -> SP.foldm_snoc cm (SB.init n (generator i)) in
let rhs_seq_subgen = fun (i: under (m-1)) -> SP.foldm_snoc cm (SB.init n (generator i)) in
let rhs_seq = SB.init m rhs_seq_gen in
let lhs = SP.foldm_snoc cm lhs_seq in
let rhs = SP.foldm_snoc cm rhs_seq in
let matrix = lhs_seq in
let submatrix = SB.slice (matrix_seq generator) 0 ((m-1)*n) in
let last_row = SB.slice (matrix_seq generator) ((m-1)*n) (m*n) in
SB.lemma_len_slice (matrix_seq generator) ((m-1)*n) (m*n);
assert (SB.length last_row = n);
SB.lemma_eq_elim matrix (SB.append submatrix last_row);
SP.foldm_snoc_append cm submatrix last_row;
matrix_fold_equals_double_fold #c #eq #p #n cm generator (m-1);
SB.lemma_eq_elim (SB.init (m-1) rhs_seq_gen)
(SB.init (m-1) rhs_seq_subgen);
let aux (j: under n) : Lemma (SB.index last_row j == generator (m-1) j) =
SB.lemma_index_app2 submatrix last_row (j+((m-1)*n));
math_aux_2 m n j;
math_aux_3 m n j;
math_aux_4 m n j;
() in Classical.forall_intro aux;
let rhs_liat, rhs_last = SProp.un_snoc rhs_seq in
let rhs_last_seq = SB.init n (generator (m-1)) in
liat_equals_init m rhs_seq_gen;
SP.foldm_snoc_decomposition cm rhs_seq;
let aux_2 (j: under n) : Lemma (SB.index last_row j == SB.index rhs_last_seq j) = () in
seq_eq_from_member_eq n last_row rhs_last_seq aux_2;
SB.lemma_eq_elim rhs_liat (SB.init (m-1) rhs_seq_gen);
cm.commutativity (SP.foldm_snoc cm submatrix) (SP.foldm_snoc cm last_row);
eq.transitivity lhs (SP.foldm_snoc cm submatrix `cm.mult` SP.foldm_snoc cm last_row)
(SP.foldm_snoc cm last_row `cm.mult` SP.foldm_snoc cm submatrix);
eq.reflexivity (SP.foldm_snoc cm last_row);
cm.congruence (SP.foldm_snoc cm last_row) (SP.foldm_snoc cm submatrix)
(SP.foldm_snoc cm last_row) (SP.foldm_snoc cm (SB.init (m-1) rhs_seq_subgen));
eq.transitivity lhs (SP.foldm_snoc cm last_row `cm.mult` SP.foldm_snoc cm submatrix) rhs
#pop-options
let matrix_fold_equals_fold_of_seq_folds #c #eq #m #n cm generator : Lemma
(ensures foldm cm (init generator) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i)))) /\
SP.foldm_snoc cm (seq_of_matrix (init generator)) `eq.eq`
SP.foldm_snoc cm (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))) =
matrix_fold_equals_double_fold cm generator m;
assert ((SB.slice (seq_of_matrix (init generator)) 0 (m*n)) == seq_of_matrix (init generator));
SB.lemma_eq_elim (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i))))
(SB.init m (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (generator i))));
assert ((SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (generator i)))) ==
(SB.init m (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (generator i)))));
()
(* This auxiliary lemma shows that the fold of the last line of a matrix
is equal to the corresponding fold of the generator function *)
let matrix_last_line_equals_gen_fold #c #eq
(#m #n: pos)
(cm: CE.cm c eq)
(generator: matrix_generator c m n)
: Lemma (SP.foldm_snoc cm (SB.slice (matrix_seq generator) ((m-1)*n) (m*n))
`eq.eq` CF.fold cm 0 (n-1) (generator (m-1))) =
let slice = SB.slice #c in
let foldm_snoc = SP.foldm_snoc #c #eq in
assert (matrix_seq generator == seq_of_matrix (init generator));
let init = SB.init #c in
let lemma_eq_elim = SB.lemma_eq_elim #c in
lemma_eq_elim (slice (matrix_seq generator) ((m-1)*n) (m*n))
(init n (generator (m-1)));
let g : ifrom_ito 0 (n-1) -> c = generator (m-1) in
CF.fold_equals_seq_foldm cm 0 (n-1) g;
let gen = CF.init_func_from_expr g 0 (n-1) in
eq.reflexivity (foldm_snoc cm (init (closed_interval_size 0 (n-1)) gen));
lemma_eq_elim (slice (matrix_seq generator) ((m-1)*n) (m*n))
(init (closed_interval_size 0 (n-1)) gen);
eq.symmetry (CF.fold cm 0 (n-1) (generator (m-1)))
(foldm_snoc cm (init (closed_interval_size 0 (n-1)) gen));
eq.transitivity (foldm_snoc cm (slice (matrix_seq generator) ((m-1)*n) (m*n)))
(foldm_snoc cm (init (closed_interval_size 0 (n-1)) gen))
(CF.fold cm 0 (n-1) (generator (m-1)))
(* This lemma proves that a matrix fold is the same thing as double-fold of
its generator function against full indices ranges *)
#push-options "--ifuel 0 --fuel 0"
let rec matrix_fold_aux #c #eq // lemma needed for precise generator domain control
(#gen_m #gen_n: pos) // full generator domain
(cm: CE.cm c eq)
(m: ifrom_ito 1 gen_m) (n: ifrom_ito 1 gen_n) //subdomain
(generator: matrix_generator c gen_m gen_n)
: Lemma (ensures SP.foldm_snoc cm (matrix_seq #c #m #n generator) `eq.eq`
CF.fold cm 0 (m-1) (fun (i: under m) -> CF.fold cm 0 (n-1) (generator i)))
(decreases m) =
Classical.forall_intro_2 (ijth_lemma (init generator));
let slice = SB.slice #c in
let foldm_snoc = SP.foldm_snoc #c #eq in
let lemma_eq_elim = SB.lemma_eq_elim #c in
if m = 1 then begin
matrix_fold_equals_fold_of_seq cm (init generator);
matrix_last_line_equals_gen_fold #c #eq #m #n cm generator;
CF.fold_singleton_lemma cm 0 (fun (i:under m) -> CF.fold cm 0 (n-1) (generator i));
assert (CF.fold cm 0 (m-1) (fun (i: under m) -> CF.fold cm 0 (n-1) (generator i))
== CF.fold cm 0 (n-1) (generator 0))
end else begin
Classical.forall_intro_3 (Classical.move_requires_3 eq.transitivity);
matrix_fold_aux cm (m-1) n generator;
let outer_func (i: under m) = CF.fold cm 0 (n-1) (generator i) in
let outer_func_on_subdomain (i: under (m-1)) = CF.fold cm 0 (n-1) (generator i) in
CF.fold_equality cm 0 (m-2) outer_func_on_subdomain outer_func;
CF.fold_snoc_decomposition cm 0 (m-1) outer_func;
matrix_fold_snoc_lemma #c #eq #m #n cm generator;
matrix_last_line_equals_gen_fold #c #eq #m #n cm generator;
cm.congruence (foldm_snoc cm (matrix_seq #c #(m-1) #n generator))
(foldm_snoc cm (slice (matrix_seq #c #m #n generator) ((m-1)*n) (m*n)))
(CF.fold cm 0 (m-2) outer_func)
(CF.fold cm 0 (n-1) (generator (m-1)))
end
#pop-options
(* This lemma establishes that the fold of a matrix is equal to
nested Algebra.CommMonoid.Fold.fold over the matrix generator *)
let matrix_fold_equals_func_double_fold #c #eq #m #n cm generator
: Lemma (foldm cm (init generator) `eq.eq`
CF.fold cm 0 (m-1) (fun (i:under m) -> CF.fold cm 0 (n-1) (generator i)))
= matrix_fold_aux cm m n generator
(* This function provides the transposed matrix generator, with indices swapped
Notice how the forall property of the result function is happily proved
automatically by z3 :) *)
let transposed_matrix_gen #c #m #n (generator: matrix_generator c m n)
: (f: matrix_generator c n m { forall i j. f j i == generator i j })
= fun j i -> generator i j
(* This lemma shows that the transposed matrix is
a permutation of the original one *)
let matrix_transpose_is_permutation #c #m #n generator
: Lemma (SP.is_permutation (seq_of_matrix (init generator))
(seq_of_matrix (init (transposed_matrix_gen generator)))
(transpose_ji m n)) =
let matrix_transposed_eq_lemma #c (#m #n: pos)
(gen: matrix_generator c m n)
(ij: under (m*n))
: Lemma (SB.index (seq_of_matrix (init gen)) ij ==
SB.index (seq_of_matrix (init (transposed_matrix_gen gen))) (transpose_ji m n ij))
=
ijth_lemma (init gen) (get_i m n ij) (get_j m n ij);
ijth_lemma (init (transposed_matrix_gen gen))
(get_i n m (transpose_ji m n ij))
(get_j n m (transpose_ji m n ij));
() in
let transpose_inequality_lemma (m n: pos) (ij: under (m*n)) (kl: under (n*m))
: Lemma (requires kl <> ij) (ensures transpose_ji m n ij <> transpose_ji m n kl) =
dual_indices m n ij;
dual_indices m n kl in
Classical.forall_intro (matrix_transposed_eq_lemma generator);
Classical.forall_intro_2 (Classical.move_requires_2
(transpose_inequality_lemma m n));
SP.reveal_is_permutation (seq_of_matrix (init generator))
(seq_of_matrix (init (transposed_matrix_gen generator)))
(transpose_ji m n)
(* Fold over matrix equals fold over transposed matrix *)
let matrix_fold_equals_fold_of_transpose #c #eq #m #n
(cm: CE.cm c eq)
(gen: matrix_generator c m n)
: Lemma (foldm cm (init gen) `eq.eq`
foldm cm (init (transposed_matrix_gen gen))) =
let matrix_seq #c #m #n (g: matrix_generator c m n) = (seq_of_matrix (init g)) in
let matrix_mn = matrix_seq gen in
let matrix_nm = matrix_seq (transposed_matrix_gen gen) in
matrix_transpose_is_permutation gen;
SP.foldm_snoc_perm cm (matrix_seq gen)
(matrix_seq (transposed_matrix_gen gen))
(transpose_ji m n);
matrix_fold_equals_fold_of_seq cm (init gen);
matrix_fold_equals_fold_of_seq cm (init (transposed_matrix_gen gen));
eq.symmetry (foldm cm (init (transposed_matrix_gen gen)))
(SP.foldm_snoc cm (matrix_seq (transposed_matrix_gen gen)));
eq.transitivity (foldm cm (init gen)) (SP.foldm_snoc cm (matrix_seq gen))
(SP.foldm_snoc cm (matrix_seq (transposed_matrix_gen gen)));
eq.transitivity (foldm cm (init gen)) (SP.foldm_snoc cm (matrix_seq (transposed_matrix_gen gen)))
(foldm cm (init (transposed_matrix_gen gen)))
let matrix_eq_fun #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n) =
eq_of_seq eq (seq_of_matrix ma) (seq_of_matrix mb)
(*
Matrix equivalence, defined as element-wise equivalence of its underlying
flattened sequence, is constructed trivially from the element equivalence
and the lemmas defined above.
*)
let matrix_equiv #c (eq: CE.equiv c) (m n: pos) : CE.equiv (matrix c m n) =
CE.EQ (matrix_eq_fun eq)
(fun m -> eq_of_seq_reflexivity eq (seq_of_matrix m))
(fun ma mb -> eq_of_seq_symmetry eq (seq_of_matrix ma) (seq_of_matrix mb))
(fun ma mb mc -> eq_of_seq_transitivity eq (seq_of_matrix ma) (seq_of_matrix mb) (seq_of_matrix mc))
(* Equivalence of matrices means equivalence of all corresponding elements *)
let matrix_equiv_ijth #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n) (i: under m) (j: under n)
: Lemma (requires (matrix_equiv eq m n).eq ma mb) (ensures ijth ma i j `eq.eq` ijth mb i j) =
eq_of_seq_element_equality eq (seq_of_matrix ma) (seq_of_matrix mb)
(* Equivalence of all corresponding elements means equivalence of matrices *)
let matrix_equiv_from_element_eq #c (#m #n: pos) (eq: CE.equiv c) (ma mb: matrix c m n)
: Lemma (requires (forall (i: under m) (j: under n). ijth ma i j `eq.eq` ijth mb i j))
(ensures matrix_eq_fun eq ma mb) =
assert (SB.length (seq_of_matrix ma) = SB.length (seq_of_matrix mb));
let s1 = seq_of_matrix ma in
let s2 = seq_of_matrix mb in
assert (forall (ij: under (m*n)). SB.index s1 ij == ijth ma (get_i m n ij) (get_j m n ij));
assert (forall (ij: under (m*n)). SB.index s2 ij == ijth mb (get_i m n ij) (get_j m n ij));
assert (forall (ij: under (m*n)). SB.index s1 ij `eq.eq` SB.index s2 ij);
eq_of_seq_from_element_equality eq (seq_of_matrix ma) (seq_of_matrix mb)
(* We construct addition CommMonoid from the following definitions *)
let matrix_add_is_associative #c #eq #m #n (add: CE.cm c eq) (ma mb mc: matrix c m n)
: Lemma (matrix_add add (matrix_add add ma mb) mc `(matrix_equiv eq m n).eq`
matrix_add add ma (matrix_add add mb mc)) =
matrix_equiv_from_proof eq
(matrix_add add (matrix_add add ma mb) mc)
(matrix_add add ma (matrix_add add mb mc))
(fun i j -> add.associativity (ijth ma i j) (ijth mb i j) (ijth mc i j))
let matrix_add_is_commutative #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb: matrix c m n)
: Lemma (matrix_add add ma mb `(matrix_equiv eq m n).eq` matrix_add add mb ma) =
matrix_equiv_from_proof eq (matrix_add add ma mb) (matrix_add add mb ma)
(fun i j -> add.commutativity (ijth ma i j) (ijth mb i j))
let matrix_add_congruence #c #eq (#m #n: pos) (add: CE.cm c eq) (ma mb mc md: matrix c m n)
: Lemma (requires matrix_eq_fun eq ma mc /\ matrix_eq_fun eq mb md)
(ensures matrix_add add ma mb `matrix_eq_fun eq` matrix_add add mc md) =
matrix_equiv_from_proof eq (matrix_add add ma mb) (matrix_add add mc md)
(fun i j -> matrix_equiv_ijth eq ma mc i j;
matrix_equiv_ijth eq mb md i j;
add.congruence (ijth ma i j) (ijth mb i j)
(ijth mc i j) (ijth md i j))
let matrix_add_zero #c #eq (add: CE.cm c eq) (m n: pos)
: (z: matrix c m n { forall (i: under m) (j: under n). ijth z i j == add.unit })
= matrix_of_seq m n (SB.create (m*n) add.unit)
let matrix_add_identity #c #eq (add: CE.cm c eq) (#m #n: pos) (mx: matrix c m n)
: Lemma (matrix_add add (matrix_add_zero add m n) mx `matrix_eq_fun eq` mx) =
matrix_equiv_from_proof eq (matrix_add add (matrix_add_zero add m n) mx) mx
(fun i j -> add.identity (ijth mx i j))
let matrix_add_comm_monoid #c #eq (add: CE.cm c eq) (m n: pos)
: CE.cm (matrix c m n) (matrix_equiv eq m n)
= CE.CM (matrix_add_zero add m n)
(matrix_add add)
(matrix_add_identity add)
(matrix_add_is_associative add)
(matrix_add_is_commutative add)
(matrix_add_congruence add)
(* equivalence of addressing styles *)
let matrix_row_col_lemma #c #m #n (mx: matrix c m n) (i: under m) (j: under n)
: Lemma (ijth mx i j == SB.index (row mx i) j /\ ijth mx i j == SB.index (col mx j) i) = ()
(*
See how lemma_eq_elim is defined, note the SMTPat there.
Invoking this is often more efficient in big proofs than invoking
lemma_eq_elim directly.
*)
let seq_of_products_lemma #c #eq (mul: CE.cm c eq) (s: SB.seq c) (t: SB.seq c {SB.length t == SB.length s})
(r: SB.seq c{SB.equal r (SB.init (SB.length s) (fun (i: under (SB.length s)) -> SB.index s i `mul.mult` SB.index t i))})
: Lemma (seq_of_products mul s t == r) = ()
let dot_lemma #c #eq add mul s t
: Lemma (dot add mul s t == SP.foldm_snoc add (seq_of_products mul s t)) = ()
let matrix_mul_gen #c #eq #m #n #p (add mul: CE.cm c eq)
(mx: matrix c m n) (my: matrix c n p)
(i: under m) (k: under p)
= dot add mul (row mx i) (col my k)
let matrix_mul #c #eq #m #n #p (add mul: CE.cm c eq) (mx: matrix c m n) (my: matrix c n p)
= init (matrix_mul_gen add mul mx my)
(* the following lemmas improve verification performance. *)
(* Sometimes this fact gets lost and needs an explicit proof *)
let seq_last_index #c (s: SB.seq c{SB.length s > 0})
: Lemma (SProp.last s == SB.index s (SB.length s - 1)) = ()
(* It often takes assert_norm to obtain the fact that,
(fold s == last s `op` fold (slice s 0 (length s - 1))).
Invoking this lemma instead offers a more stable option. *)
let seq_fold_decomposition #c #eq (cm: CE.cm c eq) (s: SB.seq c{SB.length s > 0})
: Lemma (SP.foldm_snoc cm s == cm.mult (SProp.last s) (SP.foldm_snoc cm (fst (SProp.un_snoc s)))) = ()
(* Using common notation for algebraic operations instead of `mul` / `add` infix
simplifies the code and makes it more compact. *)
let rec foldm_snoc_distributivity_left #c #eq (mul add: CE.cm c eq) (a: c) (s: SB.seq c)
: Lemma (requires is_fully_distributive mul add /\ is_absorber add.unit mul)
(ensures mul.mult a (SP.foldm_snoc add s) `eq.eq`
SP.foldm_snoc add (const_op_seq mul a s))
(decreases SB.length s) =
if SB.length s > 0 then
let ((+), ( * ), (=)) = add.mult, mul.mult, eq.eq in
let sum s = SP.foldm_snoc add s in
let liat, last = SProp.un_snoc s in
let rhs_liat, rhs_last = SProp.un_snoc (const_op_seq mul a s) in
foldm_snoc_distributivity_left mul add a liat;
SB.lemma_eq_elim rhs_liat (const_op_seq mul a liat);
eq.reflexivity rhs_last;
add.congruence rhs_last (a*sum liat) rhs_last (sum rhs_liat);
eq.transitivity (a*sum s) (rhs_last + a*sum liat) (rhs_last + sum rhs_liat)
let rec foldm_snoc_distributivity_right #c #eq (mul add: CE.cm c eq) (s: SB.seq c) (a: c)
: Lemma (requires is_fully_distributive mul add /\ is_absorber add.unit mul)
(ensures mul.mult (SP.foldm_snoc add s) a `eq.eq`
SP.foldm_snoc add (seq_op_const mul s a))
(decreases SB.length s) =
if SB.length s > 0 then
let ((+), ( * ), (=)) = add.mult, mul.mult, eq.eq in
let sum s = SP.foldm_snoc add s in
let liat, last = SProp.un_snoc s in
let rhs_liat, rhs_last = SProp.un_snoc (seq_op_const mul s a) in
foldm_snoc_distributivity_right mul add liat a;
SB.lemma_eq_elim rhs_liat (seq_op_const mul liat a);
eq.reflexivity rhs_last;
add.congruence rhs_last (sum liat*a) rhs_last (sum rhs_liat);
eq.transitivity (sum s*a) (rhs_last + sum liat*a) (rhs_last + sum rhs_liat)
let foldm_snoc_distributivity_right_eq #c #eq (mul add: CE.cm c eq) (s: SB.seq c) (a: c) (r: SB.seq c)
: Lemma (requires is_fully_distributive mul add /\ is_absorber add.unit mul /\
SB.equal r (seq_op_const mul s a))
(ensures mul.mult (SP.foldm_snoc add s) a `eq.eq`
SP.foldm_snoc add r)
= foldm_snoc_distributivity_right mul add s a
let foldm_snoc_distributivity_left_eq #c #eq (mul add: CE.cm c eq) (a: c)
(s: SB.seq c)
(r: SB.seq c{SB.equal r (const_op_seq mul a s)})
: Lemma (requires is_fully_distributive mul add /\ is_absorber add.unit mul)
(ensures (mul.mult a(SP.foldm_snoc add s)) `eq.eq`
SP.foldm_snoc add r)
= foldm_snoc_distributivity_left mul add a s
let matrix_mul_ijth #c #eq #m #n #k (add mul: CE.cm c eq)
(mx: matrix c m n) (my: matrix c n k) i h
: Lemma (ijth (matrix_mul add mul mx my) i h == dot add mul (row mx i) (col my h)) = ()
let matrix_mul_ijth_as_sum #c #eq #m #n #p (add mul: CE.cm c eq)
(mx: matrix c m n) (my: matrix c n p) i k
: Lemma (ijth (matrix_mul add mul mx my) i k ==
SP.foldm_snoc add (SB.init n (fun (j: under n) -> mul.mult (ijth mx i j) (ijth my j k)))) =
let r = SB.init n (fun (j: under n) -> mul.mult (ijth mx i j) (ijth my j k)) in
assert (ijth (matrix_mul add mul mx my) i k ==
SP.foldm_snoc add (seq_of_products mul (row mx i) (col my k)));
seq_of_products_lemma mul (row mx i) (col my k) r
let matrix_mul_ijth_eq_sum_of_seq #c #eq #m #n #p (add: CE.cm c eq)
(mul: CE.cm c eq{is_fully_distributive mul add /\ is_absorber add.unit mul})
(mx: matrix c m n) (my: matrix c n p) (i: under m) (k: under p)
(r: SB.seq c{r `SB.equal` seq_of_products mul (row mx i) (col my k)})
: Lemma (ijth (matrix_mul add mul mx my) i k == SP.foldm_snoc add r) = ()
let double_foldm_snoc_transpose_lemma #c #eq (#m #n: pos) (cm: CE.cm c eq) (f: under m -> under n -> c)
: Lemma (SP.foldm_snoc cm (SB.init m (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (fun (j: under n) -> f i j)))) `eq.eq`
SP.foldm_snoc cm (SB.init n (fun (j: under n) -> SP.foldm_snoc cm (SB.init m (fun (i: under m) -> f i j))))) =
Classical.forall_intro_2 (Classical.move_requires_2 eq.symmetry);
let gen : matrix_generator c m n = f in
let mx = init gen in
let mx_seq = matrix_seq gen in
matrix_fold_equals_fold_of_seq_folds cm gen;
let aux (i: under m) : Lemma (SB.init n (gen i) == SB.init n (fun (j: under n) -> f i j))
= SB.lemma_eq_elim (SB.init n (gen i))(SB.init n (fun (j: under n) -> f i j))
in Classical.forall_intro aux;
SB.lemma_eq_elim (SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (gen i))))
(SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (fun (j: under n) -> f i j))));
SB.lemma_eq_elim (SB.init m (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (fun (j: under n) -> f i j))))
(SB.init m (fun i -> SP.foldm_snoc cm (SB.init n (fun (j: under n) -> f i j))));
matrix_transpose_is_permutation gen;
matrix_fold_equals_fold_of_transpose cm gen;
let trans_gen = transposed_matrix_gen gen in
let mx_trans = init trans_gen in
let mx_trans_seq = matrix_seq trans_gen in
matrix_fold_equals_fold_of_seq_folds cm trans_gen;
assert (foldm cm mx_trans `eq.eq`
SP.foldm_snoc cm (SB.init n (fun j -> SP.foldm_snoc cm (SB.init m (trans_gen j)))));
let aux_tr_lemma (j: under n)
: Lemma ((SB.init m (trans_gen j)) == (SB.init m (fun (i: under m) -> f i j)))
= SB.lemma_eq_elim (SB.init m (trans_gen j)) (SB.init m (fun (i: under m) -> f i j))
in Classical.forall_intro aux_tr_lemma;
SB.lemma_eq_elim (SB.init n (fun j -> SP.foldm_snoc cm (SB.init m (trans_gen j))))
(SB.init n (fun (j:under n) -> SP.foldm_snoc cm (SB.init m (fun (i: under m) -> f i j))));
assert (foldm cm mx_trans `eq.eq`
SP.foldm_snoc cm (SB.init n (fun (j:under n) -> SP.foldm_snoc cm (SB.init m (fun (i: under m) -> f i j)))));
eq.transitivity (SP.foldm_snoc cm (SB.init m (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (fun (j: under n) -> f i j)))))
(foldm cm mx)
(foldm cm mx_trans);
eq.transitivity (SP.foldm_snoc cm (SB.init m (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (fun (j: under n) -> f i j)))))
(foldm cm mx_trans)
(SP.foldm_snoc cm (SB.init n (fun (j:under n) -> SP.foldm_snoc cm (SB.init m (fun (i: under m) -> f i j)))))
let matrix_mul_ijth_eq_sum_of_seq_for_init #c #eq #m #n #p (add mul: CE.cm c eq)
(mx: matrix c m n) (my: matrix c n p) i k
(f: under n -> c { SB.init n f `SB.equal` seq_of_products mul (row mx i) (col my k)})
: Lemma (ijth (matrix_mul add mul mx my) i k == SP.foldm_snoc add (SB.init n f)) = ()
let double_foldm_snoc_of_equal_generators #c #eq (#m #n: pos)
(cm: CE.cm c eq)
(f g: under m -> under n -> c)
: Lemma (requires (forall (i: under m) (j: under n). f i j `eq.eq` g i j))
(ensures SP.foldm_snoc cm (SB.init m (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (fun (j: under n) -> f i j))))
`eq.eq` SP.foldm_snoc cm (SB.init m (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (fun (j: under n) -> g i j))))) =
let aux i : Lemma (SP.foldm_snoc cm (SB.init n (fun (j: under n) -> f i j)) `eq.eq`
SP.foldm_snoc cm (SB.init n (fun (j: under n) -> g i j)))
= SP.foldm_snoc_of_equal_inits cm (fun j -> f i j) (fun j -> g i j) in
Classical.forall_intro aux;
SP.foldm_snoc_of_equal_inits cm (fun (i: under m) -> SP.foldm_snoc cm (SB.init n (fun (j: under n) -> f i j)))
(fun (i: under m) -> SP.foldm_snoc cm (SB.init n (fun (j: under n) -> g i j)))
#push-options "--z3rlimit 15 --ifuel 0 --fuel 0"
let matrix_mul_is_associative #c #eq #m #n #p #q (add: CE.cm c eq)
(mul: CE.cm c eq{is_fully_distributive mul add /\ is_absorber add.unit mul})
(mx: matrix c m n) (my: matrix c n p) (mz: matrix c p q)
: Lemma (matrix_eq_fun eq ((matrix_mul add mul mx my) `matrix_mul add mul` mz)
(matrix_mul add mul mx (matrix_mul add mul my mz))) =
let rhs = mx `matrix_mul add mul` (my `matrix_mul add mul` mz) in
let lhs = (mx `matrix_mul add mul` my) `matrix_mul add mul` mz in
let mxy = matrix_mul add mul mx my in
let myz = matrix_mul add mul my mz in
let ((+), ( * ), (=)) = add.mult, mul.mult, eq.eq in
let aux i l : squash (ijth lhs i l = ijth rhs i l) =
let sum_j (f: under n -> c) = SP.foldm_snoc add (SB.init n f) in
let sum_k (f: under p -> c) = SP.foldm_snoc add (SB.init p f) in
let xy_products_init k j = ijth mx i j * ijth my j k in
let xy_cell_as_sum k = sum_j (xy_products_init k) in
let xy_cell_lemma k : Lemma (ijth mxy i k == xy_cell_as_sum k) =
matrix_mul_ijth_eq_sum_of_seq_for_init add mul mx my i k (xy_products_init k)
in Classical.forall_intro xy_cell_lemma;
let xy_z_products_init k = xy_cell_as_sum k * ijth mz k l in
matrix_mul_ijth_eq_sum_of_seq_for_init add mul mxy mz i l xy_z_products_init;
let full_init_kj k j = (ijth mx i j * ijth my j k) * ijth mz k l in
let full_init_jk j k = (ijth mx i j * ijth my j k) * ijth mz k l in
let full_init_rh j k = ijth mx i j * (ijth my j k * ijth mz k l) in
let sum_jk (f: (under n -> under p -> c)) = sum_j (fun j -> sum_k (fun k -> f j k)) in
let sum_kj (f: (under p -> under n -> c)) = sum_k (fun k -> sum_j (fun j -> f k j)) in
let xy_z_distr k : Lemma (((xy_cell_as_sum k) * (ijth mz k l)) = sum_j (full_init_kj k))
= foldm_snoc_distributivity_right_eq mul add (SB.init n (xy_products_init k)) (ijth mz k l)
(SB.init n (full_init_kj k))
in Classical.forall_intro xy_z_distr;
SP.foldm_snoc_of_equal_inits add xy_z_products_init
(fun k -> sum_j (full_init_kj k));
double_foldm_snoc_transpose_lemma add full_init_kj;
eq.transitivity (ijth lhs i l) (sum_kj full_init_kj)
(sum_jk full_init_jk);
let aux_rh j k : Lemma (full_init_jk j k = full_init_rh j k)
= mul.associativity (ijth mx i j) (ijth my j k) (ijth mz k l)
in Classical.forall_intro_2 aux_rh;
double_foldm_snoc_of_equal_generators add full_init_jk full_init_rh;
eq.transitivity (ijth lhs i l) (sum_jk full_init_jk) (sum_jk full_init_rh);
// now expand the right hand side, fully dual to the first part of the lemma.
let yz_products_init j k = ijth my j k * ijth mz k l in
let yz_cell_as_sum j = sum_k (yz_products_init j) in
let x_yz_products_init j = ijth mx i j * yz_cell_as_sum j in
let yz_cell_lemma j : Lemma (ijth myz j l == sum_k (yz_products_init j)) =
matrix_mul_ijth_eq_sum_of_seq_for_init add mul my mz j l (yz_products_init j);
() in Classical.forall_intro yz_cell_lemma;
matrix_mul_ijth_eq_sum_of_seq_for_init add mul mx myz i l x_yz_products_init;
let x_yz_distr j : Lemma (ijth mx i j * yz_cell_as_sum j = sum_k (full_init_rh j))
= foldm_snoc_distributivity_left_eq mul add (ijth mx i j) (SB.init p (yz_products_init j))
(SB.init p (full_init_rh j))
in Classical.forall_intro x_yz_distr;
SP.foldm_snoc_of_equal_inits add x_yz_products_init (fun j -> sum_k (full_init_rh j));
eq.symmetry (ijth rhs i l) (sum_jk full_init_rh);
eq.transitivity (ijth lhs i l) (sum_jk full_init_rh) (ijth rhs i l);
() in matrix_equiv_from_proof eq lhs rhs aux
#pop-options
let matrix_mul_unit_row_lemma #c #eq m (add mul: CE.cm c eq) (i: under m)
: Lemma ((row (matrix_mul_unit add mul m) i
== (SB.create i add.unit) `SB.append`
((SB.create 1 mul.unit) `SB.append` (SB.create (m-i-1) add.unit))) /\
(row (matrix_mul_unit add mul m) i
== ((SB.create i add.unit) `SB.append` (SB.create 1 mul.unit)) `SB.append`
(SB.create (m-i-1) add.unit))) =
SB.lemma_eq_elim ((SB.create i add.unit `SB.append` SB.create 1 mul.unit)
`SB.append` (SB.create (m-i-1) add.unit))
(row (matrix_mul_unit add mul m) i);
SB.lemma_eq_elim ((SB.create i add.unit) `SB.append`
(SB.create 1 mul.unit `SB.append` SB.create (m-i-1) add.unit))
(row (matrix_mul_unit add mul m) i)
let matrix_mul_unit_col_lemma #c #eq m (add mul: CE.cm c eq) (i: under m)
: Lemma ((col (matrix_mul_unit add mul m) i
== (SB.create i add.unit) `SB.append`
((SB.create 1 mul.unit) `SB.append` (SB.create (m-i-1) add.unit))) /\
(col (matrix_mul_unit add mul m) i ==
((SB.create i add.unit) `SB.append` (SB.create 1 mul.unit)) `SB.append`
(SB.create (m-i-1) add.unit))) =
SB.lemma_eq_elim ((SB.create i add.unit `SB.append` SB.create 1 mul.unit)
`SB.append` (SB.create (m-i-1) add.unit))
(col (matrix_mul_unit add mul m) i);
SB.lemma_eq_elim ((SB.create i add.unit) `SB.append`
(SB.create 1 mul.unit `SB.append` SB.create (m-i-1) add.unit))
(col (matrix_mul_unit add mul m) i)
let seq_of_products_zeroes_lemma #c #eq #m (mul: CE.cm c eq)
(z: c{is_absorber z mul})
(s: SB.seq c{SB.length s == m})
: Lemma (ensures (eq_of_seq eq (seq_of_products mul (SB.create m z) s) (SB.create m z)))
= eq_of_seq_from_element_equality eq (seq_of_products mul (SB.create m z) s) (SB.create m z)
let rec foldm_snoc_zero_lemma #c #eq (add: CE.cm c eq) (zeroes: SB.seq c)
: Lemma (requires (forall (i: under (SB.length zeroes)). SB.index zeroes i `eq.eq` add.unit))
(ensures eq.eq (SP.foldm_snoc add zeroes) add.unit)
(decreases SB.length zeroes) =
if (SB.length zeroes < 1) then begin
assert_norm (SP.foldm_snoc add zeroes == add.unit);
eq.reflexivity add.unit
end else
let liat, last = SProp.un_snoc zeroes in
foldm_snoc_zero_lemma add liat;
add.congruence last (SP.foldm_snoc add liat) add.unit add.unit;
add.identity add.unit;
SP.foldm_snoc_decomposition add zeroes;
eq.transitivity (SP.foldm_snoc add zeroes)
(add.mult add.unit add.unit)
add.unit
let matrix_mul_unit_ijth #c #eq (add mul: CE.cm c eq) m (i j: under m)
: Lemma (ijth (matrix_mul_unit add mul m) i j == (if i=j then mul.unit else add.unit))=()
let last_equals_index #c (s: SB.seq c{SB.length s > 0})
: Lemma ((snd (SProp.un_snoc s)) == SB.index s (SB.length s - 1)) = ()
let matrix_right_mul_identity_aux_0 #c #eq #m
(add: CE.cm c eq)
(mul: CE.cm c eq{is_absorber add.unit mul})
(mx: matrix c m m)
(i j: under m) (k:nat{k=0})
: Lemma (ensures SP.foldm_snoc add (SB.init k (fun (k: under m)
-> ijth mx i k `mul.mult`
ijth (matrix_mul_unit add mul m) k j))
`eq.eq` add.unit)
= eq.reflexivity add.unit
let rec matrix_right_mul_identity_aux_1 #c #eq #m
(add: CE.cm c eq)
(mul: CE.cm c eq{is_absorber add.unit mul})
(mx: matrix c m m)
(i j: under m) (k:nat{k<=j})
: Lemma (ensures SP.foldm_snoc add (SB.init k (fun (k: under m)
-> ijth mx i k `mul.mult`
ijth (matrix_mul_unit add mul m) k j))
`eq.eq` add.unit)
(decreases k)
= if k = 0 then matrix_right_mul_identity_aux_0 add mul mx i j k
else
let unit = matrix_mul_unit add mul m in
let mxu = matrix_mul add mul mx unit in
let ( * ) = mul.mult in
let ( $=$ ) = eq.eq in
let gen = fun (k: under m) -> ijth mx i k * ijth unit k j in
let full = SB.init k gen in
let liat,last = SProp.un_snoc full in
matrix_right_mul_identity_aux_1 add mul mx i j (k-1);
liat_equals_init k gen;
eq.reflexivity (SP.foldm_snoc add liat);
mul.congruence last (SP.foldm_snoc add liat) add.unit (SP.foldm_snoc add liat);
eq.transitivity (last * SP.foldm_snoc add liat)
(add.unit * SP.foldm_snoc add liat)
(add.unit);
eq.reflexivity (SP.foldm_snoc add (SB.init (k-1) gen));
matrix_mul_unit_ijth add mul m (k-1) j; // This one reduces the rlimits needs to default
add.congruence last (SP.foldm_snoc add liat) add.unit add.unit;
add.identity add.unit;
SP.foldm_snoc_decomposition add full;
eq.transitivity (SP.foldm_snoc add full)
(add.mult add.unit add.unit)
add.unit
let matrix_right_mul_identity_aux_2 #c #eq #m
(add: CE.cm c eq)
(mul: CE.cm c eq{is_absorber add.unit mul})
(mx: matrix c m m)
(i j: under m) (k:nat{k=j+1})
: Lemma (ensures SP.foldm_snoc add (SB.init k (fun (k: under m)
-> ijth mx i k `mul.mult`
ijth (matrix_mul_unit add mul m) k j))
`eq.eq` ijth mx i j) =
let unit = matrix_mul_unit add mul m in
let mxu = matrix_mul add mul mx unit in
let ( * ) = mul.mult in
let ( $=$ ) = eq.eq in
let gen = fun (k: under m) -> ijth mx i k * ijth unit k j in
let full = SB.init k gen in
let liat,last = SProp.un_snoc full in
matrix_right_mul_identity_aux_1 add mul mx i j j;
liat_equals_init k gen;
mul.identity (ijth mx i j);
eq.reflexivity last;
add.congruence last (SP.foldm_snoc add liat) last add.unit;
matrix_mul_unit_ijth add mul m (k-1) j; // This one reduces the rlimits needs to default
add.identity last;
add.commutativity last add.unit;
mul.commutativity (ijth mx i j) mul.unit;
eq.transitivity (add.mult last add.unit) (add.mult add.unit last) last;
SP.foldm_snoc_decomposition add full;
eq.transitivity (SP.foldm_snoc add full) (add.mult last add.unit) last;
eq.transitivity last (mul.unit * ijth mx i j) (ijth mx i j);
eq.transitivity (SP.foldm_snoc add full) last (ijth mx i j)
let rec matrix_right_mul_identity_aux_3 #c #eq #m
(add: CE.cm c eq)
(mul: CE.cm c eq{is_absorber add.unit mul})
(mx: matrix c m m)
(i j: under m) (k:under (m+1){k>j+1})
: Lemma (ensures SP.foldm_snoc add (SB.init k
(fun (k: under m) -> ijth mx i k `mul.mult` ijth (matrix_mul_unit add mul m) k j))
`eq.eq` ijth mx i j)
(decreases k) =
if (k-1) > j+1 then matrix_right_mul_identity_aux_3 add mul mx i j (k-1)
else matrix_right_mul_identity_aux_2 add mul mx i j (k-1);
let unit = matrix_mul_unit add mul m in
let mxu = matrix_mul add mul mx unit in
let ( * ) = mul.mult in
let ( $=$ ) = eq.eq in
let gen = fun (k: under m) -> ijth mx i k * ijth unit k j in
let subgen (i: under (k)) = gen i in
let full = SB.init k gen in
SP.foldm_snoc_decomposition add full;
liat_equals_init k gen;
let liat,last = SProp.un_snoc full in
SB.lemma_eq_elim liat (SB.init (k-1) gen);
add.identity add.unit;
mul.commutativity (ijth mx i (k-1)) add.unit;
eq.reflexivity (SP.foldm_snoc add (SB.init (k-1) gen));
matrix_mul_unit_ijth add mul m (k-1) j; // This one reduces the rlimits needs to default
add.congruence last (SP.foldm_snoc add (SB.init (k-1) gen))
add.unit (SP.foldm_snoc add (SB.init (k-1) gen));
add.identity (SP.foldm_snoc add (SB.init (k-1) gen));
eq.transitivity (SP.foldm_snoc add full)
(add.mult add.unit (SP.foldm_snoc add (SB.init (k-1) gen)))
(SP.foldm_snoc add (SB.init (k-1) gen));
eq.transitivity (SP.foldm_snoc add full)
(SP.foldm_snoc add (SB.init (k-1) gen))
(ijth mx i j)
let matrix_right_identity_aux #c #eq #m
(add: CE.cm c eq)
(mul: CE.cm c eq{is_absorber add.unit mul})
(mx: matrix c m m)
(i j: under m) (k:under (m+1))
: Lemma (ensures SP.foldm_snoc add (SB.init k
(fun (k: under m) -> ijth mx i k `mul.mult` ijth (matrix_mul_unit add mul m) k j))
`eq.eq`
(if k>j then ijth mx i j else add.unit))
(decreases k) =
if k=0 then matrix_right_mul_identity_aux_0 add mul mx i j k
else if k <= j then matrix_right_mul_identity_aux_1 add mul mx i j k
else if k = j+1 then matrix_right_mul_identity_aux_2 add mul mx i j k
else matrix_right_mul_identity_aux_3 add mul mx i j k
let matrix_left_mul_identity_aux_0 #c #eq #m
(add: CE.cm c eq)
(mul: CE.cm c eq{is_absorber add.unit mul})
(mx: matrix c m m)
(i j: under m) (k:nat{k=0})
: Lemma (ensures SP.foldm_snoc add (SB.init k
(fun (k: under m) -> ijth (matrix_mul_unit add mul m) i k `mul.mult` ijth mx k j))
`eq.eq` add.unit) = eq.reflexivity add.unit
let rec matrix_left_mul_identity_aux_1 #c #eq #m
(add: CE.cm c eq)
(mul: CE.cm c eq{is_absorber add.unit mul})
(mx: matrix c m m)
(i j: under m) (k:nat{k<=i /\ k>0})
: Lemma (ensures SP.foldm_snoc add (SB.init k
(fun (k: under m) -> ijth (matrix_mul_unit add mul m) i k `mul.mult` ijth mx k j))
`eq.eq` add.unit) =
let unit = matrix_mul_unit add mul m in
let mxu = matrix_mul add mul mx unit in
let ( * ) = mul.mult in
let ( $=$ ) = eq.eq in
let gen (k: under m) = ijth unit i k * ijth mx k j in
let full = SB.init k gen in
let liat,last = SProp.un_snoc full in
if k=1 then matrix_left_mul_identity_aux_0 add mul mx i j (k-1)
else matrix_left_mul_identity_aux_1 add mul mx i j (k-1);
liat_equals_init k gen;
eq.reflexivity (SP.foldm_snoc add liat);
SP.foldm_snoc_decomposition add full;
mul.congruence last (SP.foldm_snoc add liat) add.unit (SP.foldm_snoc add liat);
eq.transitivity (last * SP.foldm_snoc add liat)
(add.unit * SP.foldm_snoc add liat)
(add.unit);
add.congruence last (SP.foldm_snoc add liat) add.unit add.unit;
add.identity add.unit;
eq.transitivity (SP.foldm_snoc add full)
(add.mult add.unit add.unit)
add.unit
#push-options "--z3rlimit 20"
let matrix_left_mul_identity_aux_2 #c #eq #m
(add: CE.cm c eq)
(mul: CE.cm c eq{is_absorber add.unit mul})
(mx: matrix c m m)
(i j: under m) (k:nat{k=i+1})
: Lemma (ensures SP.foldm_snoc add (SB.init k
(fun (k: under m) -> ijth (matrix_mul_unit add mul m) i k `mul.mult` ijth mx k j))
`eq.eq` ijth mx i j) =
let unit = matrix_mul_unit add mul m in
let mxu = matrix_mul add mul mx unit in
let ( * ) = mul.mult in
let ( $=$ ) = eq.eq in
let gen (k: under m) = ijth unit i k * ijth mx k j in
let full = SB.init k gen in
let liat,last = SProp.un_snoc full in
assert (k-1 <= i /\ k-1 >= 0);
if (k-1)=0 then matrix_left_mul_identity_aux_0 add mul mx i j (k-1)
else matrix_left_mul_identity_aux_1 add mul mx i j (k-1);
matrix_mul_unit_ijth add mul m i (k-1); // This one reduces the rlimits needs to default
SP.foldm_snoc_decomposition add full;
liat_equals_init k gen;
mul.identity (ijth mx i j);
eq.reflexivity last;
add.congruence last (SP.foldm_snoc add liat) last add.unit;
add.identity last;
add.commutativity last add.unit;
mul.commutativity (ijth mx i j) mul.unit;
eq.transitivity (add.mult last add.unit) (add.mult add.unit last) last;
eq.transitivity (SP.foldm_snoc add full) (add.mult last add.unit) last;
eq.transitivity last (mul.unit * ijth mx i j) (ijth mx i j);
eq.transitivity (SP.foldm_snoc add full) last (ijth mx i j)
let rec matrix_left_mul_identity_aux_3 #c #eq #m
(add: CE.cm c eq)
(mul: CE.cm c eq{is_absorber add.unit mul})
(mx: matrix c m m)
(i j: under m) (k:under(m+1){k>i+1})
: Lemma (ensures SP.foldm_snoc add (SB.init k
(fun (k: under m) -> ijth (matrix_mul_unit add mul m) i k `mul.mult` ijth mx k j))
`eq.eq` ijth mx i j) =
let unit = matrix_mul_unit add mul m in
let mxu = matrix_mul add mul mx unit in
let ( * ) = mul.mult in
let ( $=$ ) = eq.eq in
let gen (k: under m) = ijth unit i k * ijth mx k j in
let full = SB.init k gen in
if (k-1 = i+1) then matrix_left_mul_identity_aux_2 add mul mx i j (k-1)
else matrix_left_mul_identity_aux_3 add mul mx i j (k-1);
matrix_mul_unit_ijth add mul m i (k-1); // This one reduces the rlimits needs to default
SP.foldm_snoc_decomposition add full;
liat_equals_init k gen;
let liat,last = SProp.un_snoc full in
SB.lemma_eq_elim liat (SB.init (k-1) gen);
add.identity add.unit;
mul.commutativity (ijth mx i (k-1)) add.unit;
eq.reflexivity (SP.foldm_snoc add (SB.init (k-1) gen));
add.congruence last (SP.foldm_snoc add (SB.init (k-1) gen))
add.unit (SP.foldm_snoc add (SB.init (k-1) gen));
add.identity (SP.foldm_snoc add (SB.init (k-1) gen));
eq.transitivity (SP.foldm_snoc add full)
(add.mult add.unit (SP.foldm_snoc add (SB.init (k-1) gen)))
(SP.foldm_snoc add (SB.init (k-1) gen));
eq.transitivity (SP.foldm_snoc add full)
(SP.foldm_snoc add (SB.init (k-1) gen))
(ijth mx i j)
let matrix_left_identity_aux #c #eq #m
(add: CE.cm c eq)
(mul: CE.cm c eq{is_absorber add.unit mul})
(mx: matrix c m m)
(i j: under m) (k:under (m+1))
: Lemma (ensures SP.foldm_snoc add (SB.init k
(fun (k: under m) -> ijth (matrix_mul_unit add mul m) i k `mul.mult` ijth mx k j))
`eq.eq` (if k>i then ijth mx i j else add.unit))
(decreases k) =
if k=0 then matrix_left_mul_identity_aux_0 add mul mx i j k
else if k <= i then matrix_left_mul_identity_aux_1 add mul mx i j k
else if k = i+1 then matrix_left_mul_identity_aux_2 add mul mx i j k
else matrix_left_mul_identity_aux_3 add mul mx i j k
let matrix_mul_right_identity #c #eq #m (add: CE.cm c eq)
(mul: CE.cm c eq{is_absorber add.unit mul})
(mx: matrix c m m)
: Lemma (matrix_mul add mul mx (matrix_mul_unit add mul m) `matrix_eq_fun eq` mx) =
let unit = matrix_mul_unit add mul m in
let mxu = matrix_mul add mul mx unit in
let ( * ) = mul.mult in
let ( $=$ ) = eq.eq in
let aux (i j: under m) : Lemma (ijth mxu i j $=$ ijth mx i j) =
let gen = fun (k: under m) -> ijth mx i k * ijth unit k j in
matrix_mul_ijth_eq_sum_of_seq_for_init add mul mx unit i j gen;
let seq = SB.init m gen in
matrix_right_identity_aux add mul mx i j m
in Classical.forall_intro_2 aux;
matrix_equiv_from_element_eq eq mxu mx
let matrix_mul_left_identity #c #eq #m (add: CE.cm c eq)
(mul: CE.cm c eq{is_absorber add.unit mul})
(mx: matrix c m m)
: Lemma (matrix_mul add mul (matrix_mul_unit add mul m) mx `matrix_eq_fun eq` mx) =
let unit = matrix_mul_unit add mul m in
let mxu = matrix_mul add mul unit mx in
let ( * ) = mul.mult in
let ( $=$ ) = eq.eq in
let aux (i j: under m) : squash (ijth mxu i j $=$ ijth mx i j) =
let gen (k: under m) = ijth unit i k * ijth mx k j in
matrix_mul_ijth_eq_sum_of_seq_for_init add mul unit mx i j gen;
let seq = SB.init m gen in
matrix_left_identity_aux add mul mx i j m
in
matrix_equiv_from_proof eq mxu mx aux
let matrix_mul_identity #c #eq #m (add: CE.cm c eq)
(mul: CE.cm c eq{is_absorber add.unit mul})
(mx: matrix c m m)
: Lemma (matrix_mul add mul mx (matrix_mul_unit add mul m) `matrix_eq_fun eq` mx /\
matrix_mul add mul (matrix_mul_unit add mul m) mx `matrix_eq_fun eq` mx) =
matrix_mul_left_identity add mul mx;
matrix_mul_right_identity add mul mx
let dot_of_equal_sequences #c #eq (add mul: CE.cm c eq) m
(p q r s: (z:SB.seq c{SB.length z == m}))
: Lemma (requires eq_of_seq eq p r /\ eq_of_seq eq q s)
(ensures eq.eq (dot add mul p q) (dot add mul r s)) =
eq_of_seq_element_equality eq p r;
eq_of_seq_element_equality eq q s;
let aux (i: under (SB.length p)) : Lemma (SB.index (seq_of_products mul p q) i `eq.eq`
SB.index (seq_of_products mul r s) i)
= mul.congruence (SB.index p i) (SB.index q i) (SB.index r i) (SB.index s i)
in Classical.forall_intro aux;
eq_of_seq_from_element_equality eq (seq_of_products mul p q) (seq_of_products mul r s);
SP.foldm_snoc_equality add (seq_of_products mul p q) (seq_of_products mul r s)
let matrix_mul_congruence #c #eq #m #n #p (add mul: CE.cm c eq)
(mx: matrix c m n) (my: matrix c n p)
(mz: matrix c m n) (mw: matrix c n p)
: Lemma (requires matrix_eq_fun eq mx mz /\ matrix_eq_fun eq my mw)
(ensures matrix_eq_fun eq (matrix_mul add mul mx my) (matrix_mul add mul mz mw)) =
let aux (i: under m) (k: under p) : Lemma (ijth (matrix_mul add mul mx my) i k
`eq.eq` ijth (matrix_mul add mul mz mw) i k) =
let init_xy (j: under n) = mul.mult (ijth mx i j) (ijth my j k) in
let init_zw (j: under n) = mul.mult (ijth mz i j) (ijth mw j k) in
matrix_mul_ijth_eq_sum_of_seq_for_init add mul mx my i k init_xy;
matrix_mul_ijth_eq_sum_of_seq_for_init add mul mz mw i k init_zw;
let sp_xy = SB.init n init_xy in
let sp_zw = SB.init n init_zw in
let all_eq (j: under n) : Lemma (init_xy j `eq.eq` init_zw j) =
matrix_equiv_ijth eq mx mz i j;
matrix_equiv_ijth eq my mw j k;
mul.congruence (ijth mx i j) (ijth my j k) (ijth mz i j) (ijth mw j k)
in Classical.forall_intro all_eq;
eq_of_seq_from_element_equality eq sp_xy sp_zw;
SP.foldm_snoc_equality add sp_xy sp_zw
in matrix_equiv_from_proof eq (matrix_mul add mul mx my) (matrix_mul add mul mz mw) aux
#push-options "--z3rlimit 30 --ifuel 0 --fuel 0"
let matrix_mul_is_left_distributive #c #eq #m #n #p (add: CE.cm c eq)
(mul: CE.cm c eq{is_fully_distributive mul add /\ is_absorber add.unit mul})
(mx: matrix c m n) (my mz: matrix c n p)
: Lemma (matrix_mul add mul mx (matrix_add add my mz) `matrix_eq_fun eq`
matrix_add add (matrix_mul add mul mx my) (matrix_mul add mul mx mz)) =
let myz = matrix_add add my mz in
let mxy = matrix_mul add mul mx my in
let mxz = matrix_mul add mul mx mz in
let lhs = matrix_mul add mul mx myz in
let rhs = matrix_add add mxy mxz in
let sum_j (f: under n -> c) = SP.foldm_snoc add (SB.init n f) in
let sum_k (f: under p -> c) = SP.foldm_snoc add (SB.init p f) in
let aux i k : Lemma (ijth lhs i k `eq.eq` ijth rhs i k) =
let init_lhs j = mul.mult (ijth mx i j) (ijth myz j k) in
let init_xy j = mul.mult (ijth mx i j) (ijth my j k) in
let init_xz j = mul.mult (ijth mx i j) (ijth mz j k) in
let init_rhs j = mul.mult (ijth mx i j) (ijth my j k) `add.mult`
mul.mult (ijth mx i j) (ijth mz j k) in
Classical.forall_intro eq.reflexivity;
matrix_mul_ijth_eq_sum_of_seq_for_init add mul mx myz i k init_lhs;
matrix_mul_ijth_eq_sum_of_seq_for_init add mul mx my i k init_xy;
matrix_mul_ijth_eq_sum_of_seq_for_init add mul mx mz i k init_xz;
SP.foldm_snoc_split_seq add (SB.init n init_xy)
(SB.init n init_xz)
(SB.init n init_rhs)
(fun j -> ());
eq.symmetry (ijth rhs i k) (sum_j init_rhs);
SP.foldm_snoc_of_equal_inits add init_lhs init_rhs;
eq.transitivity (ijth lhs i k)
(sum_j init_rhs)
(ijth rhs i k)
in matrix_equiv_from_proof eq lhs rhs aux
#pop-options | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.Permutation.fsti.checked",
"FStar.Seq.Equiv.fsti.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Classical.fsti.checked",
"FStar.Algebra.CommMonoid.Fold.fsti.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": true,
"source_file": "FStar.Matrix.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Seq.Equiv",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Seq.Properties",
"short_module": "SProp"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Math.Lemmas",
"short_module": "ML"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "SB"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Permutation",
"short_module": "SP"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Fold",
"short_module": "CF"
},
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
add: FStar.Algebra.CommMonoid.Equiv.cm c eq ->
mul:
FStar.Algebra.CommMonoid.Equiv.cm c eq
{FStar.Matrix.is_fully_distributive mul add /\ FStar.Matrix.is_absorber (CM?.unit add) mul} ->
mx: FStar.Matrix.matrix c m n ->
my: FStar.Matrix.matrix c m n ->
mz: FStar.Matrix.matrix c n p
-> FStar.Pervasives.Lemma
(ensures
EQ?.eq (FStar.Matrix.matrix_equiv eq m p)
(FStar.Matrix.matrix_mul add mul (FStar.Matrix.matrix_add add mx my) mz)
(FStar.Matrix.matrix_add add
(FStar.Matrix.matrix_mul add mul mx mz)
(FStar.Matrix.matrix_mul add mul my mz))) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"FStar.Algebra.CommMonoid.Equiv.equiv",
"Prims.pos",
"FStar.Algebra.CommMonoid.Equiv.cm",
"Prims.l_and",
"FStar.Matrix.is_fully_distributive",
"FStar.Matrix.is_absorber",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__unit",
"FStar.Matrix.matrix",
"FStar.Matrix.matrix_equiv_from_proof",
"FStar.IntegerIntervals.under",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"FStar.Algebra.CommMonoid.Equiv.__proj__EQ__item__eq",
"FStar.Matrix.ijth",
"Prims.Nil",
"FStar.Pervasives.pattern",
"FStar.Algebra.CommMonoid.Equiv.__proj__EQ__item__transitivity",
"FStar.Seq.Permutation.foldm_snoc_of_equal_inits",
"FStar.Algebra.CommMonoid.Equiv.__proj__EQ__item__symmetry",
"FStar.Seq.Permutation.foldm_snoc_split_seq",
"FStar.Seq.Base.init",
"FStar.Seq.Base.length",
"FStar.Matrix.matrix_mul_ijth_eq_sum_of_seq_for_init",
"FStar.Classical.forall_intro",
"FStar.Algebra.CommMonoid.Equiv.__proj__EQ__item__reflexivity",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult",
"FStar.Seq.Permutation.foldm_snoc",
"FStar.Matrix.matrix_of",
"FStar.Matrix.matrix_add_generator",
"FStar.Matrix.matrix_add",
"FStar.Matrix.matrix_mul",
"FStar.Matrix.matrix_eq_fun"
] | [] | false | false | true | false | false | let matrix_mul_is_right_distributive
#c
#eq
#m
#n
#p
(add: CE.cm c eq)
(mul: CE.cm c eq {is_fully_distributive mul add /\ is_absorber add.unit mul})
(mx: matrix c m n)
(my: matrix c m n)
(mz: matrix c n p)
: Lemma
(matrix_eq_fun eq
(matrix_mul add mul (matrix_add add mx my) mz)
(matrix_add add (matrix_mul add mul mx mz) (matrix_mul add mul my mz))) =
| let mxy = matrix_add add mx my in
let mxz = matrix_mul add mul mx mz in
let myz = matrix_mul add mul my mz in
let lhs = matrix_mul add mul mxy mz in
let rhs = matrix_add add mxz myz in
let sum_j (f: (under n -> c)) = SP.foldm_snoc add (SB.init n f) in
let sum_k (f: (under p -> c)) = SP.foldm_snoc add (SB.init p f) in
let aux i k : Lemma ((ijth lhs i k) `eq.eq` (ijth rhs i k)) =
let init_lhs j = mul.mult (ijth mxy i j) (ijth mz j k) in
let init_xz j = mul.mult (ijth mx i j) (ijth mz j k) in
let init_yz j = mul.mult (ijth my i j) (ijth mz j k) in
let init_rhs j =
(mul.mult (ijth mx i j) (ijth mz j k)) `add.mult` (mul.mult (ijth my i j) (ijth mz j k))
in
Classical.forall_intro eq.reflexivity;
matrix_mul_ijth_eq_sum_of_seq_for_init add mul mxy mz i k init_lhs;
matrix_mul_ijth_eq_sum_of_seq_for_init add mul mx mz i k init_xz;
matrix_mul_ijth_eq_sum_of_seq_for_init add mul my mz i k init_yz;
SP.foldm_snoc_split_seq add
(SB.init n init_xz)
(SB.init n init_yz)
(SB.init n init_rhs)
(fun j -> ());
eq.symmetry (ijth rhs i k) (sum_j init_rhs);
SP.foldm_snoc_of_equal_inits add init_lhs init_rhs;
eq.transitivity (ijth lhs i k) (sum_j init_rhs) (ijth rhs i k)
in
matrix_equiv_from_proof eq lhs rhs aux | false |
Hacl.Impl.Frodo.KEM.Encaps.fst | Hacl.Impl.Frodo.KEM.Encaps.crypto_kem_enc_ | val crypto_kem_enc_:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> mu:lbytes (bytes_mu a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> pk:lbytes (crypto_publickeybytes a)
-> Stack unit
(requires fun h ->
live h ct /\ live h ss /\ live h pk /\ live h mu /\
disjoint ct ss /\ disjoint ct pk /\ disjoint ss pk /\
disjoint mu ss /\ disjoint mu ct /\ disjoint mu pk)
(ensures fun h0 _ h1 -> modifies (loc ct |+| loc ss) h0 h1 /\
(as_seq h1 ct, as_seq h1 ss) == S.crypto_kem_enc_ a gen_a (as_seq h0 mu) (as_seq h0 pk)) | val crypto_kem_enc_:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> mu:lbytes (bytes_mu a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> pk:lbytes (crypto_publickeybytes a)
-> Stack unit
(requires fun h ->
live h ct /\ live h ss /\ live h pk /\ live h mu /\
disjoint ct ss /\ disjoint ct pk /\ disjoint ss pk /\
disjoint mu ss /\ disjoint mu ct /\ disjoint mu pk)
(ensures fun h0 _ h1 -> modifies (loc ct |+| loc ss) h0 h1 /\
(as_seq h1 ct, as_seq h1 ss) == S.crypto_kem_enc_ a gen_a (as_seq h0 mu) (as_seq h0 pk)) | let crypto_kem_enc_ a gen_a mu ct ss pk =
push_frame ();
let seed_se_k = create (2ul *! crypto_bytes a) (u8 0) in
crypto_kem_enc0 a gen_a mu ct ss pk seed_se_k;
clear_words_u8 seed_se_k;
pop_frame () | {
"file_name": "code/frodo/Hacl.Impl.Frodo.KEM.Encaps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 14,
"end_line": 412,
"start_col": 0,
"start_line": 407
} | module Hacl.Impl.Frodo.KEM.Encaps
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open LowStar.Buffer
open Lib.IntTypes
open Lib.Buffer
open Hacl.Impl.Matrix
open Hacl.Impl.Frodo.Params
open Hacl.Impl.Frodo.KEM
open Hacl.Impl.Frodo.Encode
open Hacl.Impl.Frodo.Pack
open Hacl.Impl.Frodo.Sample
open Hacl.Frodo.Random
module ST = FStar.HyperStack.ST
module LSeq = Lib.Sequence
module LB = Lib.ByteSequence
module FP = Spec.Frodo.Params
module S = Spec.Frodo.KEM.Encaps
module M = Spec.Matrix
module KG = Hacl.Impl.Frodo.KEM.KeyGen
#set-options "--z3rlimit 100 --fuel 0 --ifuel 0"
inline_for_extraction noextract
val frodo_mul_add_sa_plus_e:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> bp_matrix:matrix_t params_nbar (params_n a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h ep_matrix /\ live h sp_matrix /\ live h bp_matrix /\
disjoint bp_matrix seed_a /\ disjoint bp_matrix ep_matrix /\ disjoint bp_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc bp_matrix) h0 h1 /\
as_matrix h1 bp_matrix ==
S.frodo_mul_add_sa_plus_e a gen_a (as_seq h0 seed_a) (as_matrix h0 sp_matrix) (as_matrix h0 ep_matrix))
let frodo_mul_add_sa_plus_e a gen_a seed_a sp_matrix ep_matrix bp_matrix =
push_frame ();
let a_matrix = matrix_create (params_n a) (params_n a) in
frodo_gen_matrix gen_a (params_n a) seed_a a_matrix;
matrix_mul sp_matrix a_matrix bp_matrix;
matrix_add bp_matrix ep_matrix;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct_pack_c1:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> c1:lbytes (ct1bytes_len a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h ep_matrix /\ live h sp_matrix /\ live h c1 /\
disjoint seed_a c1 /\ disjoint ep_matrix c1 /\ disjoint sp_matrix c1)
(ensures fun h0 _ h1 -> modifies (loc c1) h0 h1 /\
as_seq h1 c1 ==
S.crypto_kem_enc_ct_pack_c1 a gen_a (as_seq h0 seed_a) (as_matrix h0 sp_matrix) (as_matrix h0 ep_matrix))
let crypto_kem_enc_ct_pack_c1 a gen_a seed_a sp_matrix ep_matrix c1 =
push_frame ();
let bp_matrix = matrix_create params_nbar (params_n a) in
frodo_mul_add_sa_plus_e a gen_a seed_a sp_matrix ep_matrix bp_matrix;
frodo_pack (params_logq a) bp_matrix c1;
pop_frame ()
inline_for_extraction noextract
val frodo_mul_add_sb_plus_e:
a:FP.frodo_alg
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> v_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h b /\ live h epp_matrix /\ live h v_matrix /\ live h sp_matrix /\
disjoint v_matrix b /\ disjoint v_matrix epp_matrix /\ disjoint v_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc v_matrix) h0 h1 /\
as_matrix h1 v_matrix ==
S.frodo_mul_add_sb_plus_e a (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix))
let frodo_mul_add_sb_plus_e a b sp_matrix epp_matrix v_matrix =
push_frame ();
let b_matrix = matrix_create (params_n a) params_nbar in
frodo_unpack (params_n a) params_nbar (params_logq a) b b_matrix;
matrix_mul sp_matrix b_matrix v_matrix;
matrix_add v_matrix epp_matrix;
pop_frame ()
inline_for_extraction noextract
val frodo_mul_add_sb_plus_e_plus_mu:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> v_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h b /\ live h mu /\ live h v_matrix /\
live h sp_matrix /\ live h epp_matrix /\
disjoint v_matrix b /\ disjoint v_matrix sp_matrix /\
disjoint v_matrix mu /\ disjoint v_matrix epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc v_matrix) h0 h1 /\
as_matrix h1 v_matrix ==
S.frodo_mul_add_sb_plus_e_plus_mu a (as_seq h0 mu) (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix))
let frodo_mul_add_sb_plus_e_plus_mu a mu b sp_matrix epp_matrix v_matrix =
push_frame ();
frodo_mul_add_sb_plus_e a b sp_matrix epp_matrix v_matrix;
let mu_encode = matrix_create params_nbar params_nbar in
frodo_key_encode (params_logq a) (params_extracted_bits a) params_nbar mu mu_encode;
matrix_add v_matrix mu_encode;
clear_matrix mu_encode;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct_pack_c2:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> c2:lbytes (ct2bytes_len a)
-> Stack unit
(requires fun h ->
live h mu /\ live h b /\ live h sp_matrix /\
live h epp_matrix /\ live h c2 /\
disjoint mu c2 /\ disjoint b c2 /\
disjoint sp_matrix c2 /\ disjoint epp_matrix c2)
(ensures fun h0 _ h1 -> modifies (loc c2) h0 h1 /\
as_seq h1 c2 ==
S.crypto_kem_enc_ct_pack_c2 a (as_seq h0 mu) (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix))
#push-options "--z3rlimit 200"
let crypto_kem_enc_ct_pack_c2 a mu b sp_matrix epp_matrix c2 =
push_frame ();
let v_matrix = matrix_create params_nbar params_nbar in
frodo_mul_add_sb_plus_e_plus_mu a mu b sp_matrix epp_matrix v_matrix;
frodo_pack (params_logq a) v_matrix c2;
clear_matrix v_matrix;
pop_frame ()
#pop-options
inline_for_extraction noextract
val get_sp_ep_epp_matrices:
a:FP.frodo_alg
-> seed_se:lbytes (crypto_bytes a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h seed_se /\ live h sp_matrix /\
live h ep_matrix /\ live h epp_matrix /\
disjoint seed_se sp_matrix /\ disjoint seed_se ep_matrix /\
disjoint seed_se epp_matrix /\ disjoint sp_matrix ep_matrix /\
disjoint sp_matrix epp_matrix /\ disjoint ep_matrix epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc sp_matrix |+| loc ep_matrix |+| loc epp_matrix) h0 h1 /\
(as_matrix h1 sp_matrix, as_matrix h1 ep_matrix, as_matrix h1 epp_matrix) ==
S.get_sp_ep_epp_matrices a (as_seq h0 seed_se))
let get_sp_ep_epp_matrices a seed_se sp_matrix ep_matrix epp_matrix =
push_frame ();
[@inline_let] let s_bytes_len = secretmatrixbytes_len a in
let r = create (2ul *! s_bytes_len +! 2ul *! params_nbar *! params_nbar) (u8 0) in
KG.frodo_shake_r a (u8 0x96) seed_se (2ul *! s_bytes_len +! 2ul *! params_nbar *! params_nbar) r;
frodo_sample_matrix a params_nbar (params_n a) (sub r 0ul s_bytes_len) sp_matrix;
frodo_sample_matrix a params_nbar (params_n a) (sub r s_bytes_len s_bytes_len) ep_matrix;
frodo_sample_matrix a params_nbar params_nbar (sub r (2ul *! s_bytes_len) (2ul *! params_nbar *! params_nbar)) epp_matrix;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct0:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> b:lbytes (publicmatrixbytes_len a)
-> mu:lbytes (bytes_mu a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> ct:lbytes (crypto_ciphertextbytes a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h b /\ live h mu /\ live h ct /\
live h sp_matrix /\ live h ep_matrix /\ live h epp_matrix /\
disjoint ct seed_a /\ disjoint ct b /\ disjoint ct mu /\
disjoint ct sp_matrix /\ disjoint ct ep_matrix /\ disjoint ct epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc ct) h0 h1 /\
(let c1:LB.lbytes (FP.ct1bytes_len a) = S.crypto_kem_enc_ct_pack_c1 a gen_a (as_seq h0 seed_a) (as_seq h0 sp_matrix) (as_seq h0 ep_matrix) in
let c2:LB.lbytes (FP.ct2bytes_len a) = S.crypto_kem_enc_ct_pack_c2 a (as_seq h0 mu) (as_seq h0 b) (as_seq h0 sp_matrix) (as_seq h0 epp_matrix) in
v (crypto_ciphertextbytes a) == FP.ct1bytes_len a + FP.ct2bytes_len a /\
as_seq h1 ct `Seq.equal` LSeq.concat #_ #(FP.ct1bytes_len a) #(FP.ct2bytes_len a) c1 c2))
let crypto_kem_enc_ct0 a gen_a seed_a b mu sp_matrix ep_matrix epp_matrix ct =
let c1 = sub ct 0ul (ct1bytes_len a) in
let c2 = sub ct (ct1bytes_len a) (ct2bytes_len a) in
let h0 = ST.get () in
crypto_kem_enc_ct_pack_c1 a gen_a seed_a sp_matrix ep_matrix c1;
let h1 = ST.get () in
crypto_kem_enc_ct_pack_c2 a mu b sp_matrix epp_matrix c2;
let h2 = ST.get () in
LSeq.eq_intro
(LSeq.sub (as_seq h2 ct) 0 (v (ct1bytes_len a)))
(LSeq.sub (as_seq h1 ct) 0 (v (ct1bytes_len a)));
LSeq.lemma_concat2
(v (ct1bytes_len a)) (LSeq.sub (as_seq h1 ct) 0 (v (ct1bytes_len a)))
(v (ct2bytes_len a)) (LSeq.sub (as_seq h2 ct) (v (ct1bytes_len a)) (v (ct2bytes_len a))) (as_seq h2 ct)
inline_for_extraction noextract
val clear_matrix3:
a:FP.frodo_alg
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h sp_matrix /\ live h ep_matrix /\ live h epp_matrix /\
disjoint sp_matrix ep_matrix /\ disjoint sp_matrix epp_matrix /\
disjoint ep_matrix epp_matrix)
(ensures fun h0 _ h1 ->
modifies (loc sp_matrix |+| loc ep_matrix |+| loc epp_matrix) h0 h1)
let clear_matrix3 a sp_matrix ep_matrix epp_matrix =
clear_matrix sp_matrix;
clear_matrix ep_matrix;
clear_matrix epp_matrix
inline_for_extraction noextract
val crypto_kem_enc_ct:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> mu:lbytes (bytes_mu a)
-> pk:lbytes (crypto_publickeybytes a)
-> seed_se:lbytes (crypto_bytes a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> Stack unit
(requires fun h ->
live h mu /\ live h pk /\ live h seed_se /\ live h ct /\
disjoint ct mu /\ disjoint ct pk /\ disjoint ct seed_se)
(ensures fun h0 _ h1 -> modifies (loc ct) h0 h1 /\
as_seq h1 ct == S.crypto_kem_enc_ct a gen_a (as_seq h0 mu) (as_seq h0 pk) (as_seq h0 seed_se))
#push-options "--z3rlimit 200"
let crypto_kem_enc_ct a gen_a mu pk seed_se ct =
push_frame ();
let h0 = ST.get () in
FP.expand_crypto_publickeybytes a;
let seed_a = sub pk 0ul bytes_seed_a in
let b = sub pk bytes_seed_a (publicmatrixbytes_len a) in
let sp_matrix = matrix_create params_nbar (params_n a) in
let ep_matrix = matrix_create params_nbar (params_n a) in
let epp_matrix = matrix_create params_nbar params_nbar in
get_sp_ep_epp_matrices a seed_se sp_matrix ep_matrix epp_matrix;
crypto_kem_enc_ct0 a gen_a seed_a b mu sp_matrix ep_matrix epp_matrix ct;
clear_matrix3 a sp_matrix ep_matrix epp_matrix;
let h1 = ST.get () in
LSeq.eq_intro
(as_seq h1 ct)
(S.crypto_kem_enc_ct a gen_a (as_seq h0 mu) (as_seq h0 pk) (as_seq h0 seed_se));
pop_frame ()
#pop-options
inline_for_extraction noextract
val crypto_kem_enc_ss:
a:FP.frodo_alg
-> k:lbytes (crypto_bytes a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> Stack unit
(requires fun h ->
live h k /\ live h ct /\ live h ss /\
disjoint ct ss /\ disjoint k ct /\ disjoint k ss)
(ensures fun h0 _ h1 -> modifies (loc ss) h0 h1 /\
as_seq h1 ss == S.crypto_kem_enc_ss a (as_seq h0 k) (as_seq h0 ct))
let crypto_kem_enc_ss a k ct ss =
push_frame ();
let ss_init_len = crypto_ciphertextbytes a +! crypto_bytes a in
let shake_input_ss = create ss_init_len (u8 0) in
concat2 (crypto_ciphertextbytes a) ct (crypto_bytes a) k shake_input_ss;
frodo_shake a ss_init_len shake_input_ss (crypto_bytes a) ss;
clear_words_u8 shake_input_ss;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_seed_se_k:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> pk:lbytes (crypto_publickeybytes a)
-> seed_se_k:lbytes (2ul *! crypto_bytes a)
-> Stack unit
(requires fun h ->
live h mu /\ live h pk /\ live h seed_se_k /\
disjoint seed_se_k mu /\ disjoint seed_se_k pk)
(ensures fun h0 _ h1 -> modifies (loc seed_se_k) h0 h1 /\
as_seq h1 seed_se_k == S.crypto_kem_enc_seed_se_k a (as_seq h0 mu) (as_seq h0 pk))
let crypto_kem_enc_seed_se_k a mu pk seed_se_k =
push_frame ();
let pkh_mu = create (bytes_pkhash a +! bytes_mu a) (u8 0) in
let h0 = ST.get () in
update_sub_f h0 pkh_mu 0ul (bytes_pkhash a)
(fun h -> FP.frodo_shake a (v (crypto_publickeybytes a)) (as_seq h0 pk) (v (bytes_pkhash a)))
(fun _ -> frodo_shake a (crypto_publickeybytes a) pk (bytes_pkhash a) (sub pkh_mu 0ul (bytes_pkhash a)));
let h1 = ST.get () in
update_sub pkh_mu (bytes_pkhash a) (bytes_mu a) mu;
let h2 = ST.get () in
LSeq.eq_intro
(LSeq.sub (as_seq h2 pkh_mu) 0 (v (bytes_pkhash a)))
(LSeq.sub (as_seq h1 pkh_mu) 0 (v (bytes_pkhash a)));
LSeq.lemma_concat2
(v (bytes_pkhash a)) (LSeq.sub (as_seq h1 pkh_mu) 0 (v (bytes_pkhash a)))
(v (bytes_mu a)) (as_seq h0 mu) (as_seq h2 pkh_mu);
//concat2 (bytes_pkhash a) pkh (bytes_mu a) mu pkh_mu;
frodo_shake a (bytes_pkhash a +! bytes_mu a) pkh_mu (2ul *! crypto_bytes a) seed_se_k;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct_ss:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_se_k:lbytes (2ul *! crypto_bytes a)
-> mu:lbytes (bytes_mu a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> pk:lbytes (crypto_publickeybytes a)
-> Stack unit
(requires fun h ->
live h seed_se_k /\ live h ct /\ live h ss /\ live h pk /\ live h mu /\
disjoint ct ss /\ disjoint ct pk /\ disjoint ss pk /\
disjoint mu ss /\ disjoint mu ct /\ disjoint seed_se_k ct /\ disjoint seed_se_k ss)
(ensures fun h0 _ h1 -> modifies (loc ct |+| loc ss) h0 h1 /\
(let seed_se = LSeq.sub (as_seq h0 seed_se_k) 0 (v (crypto_bytes a)) in
let k = LSeq.sub (as_seq h0 seed_se_k) (v (crypto_bytes a)) (v (crypto_bytes a)) in
as_seq h1 ct == S.crypto_kem_enc_ct a gen_a (as_seq h0 mu) (as_seq h0 pk) seed_se /\
as_seq h1 ss == S.crypto_kem_enc_ss a k (as_seq h1 ct)))
let crypto_kem_enc_ct_ss a gen_a seed_se_k mu ct ss pk =
let seed_se = sub seed_se_k 0ul (crypto_bytes a) in
let k = sub seed_se_k (crypto_bytes a) (crypto_bytes a) in
crypto_kem_enc_ct a gen_a mu pk seed_se ct;
crypto_kem_enc_ss a k ct ss
inline_for_extraction noextract
val crypto_kem_enc0:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> mu:lbytes (bytes_mu a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> pk:lbytes (crypto_publickeybytes a)
-> seed_se_k:lbytes (2ul *! crypto_bytes a)
-> Stack unit
(requires fun h ->
live h ct /\ live h ss /\ live h pk /\ live h mu /\ live h seed_se_k /\
loc_pairwise_disjoint [loc mu; loc ct; loc ss; loc pk; loc seed_se_k])
(ensures fun h0 _ h1 -> modifies (loc ct |+| loc ss |+| loc seed_se_k) h0 h1 /\
(as_seq h1 ct, as_seq h1 ss) == S.crypto_kem_enc_ a gen_a (as_seq h0 mu) (as_seq h0 pk))
#push-options "--z3rlimit 200"
let crypto_kem_enc0 a gen_a mu ct ss pk seed_se_k =
crypto_kem_enc_seed_se_k a mu pk seed_se_k;
crypto_kem_enc_ct_ss a gen_a seed_se_k mu ct ss pk
#pop-options
inline_for_extraction noextract
val crypto_kem_enc_:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> mu:lbytes (bytes_mu a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> pk:lbytes (crypto_publickeybytes a)
-> Stack unit
(requires fun h ->
live h ct /\ live h ss /\ live h pk /\ live h mu /\
disjoint ct ss /\ disjoint ct pk /\ disjoint ss pk /\
disjoint mu ss /\ disjoint mu ct /\ disjoint mu pk)
(ensures fun h0 _ h1 -> modifies (loc ct |+| loc ss) h0 h1 /\
(as_seq h1 ct, as_seq h1 ss) == S.crypto_kem_enc_ a gen_a (as_seq h0 mu) (as_seq h0 pk)) | {
"checked_file": "/",
"dependencies": [
"Spec.Matrix.fst.checked",
"Spec.Frodo.Params.fst.checked",
"Spec.Frodo.KEM.Encaps.fst.checked",
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Matrix.fst.checked",
"Hacl.Impl.Frodo.Sample.fst.checked",
"Hacl.Impl.Frodo.Params.fst.checked",
"Hacl.Impl.Frodo.Pack.fst.checked",
"Hacl.Impl.Frodo.KEM.KeyGen.fst.checked",
"Hacl.Impl.Frodo.KEM.fst.checked",
"Hacl.Impl.Frodo.Encode.fst.checked",
"Hacl.Frodo.Random.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Frodo.KEM.Encaps.fst"
} | [
{
"abbrev": true,
"full_module": "Hacl.Impl.Frodo.KEM.KeyGen",
"short_module": "KG"
},
{
"abbrev": true,
"full_module": "Spec.Matrix",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "Spec.Frodo.KEM.Encaps",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Spec.Frodo.Params",
"short_module": "FP"
},
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "LB"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Frodo.Random",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Sample",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Pack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Encode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Params",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Matrix",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
a: Spec.Frodo.Params.frodo_alg ->
gen_a: Spec.Frodo.Params.frodo_gen_a{Hacl.Impl.Frodo.Params.is_supported gen_a} ->
mu: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.bytes_mu a) ->
ct: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.crypto_ciphertextbytes a) ->
ss: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.crypto_bytes a) ->
pk: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.crypto_publickeybytes a)
-> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Spec.Frodo.Params.frodo_alg",
"Spec.Frodo.Params.frodo_gen_a",
"Prims.b2t",
"Hacl.Impl.Frodo.Params.is_supported",
"Hacl.Impl.Matrix.lbytes",
"Hacl.Impl.Frodo.Params.bytes_mu",
"Hacl.Impl.Frodo.Params.crypto_ciphertextbytes",
"Hacl.Impl.Frodo.Params.crypto_bytes",
"Hacl.Impl.Frodo.Params.crypto_publickeybytes",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Hacl.Impl.Frodo.KEM.clear_words_u8",
"Lib.IntTypes.op_Star_Bang",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"FStar.UInt32.__uint_to_t",
"Hacl.Impl.Frodo.KEM.Encaps.crypto_kem_enc0",
"Lib.Buffer.lbuffer_t",
"Lib.Buffer.MUT",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U8",
"Lib.IntTypes.SEC",
"Lib.IntTypes.mul",
"FStar.UInt32.uint_to_t",
"FStar.UInt32.t",
"Lib.Buffer.create",
"Lib.IntTypes.uint8",
"Lib.IntTypes.u8",
"Lib.Buffer.lbuffer",
"FStar.HyperStack.ST.push_frame"
] | [] | false | true | false | false | false | let crypto_kem_enc_ a gen_a mu ct ss pk =
| push_frame ();
let seed_se_k = create (2ul *! crypto_bytes a) (u8 0) in
crypto_kem_enc0 a gen_a mu ct ss pk seed_se_k;
clear_words_u8 seed_se_k;
pop_frame () | false |
LowParse.Repr.fsti | LowParse.Repr.as_repr_pos | val as_repr_pos (#t: _) (b: const_slice) (from to: index b) (p: repr_ptr t)
: Pure (repr_pos t b)
(requires from <= to /\ Ptr?.b p == C.gsub b.base from (to - from))
(ensures fun r -> p == as_ptr_spec r) | val as_repr_pos (#t: _) (b: const_slice) (from to: index b) (p: repr_ptr t)
: Pure (repr_pos t b)
(requires from <= to /\ Ptr?.b p == C.gsub b.base from (to - from))
(ensures fun r -> p == as_ptr_spec r) | let as_repr_pos #t (b:const_slice) (from to:index b) (p:repr_ptr t)
: Pure (repr_pos t b)
(requires
from <= to /\
Ptr?.b p == C.gsub b.base from (to - from))
(ensures fun r ->
p == as_ptr_spec r)
= Pos from (Ptr?.meta p) (Ptr?.vv p) (to - from) | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 50,
"end_line": 818,
"start_col": 0,
"start_line": 811
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2
unfold
let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p))
inline_for_extraction
let read_field (#k1:strong_parser_kind) (#t1:_) (#p1:LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: field_reader_t f
= reveal_valid ();
fun p ->
[@inline_let]
let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos
(*** Positional representation types ***)
/// `index b` is the type of valid indexes into `b`
let index (b:const_slice)= i:uint_32{ i <= b.slice_len }
noeq
type repr_pos (t:Type) (b:const_slice) =
| Pos: start_pos:index b ->
meta:meta t ->
vv_pos:t -> //temporary
length:U32.t { U32.v start_pos + U32.v meta.len <= U32.v b.slice_len /\
vv_pos == meta.v /\
length == meta.len } ->
repr_pos t b
let value_pos #t #b (r:repr_pos t b) : GTot t = r.meta.v
let as_ptr_spec #t #b (p:repr_pos t b)
: GTot (repr_ptr t)
= Ptr (C.gsub b.base p.start_pos ((Pos?.meta p).len))
(Pos?.meta p)
(Pos?.vv_pos p)
(Pos?.length p)
let const_buffer_of_repr_pos #t #b (r:repr_pos t b)
: GTot (C.const_buffer LP.byte)
= C.gsub b.base r.start_pos r.meta.len
/// `repr_pos_p`, the analog of `repr_ptr_p`
let repr_pos_p (t:Type) (b:const_slice) #k (parser:LP.parser k t) =
r:repr_pos t b {
r.meta.parser_kind == k /\
r.meta.parser == parser
}
(*** Validity ***)
/// `valid`: abstract validity
let valid_repr_pos (#t:Type) (#b:const_slice) (r:repr_pos t b) (h:HS.mem)
= valid (as_ptr_spec r) h /\
C.live h b.base
/// `fp r`: The memory footprint of a repr_pos is the
/// sub-slice b.[from, to)
let fp_pos #t (#b:const_slice) (r:repr_pos t b)
: GTot B.loc
= fp (as_ptr_spec r)
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
let frame_valid_repr_pos #t #b (r:repr_pos t b) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid_repr_pos r h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp_pos r) l)
(ensures
valid_repr_pos r h1)
[SMTPat (valid_repr_pos r h1);
SMTPat (B.modifies l h0 h1)]
= ()
(*** Operations on repr_pos ***)
/// End position
/// On positional reprs, this is concretely computable
let end_pos #t #b (r:repr_pos t b)
: index b
= r.start_pos + r.length
let valid_repr_pos_elim
(#t: Type)
(#b: const_slice)
(r: repr_pos t b)
(h: HS.mem)
: Lemma
(requires (
valid_repr_pos r h
))
(ensures (
LP.valid_content_pos r.meta.parser h (to_slice b) r.start_pos r.meta.v (end_pos r)
))
= reveal_valid ();
let p : repr_ptr t = as_ptr_spec r in
let slice = slice_of_const_buffer (Ptr?.b p) (Ptr?.meta p).len in
LP.valid_facts r.meta.parser h slice 0ul;
LP.valid_facts r.meta.parser h (to_slice b) r.start_pos;
LP.parse_strong_prefix r.meta.parser (LP.bytes_of_slice_from h slice 0ul) (LP.bytes_of_slice_from h (to_slice b) r.start_pos)
/// Mostly just by inheriting operations on pointers
let as_ptr #t #b (r:repr_pos t b)
: Stack (repr_ptr t)
(requires fun h ->
valid_repr_pos r h)
(ensures fun h0 ptr h1 ->
ptr == as_ptr_spec r /\
h0 == h1)
= let b = C.sub b.base r.start_pos (Ghost.hide r.meta.len) in
let m = r.meta in
let v = r.vv_pos in
let l = r.length in
Ptr b m v l | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
b: LowParse.Repr.const_slice ->
from: LowParse.Repr.index b ->
to: LowParse.Repr.index b ->
p: LowParse.Repr.repr_ptr t
-> Prims.Pure (LowParse.Repr.repr_pos t b) | Prims.Pure | [] | [] | [
"LowParse.Repr.const_slice",
"LowParse.Repr.index",
"LowParse.Repr.repr_ptr",
"LowParse.Repr.Pos",
"LowParse.Repr.__proj__Ptr__item__meta",
"LowParse.Repr.__proj__Ptr__item__vv",
"FStar.Integers.op_Subtraction",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"LowParse.Repr.repr_pos",
"Prims.l_and",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"Prims.eq2",
"LowStar.ConstBuffer.const_buffer",
"LowParse.Bytes.byte",
"LowParse.Repr.__proj__Ptr__item__b",
"LowStar.ConstBuffer.gsub",
"LowParse.Repr.__proj__MkSlice__item__base",
"LowParse.Repr.as_ptr_spec"
] | [] | false | false | false | false | false | let as_repr_pos #t (b: const_slice) (from: index b) (to: index b) (p: repr_ptr t)
: Pure (repr_pos t b)
(requires from <= to /\ Ptr?.b p == C.gsub b.base from (to - from))
(ensures fun r -> p == as_ptr_spec r) =
| Pos from (Ptr?.meta p) (Ptr?.vv p) (to - from) | false |
LowParse.Repr.fsti | LowParse.Repr.field_accessor_pos_post | val field_accessor_pos_post : p: LowParse.Repr.repr_pos t1 b ->
f: LowParse.Repr.field_accessor (Mkmeta?.parser (Pos?.meta p)) p2 ->
h0: FStar.Monotonic.HyperStack.mem ->
q: LowParse.Repr.repr_pos_p t2 b p2 ->
h1: FStar.Monotonic.HyperStack.mem
-> Prims.GTot Prims.logical | let field_accessor_pos_post (#b:const_slice) (#t1:Type) (p:repr_pos t1 b)
(#k2: strong_parser_kind)
(#t2:Type)
(#p2: LP.parser k2 t2)
(f:field_accessor p.meta.parser p2) =
fun h0 (q:repr_pos_p t2 b p2) h1 ->
let cl = FieldAccessor?.cl f in
cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid_repr_pos q h1 /\
value_pos q == cl.LP.clens_get (value_pos p) | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 50,
"end_line": 908,
"start_col": 0,
"start_line": 898
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2
unfold
let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p))
inline_for_extraction
let read_field (#k1:strong_parser_kind) (#t1:_) (#p1:LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: field_reader_t f
= reveal_valid ();
fun p ->
[@inline_let]
let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos
(*** Positional representation types ***)
/// `index b` is the type of valid indexes into `b`
let index (b:const_slice)= i:uint_32{ i <= b.slice_len }
noeq
type repr_pos (t:Type) (b:const_slice) =
| Pos: start_pos:index b ->
meta:meta t ->
vv_pos:t -> //temporary
length:U32.t { U32.v start_pos + U32.v meta.len <= U32.v b.slice_len /\
vv_pos == meta.v /\
length == meta.len } ->
repr_pos t b
let value_pos #t #b (r:repr_pos t b) : GTot t = r.meta.v
let as_ptr_spec #t #b (p:repr_pos t b)
: GTot (repr_ptr t)
= Ptr (C.gsub b.base p.start_pos ((Pos?.meta p).len))
(Pos?.meta p)
(Pos?.vv_pos p)
(Pos?.length p)
let const_buffer_of_repr_pos #t #b (r:repr_pos t b)
: GTot (C.const_buffer LP.byte)
= C.gsub b.base r.start_pos r.meta.len
/// `repr_pos_p`, the analog of `repr_ptr_p`
let repr_pos_p (t:Type) (b:const_slice) #k (parser:LP.parser k t) =
r:repr_pos t b {
r.meta.parser_kind == k /\
r.meta.parser == parser
}
(*** Validity ***)
/// `valid`: abstract validity
let valid_repr_pos (#t:Type) (#b:const_slice) (r:repr_pos t b) (h:HS.mem)
= valid (as_ptr_spec r) h /\
C.live h b.base
/// `fp r`: The memory footprint of a repr_pos is the
/// sub-slice b.[from, to)
let fp_pos #t (#b:const_slice) (r:repr_pos t b)
: GTot B.loc
= fp (as_ptr_spec r)
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
let frame_valid_repr_pos #t #b (r:repr_pos t b) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid_repr_pos r h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp_pos r) l)
(ensures
valid_repr_pos r h1)
[SMTPat (valid_repr_pos r h1);
SMTPat (B.modifies l h0 h1)]
= ()
(*** Operations on repr_pos ***)
/// End position
/// On positional reprs, this is concretely computable
let end_pos #t #b (r:repr_pos t b)
: index b
= r.start_pos + r.length
let valid_repr_pos_elim
(#t: Type)
(#b: const_slice)
(r: repr_pos t b)
(h: HS.mem)
: Lemma
(requires (
valid_repr_pos r h
))
(ensures (
LP.valid_content_pos r.meta.parser h (to_slice b) r.start_pos r.meta.v (end_pos r)
))
= reveal_valid ();
let p : repr_ptr t = as_ptr_spec r in
let slice = slice_of_const_buffer (Ptr?.b p) (Ptr?.meta p).len in
LP.valid_facts r.meta.parser h slice 0ul;
LP.valid_facts r.meta.parser h (to_slice b) r.start_pos;
LP.parse_strong_prefix r.meta.parser (LP.bytes_of_slice_from h slice 0ul) (LP.bytes_of_slice_from h (to_slice b) r.start_pos)
/// Mostly just by inheriting operations on pointers
let as_ptr #t #b (r:repr_pos t b)
: Stack (repr_ptr t)
(requires fun h ->
valid_repr_pos r h)
(ensures fun h0 ptr h1 ->
ptr == as_ptr_spec r /\
h0 == h1)
= let b = C.sub b.base r.start_pos (Ghost.hide r.meta.len) in
let m = r.meta in
let v = r.vv_pos in
let l = r.length in
Ptr b m v l
let as_repr_pos #t (b:const_slice) (from to:index b) (p:repr_ptr t)
: Pure (repr_pos t b)
(requires
from <= to /\
Ptr?.b p == C.gsub b.base from (to - from))
(ensures fun r ->
p == as_ptr_spec r)
= Pos from (Ptr?.meta p) (Ptr?.vv p) (to - from)
/// `mk_repr_pos b from to p`:
/// Constructing a `repr_pos` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
let mk_repr_pos (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:LP.slice mut_p mut_p)
(from to:index (of_slice b))
: Stack (repr_pos_p t (of_slice b) parser)
(requires fun h ->
LP.valid_pos parser h b from to)
(ensures fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\
valid_repr_pos r h1 /\
r.start_pos = from /\
end_pos r = to /\
r.vv_pos == LP.contents parser h1 b from)
= as_repr_pos (of_slice b) from to (mk parser32 b from to)
/// `mk b from to p`:
/// Constructing a `repr_pos` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
let mk_repr_pos_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:index b)
: Stack (repr_pos_p t b parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\
valid_repr_pos r h1 /\
r.start_pos = from /\
end_pos r = to /\
r.vv_pos == LP.contents parser h1 (to_slice b) from)
= as_repr_pos b from to (mk_from_const_slice parser32 b from to)
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_repr_pos_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:index (of_slice b))
(x: t)
: Stack (option (repr_pos_p t (of_slice b) parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 r h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
begin match r with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some r ->
valid_repr_pos r h1 /\
r.start_pos == from /\
r.vv_pos == x /\
v (end_pos r) = v from + v (size32 x)
end
)
= let size = size32 x in
match (mk_from_serialize parser32 serializer32 size32 b from x) with
| None -> None
| Some p -> Some (as_repr_pos (of_slice b) from (from + size) p)
/// Accessors on positional reprs | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
p: LowParse.Repr.repr_pos t1 b ->
f: LowParse.Repr.field_accessor (Mkmeta?.parser (Pos?.meta p)) p2 ->
h0: FStar.Monotonic.HyperStack.mem ->
q: LowParse.Repr.repr_pos_p t2 b p2 ->
h1: FStar.Monotonic.HyperStack.mem
-> Prims.GTot Prims.logical | Prims.GTot | [
"sometrivial"
] | [] | [
"LowParse.Repr.const_slice",
"LowParse.Repr.repr_pos",
"LowParse.Repr.strong_parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Repr.field_accessor",
"LowParse.Repr.__proj__Mkmeta__item__parser_kind",
"LowParse.Repr.__proj__Pos__item__meta",
"LowParse.Repr.__proj__Mkmeta__item__parser",
"FStar.Monotonic.HyperStack.mem",
"LowParse.Repr.repr_pos_p",
"Prims.l_and",
"LowParse.Low.Base.Spec.__proj__Mkclens__item__clens_cond",
"LowParse.Repr.__proj__Mkmeta__item__v",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_none",
"LowParse.Repr.valid_repr_pos",
"Prims.eq2",
"LowParse.Repr.value_pos",
"LowParse.Low.Base.Spec.__proj__Mkclens__item__clens_get",
"LowParse.Low.Base.Spec.clens",
"LowParse.Repr.__proj__FieldAccessor__item__cl",
"Prims.logical"
] | [] | false | false | false | false | true | let field_accessor_pos_post
(#b: const_slice)
(#t1: Type)
(p: repr_pos t1 b)
(#k2: strong_parser_kind)
(#t2: Type)
(#p2: LP.parser k2 t2)
(f: field_accessor p.meta.parser p2)
=
| fun h0 (q: repr_pos_p t2 b p2) h1 ->
let cl = FieldAccessor?.cl f in
cl.LP.clens_cond p.meta.v /\ B.modifies B.loc_none h0 h1 /\ valid_repr_pos q h1 /\
value_pos q == cl.LP.clens_get (value_pos p) | false |
|
LowParse.Repr.fsti | LowParse.Repr.as_ptr | val as_ptr (#t #b: _) (r: repr_pos t b)
: Stack (repr_ptr t)
(requires fun h -> valid_repr_pos r h)
(ensures fun h0 ptr h1 -> ptr == as_ptr_spec r /\ h0 == h1) | val as_ptr (#t #b: _) (r: repr_pos t b)
: Stack (repr_ptr t)
(requires fun h -> valid_repr_pos r h)
(ensures fun h0 ptr h1 -> ptr == as_ptr_spec r /\ h0 == h1) | let as_ptr #t #b (r:repr_pos t b)
: Stack (repr_ptr t)
(requires fun h ->
valid_repr_pos r h)
(ensures fun h0 ptr h1 ->
ptr == as_ptr_spec r /\
h0 == h1)
= let b = C.sub b.base r.start_pos (Ghost.hide r.meta.len) in
let m = r.meta in
let v = r.vv_pos in
let l = r.length in
Ptr b m v l | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 15,
"end_line": 809,
"start_col": 0,
"start_line": 798
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2
unfold
let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p))
inline_for_extraction
let read_field (#k1:strong_parser_kind) (#t1:_) (#p1:LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: field_reader_t f
= reveal_valid ();
fun p ->
[@inline_let]
let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos
(*** Positional representation types ***)
/// `index b` is the type of valid indexes into `b`
let index (b:const_slice)= i:uint_32{ i <= b.slice_len }
noeq
type repr_pos (t:Type) (b:const_slice) =
| Pos: start_pos:index b ->
meta:meta t ->
vv_pos:t -> //temporary
length:U32.t { U32.v start_pos + U32.v meta.len <= U32.v b.slice_len /\
vv_pos == meta.v /\
length == meta.len } ->
repr_pos t b
let value_pos #t #b (r:repr_pos t b) : GTot t = r.meta.v
let as_ptr_spec #t #b (p:repr_pos t b)
: GTot (repr_ptr t)
= Ptr (C.gsub b.base p.start_pos ((Pos?.meta p).len))
(Pos?.meta p)
(Pos?.vv_pos p)
(Pos?.length p)
let const_buffer_of_repr_pos #t #b (r:repr_pos t b)
: GTot (C.const_buffer LP.byte)
= C.gsub b.base r.start_pos r.meta.len
/// `repr_pos_p`, the analog of `repr_ptr_p`
let repr_pos_p (t:Type) (b:const_slice) #k (parser:LP.parser k t) =
r:repr_pos t b {
r.meta.parser_kind == k /\
r.meta.parser == parser
}
(*** Validity ***)
/// `valid`: abstract validity
let valid_repr_pos (#t:Type) (#b:const_slice) (r:repr_pos t b) (h:HS.mem)
= valid (as_ptr_spec r) h /\
C.live h b.base
/// `fp r`: The memory footprint of a repr_pos is the
/// sub-slice b.[from, to)
let fp_pos #t (#b:const_slice) (r:repr_pos t b)
: GTot B.loc
= fp (as_ptr_spec r)
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
let frame_valid_repr_pos #t #b (r:repr_pos t b) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid_repr_pos r h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp_pos r) l)
(ensures
valid_repr_pos r h1)
[SMTPat (valid_repr_pos r h1);
SMTPat (B.modifies l h0 h1)]
= ()
(*** Operations on repr_pos ***)
/// End position
/// On positional reprs, this is concretely computable
let end_pos #t #b (r:repr_pos t b)
: index b
= r.start_pos + r.length
let valid_repr_pos_elim
(#t: Type)
(#b: const_slice)
(r: repr_pos t b)
(h: HS.mem)
: Lemma
(requires (
valid_repr_pos r h
))
(ensures (
LP.valid_content_pos r.meta.parser h (to_slice b) r.start_pos r.meta.v (end_pos r)
))
= reveal_valid ();
let p : repr_ptr t = as_ptr_spec r in
let slice = slice_of_const_buffer (Ptr?.b p) (Ptr?.meta p).len in
LP.valid_facts r.meta.parser h slice 0ul;
LP.valid_facts r.meta.parser h (to_slice b) r.start_pos;
LP.parse_strong_prefix r.meta.parser (LP.bytes_of_slice_from h slice 0ul) (LP.bytes_of_slice_from h (to_slice b) r.start_pos) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: LowParse.Repr.repr_pos t b -> FStar.HyperStack.ST.Stack (LowParse.Repr.repr_ptr t) | FStar.HyperStack.ST.Stack | [] | [] | [
"LowParse.Repr.const_slice",
"LowParse.Repr.repr_pos",
"LowParse.Repr.Ptr",
"FStar.UInt32.t",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.UInt32.v",
"LowParse.Repr.__proj__Pos__item__start_pos",
"LowParse.Repr.__proj__Mkmeta__item__len",
"LowParse.Repr.__proj__Pos__item__meta",
"FStar.Integers.int_t",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"LowParse.Repr.__proj__MkSlice__item__slice_len",
"Prims.eq2",
"LowParse.Repr.__proj__Pos__item__vv_pos",
"LowParse.Repr.__proj__Mkmeta__item__v",
"LowParse.Repr.__proj__Pos__item__length",
"LowParse.Repr.meta",
"LowParse.Repr.repr_ptr",
"LowStar.ConstBuffer.const_buffer",
"LowParse.Bytes.byte",
"LowStar.ConstBuffer.sub",
"LowParse.Repr.__proj__MkSlice__item__base",
"FStar.Ghost.hide",
"FStar.Monotonic.HyperStack.mem",
"LowParse.Repr.valid_repr_pos",
"LowParse.Repr.as_ptr_spec"
] | [] | false | true | false | false | false | let as_ptr #t #b (r: repr_pos t b)
: Stack (repr_ptr t)
(requires fun h -> valid_repr_pos r h)
(ensures fun h0 ptr h1 -> ptr == as_ptr_spec r /\ h0 == h1) =
| let b = C.sub b.base r.start_pos (Ghost.hide r.meta.len) in
let m = r.meta in
let v = r.vv_pos in
let l = r.length in
Ptr b m v l | false |
LowParse.Repr.fsti | LowParse.Repr.get_field_pos_t | val get_field_pos_t : f: LowParse.Repr.field_accessor p1 p2 -> Type | let get_field_pos_t (#k1: strong_parser_kind) (#t1: Type) (#p1: LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2: Type) (#p2: LP.parser k2 t2)
(f:field_accessor p1 p2)
= (#b:const_slice) ->
(pp:repr_pos_p t1 b p1) ->
Stack (repr_pos_p t2 b p2)
(requires fun h ->
let cl = FieldAccessor?.cl f in
valid_repr_pos pp h /\
cl.LP.clens_cond pp.meta.v)
(ensures
field_accessor_pos_post pp f) | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 36,
"end_line": 922,
"start_col": 0,
"start_line": 911
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2
unfold
let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p))
inline_for_extraction
let read_field (#k1:strong_parser_kind) (#t1:_) (#p1:LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: field_reader_t f
= reveal_valid ();
fun p ->
[@inline_let]
let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos
(*** Positional representation types ***)
/// `index b` is the type of valid indexes into `b`
let index (b:const_slice)= i:uint_32{ i <= b.slice_len }
noeq
type repr_pos (t:Type) (b:const_slice) =
| Pos: start_pos:index b ->
meta:meta t ->
vv_pos:t -> //temporary
length:U32.t { U32.v start_pos + U32.v meta.len <= U32.v b.slice_len /\
vv_pos == meta.v /\
length == meta.len } ->
repr_pos t b
let value_pos #t #b (r:repr_pos t b) : GTot t = r.meta.v
let as_ptr_spec #t #b (p:repr_pos t b)
: GTot (repr_ptr t)
= Ptr (C.gsub b.base p.start_pos ((Pos?.meta p).len))
(Pos?.meta p)
(Pos?.vv_pos p)
(Pos?.length p)
let const_buffer_of_repr_pos #t #b (r:repr_pos t b)
: GTot (C.const_buffer LP.byte)
= C.gsub b.base r.start_pos r.meta.len
/// `repr_pos_p`, the analog of `repr_ptr_p`
let repr_pos_p (t:Type) (b:const_slice) #k (parser:LP.parser k t) =
r:repr_pos t b {
r.meta.parser_kind == k /\
r.meta.parser == parser
}
(*** Validity ***)
/// `valid`: abstract validity
let valid_repr_pos (#t:Type) (#b:const_slice) (r:repr_pos t b) (h:HS.mem)
= valid (as_ptr_spec r) h /\
C.live h b.base
/// `fp r`: The memory footprint of a repr_pos is the
/// sub-slice b.[from, to)
let fp_pos #t (#b:const_slice) (r:repr_pos t b)
: GTot B.loc
= fp (as_ptr_spec r)
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
let frame_valid_repr_pos #t #b (r:repr_pos t b) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid_repr_pos r h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp_pos r) l)
(ensures
valid_repr_pos r h1)
[SMTPat (valid_repr_pos r h1);
SMTPat (B.modifies l h0 h1)]
= ()
(*** Operations on repr_pos ***)
/// End position
/// On positional reprs, this is concretely computable
let end_pos #t #b (r:repr_pos t b)
: index b
= r.start_pos + r.length
let valid_repr_pos_elim
(#t: Type)
(#b: const_slice)
(r: repr_pos t b)
(h: HS.mem)
: Lemma
(requires (
valid_repr_pos r h
))
(ensures (
LP.valid_content_pos r.meta.parser h (to_slice b) r.start_pos r.meta.v (end_pos r)
))
= reveal_valid ();
let p : repr_ptr t = as_ptr_spec r in
let slice = slice_of_const_buffer (Ptr?.b p) (Ptr?.meta p).len in
LP.valid_facts r.meta.parser h slice 0ul;
LP.valid_facts r.meta.parser h (to_slice b) r.start_pos;
LP.parse_strong_prefix r.meta.parser (LP.bytes_of_slice_from h slice 0ul) (LP.bytes_of_slice_from h (to_slice b) r.start_pos)
/// Mostly just by inheriting operations on pointers
let as_ptr #t #b (r:repr_pos t b)
: Stack (repr_ptr t)
(requires fun h ->
valid_repr_pos r h)
(ensures fun h0 ptr h1 ->
ptr == as_ptr_spec r /\
h0 == h1)
= let b = C.sub b.base r.start_pos (Ghost.hide r.meta.len) in
let m = r.meta in
let v = r.vv_pos in
let l = r.length in
Ptr b m v l
let as_repr_pos #t (b:const_slice) (from to:index b) (p:repr_ptr t)
: Pure (repr_pos t b)
(requires
from <= to /\
Ptr?.b p == C.gsub b.base from (to - from))
(ensures fun r ->
p == as_ptr_spec r)
= Pos from (Ptr?.meta p) (Ptr?.vv p) (to - from)
/// `mk_repr_pos b from to p`:
/// Constructing a `repr_pos` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
let mk_repr_pos (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:LP.slice mut_p mut_p)
(from to:index (of_slice b))
: Stack (repr_pos_p t (of_slice b) parser)
(requires fun h ->
LP.valid_pos parser h b from to)
(ensures fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\
valid_repr_pos r h1 /\
r.start_pos = from /\
end_pos r = to /\
r.vv_pos == LP.contents parser h1 b from)
= as_repr_pos (of_slice b) from to (mk parser32 b from to)
/// `mk b from to p`:
/// Constructing a `repr_pos` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
let mk_repr_pos_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:index b)
: Stack (repr_pos_p t b parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\
valid_repr_pos r h1 /\
r.start_pos = from /\
end_pos r = to /\
r.vv_pos == LP.contents parser h1 (to_slice b) from)
= as_repr_pos b from to (mk_from_const_slice parser32 b from to)
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_repr_pos_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:index (of_slice b))
(x: t)
: Stack (option (repr_pos_p t (of_slice b) parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 r h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
begin match r with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some r ->
valid_repr_pos r h1 /\
r.start_pos == from /\
r.vv_pos == x /\
v (end_pos r) = v from + v (size32 x)
end
)
= let size = size32 x in
match (mk_from_serialize parser32 serializer32 size32 b from x) with
| None -> None
| Some p -> Some (as_repr_pos (of_slice b) from (from + size) p)
/// Accessors on positional reprs
unfold
let field_accessor_pos_post (#b:const_slice) (#t1:Type) (p:repr_pos t1 b)
(#k2: strong_parser_kind)
(#t2:Type)
(#p2: LP.parser k2 t2)
(f:field_accessor p.meta.parser p2) =
fun h0 (q:repr_pos_p t2 b p2) h1 ->
let cl = FieldAccessor?.cl f in
cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid_repr_pos q h1 /\
value_pos q == cl.LP.clens_get (value_pos p) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: LowParse.Repr.field_accessor p1 p2 -> Type | Prims.Tot | [
"total"
] | [] | [
"LowParse.Repr.strong_parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Repr.field_accessor",
"LowParse.Repr.const_slice",
"LowParse.Repr.repr_pos_p",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowParse.Repr.valid_repr_pos",
"LowParse.Low.Base.Spec.__proj__Mkclens__item__clens_cond",
"LowParse.Repr.__proj__Mkmeta__item__v",
"LowParse.Repr.__proj__Pos__item__meta",
"LowParse.Low.Base.Spec.clens",
"LowParse.Repr.__proj__FieldAccessor__item__cl",
"LowParse.Repr.field_accessor_pos_post"
] | [] | false | false | false | false | true | let get_field_pos_t
(#k1: strong_parser_kind)
(#t1: Type)
(#p1: LP.parser k1 t1)
(#k2: strong_parser_kind)
(#t2: Type)
(#p2: LP.parser k2 t2)
(f: field_accessor p1 p2)
=
| #b: const_slice -> pp: repr_pos_p t1 b p1
-> Stack (repr_pos_p t2 b p2)
(requires
fun h ->
let cl = FieldAccessor?.cl f in
valid_repr_pos pp h /\ cl.LP.clens_cond pp.meta.v)
(ensures field_accessor_pos_post pp f) | false |
|
LowParse.Repr.fsti | LowParse.Repr.end_pos | val end_pos (#t #b: _) (r: repr_pos t b) : index b | val end_pos (#t #b: _) (r: repr_pos t b) : index b | let end_pos #t #b (r:repr_pos t b)
: index b
= r.start_pos + r.length | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 26,
"end_line": 776,
"start_col": 0,
"start_line": 774
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2
unfold
let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p))
inline_for_extraction
let read_field (#k1:strong_parser_kind) (#t1:_) (#p1:LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: field_reader_t f
= reveal_valid ();
fun p ->
[@inline_let]
let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos
(*** Positional representation types ***)
/// `index b` is the type of valid indexes into `b`
let index (b:const_slice)= i:uint_32{ i <= b.slice_len }
noeq
type repr_pos (t:Type) (b:const_slice) =
| Pos: start_pos:index b ->
meta:meta t ->
vv_pos:t -> //temporary
length:U32.t { U32.v start_pos + U32.v meta.len <= U32.v b.slice_len /\
vv_pos == meta.v /\
length == meta.len } ->
repr_pos t b
let value_pos #t #b (r:repr_pos t b) : GTot t = r.meta.v
let as_ptr_spec #t #b (p:repr_pos t b)
: GTot (repr_ptr t)
= Ptr (C.gsub b.base p.start_pos ((Pos?.meta p).len))
(Pos?.meta p)
(Pos?.vv_pos p)
(Pos?.length p)
let const_buffer_of_repr_pos #t #b (r:repr_pos t b)
: GTot (C.const_buffer LP.byte)
= C.gsub b.base r.start_pos r.meta.len
/// `repr_pos_p`, the analog of `repr_ptr_p`
let repr_pos_p (t:Type) (b:const_slice) #k (parser:LP.parser k t) =
r:repr_pos t b {
r.meta.parser_kind == k /\
r.meta.parser == parser
}
(*** Validity ***)
/// `valid`: abstract validity
let valid_repr_pos (#t:Type) (#b:const_slice) (r:repr_pos t b) (h:HS.mem)
= valid (as_ptr_spec r) h /\
C.live h b.base
/// `fp r`: The memory footprint of a repr_pos is the
/// sub-slice b.[from, to)
let fp_pos #t (#b:const_slice) (r:repr_pos t b)
: GTot B.loc
= fp (as_ptr_spec r)
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
let frame_valid_repr_pos #t #b (r:repr_pos t b) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid_repr_pos r h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp_pos r) l)
(ensures
valid_repr_pos r h1)
[SMTPat (valid_repr_pos r h1);
SMTPat (B.modifies l h0 h1)]
= ()
(*** Operations on repr_pos ***)
/// End position | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: LowParse.Repr.repr_pos t b -> LowParse.Repr.index b | Prims.Tot | [
"total"
] | [] | [
"LowParse.Repr.const_slice",
"LowParse.Repr.repr_pos",
"FStar.Integers.op_Plus",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"LowParse.Repr.__proj__Pos__item__start_pos",
"LowParse.Repr.__proj__Pos__item__length",
"LowParse.Repr.index"
] | [] | false | false | false | false | false | let end_pos #t #b (r: repr_pos t b) : index b =
| r.start_pos + r.length | false |
LowParse.Repr.fsti | LowParse.Repr.read_field_pos_t | val read_field_pos_t : f: LowParse.Repr.field_reader p1 t2 -> Type | let read_field_pos_t (#k1: strong_parser_kind) (#t1: Type) (#p1: LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
= (#b:const_slice) ->
(p:repr_pos_p t1 b p1) ->
Stack t2
(requires fun h ->
valid_repr_pos p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value_pos p)) | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 44,
"end_line": 953,
"start_col": 0,
"start_line": 943
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2
unfold
let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p))
inline_for_extraction
let read_field (#k1:strong_parser_kind) (#t1:_) (#p1:LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: field_reader_t f
= reveal_valid ();
fun p ->
[@inline_let]
let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos
(*** Positional representation types ***)
/// `index b` is the type of valid indexes into `b`
let index (b:const_slice)= i:uint_32{ i <= b.slice_len }
noeq
type repr_pos (t:Type) (b:const_slice) =
| Pos: start_pos:index b ->
meta:meta t ->
vv_pos:t -> //temporary
length:U32.t { U32.v start_pos + U32.v meta.len <= U32.v b.slice_len /\
vv_pos == meta.v /\
length == meta.len } ->
repr_pos t b
let value_pos #t #b (r:repr_pos t b) : GTot t = r.meta.v
let as_ptr_spec #t #b (p:repr_pos t b)
: GTot (repr_ptr t)
= Ptr (C.gsub b.base p.start_pos ((Pos?.meta p).len))
(Pos?.meta p)
(Pos?.vv_pos p)
(Pos?.length p)
let const_buffer_of_repr_pos #t #b (r:repr_pos t b)
: GTot (C.const_buffer LP.byte)
= C.gsub b.base r.start_pos r.meta.len
/// `repr_pos_p`, the analog of `repr_ptr_p`
let repr_pos_p (t:Type) (b:const_slice) #k (parser:LP.parser k t) =
r:repr_pos t b {
r.meta.parser_kind == k /\
r.meta.parser == parser
}
(*** Validity ***)
/// `valid`: abstract validity
let valid_repr_pos (#t:Type) (#b:const_slice) (r:repr_pos t b) (h:HS.mem)
= valid (as_ptr_spec r) h /\
C.live h b.base
/// `fp r`: The memory footprint of a repr_pos is the
/// sub-slice b.[from, to)
let fp_pos #t (#b:const_slice) (r:repr_pos t b)
: GTot B.loc
= fp (as_ptr_spec r)
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
let frame_valid_repr_pos #t #b (r:repr_pos t b) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid_repr_pos r h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp_pos r) l)
(ensures
valid_repr_pos r h1)
[SMTPat (valid_repr_pos r h1);
SMTPat (B.modifies l h0 h1)]
= ()
(*** Operations on repr_pos ***)
/// End position
/// On positional reprs, this is concretely computable
let end_pos #t #b (r:repr_pos t b)
: index b
= r.start_pos + r.length
let valid_repr_pos_elim
(#t: Type)
(#b: const_slice)
(r: repr_pos t b)
(h: HS.mem)
: Lemma
(requires (
valid_repr_pos r h
))
(ensures (
LP.valid_content_pos r.meta.parser h (to_slice b) r.start_pos r.meta.v (end_pos r)
))
= reveal_valid ();
let p : repr_ptr t = as_ptr_spec r in
let slice = slice_of_const_buffer (Ptr?.b p) (Ptr?.meta p).len in
LP.valid_facts r.meta.parser h slice 0ul;
LP.valid_facts r.meta.parser h (to_slice b) r.start_pos;
LP.parse_strong_prefix r.meta.parser (LP.bytes_of_slice_from h slice 0ul) (LP.bytes_of_slice_from h (to_slice b) r.start_pos)
/// Mostly just by inheriting operations on pointers
let as_ptr #t #b (r:repr_pos t b)
: Stack (repr_ptr t)
(requires fun h ->
valid_repr_pos r h)
(ensures fun h0 ptr h1 ->
ptr == as_ptr_spec r /\
h0 == h1)
= let b = C.sub b.base r.start_pos (Ghost.hide r.meta.len) in
let m = r.meta in
let v = r.vv_pos in
let l = r.length in
Ptr b m v l
let as_repr_pos #t (b:const_slice) (from to:index b) (p:repr_ptr t)
: Pure (repr_pos t b)
(requires
from <= to /\
Ptr?.b p == C.gsub b.base from (to - from))
(ensures fun r ->
p == as_ptr_spec r)
= Pos from (Ptr?.meta p) (Ptr?.vv p) (to - from)
/// `mk_repr_pos b from to p`:
/// Constructing a `repr_pos` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
let mk_repr_pos (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:LP.slice mut_p mut_p)
(from to:index (of_slice b))
: Stack (repr_pos_p t (of_slice b) parser)
(requires fun h ->
LP.valid_pos parser h b from to)
(ensures fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\
valid_repr_pos r h1 /\
r.start_pos = from /\
end_pos r = to /\
r.vv_pos == LP.contents parser h1 b from)
= as_repr_pos (of_slice b) from to (mk parser32 b from to)
/// `mk b from to p`:
/// Constructing a `repr_pos` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
let mk_repr_pos_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:index b)
: Stack (repr_pos_p t b parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\
valid_repr_pos r h1 /\
r.start_pos = from /\
end_pos r = to /\
r.vv_pos == LP.contents parser h1 (to_slice b) from)
= as_repr_pos b from to (mk_from_const_slice parser32 b from to)
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_repr_pos_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:index (of_slice b))
(x: t)
: Stack (option (repr_pos_p t (of_slice b) parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 r h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
begin match r with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some r ->
valid_repr_pos r h1 /\
r.start_pos == from /\
r.vv_pos == x /\
v (end_pos r) = v from + v (size32 x)
end
)
= let size = size32 x in
match (mk_from_serialize parser32 serializer32 size32 b from x) with
| None -> None
| Some p -> Some (as_repr_pos (of_slice b) from (from + size) p)
/// Accessors on positional reprs
unfold
let field_accessor_pos_post (#b:const_slice) (#t1:Type) (p:repr_pos t1 b)
(#k2: strong_parser_kind)
(#t2:Type)
(#p2: LP.parser k2 t2)
(f:field_accessor p.meta.parser p2) =
fun h0 (q:repr_pos_p t2 b p2) h1 ->
let cl = FieldAccessor?.cl f in
cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid_repr_pos q h1 /\
value_pos q == cl.LP.clens_get (value_pos p)
unfold
let get_field_pos_t (#k1: strong_parser_kind) (#t1: Type) (#p1: LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2: Type) (#p2: LP.parser k2 t2)
(f:field_accessor p1 p2)
= (#b:const_slice) ->
(pp:repr_pos_p t1 b p1) ->
Stack (repr_pos_p t2 b p2)
(requires fun h ->
let cl = FieldAccessor?.cl f in
valid_repr_pos pp h /\
cl.LP.clens_cond pp.meta.v)
(ensures
field_accessor_pos_post pp f)
inline_for_extraction
let get_field_pos (#k1: strong_parser_kind) (#t1: Type) (#p1: LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2: Type) (#p2: LP.parser k2 t2)
(f:field_accessor p1 p2)
: get_field_pos_t f
= reveal_valid ();
fun #b pp ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let p = as_ptr pp in
let bb = temp_slice_of_repr_ptr p in
let pos = acc bb 0ul in
let pos_to = jump bb pos in
let q = mk p2' bb pos pos_to in
let len = pos_to - pos in
assert (Ptr?.b q `C.const_sub_buffer pos len` Ptr?.b p);
as_repr_pos b (pp.start_pos + pos) (pp.start_pos + pos + len) q | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: LowParse.Repr.field_reader p1 t2 -> Type | Prims.Tot | [
"total"
] | [] | [
"LowParse.Repr.strong_parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Repr.field_reader",
"LowParse.Repr.const_slice",
"LowParse.Repr.repr_pos_p",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowParse.Repr.valid_repr_pos",
"LowParse.Low.Base.Spec.__proj__Mkclens__item__clens_cond",
"LowParse.Repr.__proj__FieldReader__item__cl",
"LowParse.Repr.__proj__Mkmeta__item__v",
"LowParse.Repr.__proj__Pos__item__meta",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_none",
"Prims.eq2",
"LowParse.Low.Base.Spec.__proj__Mkclens__item__clens_get",
"LowParse.Repr.value_pos"
] | [] | false | false | false | false | true | let read_field_pos_t
(#k1: strong_parser_kind)
(#t1: Type)
(#p1: LP.parser k1 t1)
#t2
(f: field_reader p1 t2)
=
| #b: const_slice -> p: repr_pos_p t1 b p1
-> Stack t2
(requires fun h -> valid_repr_pos p h /\ f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 -> B.modifies B.loc_none h0 h1 /\ pv == f.cl.LP.clens_get (value_pos p)) | false |
|
Vale.X64.Lemmas.fst | Vale.X64.Lemmas.lemma_whileMerge_total | val lemma_whileMerge_total (c:code) (s0:vale_state) (f0:fuel) (sM:vale_state) (fM:fuel) (sN:vale_state) : Ghost fuel
(requires While? c /\ (
let cond = While?.whileCond c in
sN.vs_ok /\
valid_ocmp cond sM /\
eval_ocmp sM cond /\
eval_while_inv c s0 f0 sM /\
eval_code (While?.whileBody c) ({sM with vs_flags = havoc_flags}) fM sN
))
(ensures (fun fN ->
eval_while_inv c s0 fN sN
)) | val lemma_whileMerge_total (c:code) (s0:vale_state) (f0:fuel) (sM:vale_state) (fM:fuel) (sN:vale_state) : Ghost fuel
(requires While? c /\ (
let cond = While?.whileCond c in
sN.vs_ok /\
valid_ocmp cond sM /\
eval_ocmp sM cond /\
eval_while_inv c s0 f0 sM /\
eval_code (While?.whileBody c) ({sM with vs_flags = havoc_flags}) fM sN
))
(ensures (fun fN ->
eval_while_inv c s0 fN sN
)) | let lemma_whileMerge_total (c:code) (s0:vale_state) (f0:fuel) (sM:vale_state) (fM:fuel) (sN:vale_state) =
reveal_opaque (`%BS.valid_ocmp_opaque) BS.valid_ocmp_opaque;
reveal_opaque (`%BS.eval_ocmp_opaque) BS.eval_ocmp_opaque;
let fN:nat = f0 + fM + 1 in
let g = code_modifies_ghost c in
let fForall (f:nat) : Lemma
(requires Some? (BS.machine_eval_code c f (state_to_S sN)))
(ensures state_eq_opt g (BS.machine_eval_code c (f + fN) (state_to_S s0)) (BS.machine_eval_code c f (state_to_S sN)))
[SMTPat (BS.machine_eval_code c f (state_to_S sN))]
=
let Some sZ = BS.machine_eval_code c f (state_to_S sN) in
let fZ = if f > fM then f else fM in
let sM' = {sM with vs_flags = havoc_flags} in
increase_fuel (code_modifies_ghost c) (While?.whileBody c) (state_to_S sM') fM (state_to_S sN) fZ;
increase_fuel (code_modifies_ghost c) c (state_to_S sN) f sZ fZ;
assert (state_eq_opt g (BS.machine_eval_code c (fZ + 1) (state_to_S sM)) (Some sZ)); // via eval_code for While
assert (state_eq_opt g (BS.machine_eval_code c (fZ + 1) (state_to_S sM)) (BS.machine_eval_code c (fZ + 1 + f0) (state_to_S s0))); // via eval_while_inv, choosing f = fZ + 1
// Two assertions above prove (BS.machine_eval_code c (fZ + 1 + f0) (state_to_S s0)) equals (Some sZ)
// increase_fuel (code_modifies_ghost c) c s0 (fZ + 1 + f0) (state_of_S s0 sZ) (f + fN);
increase_fuel (code_modifies_ghost c) c (state_to_S s0) (fZ + 1 + f0) sZ (f + fN);
assert (state_eq_opt g (BS.machine_eval_code c (f + fN) (state_to_S s0)) (Some sZ));
()
in
fN | {
"file_name": "vale/code/arch/x64/Vale.X64.Lemmas.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 4,
"end_line": 423,
"start_col": 0,
"start_line": 398
} | module Vale.X64.Lemmas
open FStar.Mul
open Vale.X64.Machine_s
open Vale.X64.State
open Vale.X64.StateLemmas
open Vale.X64.Instruction_s
open Vale.X64.Bytes_Code_s
module BS = Vale.X64.Machine_Semantics_s
module ME = Vale.X64.Memory
#reset-options "--initial_fuel 1 --max_fuel 1 --z3rlimit 100"
#restart-solver
let rec lemma_eq_instr_apply_eval_args
(outs:list instr_out) (args:list instr_operand)
(f:instr_args_t outs args) (oprs:instr_operands_t_args args) (s1 s2:machine_state)
: Lemma
(requires state_eq_S true s1 s2)
(ensures
BS.instr_apply_eval_args outs args f oprs s1 ==
BS.instr_apply_eval_args outs args f oprs s2)
=
let open BS in
lemma_heap_ignore_ghost_machine s1.BS.ms_heap s2.BS.ms_heap;
match args with
| [] -> ()
| i::args ->
(
let (v, oprs) : option (instr_val_t i) & instr_operands_t_args args =
match i with
| IOpEx i -> let oprs = coerce oprs in (instr_eval_operand_explicit i (fst oprs) s1, snd oprs)
| IOpIm i -> (instr_eval_operand_implicit i s1, coerce oprs)
in
let f:arrow (instr_val_t i) (instr_args_t outs args) = coerce f in
match v with
| None -> ()
| Some v -> lemma_eq_instr_apply_eval_args outs args (f v) oprs s1 s2
)
#restart-solver
let rec lemma_eq_instr_apply_eval_inouts
(outs inouts:list instr_out) (args:list instr_operand)
(f:instr_inouts_t outs inouts args) (oprs:instr_operands_t inouts args) (s1 s2:machine_state)
: Lemma
(requires state_eq_S true s1 s2)
(ensures
BS.instr_apply_eval_inouts outs inouts args f oprs s1 ==
BS.instr_apply_eval_inouts outs inouts args f oprs s2)
=
let open BS in
lemma_heap_ignore_ghost_machine s1.BS.ms_heap s2.BS.ms_heap;
match inouts with
| [] -> lemma_eq_instr_apply_eval_args outs args f oprs s1 s2
| (Out, i)::inouts ->
let oprs =
match i with
| IOpEx i -> snd #(instr_operand_t i) (coerce oprs)
| IOpIm i -> coerce oprs
in
lemma_eq_instr_apply_eval_inouts outs inouts args (coerce f) oprs s1 s2
| (InOut, i)::inouts ->
(
let (v, oprs) : option (instr_val_t i) & instr_operands_t inouts args =
match i with
| IOpEx i -> let oprs = coerce oprs in (instr_eval_operand_explicit i (fst oprs) s1, snd oprs)
| IOpIm i -> (instr_eval_operand_implicit i s1, coerce oprs)
in
let f:arrow (instr_val_t i) (instr_inouts_t outs inouts args) = coerce f in
match v with
| None -> ()
| Some v -> lemma_eq_instr_apply_eval_inouts outs inouts args (f v) oprs s1 s2
)
#restart-solver
#push-options "--z3rlimit_factor 2"
let rec lemma_eq_instr_write_outputs
(outs:list instr_out) (args:list instr_operand)
(vs:instr_ret_t outs) (oprs:instr_operands_t outs args) (s1_orig s1 s2_orig s2:machine_state)
: Lemma
(requires state_eq_S true s1_orig s2_orig /\ state_eq_S true s1 s2)
(ensures
state_eq_S true
(BS.instr_write_outputs outs args vs oprs s1_orig s1)
(BS.instr_write_outputs outs args vs oprs s2_orig s2))
=
let open BS in
use_machine_state_equal ();
lemma_heap_ignore_ghost_machine s1.BS.ms_heap s2.BS.ms_heap;
lemma_heap_ignore_ghost_machine s1_orig.BS.ms_heap s2_orig.BS.ms_heap;
allow_inversion tmaddr;
match outs with
| [] -> ()
| (_, i)::outs ->
(
let ((v:instr_val_t i), (vs:instr_ret_t outs)) =
match outs with
| [] -> (vs, ())
| _::_ -> let vs = coerce vs in (fst vs, snd vs)
in
match i with
| IOpEx i ->
let oprs = coerce oprs in
let s1 = instr_write_output_explicit i v (fst oprs) s1_orig s1 in
let s2 = instr_write_output_explicit i v (fst oprs) s2_orig s2 in
lemma_eq_instr_write_outputs outs args vs (snd oprs) s1_orig s1 s2_orig s2
| IOpIm i ->
let s1 = instr_write_output_implicit i v s1_orig s1 in
let s2 = instr_write_output_implicit i v s2_orig s2 in
allow_inversion operand64;
allow_inversion operand128;
lemma_eq_instr_write_outputs outs args vs (coerce oprs) s1_orig s1 s2_orig s2
)
#pop-options
#restart-solver
let eval_ins_eq_instr (inst:BS.ins) (s1 s2:machine_state) : Lemma
(requires Instr? inst /\ state_eq_S true s1 s2)
(ensures state_eq_S true (BS.machine_eval_ins inst s1) (BS.machine_eval_ins inst s2))
=
let open BS in
let Instr it oprs ann = inst in
let InstrTypeRecord #outs #args #havoc_flags' i = it in
lemma_eq_instr_apply_eval_inouts outs outs args (instr_eval i) oprs s1 s2;
let vs = instr_apply_eval outs args (instr_eval i) oprs s1 in
let hav s =
match havoc_flags' with
| HavocFlags -> {s with ms_flags = havoc_flags}
| PreserveFlags -> s
in
let s1' = hav s1 in
let s2' = hav s2 in
match vs with
| None -> ()
| Some vs -> lemma_eq_instr_write_outputs outs args vs oprs s1 s1' s2 s2'
let eval_code_eq_instr (inst:BS.ins) (f:fuel) (s1 s2:machine_state) : Lemma
(requires Instr? inst /\ state_eq_S true s1 s2)
(ensures state_eq_opt true (BS.machine_eval_code (Ins inst) f s1) (BS.machine_eval_code (Ins inst) f s2))
=
reveal_opaque (`%BS.machine_eval_code_ins) BS.machine_eval_code_ins;
eval_ins_eq_instr inst ({s1 with BS.ms_trace = []}) ({s2 with BS.ms_trace = []})
let eval_code_eq_dealloc (inst:BS.ins) (f:fuel) (s1 s2:machine_state) : Lemma
(requires Dealloc? inst /\ state_eq_S true s1 s2)
(ensures state_eq_opt true (BS.machine_eval_code (Ins inst) f s1) (BS.machine_eval_code (Ins inst) f s2))
=
reveal_opaque (`%BS.machine_eval_code_ins) BS.machine_eval_code_ins;
use_machine_state_equal ();
lemma_heap_ignore_ghost_machine s1.BS.ms_heap s2.BS.ms_heap;
allow_inversion tmaddr
let eval_code_eq_alloc (inst:BS.ins) (f:fuel) (s1 s2:machine_state) : Lemma
(requires Alloc? inst /\ state_eq_S true s1 s2)
(ensures state_eq_opt true (BS.machine_eval_code (Ins inst) f s1) (BS.machine_eval_code (Ins inst) f s2))
=
reveal_opaque (`%BS.machine_eval_code_ins) BS.machine_eval_code_ins;
use_machine_state_equal ();
lemma_heap_ignore_ghost_machine s1.BS.ms_heap s2.BS.ms_heap;
allow_inversion tmaddr
let eval_code_eq_push (inst:BS.ins) (f:fuel) (s1 s2:machine_state) : Lemma
(requires Push? inst /\ state_eq_S true s1 s2)
(ensures state_eq_opt true (BS.machine_eval_code (Ins inst) f s1) (BS.machine_eval_code (Ins inst) f s2))
=
reveal_opaque (`%BS.machine_eval_code_ins) BS.machine_eval_code_ins;
use_machine_state_equal ();
lemma_heap_ignore_ghost_machine s1.BS.ms_heap s2.BS.ms_heap;
allow_inversion tmaddr
let eval_code_eq_pop (inst:BS.ins) (f:fuel) (s1 s2:machine_state) : Lemma
(requires Pop? inst /\ state_eq_S true s1 s2)
(ensures state_eq_opt true (BS.machine_eval_code (Ins inst) f s1) (BS.machine_eval_code (Ins inst) f s2))
=
reveal_opaque (`%BS.machine_eval_code_ins) BS.machine_eval_code_ins;
use_machine_state_equal ();
lemma_heap_ignore_ghost_machine s1.BS.ms_heap s2.BS.ms_heap;
allow_inversion tmaddr
let eval_code_eq_ins (i:BS.ins) (f:fuel) (s1 s2:machine_state) : Lemma
(requires state_eq_S true s1 s2)
(ensures state_eq_opt true (BS.machine_eval_code (Ins i) f s1) (BS.machine_eval_code (Ins i) f s2))
=
match i with
| Instr _ _ _ -> eval_code_eq_instr i f s1 s2
| Dealloc _ -> eval_code_eq_dealloc i f s1 s2
| Alloc _ -> eval_code_eq_alloc i f s1 s2
| Push _ _ -> eval_code_eq_push i f s1 s2
| Pop _ _ -> eval_code_eq_pop i f s1 s2
#reset-options "--fuel 2 --z3rlimit 30"
let eval_ocmp_eq_core (g:bool) (cond:ocmp) (s:machine_state) : Lemma
(ensures (
let (s1, b1) = BS.machine_eval_ocmp s cond in
let (s2, b2) = BS.machine_eval_ocmp (core_state g s) cond in
state_eq_S g s1 s2 /\ b1 == b2
))
=
reveal_opaque (`%BS.valid_ocmp_opaque) BS.valid_ocmp_opaque;
reveal_opaque (`%BS.eval_ocmp_opaque) BS.eval_ocmp_opaque;
()
#restart-solver
let rec eval_code_eq_core (g:bool) (c:code) (f:fuel) (s:machine_state) : Lemma
(ensures state_eq_opt g (BS.machine_eval_code c f s) (BS.machine_eval_code c f (core_state g s)))
(decreases %[f; c])
=
match c with
| Ins i ->
reveal_opaque (`%BS.machine_eval_code_ins) BS.machine_eval_code_ins;
if g then eval_code_eq_ins i f s (core_state g s)
| Block cs -> eval_codes_eq_core g cs f s
| IfElse cond ct cf ->
eval_ocmp_eq_core g cond s;
let (s', _) = BS.machine_eval_ocmp s cond in
let (t', _) = BS.machine_eval_ocmp (core_state g s) cond in
eval_code_eq_core g ct f s';
eval_code_eq_core g ct f t';
eval_code_eq_core g cf f s';
eval_code_eq_core g cf f t';
()
| While cond body -> eval_while_eq_core g cond body f s
and eval_codes_eq_core (g:bool) (cs:codes) (f:fuel) (s:machine_state) : Lemma
(ensures state_eq_opt g (BS.machine_eval_codes cs f s) (BS.machine_eval_codes cs f (core_state g s)))
(decreases %[f; cs])
=
match cs with
| [] -> ()
| c'::cs' -> (
eval_code_eq_core g c' f s;
match (machine_eval_code c' f s, machine_eval_code c' f (core_state g s)) with
| (None, None) -> ()
| (Some s', Some t') -> eval_codes_eq_core g cs' f s'; eval_codes_eq_core g cs' f t'
)
and eval_while_eq_core (g:bool) (cond:ocmp) (body:code) (f:fuel) (s:machine_state) : Lemma
(ensures state_eq_opt g (BS.machine_eval_while cond body f s) (BS.machine_eval_while cond body f (core_state g s)))
(decreases %[f; body])
=
if f > 0 then (
eval_ocmp_eq_core g cond s;
let (s1, _) = BS.machine_eval_ocmp s cond in
let (t1, _) = BS.machine_eval_ocmp (core_state g s) cond in
eval_code_eq_core g body (f - 1) s1;
eval_code_eq_core g body (f - 1) t1;
match (BS.machine_eval_code body (f - 1) s1, BS.machine_eval_code body (f - 1) t1) with
| (None, None) -> ()
| (Some s2, Some t2) ->
eval_while_eq_core g cond body (f - 1) s2;
eval_while_eq_core g cond body (f - 1) t2;
()
)
let eval_code_eq_f (c:code) (f:fuel) (s1 s2:machine_state) : Lemma
(requires state_eq_S false s1 s2)
(ensures state_eq_opt false (BS.machine_eval_code c f s1) (BS.machine_eval_code c f s2))
[SMTPat (BS.machine_eval_code c f s1); SMTPat (BS.machine_eval_code c f s2)]
=
eval_code_eq_core false c f s1; eval_code_eq_core false c f s2
let eval_codes_eq_f (cs:codes) (f:fuel) (s1 s2:machine_state) : Lemma
(requires state_eq_S false s1 s2)
(ensures state_eq_opt false (BS.machine_eval_codes cs f s1) (BS.machine_eval_codes cs f s2))
[SMTPat (BS.machine_eval_codes cs f s1); SMTPat (BS.machine_eval_codes cs f s2)]
=
eval_codes_eq_core false cs f s1; eval_codes_eq_core false cs f s2
let eval_while_eq_f (cond:ocmp) (body:code) (f:fuel) (s1 s2:machine_state) : Lemma
(requires state_eq_S false s1 s2)
(ensures state_eq_opt false (BS.machine_eval_while cond body f s1) (BS.machine_eval_while cond body f s2))
[SMTPat (BS.machine_eval_while cond body f s1); SMTPat (BS.machine_eval_while cond body f s2)]
=
eval_while_eq_core false cond body f s1; eval_while_eq_core false cond body f s2
let eval_code_eq_t (c:code) (f:fuel) (s1 s2:machine_state) : Lemma
(requires state_eq_S true s1 s2)
(ensures state_eq_opt true (BS.machine_eval_code c f s1) (BS.machine_eval_code c f s2))
[SMTPat (BS.machine_eval_code c f s1); SMTPat (BS.machine_eval_code c f s2)]
=
eval_code_eq_core true c f s1; eval_code_eq_core true c f s2
let eval_codes_eq_t (cs:codes) (f:fuel) (s1 s2:machine_state) : Lemma
(requires state_eq_S true s1 s2)
(ensures state_eq_opt true (BS.machine_eval_codes cs f s1) (BS.machine_eval_codes cs f s2))
[SMTPat (BS.machine_eval_codes cs f s1); SMTPat (BS.machine_eval_codes cs f s2)]
=
eval_codes_eq_core true cs f s1; eval_codes_eq_core true cs f s2
let eval_while_eq_t (cond:ocmp) (body:code) (f:fuel) (s1 s2:machine_state) : Lemma
(requires state_eq_S true s1 s2)
(ensures state_eq_opt true (BS.machine_eval_while cond body f s1) (BS.machine_eval_while cond body f s2))
[SMTPat (BS.machine_eval_while cond body f s1); SMTPat (BS.machine_eval_while cond body f s2)]
=
eval_while_eq_core true cond body f s1; eval_while_eq_core true cond body f s2
let eval_code_ts (g:bool) (c:code) (s0:machine_state) (f0:fuel) (s1:machine_state) : Type0 =
state_eq_opt g (BS.machine_eval_code c f0 s0) (Some s1)
let rec increase_fuel (g:bool) (c:code) (s0:machine_state) (f0:fuel) (sN:machine_state) (fN:fuel) : Lemma
(requires eval_code_ts g c s0 f0 sN /\ f0 <= fN)
(ensures eval_code_ts g c s0 fN sN)
(decreases %[f0; c])
=
match c with
| Ins ins -> ()
| Block l -> increase_fuels g l s0 f0 sN fN
| IfElse cond t f ->
let (s0, b0) = BS.machine_eval_ocmp s0 cond in
if b0 then increase_fuel g t s0 f0 sN fN else increase_fuel g f s0 f0 sN fN
| While cond c ->
let (s1, b0) = BS.machine_eval_ocmp s0 cond in
if b0 then (
match BS.machine_eval_code c (f0 - 1) s1 with
| None -> ()
| Some s2 ->
increase_fuel g c s1 (f0 - 1) s2 (fN - 1);
if s2.BS.ms_ok then increase_fuel g (While cond c) s2 (f0 - 1) sN (fN - 1)
else ()
)
and increase_fuels (g:bool) (c:codes) (s0:machine_state) (f0:fuel) (sN:machine_state) (fN:fuel) : Lemma
(requires eval_code_ts g (Block c) s0 f0 sN /\ f0 <= fN)
(ensures eval_code_ts g (Block c) s0 fN sN)
(decreases %[f0; c])
=
match c with
| [] -> ()
| h::t ->
(
let Some s1 = BS.machine_eval_code h f0 s0 in
increase_fuel g h s0 f0 s1 fN;
increase_fuels g t s1 f0 sN fN
)
let lemma_cmp_eq s o1 o2 = reveal_opaque (`%BS.eval_ocmp_opaque) BS.eval_ocmp_opaque
let lemma_cmp_ne s o1 o2 = reveal_opaque (`%BS.eval_ocmp_opaque) BS.eval_ocmp_opaque
let lemma_cmp_le s o1 o2 = reveal_opaque (`%BS.eval_ocmp_opaque) BS.eval_ocmp_opaque
let lemma_cmp_ge s o1 o2 = reveal_opaque (`%BS.eval_ocmp_opaque) BS.eval_ocmp_opaque
let lemma_cmp_lt s o1 o2 = reveal_opaque (`%BS.eval_ocmp_opaque) BS.eval_ocmp_opaque
let lemma_cmp_gt s o1 o2 = reveal_opaque (`%BS.eval_ocmp_opaque) BS.eval_ocmp_opaque
let lemma_valid_cmp_eq s o1 o2 = ()
let lemma_valid_cmp_ne s o1 o2 = ()
let lemma_valid_cmp_le s o1 o2 = ()
let lemma_valid_cmp_ge s o1 o2 = ()
let lemma_valid_cmp_lt s o1 o2 = ()
let lemma_valid_cmp_gt s o1 o2 = ()
let compute_merge_total (f0:fuel) (fM:fuel) =
if f0 > fM then f0 else fM
let lemma_merge_total (b0:codes) (s0:vale_state) (f0:fuel) (sM:vale_state) (fM:fuel) (sN:vale_state) =
let f = if f0 > fM then f0 else fM in
increase_fuel (codes_modifies_ghost b0) (Cons?.hd b0) (state_to_S s0) f0 (state_to_S sM) f;
increase_fuel (codes_modifies_ghost b0) (Block (Cons?.tl b0)) (state_to_S sM) fM (state_to_S sN) f
let lemma_empty_total (s0:vale_state) (bN:codes) =
(s0, 0)
let lemma_ifElse_total (ifb:ocmp) (ct:code) (cf:code) (s0:vale_state) =
(eval_ocmp s0 ifb, {s0 with vs_flags = havoc_flags}, s0, 0)
let lemma_havoc_flags : squash (Flags.to_fun havoc_flags == BS.havoc_flags) =
assert (FStar.FunctionalExtensionality.feq (Flags.to_fun havoc_flags) BS.havoc_flags)
let lemma_ifElseTrue_total (ifb:ocmp) (ct:code) (cf:code) (s0:vale_state) (f0:fuel) (sM:vale_state) =
reveal_opaque (`%BS.valid_ocmp_opaque) BS.valid_ocmp_opaque;
reveal_opaque (`%BS.eval_ocmp_opaque) BS.eval_ocmp_opaque
let lemma_ifElseFalse_total (ifb:ocmp) (ct:code) (cf:code) (s0:vale_state) (f0:fuel) (sM:vale_state) =
reveal_opaque (`%BS.valid_ocmp_opaque) BS.valid_ocmp_opaque;
reveal_opaque (`%BS.eval_ocmp_opaque) BS.eval_ocmp_opaque
let eval_while_inv_temp (c:code) (s0:vale_state) (fW:fuel) (sW:vale_state) : Type0 =
forall (f:nat).{:pattern BS.machine_eval_code c f (state_to_S sW)}
Some? (BS.machine_eval_code c f (state_to_S sW)) ==>
state_eq_opt (code_modifies_ghost c)
(BS.machine_eval_code c (f + fW) (state_to_S s0))
(BS.machine_eval_code c f (state_to_S sW))
let eval_while_inv (c:code) (s0:vale_state) (fW:fuel) (sW:vale_state) : Type0 =
eval_while_inv_temp c s0 fW sW
let lemma_while_total (b:ocmp) (c:code) (s0:vale_state) =
(s0, 0)
let lemma_whileTrue_total (b:ocmp) (c:code) (s0:vale_state) (sW:vale_state) (fW:fuel) =
({sW with vs_flags = havoc_flags}, fW)
let lemma_whileFalse_total (b:ocmp) (c:code) (s0:vale_state) (sW:vale_state) (fW:fuel) =
reveal_opaque (`%BS.valid_ocmp_opaque) BS.valid_ocmp_opaque;
reveal_opaque (`%BS.eval_ocmp_opaque) BS.eval_ocmp_opaque;
let f1 = fW + 1 in
let s1 = {sW with vs_flags = havoc_flags} in
assert (state_eq_opt (code_modifies_ghost c) (BS.machine_eval_code (While b c) f1 (state_to_S s0)) (BS.machine_eval_code (While b c) 1 (state_to_S sW)));
assert (eval_code (While b c) s0 f1 s1);
(s1, f1) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.StateLemmas.fsti.checked",
"Vale.X64.State.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Instruction_s.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Bytes_Code_s.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Lemmas.fst"
} | [
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "BS"
},
{
"abbrev": false,
"full_module": "Vale.X64.Bytes_Code_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Instruction_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "BS"
},
{
"abbrev": false,
"full_module": "Vale.X64.Bytes_Code_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 2,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
c: Vale.X64.StateLemmas.code ->
s0: Vale.X64.State.vale_state ->
f0: Vale.X64.Lemmas.fuel ->
sM: Vale.X64.State.vale_state ->
fM: Vale.X64.Lemmas.fuel ->
sN: Vale.X64.State.vale_state
-> Prims.Ghost Vale.X64.Lemmas.fuel | Prims.Ghost | [] | [] | [
"Vale.X64.StateLemmas.code",
"Vale.X64.State.vale_state",
"Vale.X64.Lemmas.fuel",
"Prims.nat",
"Prims.unit",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some",
"Vale.X64.Machine_Semantics_s.machine_state",
"Vale.X64.Machine_Semantics_s.machine_eval_code",
"Vale.X64.StateLemmas.state_to_S",
"Prims.squash",
"Vale.X64.Lemmas.state_eq_opt",
"Prims.op_Addition",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"FStar.Pervasives.Native.option",
"Prims.Nil",
"Prims._assert",
"FStar.Pervasives.Native.Some",
"Vale.X64.Lemmas.increase_fuel",
"Vale.X64.Lemmas.code_modifies_ghost",
"Vale.X64.Machine_s.__proj__While__item__whileBody",
"Vale.X64.Bytes_Code_s.instruction_t",
"Vale.X64.Machine_Semantics_s.instr_annotation",
"Vale.X64.Bytes_Code_s.ocmp",
"Vale.X64.State.Mkvale_state",
"Vale.X64.State.__proj__Mkvale_state__item__vs_ok",
"Vale.X64.State.__proj__Mkvale_state__item__vs_regs",
"Vale.X64.Lemmas.havoc_flags",
"Vale.X64.State.__proj__Mkvale_state__item__vs_heap",
"Vale.X64.State.__proj__Mkvale_state__item__vs_stack",
"Vale.X64.State.__proj__Mkvale_state__item__vs_stackTaint",
"Prims.int",
"Prims.l_and",
"Prims.op_GreaterThanOrEqual",
"Prims.op_GreaterThan",
"Prims.bool",
"FStar.Pervasives.reveal_opaque",
"Vale.X64.Machine_Semantics_s.ocmp",
"Vale.X64.Machine_Semantics_s.eval_ocmp_opaque",
"Vale.X64.Machine_Semantics_s.valid_ocmp_opaque"
] | [] | false | false | false | false | false | let lemma_whileMerge_total
(c: code)
(s0: vale_state)
(f0: fuel)
(sM: vale_state)
(fM: fuel)
(sN: vale_state)
=
| reveal_opaque (`%BS.valid_ocmp_opaque) BS.valid_ocmp_opaque;
reveal_opaque (`%BS.eval_ocmp_opaque) BS.eval_ocmp_opaque;
let fN:nat = f0 + fM + 1 in
let g = code_modifies_ghost c in
let fForall (f: nat)
: Lemma (requires Some? (BS.machine_eval_code c f (state_to_S sN)))
(ensures
state_eq_opt g
(BS.machine_eval_code c (f + fN) (state_to_S s0))
(BS.machine_eval_code c f (state_to_S sN)))
[SMTPat (BS.machine_eval_code c f (state_to_S sN))] =
let Some sZ = BS.machine_eval_code c f (state_to_S sN) in
let fZ = if f > fM then f else fM in
let sM' = { sM with vs_flags = havoc_flags } in
increase_fuel (code_modifies_ghost c) (While?.whileBody c) (state_to_S sM') fM (state_to_S sN) fZ;
increase_fuel (code_modifies_ghost c) c (state_to_S sN) f sZ fZ;
assert (state_eq_opt g (BS.machine_eval_code c (fZ + 1) (state_to_S sM)) (Some sZ));
assert (state_eq_opt g
(BS.machine_eval_code c (fZ + 1) (state_to_S sM))
(BS.machine_eval_code c (fZ + 1 + f0) (state_to_S s0)));
increase_fuel (code_modifies_ghost c) c (state_to_S s0) (fZ + 1 + f0) sZ (f + fN);
assert (state_eq_opt g (BS.machine_eval_code c (f + fN) (state_to_S s0)) (Some sZ));
()
in
fN | false |
LowParse.Repr.fsti | LowParse.Repr.valid_repr_pos_elim | val valid_repr_pos_elim (#t: Type) (#b: const_slice) (r: repr_pos t b) (h: HS.mem)
: Lemma (requires (valid_repr_pos r h))
(ensures (LP.valid_content_pos r.meta.parser h (to_slice b) r.start_pos r.meta.v (end_pos r))) | val valid_repr_pos_elim (#t: Type) (#b: const_slice) (r: repr_pos t b) (h: HS.mem)
: Lemma (requires (valid_repr_pos r h))
(ensures (LP.valid_content_pos r.meta.parser h (to_slice b) r.start_pos r.meta.v (end_pos r))) | let valid_repr_pos_elim
(#t: Type)
(#b: const_slice)
(r: repr_pos t b)
(h: HS.mem)
: Lemma
(requires (
valid_repr_pos r h
))
(ensures (
LP.valid_content_pos r.meta.parser h (to_slice b) r.start_pos r.meta.v (end_pos r)
))
= reveal_valid ();
let p : repr_ptr t = as_ptr_spec r in
let slice = slice_of_const_buffer (Ptr?.b p) (Ptr?.meta p).len in
LP.valid_facts r.meta.parser h slice 0ul;
LP.valid_facts r.meta.parser h (to_slice b) r.start_pos;
LP.parse_strong_prefix r.meta.parser (LP.bytes_of_slice_from h slice 0ul) (LP.bytes_of_slice_from h (to_slice b) r.start_pos) | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 127,
"end_line": 795,
"start_col": 0,
"start_line": 778
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2
unfold
let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p))
inline_for_extraction
let read_field (#k1:strong_parser_kind) (#t1:_) (#p1:LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: field_reader_t f
= reveal_valid ();
fun p ->
[@inline_let]
let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos
(*** Positional representation types ***)
/// `index b` is the type of valid indexes into `b`
let index (b:const_slice)= i:uint_32{ i <= b.slice_len }
noeq
type repr_pos (t:Type) (b:const_slice) =
| Pos: start_pos:index b ->
meta:meta t ->
vv_pos:t -> //temporary
length:U32.t { U32.v start_pos + U32.v meta.len <= U32.v b.slice_len /\
vv_pos == meta.v /\
length == meta.len } ->
repr_pos t b
let value_pos #t #b (r:repr_pos t b) : GTot t = r.meta.v
let as_ptr_spec #t #b (p:repr_pos t b)
: GTot (repr_ptr t)
= Ptr (C.gsub b.base p.start_pos ((Pos?.meta p).len))
(Pos?.meta p)
(Pos?.vv_pos p)
(Pos?.length p)
let const_buffer_of_repr_pos #t #b (r:repr_pos t b)
: GTot (C.const_buffer LP.byte)
= C.gsub b.base r.start_pos r.meta.len
/// `repr_pos_p`, the analog of `repr_ptr_p`
let repr_pos_p (t:Type) (b:const_slice) #k (parser:LP.parser k t) =
r:repr_pos t b {
r.meta.parser_kind == k /\
r.meta.parser == parser
}
(*** Validity ***)
/// `valid`: abstract validity
let valid_repr_pos (#t:Type) (#b:const_slice) (r:repr_pos t b) (h:HS.mem)
= valid (as_ptr_spec r) h /\
C.live h b.base
/// `fp r`: The memory footprint of a repr_pos is the
/// sub-slice b.[from, to)
let fp_pos #t (#b:const_slice) (r:repr_pos t b)
: GTot B.loc
= fp (as_ptr_spec r)
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
let frame_valid_repr_pos #t #b (r:repr_pos t b) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid_repr_pos r h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp_pos r) l)
(ensures
valid_repr_pos r h1)
[SMTPat (valid_repr_pos r h1);
SMTPat (B.modifies l h0 h1)]
= ()
(*** Operations on repr_pos ***)
/// End position
/// On positional reprs, this is concretely computable
let end_pos #t #b (r:repr_pos t b)
: index b
= r.start_pos + r.length | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r: LowParse.Repr.repr_pos t b -> h: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma (requires LowParse.Repr.valid_repr_pos r h)
(ensures
LowParse.Low.Base.Spec.valid_content_pos (Mkmeta?.parser (Pos?.meta r))
h
(LowParse.Repr.to_slice b)
(Pos?.start_pos r)
(Mkmeta?.v (Pos?.meta r))
(LowParse.Repr.end_pos r)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Repr.const_slice",
"LowParse.Repr.repr_pos",
"FStar.Monotonic.HyperStack.mem",
"LowParse.Spec.Base.parse_strong_prefix",
"LowParse.Repr.__proj__Mkmeta__item__parser_kind",
"LowParse.Repr.__proj__Pos__item__meta",
"LowParse.Repr.__proj__Mkmeta__item__parser",
"LowParse.Slice.bytes_of_slice_from",
"LowParse.Repr.preorder",
"LowParse.Repr.__proj__Ptr__item__b",
"FStar.UInt32.__uint_to_t",
"LowParse.Repr.__proj__MkSlice__item__base",
"LowParse.Repr.to_slice",
"LowParse.Repr.__proj__Pos__item__start_pos",
"Prims.unit",
"LowParse.Low.Base.Spec.valid_facts",
"LowParse.Slice.slice",
"LowParse.Repr.slice_of_const_buffer",
"LowParse.Repr.__proj__Mkmeta__item__len",
"LowParse.Repr.__proj__Ptr__item__meta",
"LowParse.Repr.repr_ptr",
"LowParse.Repr.as_ptr_spec",
"LowParse.Repr.reveal_valid",
"LowParse.Repr.valid_repr_pos",
"Prims.squash",
"LowParse.Low.Base.Spec.valid_content_pos",
"LowParse.Repr.__proj__Mkmeta__item__v",
"LowParse.Repr.end_pos",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let valid_repr_pos_elim (#t: Type) (#b: const_slice) (r: repr_pos t b) (h: HS.mem)
: Lemma (requires (valid_repr_pos r h))
(ensures (LP.valid_content_pos r.meta.parser h (to_slice b) r.start_pos r.meta.v (end_pos r))) =
| reveal_valid ();
let p:repr_ptr t = as_ptr_spec r in
let slice = slice_of_const_buffer (Ptr?.b p) (Ptr?.meta p).len in
LP.valid_facts r.meta.parser h slice 0ul;
LP.valid_facts r.meta.parser h (to_slice b) r.start_pos;
LP.parse_strong_prefix r.meta.parser
(LP.bytes_of_slice_from h slice 0ul)
(LP.bytes_of_slice_from h (to_slice b) r.start_pos) | false |
LowParse.Repr.fsti | LowParse.Repr.mk_repr_pos_from_const_slice | val mk_repr_pos_from_const_slice
(#k: strong_parser_kind)
(#t: _)
(#parser: LP.parser k t)
(parser32: LS.parser32 parser)
(b: const_slice)
(from to: index b)
: Stack (repr_pos_p t b parser)
(requires fun h -> LP.valid_pos parser h (to_slice b) from to)
(ensures
fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\ valid_repr_pos r h1 /\ r.start_pos = from /\ end_pos r = to /\
r.vv_pos == LP.contents parser h1 (to_slice b) from) | val mk_repr_pos_from_const_slice
(#k: strong_parser_kind)
(#t: _)
(#parser: LP.parser k t)
(parser32: LS.parser32 parser)
(b: const_slice)
(from to: index b)
: Stack (repr_pos_p t b parser)
(requires fun h -> LP.valid_pos parser h (to_slice b) from to)
(ensures
fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\ valid_repr_pos r h1 /\ r.start_pos = from /\ end_pos r = to /\
r.vv_pos == LP.contents parser h1 (to_slice b) from) | let mk_repr_pos_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:index b)
: Stack (repr_pos_p t b parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\
valid_repr_pos r h1 /\
r.start_pos = from /\
end_pos r = to /\
r.vv_pos == LP.contents parser h1 (to_slice b) from)
= as_repr_pos b from to (mk_from_const_slice parser32 b from to) | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 66,
"end_line": 859,
"start_col": 0,
"start_line": 845
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2
unfold
let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p))
inline_for_extraction
let read_field (#k1:strong_parser_kind) (#t1:_) (#p1:LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: field_reader_t f
= reveal_valid ();
fun p ->
[@inline_let]
let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos
(*** Positional representation types ***)
/// `index b` is the type of valid indexes into `b`
let index (b:const_slice)= i:uint_32{ i <= b.slice_len }
noeq
type repr_pos (t:Type) (b:const_slice) =
| Pos: start_pos:index b ->
meta:meta t ->
vv_pos:t -> //temporary
length:U32.t { U32.v start_pos + U32.v meta.len <= U32.v b.slice_len /\
vv_pos == meta.v /\
length == meta.len } ->
repr_pos t b
let value_pos #t #b (r:repr_pos t b) : GTot t = r.meta.v
let as_ptr_spec #t #b (p:repr_pos t b)
: GTot (repr_ptr t)
= Ptr (C.gsub b.base p.start_pos ((Pos?.meta p).len))
(Pos?.meta p)
(Pos?.vv_pos p)
(Pos?.length p)
let const_buffer_of_repr_pos #t #b (r:repr_pos t b)
: GTot (C.const_buffer LP.byte)
= C.gsub b.base r.start_pos r.meta.len
/// `repr_pos_p`, the analog of `repr_ptr_p`
let repr_pos_p (t:Type) (b:const_slice) #k (parser:LP.parser k t) =
r:repr_pos t b {
r.meta.parser_kind == k /\
r.meta.parser == parser
}
(*** Validity ***)
/// `valid`: abstract validity
let valid_repr_pos (#t:Type) (#b:const_slice) (r:repr_pos t b) (h:HS.mem)
= valid (as_ptr_spec r) h /\
C.live h b.base
/// `fp r`: The memory footprint of a repr_pos is the
/// sub-slice b.[from, to)
let fp_pos #t (#b:const_slice) (r:repr_pos t b)
: GTot B.loc
= fp (as_ptr_spec r)
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
let frame_valid_repr_pos #t #b (r:repr_pos t b) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid_repr_pos r h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp_pos r) l)
(ensures
valid_repr_pos r h1)
[SMTPat (valid_repr_pos r h1);
SMTPat (B.modifies l h0 h1)]
= ()
(*** Operations on repr_pos ***)
/// End position
/// On positional reprs, this is concretely computable
let end_pos #t #b (r:repr_pos t b)
: index b
= r.start_pos + r.length
let valid_repr_pos_elim
(#t: Type)
(#b: const_slice)
(r: repr_pos t b)
(h: HS.mem)
: Lemma
(requires (
valid_repr_pos r h
))
(ensures (
LP.valid_content_pos r.meta.parser h (to_slice b) r.start_pos r.meta.v (end_pos r)
))
= reveal_valid ();
let p : repr_ptr t = as_ptr_spec r in
let slice = slice_of_const_buffer (Ptr?.b p) (Ptr?.meta p).len in
LP.valid_facts r.meta.parser h slice 0ul;
LP.valid_facts r.meta.parser h (to_slice b) r.start_pos;
LP.parse_strong_prefix r.meta.parser (LP.bytes_of_slice_from h slice 0ul) (LP.bytes_of_slice_from h (to_slice b) r.start_pos)
/// Mostly just by inheriting operations on pointers
let as_ptr #t #b (r:repr_pos t b)
: Stack (repr_ptr t)
(requires fun h ->
valid_repr_pos r h)
(ensures fun h0 ptr h1 ->
ptr == as_ptr_spec r /\
h0 == h1)
= let b = C.sub b.base r.start_pos (Ghost.hide r.meta.len) in
let m = r.meta in
let v = r.vv_pos in
let l = r.length in
Ptr b m v l
let as_repr_pos #t (b:const_slice) (from to:index b) (p:repr_ptr t)
: Pure (repr_pos t b)
(requires
from <= to /\
Ptr?.b p == C.gsub b.base from (to - from))
(ensures fun r ->
p == as_ptr_spec r)
= Pos from (Ptr?.meta p) (Ptr?.vv p) (to - from)
/// `mk_repr_pos b from to p`:
/// Constructing a `repr_pos` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
let mk_repr_pos (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:LP.slice mut_p mut_p)
(from to:index (of_slice b))
: Stack (repr_pos_p t (of_slice b) parser)
(requires fun h ->
LP.valid_pos parser h b from to)
(ensures fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\
valid_repr_pos r h1 /\
r.start_pos = from /\
end_pos r = to /\
r.vv_pos == LP.contents parser h1 b from)
= as_repr_pos (of_slice b) from to (mk parser32 b from to)
/// `mk b from to p`:
/// Constructing a `repr_pos` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p` | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
parser32: LowParse.SLow.Base.parser32 parser ->
b: LowParse.Repr.const_slice ->
from: LowParse.Repr.index b ->
to: LowParse.Repr.index b
-> FStar.HyperStack.ST.Stack (LowParse.Repr.repr_pos_p t b parser) | FStar.HyperStack.ST.Stack | [] | [] | [
"LowParse.Repr.strong_parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.SLow.Base.parser32",
"LowParse.Repr.const_slice",
"LowParse.Repr.index",
"LowParse.Repr.as_repr_pos",
"LowParse.Repr.repr_pos_p",
"LowParse.Repr.repr_ptr",
"LowParse.Repr.mk_from_const_slice",
"LowParse.Repr.repr_ptr_p",
"FStar.Monotonic.HyperStack.mem",
"LowParse.Low.Base.Spec.valid_pos",
"LowParse.Repr.preorder",
"LowParse.Repr.__proj__MkSlice__item__base",
"LowParse.Repr.to_slice",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_none",
"LowParse.Repr.valid_repr_pos",
"Prims.b2t",
"Prims.op_Equality",
"LowParse.Repr.__proj__Pos__item__start_pos",
"LowParse.Repr.end_pos",
"Prims.eq2",
"LowParse.Repr.__proj__Pos__item__vv_pos",
"LowParse.Low.Base.Spec.contents"
] | [] | false | true | false | false | false | let mk_repr_pos_from_const_slice
(#k: strong_parser_kind)
#t
(#parser: LP.parser k t)
(parser32: LS.parser32 parser)
(b: const_slice)
(from: index b)
(to: index b)
: Stack (repr_pos_p t b parser)
(requires fun h -> LP.valid_pos parser h (to_slice b) from to)
(ensures
fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\ valid_repr_pos r h1 /\ r.start_pos = from /\ end_pos r = to /\
r.vv_pos == LP.contents parser h1 (to_slice b) from) =
| as_repr_pos b from to (mk_from_const_slice parser32 b from to) | false |
LowParse.Repr.fsti | LowParse.Repr.mk_repr_pos | val mk_repr_pos
(#k: strong_parser_kind)
(#t: _)
(#parser: LP.parser k t)
(parser32: LS.parser32 parser)
(b: LP.slice mut_p mut_p)
(from to: index (of_slice b))
: Stack (repr_pos_p t (of_slice b) parser)
(requires fun h -> LP.valid_pos parser h b from to)
(ensures
fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\ valid_repr_pos r h1 /\ r.start_pos = from /\ end_pos r = to /\
r.vv_pos == LP.contents parser h1 b from) | val mk_repr_pos
(#k: strong_parser_kind)
(#t: _)
(#parser: LP.parser k t)
(parser32: LS.parser32 parser)
(b: LP.slice mut_p mut_p)
(from to: index (of_slice b))
: Stack (repr_pos_p t (of_slice b) parser)
(requires fun h -> LP.valid_pos parser h b from to)
(ensures
fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\ valid_repr_pos r h1 /\ r.start_pos = from /\ end_pos r = to /\
r.vv_pos == LP.contents parser h1 b from) | let mk_repr_pos (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:LP.slice mut_p mut_p)
(from to:index (of_slice b))
: Stack (repr_pos_p t (of_slice b) parser)
(requires fun h ->
LP.valid_pos parser h b from to)
(ensures fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\
valid_repr_pos r h1 /\
r.start_pos = from /\
end_pos r = to /\
r.vv_pos == LP.contents parser h1 b from)
= as_repr_pos (of_slice b) from to (mk parser32 b from to) | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 60,
"end_line": 838,
"start_col": 0,
"start_line": 825
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2
unfold
let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p))
inline_for_extraction
let read_field (#k1:strong_parser_kind) (#t1:_) (#p1:LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: field_reader_t f
= reveal_valid ();
fun p ->
[@inline_let]
let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos
(*** Positional representation types ***)
/// `index b` is the type of valid indexes into `b`
let index (b:const_slice)= i:uint_32{ i <= b.slice_len }
noeq
type repr_pos (t:Type) (b:const_slice) =
| Pos: start_pos:index b ->
meta:meta t ->
vv_pos:t -> //temporary
length:U32.t { U32.v start_pos + U32.v meta.len <= U32.v b.slice_len /\
vv_pos == meta.v /\
length == meta.len } ->
repr_pos t b
let value_pos #t #b (r:repr_pos t b) : GTot t = r.meta.v
let as_ptr_spec #t #b (p:repr_pos t b)
: GTot (repr_ptr t)
= Ptr (C.gsub b.base p.start_pos ((Pos?.meta p).len))
(Pos?.meta p)
(Pos?.vv_pos p)
(Pos?.length p)
let const_buffer_of_repr_pos #t #b (r:repr_pos t b)
: GTot (C.const_buffer LP.byte)
= C.gsub b.base r.start_pos r.meta.len
/// `repr_pos_p`, the analog of `repr_ptr_p`
let repr_pos_p (t:Type) (b:const_slice) #k (parser:LP.parser k t) =
r:repr_pos t b {
r.meta.parser_kind == k /\
r.meta.parser == parser
}
(*** Validity ***)
/// `valid`: abstract validity
let valid_repr_pos (#t:Type) (#b:const_slice) (r:repr_pos t b) (h:HS.mem)
= valid (as_ptr_spec r) h /\
C.live h b.base
/// `fp r`: The memory footprint of a repr_pos is the
/// sub-slice b.[from, to)
let fp_pos #t (#b:const_slice) (r:repr_pos t b)
: GTot B.loc
= fp (as_ptr_spec r)
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
let frame_valid_repr_pos #t #b (r:repr_pos t b) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid_repr_pos r h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp_pos r) l)
(ensures
valid_repr_pos r h1)
[SMTPat (valid_repr_pos r h1);
SMTPat (B.modifies l h0 h1)]
= ()
(*** Operations on repr_pos ***)
/// End position
/// On positional reprs, this is concretely computable
let end_pos #t #b (r:repr_pos t b)
: index b
= r.start_pos + r.length
let valid_repr_pos_elim
(#t: Type)
(#b: const_slice)
(r: repr_pos t b)
(h: HS.mem)
: Lemma
(requires (
valid_repr_pos r h
))
(ensures (
LP.valid_content_pos r.meta.parser h (to_slice b) r.start_pos r.meta.v (end_pos r)
))
= reveal_valid ();
let p : repr_ptr t = as_ptr_spec r in
let slice = slice_of_const_buffer (Ptr?.b p) (Ptr?.meta p).len in
LP.valid_facts r.meta.parser h slice 0ul;
LP.valid_facts r.meta.parser h (to_slice b) r.start_pos;
LP.parse_strong_prefix r.meta.parser (LP.bytes_of_slice_from h slice 0ul) (LP.bytes_of_slice_from h (to_slice b) r.start_pos)
/// Mostly just by inheriting operations on pointers
let as_ptr #t #b (r:repr_pos t b)
: Stack (repr_ptr t)
(requires fun h ->
valid_repr_pos r h)
(ensures fun h0 ptr h1 ->
ptr == as_ptr_spec r /\
h0 == h1)
= let b = C.sub b.base r.start_pos (Ghost.hide r.meta.len) in
let m = r.meta in
let v = r.vv_pos in
let l = r.length in
Ptr b m v l
let as_repr_pos #t (b:const_slice) (from to:index b) (p:repr_ptr t)
: Pure (repr_pos t b)
(requires
from <= to /\
Ptr?.b p == C.gsub b.base from (to - from))
(ensures fun r ->
p == as_ptr_spec r)
= Pos from (Ptr?.meta p) (Ptr?.vv p) (to - from)
/// `mk_repr_pos b from to p`:
/// Constructing a `repr_pos` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p` | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
parser32: LowParse.SLow.Base.parser32 parser ->
b: LowParse.Slice.slice LowParse.Repr.mut_p LowParse.Repr.mut_p ->
from: LowParse.Repr.index (LowParse.Repr.of_slice b) ->
to: LowParse.Repr.index (LowParse.Repr.of_slice b)
-> FStar.HyperStack.ST.Stack (LowParse.Repr.repr_pos_p t (LowParse.Repr.of_slice b) parser) | FStar.HyperStack.ST.Stack | [] | [] | [
"LowParse.Repr.strong_parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.SLow.Base.parser32",
"LowParse.Slice.slice",
"LowParse.Repr.mut_p",
"LowParse.Repr.index",
"LowParse.Repr.of_slice",
"LowParse.Repr.as_repr_pos",
"LowParse.Repr.repr_pos_p",
"LowParse.Repr.repr_ptr",
"LowParse.Repr.mk",
"LowStar.ConstBuffer.MUTABLE",
"LowParse.Repr.repr_ptr_p",
"FStar.Monotonic.HyperStack.mem",
"LowParse.Low.Base.Spec.valid_pos",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_none",
"LowParse.Repr.valid_repr_pos",
"Prims.b2t",
"Prims.op_Equality",
"LowParse.Repr.__proj__Pos__item__start_pos",
"LowParse.Repr.end_pos",
"Prims.eq2",
"LowParse.Repr.__proj__Pos__item__vv_pos",
"LowParse.Low.Base.Spec.contents"
] | [] | false | true | false | false | false | let mk_repr_pos
(#k: strong_parser_kind)
#t
(#parser: LP.parser k t)
(parser32: LS.parser32 parser)
(b: LP.slice mut_p mut_p)
(from: index (of_slice b))
(to: index (of_slice b))
: Stack (repr_pos_p t (of_slice b) parser)
(requires fun h -> LP.valid_pos parser h b from to)
(ensures
fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\ valid_repr_pos r h1 /\ r.start_pos = from /\ end_pos r = to /\
r.vv_pos == LP.contents parser h1 b from) =
| as_repr_pos (of_slice b) from to (mk parser32 b from to) | false |
LowParse.Repr.fsti | LowParse.Repr.mk_repr_pos_from_serialize | val mk_repr_pos_from_serialize
(#k: strong_parser_kind)
(#t: _)
(#parser: LP.parser k t)
(#serializer: LP.serializer parser)
(parser32: LS.parser32 parser)
(serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b: LP.slice mut_p mut_p)
(from: index (of_slice b))
(x: t)
: Stack (option (repr_pos_p t (of_slice b) parser))
(requires fun h -> LP.live_slice h b)
(ensures
fun h0 r h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match r with
| None -> Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some r ->
valid_repr_pos r h1 /\ r.start_pos == from /\ r.vv_pos == x /\
v (end_pos r) = v from + v (size32 x))) | val mk_repr_pos_from_serialize
(#k: strong_parser_kind)
(#t: _)
(#parser: LP.parser k t)
(#serializer: LP.serializer parser)
(parser32: LS.parser32 parser)
(serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b: LP.slice mut_p mut_p)
(from: index (of_slice b))
(x: t)
: Stack (option (repr_pos_p t (of_slice b) parser))
(requires fun h -> LP.live_slice h b)
(ensures
fun h0 r h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match r with
| None -> Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some r ->
valid_repr_pos r h1 /\ r.start_pos == from /\ r.vv_pos == x /\
v (end_pos r) = v from + v (size32 x))) | let mk_repr_pos_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:index (of_slice b))
(x: t)
: Stack (option (repr_pos_p t (of_slice b) parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 r h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
begin match r with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some r ->
valid_repr_pos r h1 /\
r.start_pos == from /\
r.vv_pos == x /\
v (end_pos r) = v from + v (size32 x)
end
)
= let size = size32 x in
match (mk_from_serialize parser32 serializer32 size32 b from x) with
| None -> None
| Some p -> Some (as_repr_pos (of_slice b) from (from + size) p) | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 66,
"end_line": 894,
"start_col": 0,
"start_line": 868
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2
unfold
let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p))
inline_for_extraction
let read_field (#k1:strong_parser_kind) (#t1:_) (#p1:LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: field_reader_t f
= reveal_valid ();
fun p ->
[@inline_let]
let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos
(*** Positional representation types ***)
/// `index b` is the type of valid indexes into `b`
let index (b:const_slice)= i:uint_32{ i <= b.slice_len }
noeq
type repr_pos (t:Type) (b:const_slice) =
| Pos: start_pos:index b ->
meta:meta t ->
vv_pos:t -> //temporary
length:U32.t { U32.v start_pos + U32.v meta.len <= U32.v b.slice_len /\
vv_pos == meta.v /\
length == meta.len } ->
repr_pos t b
let value_pos #t #b (r:repr_pos t b) : GTot t = r.meta.v
let as_ptr_spec #t #b (p:repr_pos t b)
: GTot (repr_ptr t)
= Ptr (C.gsub b.base p.start_pos ((Pos?.meta p).len))
(Pos?.meta p)
(Pos?.vv_pos p)
(Pos?.length p)
let const_buffer_of_repr_pos #t #b (r:repr_pos t b)
: GTot (C.const_buffer LP.byte)
= C.gsub b.base r.start_pos r.meta.len
/// `repr_pos_p`, the analog of `repr_ptr_p`
let repr_pos_p (t:Type) (b:const_slice) #k (parser:LP.parser k t) =
r:repr_pos t b {
r.meta.parser_kind == k /\
r.meta.parser == parser
}
(*** Validity ***)
/// `valid`: abstract validity
let valid_repr_pos (#t:Type) (#b:const_slice) (r:repr_pos t b) (h:HS.mem)
= valid (as_ptr_spec r) h /\
C.live h b.base
/// `fp r`: The memory footprint of a repr_pos is the
/// sub-slice b.[from, to)
let fp_pos #t (#b:const_slice) (r:repr_pos t b)
: GTot B.loc
= fp (as_ptr_spec r)
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
let frame_valid_repr_pos #t #b (r:repr_pos t b) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid_repr_pos r h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp_pos r) l)
(ensures
valid_repr_pos r h1)
[SMTPat (valid_repr_pos r h1);
SMTPat (B.modifies l h0 h1)]
= ()
(*** Operations on repr_pos ***)
/// End position
/// On positional reprs, this is concretely computable
let end_pos #t #b (r:repr_pos t b)
: index b
= r.start_pos + r.length
let valid_repr_pos_elim
(#t: Type)
(#b: const_slice)
(r: repr_pos t b)
(h: HS.mem)
: Lemma
(requires (
valid_repr_pos r h
))
(ensures (
LP.valid_content_pos r.meta.parser h (to_slice b) r.start_pos r.meta.v (end_pos r)
))
= reveal_valid ();
let p : repr_ptr t = as_ptr_spec r in
let slice = slice_of_const_buffer (Ptr?.b p) (Ptr?.meta p).len in
LP.valid_facts r.meta.parser h slice 0ul;
LP.valid_facts r.meta.parser h (to_slice b) r.start_pos;
LP.parse_strong_prefix r.meta.parser (LP.bytes_of_slice_from h slice 0ul) (LP.bytes_of_slice_from h (to_slice b) r.start_pos)
/// Mostly just by inheriting operations on pointers
let as_ptr #t #b (r:repr_pos t b)
: Stack (repr_ptr t)
(requires fun h ->
valid_repr_pos r h)
(ensures fun h0 ptr h1 ->
ptr == as_ptr_spec r /\
h0 == h1)
= let b = C.sub b.base r.start_pos (Ghost.hide r.meta.len) in
let m = r.meta in
let v = r.vv_pos in
let l = r.length in
Ptr b m v l
let as_repr_pos #t (b:const_slice) (from to:index b) (p:repr_ptr t)
: Pure (repr_pos t b)
(requires
from <= to /\
Ptr?.b p == C.gsub b.base from (to - from))
(ensures fun r ->
p == as_ptr_spec r)
= Pos from (Ptr?.meta p) (Ptr?.vv p) (to - from)
/// `mk_repr_pos b from to p`:
/// Constructing a `repr_pos` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
let mk_repr_pos (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:LP.slice mut_p mut_p)
(from to:index (of_slice b))
: Stack (repr_pos_p t (of_slice b) parser)
(requires fun h ->
LP.valid_pos parser h b from to)
(ensures fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\
valid_repr_pos r h1 /\
r.start_pos = from /\
end_pos r = to /\
r.vv_pos == LP.contents parser h1 b from)
= as_repr_pos (of_slice b) from to (mk parser32 b from to)
/// `mk b from to p`:
/// Constructing a `repr_pos` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
let mk_repr_pos_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:index b)
: Stack (repr_pos_p t b parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\
valid_repr_pos r h1 /\
r.start_pos = from /\
end_pos r = to /\
r.vv_pos == LP.contents parser h1 (to_slice b) from)
= as_repr_pos b from to (mk_from_const_slice parser32 b from to)
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
parser32: LowParse.SLow.Base.parser32 parser ->
serializer32: LowParse.SLow.Base.serializer32 serializer ->
size32: LowParse.SLow.Base.size32 serializer ->
b: LowParse.Slice.slice LowParse.Repr.mut_p LowParse.Repr.mut_p ->
from: LowParse.Repr.index (LowParse.Repr.of_slice b) ->
x: t
-> FStar.HyperStack.ST.Stack
(FStar.Pervasives.Native.option (LowParse.Repr.repr_pos_p t (LowParse.Repr.of_slice b) parser)) | FStar.HyperStack.ST.Stack | [] | [] | [
"LowParse.Repr.strong_parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"LowParse.SLow.Base.parser32",
"LowParse.SLow.Base.serializer32",
"LowParse.SLow.Base.size32",
"LowParse.Slice.slice",
"LowParse.Repr.mut_p",
"LowParse.Repr.index",
"LowParse.Repr.of_slice",
"FStar.Pervasives.Native.None",
"LowParse.Repr.repr_pos_p",
"LowParse.Repr.repr_ptr_p",
"FStar.Pervasives.Native.Some",
"LowParse.Repr.as_repr_pos",
"FStar.Integers.op_Plus",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"FStar.Pervasives.Native.option",
"LowParse.Repr.mk_from_serialize",
"FStar.UInt32.t",
"LowParse.SLow.Base.size32_postcond",
"FStar.Monotonic.HyperStack.mem",
"LowParse.Slice.live_slice",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowParse.Slice.loc_slice_from",
"Prims.b2t",
"FStar.Integers.op_Greater",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"LowParse.Spec.Base.serialize",
"FStar.UInt32.v",
"FStar.Integers.op_Subtraction",
"LowParse.Slice.__proj__Mkslice__item__len",
"LowParse.Repr.valid_repr_pos",
"Prims.eq2",
"LowParse.Repr.__proj__Pos__item__start_pos",
"LowParse.Repr.__proj__Pos__item__vv_pos",
"Prims.op_Equality",
"FStar.Integers.int_t",
"FStar.Integers.v",
"LowParse.Repr.end_pos",
"Prims.logical"
] | [] | false | true | false | false | false | let mk_repr_pos_from_serialize
(#k: strong_parser_kind)
#t
(#parser: LP.parser k t)
(#serializer: LP.serializer parser)
(parser32: LS.parser32 parser)
(serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b: LP.slice mut_p mut_p)
(from: index (of_slice b))
(x: t)
: Stack (option (repr_pos_p t (of_slice b) parser))
(requires fun h -> LP.live_slice h b)
(ensures
fun h0 r h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match r with
| None -> Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some r ->
valid_repr_pos r h1 /\ r.start_pos == from /\ r.vv_pos == x /\
v (end_pos r) = v from + v (size32 x))) =
| let size = size32 x in
match (mk_from_serialize parser32 serializer32 size32 b from x) with
| None -> None
| Some p -> Some (as_repr_pos (of_slice b) from (from + size) p) | false |
LowParse.Repr.fsti | LowParse.Repr.mk_from_const_slice | val mk_from_const_slice
(#k: strong_parser_kind)
(#t: _)
(#parser: LP.parser k t)
(parser32: LS.parser32 parser)
(b: const_slice)
(from to: uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h -> LP.valid_pos parser h (to_slice b) from to)
(ensures
fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\ valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
C.const_sub_buffer from (to - from) p.b b.base) | val mk_from_const_slice
(#k: strong_parser_kind)
(#t: _)
(#parser: LP.parser k t)
(parser32: LS.parser32 parser)
(b: const_slice)
(from to: uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h -> LP.valid_pos parser h (to_slice b) from to)
(ensures
fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\ valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
C.const_sub_buffer from (to - from) p.b b.base) | let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 5,
"end_line": 288,
"start_col": 0,
"start_line": 250
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20" | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
parser32: LowParse.SLow.Base.parser32 parser ->
b: LowParse.Repr.const_slice ->
from: FStar.Integers.uint_32 ->
to: FStar.Integers.uint_32
-> FStar.HyperStack.ST.Stack (LowParse.Repr.repr_ptr_p t parser) | FStar.HyperStack.ST.Stack | [] | [] | [
"LowParse.Repr.strong_parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.SLow.Base.parser32",
"LowParse.Repr.const_slice",
"FStar.Integers.uint_32",
"Prims.unit",
"LowParse.Low.Base.Spec.valid_facts",
"LowParse.Repr.preorder",
"LowParse.Repr.__proj__Mkmeta__item__parser_kind",
"LowParse.Repr.__proj__Ptr__item__meta",
"LowParse.Repr.__proj__Mkmeta__item__parser",
"FStar.UInt32.__uint_to_t",
"LowParse.Repr.__proj__MkSlice__item__base",
"LowParse.Slice.slice",
"LowParse.Repr.slice_of_const_buffer",
"FStar.Integers.op_Subtraction",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"LowParse.Repr.repr_ptr_p",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowParse.Repr.repr_ptr",
"LowParse.Repr.Ptr",
"FStar.UInt32.t",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.SLow.Base.parser32_correct",
"FStar.Bytes.bytes",
"Prims.b2t",
"Prims.op_Equality",
"FStar.UInt.uint_t",
"FStar.UInt32.v",
"FStar.Bytes.len",
"FStar.UInt32.sub",
"FStar.Bytes.of_buffer",
"LowStar.ConstBuffer.qbuf_pre",
"LowParse.Bytes.byte",
"LowStar.ConstBuffer.as_qbuf",
"LowStar.ConstBuffer.cast",
"FStar.Bytes.length",
"LowStar.ConstBuffer.const_buffer",
"LowStar.ConstBuffer.sub",
"FStar.Ghost.hide",
"LowParse.Repr.meta",
"LowParse.Repr.Mkmeta",
"LowParse.Low.Base.Spec.contents",
"LowParse.Low.Base.Spec.bytes_of_slice_from_to",
"LowParse.Low.Base.Spec.contents_exact_eq",
"LowParse.Repr.to_slice",
"LowParse.Repr.reveal_valid",
"LowParse.Low.Base.Spec.valid_pos",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_none",
"LowParse.Repr.valid",
"Prims.eq2",
"LowParse.Repr.__proj__Mkmeta__item__v",
"LowStar.ConstBuffer.const_sub_buffer",
"LowParse.Repr.__proj__Ptr__item__b"
] | [] | false | true | false | false | false | let mk_from_const_slice
(#k: strong_parser_kind)
#t
(#parser: LP.parser k t)
(parser32: LS.parser32 parser)
(b: const_slice)
(from: uint_32)
(to: uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h -> LP.valid_pos parser h (to_slice b) from to)
(ensures
fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\ valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
C.const_sub_buffer from (to - from) p.b b.base) =
| reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta:meta t =
{
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
}
in
let sub_b = C.sub b.base from (to - from) in
let value =
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from;
LP.valid_facts p.meta.parser h1 slice' 0ul;
p | false |
Hacl.Impl.Frodo.KEM.Encaps.fst | Hacl.Impl.Frodo.KEM.Encaps.get_sp_ep_epp_matrices | val get_sp_ep_epp_matrices:
a:FP.frodo_alg
-> seed_se:lbytes (crypto_bytes a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h seed_se /\ live h sp_matrix /\
live h ep_matrix /\ live h epp_matrix /\
disjoint seed_se sp_matrix /\ disjoint seed_se ep_matrix /\
disjoint seed_se epp_matrix /\ disjoint sp_matrix ep_matrix /\
disjoint sp_matrix epp_matrix /\ disjoint ep_matrix epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc sp_matrix |+| loc ep_matrix |+| loc epp_matrix) h0 h1 /\
(as_matrix h1 sp_matrix, as_matrix h1 ep_matrix, as_matrix h1 epp_matrix) ==
S.get_sp_ep_epp_matrices a (as_seq h0 seed_se)) | val get_sp_ep_epp_matrices:
a:FP.frodo_alg
-> seed_se:lbytes (crypto_bytes a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h seed_se /\ live h sp_matrix /\
live h ep_matrix /\ live h epp_matrix /\
disjoint seed_se sp_matrix /\ disjoint seed_se ep_matrix /\
disjoint seed_se epp_matrix /\ disjoint sp_matrix ep_matrix /\
disjoint sp_matrix epp_matrix /\ disjoint ep_matrix epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc sp_matrix |+| loc ep_matrix |+| loc epp_matrix) h0 h1 /\
(as_matrix h1 sp_matrix, as_matrix h1 ep_matrix, as_matrix h1 epp_matrix) ==
S.get_sp_ep_epp_matrices a (as_seq h0 seed_se)) | let get_sp_ep_epp_matrices a seed_se sp_matrix ep_matrix epp_matrix =
push_frame ();
[@inline_let] let s_bytes_len = secretmatrixbytes_len a in
let r = create (2ul *! s_bytes_len +! 2ul *! params_nbar *! params_nbar) (u8 0) in
KG.frodo_shake_r a (u8 0x96) seed_se (2ul *! s_bytes_len +! 2ul *! params_nbar *! params_nbar) r;
frodo_sample_matrix a params_nbar (params_n a) (sub r 0ul s_bytes_len) sp_matrix;
frodo_sample_matrix a params_nbar (params_n a) (sub r s_bytes_len s_bytes_len) ep_matrix;
frodo_sample_matrix a params_nbar params_nbar (sub r (2ul *! s_bytes_len) (2ul *! params_nbar *! params_nbar)) epp_matrix;
pop_frame () | {
"file_name": "code/frodo/Hacl.Impl.Frodo.KEM.Encaps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 14,
"end_line": 186,
"start_col": 0,
"start_line": 178
} | module Hacl.Impl.Frodo.KEM.Encaps
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open LowStar.Buffer
open Lib.IntTypes
open Lib.Buffer
open Hacl.Impl.Matrix
open Hacl.Impl.Frodo.Params
open Hacl.Impl.Frodo.KEM
open Hacl.Impl.Frodo.Encode
open Hacl.Impl.Frodo.Pack
open Hacl.Impl.Frodo.Sample
open Hacl.Frodo.Random
module ST = FStar.HyperStack.ST
module LSeq = Lib.Sequence
module LB = Lib.ByteSequence
module FP = Spec.Frodo.Params
module S = Spec.Frodo.KEM.Encaps
module M = Spec.Matrix
module KG = Hacl.Impl.Frodo.KEM.KeyGen
#set-options "--z3rlimit 100 --fuel 0 --ifuel 0"
inline_for_extraction noextract
val frodo_mul_add_sa_plus_e:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> bp_matrix:matrix_t params_nbar (params_n a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h ep_matrix /\ live h sp_matrix /\ live h bp_matrix /\
disjoint bp_matrix seed_a /\ disjoint bp_matrix ep_matrix /\ disjoint bp_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc bp_matrix) h0 h1 /\
as_matrix h1 bp_matrix ==
S.frodo_mul_add_sa_plus_e a gen_a (as_seq h0 seed_a) (as_matrix h0 sp_matrix) (as_matrix h0 ep_matrix))
let frodo_mul_add_sa_plus_e a gen_a seed_a sp_matrix ep_matrix bp_matrix =
push_frame ();
let a_matrix = matrix_create (params_n a) (params_n a) in
frodo_gen_matrix gen_a (params_n a) seed_a a_matrix;
matrix_mul sp_matrix a_matrix bp_matrix;
matrix_add bp_matrix ep_matrix;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct_pack_c1:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> c1:lbytes (ct1bytes_len a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h ep_matrix /\ live h sp_matrix /\ live h c1 /\
disjoint seed_a c1 /\ disjoint ep_matrix c1 /\ disjoint sp_matrix c1)
(ensures fun h0 _ h1 -> modifies (loc c1) h0 h1 /\
as_seq h1 c1 ==
S.crypto_kem_enc_ct_pack_c1 a gen_a (as_seq h0 seed_a) (as_matrix h0 sp_matrix) (as_matrix h0 ep_matrix))
let crypto_kem_enc_ct_pack_c1 a gen_a seed_a sp_matrix ep_matrix c1 =
push_frame ();
let bp_matrix = matrix_create params_nbar (params_n a) in
frodo_mul_add_sa_plus_e a gen_a seed_a sp_matrix ep_matrix bp_matrix;
frodo_pack (params_logq a) bp_matrix c1;
pop_frame ()
inline_for_extraction noextract
val frodo_mul_add_sb_plus_e:
a:FP.frodo_alg
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> v_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h b /\ live h epp_matrix /\ live h v_matrix /\ live h sp_matrix /\
disjoint v_matrix b /\ disjoint v_matrix epp_matrix /\ disjoint v_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc v_matrix) h0 h1 /\
as_matrix h1 v_matrix ==
S.frodo_mul_add_sb_plus_e a (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix))
let frodo_mul_add_sb_plus_e a b sp_matrix epp_matrix v_matrix =
push_frame ();
let b_matrix = matrix_create (params_n a) params_nbar in
frodo_unpack (params_n a) params_nbar (params_logq a) b b_matrix;
matrix_mul sp_matrix b_matrix v_matrix;
matrix_add v_matrix epp_matrix;
pop_frame ()
inline_for_extraction noextract
val frodo_mul_add_sb_plus_e_plus_mu:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> v_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h b /\ live h mu /\ live h v_matrix /\
live h sp_matrix /\ live h epp_matrix /\
disjoint v_matrix b /\ disjoint v_matrix sp_matrix /\
disjoint v_matrix mu /\ disjoint v_matrix epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc v_matrix) h0 h1 /\
as_matrix h1 v_matrix ==
S.frodo_mul_add_sb_plus_e_plus_mu a (as_seq h0 mu) (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix))
let frodo_mul_add_sb_plus_e_plus_mu a mu b sp_matrix epp_matrix v_matrix =
push_frame ();
frodo_mul_add_sb_plus_e a b sp_matrix epp_matrix v_matrix;
let mu_encode = matrix_create params_nbar params_nbar in
frodo_key_encode (params_logq a) (params_extracted_bits a) params_nbar mu mu_encode;
matrix_add v_matrix mu_encode;
clear_matrix mu_encode;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct_pack_c2:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> c2:lbytes (ct2bytes_len a)
-> Stack unit
(requires fun h ->
live h mu /\ live h b /\ live h sp_matrix /\
live h epp_matrix /\ live h c2 /\
disjoint mu c2 /\ disjoint b c2 /\
disjoint sp_matrix c2 /\ disjoint epp_matrix c2)
(ensures fun h0 _ h1 -> modifies (loc c2) h0 h1 /\
as_seq h1 c2 ==
S.crypto_kem_enc_ct_pack_c2 a (as_seq h0 mu) (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix))
#push-options "--z3rlimit 200"
let crypto_kem_enc_ct_pack_c2 a mu b sp_matrix epp_matrix c2 =
push_frame ();
let v_matrix = matrix_create params_nbar params_nbar in
frodo_mul_add_sb_plus_e_plus_mu a mu b sp_matrix epp_matrix v_matrix;
frodo_pack (params_logq a) v_matrix c2;
clear_matrix v_matrix;
pop_frame ()
#pop-options
inline_for_extraction noextract
val get_sp_ep_epp_matrices:
a:FP.frodo_alg
-> seed_se:lbytes (crypto_bytes a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h seed_se /\ live h sp_matrix /\
live h ep_matrix /\ live h epp_matrix /\
disjoint seed_se sp_matrix /\ disjoint seed_se ep_matrix /\
disjoint seed_se epp_matrix /\ disjoint sp_matrix ep_matrix /\
disjoint sp_matrix epp_matrix /\ disjoint ep_matrix epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc sp_matrix |+| loc ep_matrix |+| loc epp_matrix) h0 h1 /\
(as_matrix h1 sp_matrix, as_matrix h1 ep_matrix, as_matrix h1 epp_matrix) ==
S.get_sp_ep_epp_matrices a (as_seq h0 seed_se)) | {
"checked_file": "/",
"dependencies": [
"Spec.Matrix.fst.checked",
"Spec.Frodo.Params.fst.checked",
"Spec.Frodo.KEM.Encaps.fst.checked",
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Matrix.fst.checked",
"Hacl.Impl.Frodo.Sample.fst.checked",
"Hacl.Impl.Frodo.Params.fst.checked",
"Hacl.Impl.Frodo.Pack.fst.checked",
"Hacl.Impl.Frodo.KEM.KeyGen.fst.checked",
"Hacl.Impl.Frodo.KEM.fst.checked",
"Hacl.Impl.Frodo.Encode.fst.checked",
"Hacl.Frodo.Random.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Frodo.KEM.Encaps.fst"
} | [
{
"abbrev": true,
"full_module": "Hacl.Impl.Frodo.KEM.KeyGen",
"short_module": "KG"
},
{
"abbrev": true,
"full_module": "Spec.Matrix",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "Spec.Frodo.KEM.Encaps",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Spec.Frodo.Params",
"short_module": "FP"
},
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "LB"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Frodo.Random",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Sample",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Pack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Encode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Params",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Matrix",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
a: Spec.Frodo.Params.frodo_alg ->
seed_se: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.crypto_bytes a) ->
sp_matrix:
Hacl.Impl.Matrix.matrix_t Hacl.Impl.Frodo.Params.params_nbar
(Hacl.Impl.Frodo.Params.params_n a) ->
ep_matrix:
Hacl.Impl.Matrix.matrix_t Hacl.Impl.Frodo.Params.params_nbar
(Hacl.Impl.Frodo.Params.params_n a) ->
epp_matrix:
Hacl.Impl.Matrix.matrix_t Hacl.Impl.Frodo.Params.params_nbar
Hacl.Impl.Frodo.Params.params_nbar
-> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Spec.Frodo.Params.frodo_alg",
"Hacl.Impl.Matrix.lbytes",
"Hacl.Impl.Frodo.Params.crypto_bytes",
"Hacl.Impl.Matrix.matrix_t",
"Hacl.Impl.Frodo.Params.params_nbar",
"Hacl.Impl.Frodo.Params.params_n",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Hacl.Impl.Frodo.Sample.frodo_sample_matrix",
"Lib.Buffer.lbuffer_t",
"Lib.Buffer.MUT",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U8",
"Lib.IntTypes.SEC",
"Lib.IntTypes.mul",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"FStar.UInt32.uint_to_t",
"FStar.UInt32.t",
"Lib.Buffer.sub",
"Lib.IntTypes.uint8",
"Lib.IntTypes.op_Plus_Bang",
"Lib.IntTypes.op_Star_Bang",
"FStar.UInt32.__uint_to_t",
"Hacl.Impl.Frodo.KEM.KeyGen.frodo_shake_r",
"Lib.IntTypes.u8",
"Lib.IntTypes.add",
"Lib.Buffer.create",
"Lib.Buffer.lbuffer",
"Hacl.Impl.Frodo.Params.secretmatrixbytes_len",
"FStar.HyperStack.ST.push_frame"
] | [] | false | true | false | false | false | let get_sp_ep_epp_matrices a seed_se sp_matrix ep_matrix epp_matrix =
| push_frame ();
[@@ inline_let ]let s_bytes_len = secretmatrixbytes_len a in
let r = create (2ul *! s_bytes_len +! 2ul *! params_nbar *! params_nbar) (u8 0) in
KG.frodo_shake_r a (u8 0x96) seed_se (2ul *! s_bytes_len +! 2ul *! params_nbar *! params_nbar) r;
frodo_sample_matrix a params_nbar (params_n a) (sub r 0ul s_bytes_len) sp_matrix;
frodo_sample_matrix a params_nbar (params_n a) (sub r s_bytes_len s_bytes_len) ep_matrix;
frodo_sample_matrix a
params_nbar
params_nbar
(sub r (2ul *! s_bytes_len) (2ul *! params_nbar *! params_nbar))
epp_matrix;
pop_frame () | false |
LowParse.Repr.fsti | LowParse.Repr.read_field_pos | val read_field_pos
(#k1: strong_parser_kind)
(#t1: Type)
(#p1: LP.parser k1 t1)
(#t2: _)
(f: field_reader p1 t2)
: read_field_pos_t f | val read_field_pos
(#k1: strong_parser_kind)
(#t1: Type)
(#p1: LP.parser k1 t1)
(#t2: _)
(f: field_reader p1 t2)
: read_field_pos_t f | let read_field_pos (#k1: strong_parser_kind) (#t1: Type) (#p1: LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: read_field_pos_t f
= fun #b p ->
read_field f (as_ptr p) | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 28,
"end_line": 960,
"start_col": 0,
"start_line": 956
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2
unfold
let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p))
inline_for_extraction
let read_field (#k1:strong_parser_kind) (#t1:_) (#p1:LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: field_reader_t f
= reveal_valid ();
fun p ->
[@inline_let]
let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos
(*** Positional representation types ***)
/// `index b` is the type of valid indexes into `b`
let index (b:const_slice)= i:uint_32{ i <= b.slice_len }
noeq
type repr_pos (t:Type) (b:const_slice) =
| Pos: start_pos:index b ->
meta:meta t ->
vv_pos:t -> //temporary
length:U32.t { U32.v start_pos + U32.v meta.len <= U32.v b.slice_len /\
vv_pos == meta.v /\
length == meta.len } ->
repr_pos t b
let value_pos #t #b (r:repr_pos t b) : GTot t = r.meta.v
let as_ptr_spec #t #b (p:repr_pos t b)
: GTot (repr_ptr t)
= Ptr (C.gsub b.base p.start_pos ((Pos?.meta p).len))
(Pos?.meta p)
(Pos?.vv_pos p)
(Pos?.length p)
let const_buffer_of_repr_pos #t #b (r:repr_pos t b)
: GTot (C.const_buffer LP.byte)
= C.gsub b.base r.start_pos r.meta.len
/// `repr_pos_p`, the analog of `repr_ptr_p`
let repr_pos_p (t:Type) (b:const_slice) #k (parser:LP.parser k t) =
r:repr_pos t b {
r.meta.parser_kind == k /\
r.meta.parser == parser
}
(*** Validity ***)
/// `valid`: abstract validity
let valid_repr_pos (#t:Type) (#b:const_slice) (r:repr_pos t b) (h:HS.mem)
= valid (as_ptr_spec r) h /\
C.live h b.base
/// `fp r`: The memory footprint of a repr_pos is the
/// sub-slice b.[from, to)
let fp_pos #t (#b:const_slice) (r:repr_pos t b)
: GTot B.loc
= fp (as_ptr_spec r)
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
let frame_valid_repr_pos #t #b (r:repr_pos t b) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid_repr_pos r h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp_pos r) l)
(ensures
valid_repr_pos r h1)
[SMTPat (valid_repr_pos r h1);
SMTPat (B.modifies l h0 h1)]
= ()
(*** Operations on repr_pos ***)
/// End position
/// On positional reprs, this is concretely computable
let end_pos #t #b (r:repr_pos t b)
: index b
= r.start_pos + r.length
let valid_repr_pos_elim
(#t: Type)
(#b: const_slice)
(r: repr_pos t b)
(h: HS.mem)
: Lemma
(requires (
valid_repr_pos r h
))
(ensures (
LP.valid_content_pos r.meta.parser h (to_slice b) r.start_pos r.meta.v (end_pos r)
))
= reveal_valid ();
let p : repr_ptr t = as_ptr_spec r in
let slice = slice_of_const_buffer (Ptr?.b p) (Ptr?.meta p).len in
LP.valid_facts r.meta.parser h slice 0ul;
LP.valid_facts r.meta.parser h (to_slice b) r.start_pos;
LP.parse_strong_prefix r.meta.parser (LP.bytes_of_slice_from h slice 0ul) (LP.bytes_of_slice_from h (to_slice b) r.start_pos)
/// Mostly just by inheriting operations on pointers
let as_ptr #t #b (r:repr_pos t b)
: Stack (repr_ptr t)
(requires fun h ->
valid_repr_pos r h)
(ensures fun h0 ptr h1 ->
ptr == as_ptr_spec r /\
h0 == h1)
= let b = C.sub b.base r.start_pos (Ghost.hide r.meta.len) in
let m = r.meta in
let v = r.vv_pos in
let l = r.length in
Ptr b m v l
let as_repr_pos #t (b:const_slice) (from to:index b) (p:repr_ptr t)
: Pure (repr_pos t b)
(requires
from <= to /\
Ptr?.b p == C.gsub b.base from (to - from))
(ensures fun r ->
p == as_ptr_spec r)
= Pos from (Ptr?.meta p) (Ptr?.vv p) (to - from)
/// `mk_repr_pos b from to p`:
/// Constructing a `repr_pos` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
let mk_repr_pos (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:LP.slice mut_p mut_p)
(from to:index (of_slice b))
: Stack (repr_pos_p t (of_slice b) parser)
(requires fun h ->
LP.valid_pos parser h b from to)
(ensures fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\
valid_repr_pos r h1 /\
r.start_pos = from /\
end_pos r = to /\
r.vv_pos == LP.contents parser h1 b from)
= as_repr_pos (of_slice b) from to (mk parser32 b from to)
/// `mk b from to p`:
/// Constructing a `repr_pos` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
let mk_repr_pos_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:index b)
: Stack (repr_pos_p t b parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\
valid_repr_pos r h1 /\
r.start_pos = from /\
end_pos r = to /\
r.vv_pos == LP.contents parser h1 (to_slice b) from)
= as_repr_pos b from to (mk_from_const_slice parser32 b from to)
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_repr_pos_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:index (of_slice b))
(x: t)
: Stack (option (repr_pos_p t (of_slice b) parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 r h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
begin match r with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some r ->
valid_repr_pos r h1 /\
r.start_pos == from /\
r.vv_pos == x /\
v (end_pos r) = v from + v (size32 x)
end
)
= let size = size32 x in
match (mk_from_serialize parser32 serializer32 size32 b from x) with
| None -> None
| Some p -> Some (as_repr_pos (of_slice b) from (from + size) p)
/// Accessors on positional reprs
unfold
let field_accessor_pos_post (#b:const_slice) (#t1:Type) (p:repr_pos t1 b)
(#k2: strong_parser_kind)
(#t2:Type)
(#p2: LP.parser k2 t2)
(f:field_accessor p.meta.parser p2) =
fun h0 (q:repr_pos_p t2 b p2) h1 ->
let cl = FieldAccessor?.cl f in
cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid_repr_pos q h1 /\
value_pos q == cl.LP.clens_get (value_pos p)
unfold
let get_field_pos_t (#k1: strong_parser_kind) (#t1: Type) (#p1: LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2: Type) (#p2: LP.parser k2 t2)
(f:field_accessor p1 p2)
= (#b:const_slice) ->
(pp:repr_pos_p t1 b p1) ->
Stack (repr_pos_p t2 b p2)
(requires fun h ->
let cl = FieldAccessor?.cl f in
valid_repr_pos pp h /\
cl.LP.clens_cond pp.meta.v)
(ensures
field_accessor_pos_post pp f)
inline_for_extraction
let get_field_pos (#k1: strong_parser_kind) (#t1: Type) (#p1: LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2: Type) (#p2: LP.parser k2 t2)
(f:field_accessor p1 p2)
: get_field_pos_t f
= reveal_valid ();
fun #b pp ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let p = as_ptr pp in
let bb = temp_slice_of_repr_ptr p in
let pos = acc bb 0ul in
let pos_to = jump bb pos in
let q = mk p2' bb pos pos_to in
let len = pos_to - pos in
assert (Ptr?.b q `C.const_sub_buffer pos len` Ptr?.b p);
as_repr_pos b (pp.start_pos + pos) (pp.start_pos + pos + len) q
unfold
let read_field_pos_t (#k1: strong_parser_kind) (#t1: Type) (#p1: LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
= (#b:const_slice) ->
(p:repr_pos_p t1 b p1) ->
Stack t2
(requires fun h ->
valid_repr_pos p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value_pos p)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: LowParse.Repr.field_reader p1 t2 -> LowParse.Repr.read_field_pos_t f | Prims.Tot | [
"total"
] | [] | [
"LowParse.Repr.strong_parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Repr.field_reader",
"LowParse.Repr.const_slice",
"LowParse.Repr.repr_pos_p",
"LowParse.Repr.read_field",
"LowParse.Repr.repr_ptr_p",
"LowParse.Repr.as_ptr",
"LowParse.Repr.repr_ptr",
"LowParse.Repr.read_field_pos_t"
] | [] | false | false | false | false | false | let read_field_pos
(#k1: strong_parser_kind)
(#t1: Type)
(#p1: LP.parser k1 t1)
#t2
(f: field_reader p1 t2)
: read_field_pos_t f =
| fun #b p -> read_field f (as_ptr p) | false |
FStar.Tactics.CanonMonoid.fst | FStar.Tactics.CanonMonoid.dump | val dump : m: Prims.string -> FStar.Tactics.Effect.Tac Prims.unit | let dump m = if debugging () then dump m | {
"file_name": "ulib/FStar.Tactics.CanonMonoid.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 40,
"end_line": 24,
"start_col": 0,
"start_line": 24
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Tactics.CanonMonoid
open FStar.Algebra.Monoid
open FStar.List
open FStar.Reflection.V2
open FStar.Tactics.V2 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.Algebra.Monoid.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Tactics.CanonMonoid.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Tactics.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Reflection.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.Monoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: Prims.string -> FStar.Tactics.Effect.Tac Prims.unit | FStar.Tactics.Effect.Tac | [] | [] | [
"Prims.string",
"FStar.Stubs.Tactics.V2.Builtins.dump",
"Prims.unit",
"Prims.bool",
"FStar.Stubs.Tactics.V2.Builtins.debugging"
] | [] | false | true | false | false | false | let dump m =
| if debugging () then dump m | false |
|
LowParse.Repr.fsti | LowParse.Repr.sub_ptr_stable | val sub_ptr_stable (#t0 #t1: _) (r0: repr_ptr t0) (r1: repr_ptr t1) (h: HS.mem)
: Lemma (requires r0 `sub_ptr` r1 /\ valid_if_live r1 /\ valid r1 h /\ valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\ (B.region_lifetime_buf b1 ==> B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1); SMTPat (valid_if_live r1); SMTPat (valid r0 h)] | val sub_ptr_stable (#t0 #t1: _) (r0: repr_ptr t0) (r1: repr_ptr t1) (h: HS.mem)
: Lemma (requires r0 `sub_ptr` r1 /\ valid_if_live r1 /\ valid r1 h /\ valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\ (B.region_lifetime_buf b1 ==> B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1); SMTPat (valid_if_live r1); SMTPat (valid r0 h)] | let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 28,
"end_line": 492,
"start_col": 0,
"start_line": 460
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
() | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | r0: LowParse.Repr.repr_ptr t0 -> r1: LowParse.Repr.repr_ptr t1 -> h: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
LowParse.Repr.sub_ptr r0 r1 /\ LowParse.Repr.valid_if_live r1 /\ LowParse.Repr.valid r1 h /\
LowParse.Repr.valid r0 h)
(ensures
LowParse.Repr.valid_if_live r0 /\
(let b0 = LowStar.ConstBuffer.cast (Ptr?.b r0) in
let b1 = LowStar.ConstBuffer.cast (Ptr?.b r1) in
LowStar.Monotonic.Buffer.frameOf b0 == LowStar.Monotonic.Buffer.frameOf b1 /\
(LowStar.Monotonic.Buffer.region_lifetime_buf b1 ==>
LowStar.Monotonic.Buffer.region_lifetime_buf b0)))
[
SMTPat (LowParse.Repr.sub_ptr r0 r1);
SMTPat (LowParse.Repr.valid_if_live r1);
SMTPat (LowParse.Repr.valid r0 h)
] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowParse.Repr.repr_ptr",
"FStar.Monotonic.HyperStack.mem",
"LowParse.Repr.valid_if_live_intro",
"Prims.unit",
"LowStar.Monotonic.Buffer.region_lifetime_sub",
"LowParse.Bytes.byte",
"LowStar.ImmutableBuffer.immutable_preorder",
"FStar.UInt32.t",
"LowStar.ConstBuffer.const_sub_buffer",
"LowParse.Repr.__proj__Ptr__item__b",
"Prims.squash",
"LowStar.ImmutableBuffer.value_is",
"FStar.Ghost.hide",
"FStar.Seq.Base.seq",
"LowParse.Repr.__proj__Mkmeta__item__repr_bytes",
"LowParse.Repr.__proj__Ptr__item__meta",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.logical",
"Prims.Nil",
"LowStar.ImmutableBuffer.sub_ptr_value_is",
"Prims._assert",
"Prims.eq2",
"Prims.nat",
"FStar.Seq.Base.length",
"LowStar.Monotonic.Buffer.length",
"LowStar.ImmutableBuffer.ibuffer",
"LowStar.ConstBuffer.cast",
"LowParse.Repr.reveal_valid",
"Prims.l_and",
"LowParse.Repr.sub_ptr",
"LowParse.Repr.valid_if_live",
"LowParse.Repr.valid",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"Prims.l_imp",
"LowStar.Monotonic.Buffer.region_lifetime_buf",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.prop"
] | [] | false | false | true | false | false | let sub_ptr_stable (#t0 #t1: _) (r0: repr_ptr t0) (r1: repr_ptr t1) (h: HS.mem)
: Lemma (requires r0 `sub_ptr` r1 /\ valid_if_live r1 /\ valid r1 h /\ valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\ (B.region_lifetime_buf b1 ==> B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1); SMTPat (valid_if_live r1); SMTPat (valid r0 h)] =
| reveal_valid ();
let b0:I.ibuffer LP.byte = C.cast r0.b in
let b1:I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len: U32.t)
: Lemma (requires C.const_sub_buffer i len r0.b r1.b)
(ensures I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (C.const_sub_buffer i len r0.b r1.b)] =
I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h | false |
LowParse.Repr.fsti | LowParse.Repr.get_field_pos | val get_field_pos
(#k1: strong_parser_kind)
(#t1: Type)
(#p1: LP.parser k1 t1)
(#k2: strong_parser_kind)
(#t2: Type)
(#p2: LP.parser k2 t2)
(f: field_accessor p1 p2)
: get_field_pos_t f | val get_field_pos
(#k1: strong_parser_kind)
(#t1: Type)
(#p1: LP.parser k1 t1)
(#k2: strong_parser_kind)
(#t2: Type)
(#p2: LP.parser k2 t2)
(f: field_accessor p1 p2)
: get_field_pos_t f | let get_field_pos (#k1: strong_parser_kind) (#t1: Type) (#p1: LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2: Type) (#p2: LP.parser k2 t2)
(f:field_accessor p1 p2)
: get_field_pos_t f
= reveal_valid ();
fun #b pp ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let p = as_ptr pp in
let bb = temp_slice_of_repr_ptr p in
let pos = acc bb 0ul in
let pos_to = jump bb pos in
let q = mk p2' bb pos pos_to in
let len = pos_to - pos in
assert (Ptr?.b q `C.const_sub_buffer pos len` Ptr?.b p);
as_repr_pos b (pp.start_pos + pos) (pp.start_pos + pos + len) q | {
"file_name": "src/lowparse/LowParse.Repr.fsti",
"git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | {
"end_col": 67,
"end_line": 940,
"start_col": 0,
"start_line": 926
} | (*
Copyright 2015--2019 INRIA and Microsoft Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: T. Ramananandro, A. Rastogi, N. Swamy, A. Fromherz
*)
module LowParse.Repr
module LP = LowParse.Low.Base
module LS = LowParse.SLow.Base
module B = LowStar.Buffer
module HS = FStar.HyperStack
module C = LowStar.ConstBuffer
module U32 = FStar.UInt32
open FStar.Integers
open FStar.HyperStack.ST
module ST = FStar.HyperStack.ST
module I = LowStar.ImmutableBuffer
(* Summary:
A pointer-based representation type.
See
https://github.com/mitls/mitls-papers/wiki/The-Memory-Model-of-miTLS#pointer-based-transient-reprs
Calling it LowParse.Ptr since it should eventually move to everparse/src/lowparse
*)
/// `strong_parser_kind`: We restrict our attention to the
/// representation of types whose parsers have the strong-prefix
/// property.
let strong_parser_kind =
k:LP.parser_kind{
LP.(k.parser_kind_subkind == Some ParserStrong)
}
let preorder (c:C.const_buffer LP.byte) = C.qbuf_pre (C.as_qbuf c)
inline_for_extraction noextract
let slice_of_const_buffer (b:C.const_buffer LP.byte) (len:uint_32{U32.v len <= C.length b})
: LP.slice (preorder b) (preorder b)
=
LP.({
base = C.cast b;
len = len
})
let mut_p = LowStar.Buffer.trivial_preorder LP.byte
/// A slice is a const uint_8* and a length.
///
/// It is a layer around LP.slice, effectively guaranteeing that no
/// writes are performed via this pointer.
///
/// It allows us to uniformly represent `ptr t` backed by either
/// mutable or immutable arrays.
noeq
type const_slice =
| MkSlice:
base:C.const_buffer LP.byte ->
slice_len:uint_32 {
UInt32.v slice_len <= C.length base// /\
// slice_len <= LP.validator_max_length
} ->
const_slice
let to_slice (x:const_slice)
: Tot (LP.slice (preorder x.base) (preorder x.base))
= slice_of_const_buffer x.base x.slice_len
let of_slice (x:LP.slice mut_p mut_p)
: Tot const_slice
= let b = C.of_buffer x.LP.base in
let len = x.LP.len in
MkSlice b len
let live_slice (h:HS.mem) (c:const_slice) =
C.live h c.base
let slice_as_seq (h:HS.mem) (c:const_slice) =
Seq.slice (C.as_seq h c.base) 0 (U32.v c.slice_len)
(*** Pointer-based Representation types ***)
/// `meta t`: Each representation is associated with
/// specification metadata that records
///
/// -- the parser(s) that defines its wire format
/// -- the represented value
/// -- the bytes of its wire format
///
/// We retain both the value and its wire format for convenience.
///
/// An alternative would be to also retain here a serializer and then
/// compute the bytes when needed from the serializer. But that's a
/// bit heavy
[@erasable]
noeq
type meta (t:Type) = {
parser_kind: strong_parser_kind;
parser:LP.parser parser_kind t;
parser32:LS.parser32 parser;
v: t;
len: uint_32;
repr_bytes: Seq.lseq LP.byte (U32.v len);
meta_ok: squash (LowParse.Spec.Base.parse parser repr_bytes == Some (v, U32.v len))// /\
// 0ul < len /\
// len < LP.validator_max_length)
}
/// `repr_ptr t`: The main type of this module.
///
/// * The const pointer `b` refers to a representation of `t`
///
/// * The representation is described by erased the `meta` field
///
/// Temporary fields:
///
/// * At this stage, we also keep a real high-level value (vv). We
/// plan to gradually access instead its ghost (.meta.v) and
/// eventually get rid of it to lower implicit heap allocations.
///
/// * We also retain a concrete length field to facilitate using the
/// LowParse APIs for accessors and jumpers, which are oriented
/// towards using slices rather than pointers. As those APIs
/// change, we will remove the length field.
noeq
type repr_ptr (t:Type) =
| Ptr: b:C.const_buffer LP.byte ->
meta:meta t ->
vv:t ->
length:U32.t { U32.v meta.len == C.length b /\
meta.len == length /\
vv == meta.v } ->
repr_ptr t
let region_of #t (p:repr_ptr t) : GTot HS.rid = B.frameOf (C.cast p.b)
let value #t (p:repr_ptr t) : GTot t = p.meta.v
let repr_ptr_p (t:Type) (#k:strong_parser_kind) (parser:LP.parser k t) =
p:repr_ptr t{ p.meta.parser_kind == k /\ p.meta.parser == parser }
let slice_of_repr_ptr #t (p:repr_ptr t)
: GTot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.meta.len
let sub_ptr (p2:repr_ptr 'a) (p1: repr_ptr 'b) =
exists pos len. Ptr?.b p2 `C.const_sub_buffer pos len` Ptr?.b p1
let intro_sub_ptr (x:repr_ptr 'a) (y:repr_ptr 'b) (from to:uint_32)
: Lemma
(requires
to >= from /\
Ptr?.b x `C.const_sub_buffer from (to - from)` Ptr?.b y)
(ensures
x `sub_ptr` y)
= ()
/// TEMPORARY: DO NOT USE THIS FUNCTION UNLESS YOU REALLY HAVE SOME
/// SPECIAL REASON FOR IT
///
/// It is meant to support migration towards an EverParse API that
/// will eventually provide accessors and jumpers for pointers rather
/// than slices
inline_for_extraction noextract
let temp_slice_of_repr_ptr #t (p:repr_ptr t)
: Tot (LP.slice (preorder p.b) (preorder p.b))
= slice_of_const_buffer p.b p.length
(*** Validity ***)
/// `valid' r h`:
/// We define validity in two stages:
///
/// First, we provide `valid'`, a transparent definition and then
/// turn it `abstract` by the `valid` predicate just below.
///
/// Validity encapsulates three related LowParse properties:notions:
///
/// 1. The underlying pointer contains a valid wire-format
/// (`valid_pos`)
///
/// 2. The ghost value associated with the `repr` is the
/// parsed value of the wire format.
///
/// 3. The bytes of the slice are indeed the representation of the
/// ghost value in wire format
unfold
let valid_slice (#t:Type) (#r #s:_) (slice:LP.slice r s) (meta:meta t) (h:HS.mem) =
LP.valid_content_pos meta.parser h slice 0ul meta.v meta.len /\
meta.repr_bytes == LP.bytes_of_slice_from_to h slice 0ul meta.len
unfold
let valid' (#t:Type) (p:repr_ptr t) (h:HS.mem) =
let slice = slice_of_const_buffer p.b p.meta.len in
valid_slice slice p.meta h
/// `valid`: abstract validity
val valid (#t:Type) (p:repr_ptr t) (h:HS.mem) : prop
/// `reveal_valid`:
/// An explicit lemma exposes the definition of `valid`
val reveal_valid (_:unit)
: Lemma (forall (t:Type) (p:repr_ptr t) (h:HS.mem).
{:pattern (valid #t p h)}
valid #t p h <==> valid' #t p h)
/// `fp r`: The memory footprint of a repr_ptr is the underlying pointer
let fp #t (p:repr_ptr t)
: GTot B.loc
= C.loc_buffer p.b
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
val frame_valid (#t:_) (p:repr_ptr t) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid p h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp p) l)
(ensures
valid p h1)
[SMTPat (valid p h1);
SMTPat (B.modifies l h0 h1)]
(*** Contructors ***)
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
#set-options "--z3rlimit 20"
inline_for_extraction noextract
let mk_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.meta.v == LP.contents parser h1 (to_slice b) from /\
p.b `C.const_sub_buffer from (to - from)` b.base)
= reveal_valid ();
let h = get () in
let slice = to_slice b in
LP.contents_exact_eq parser h slice from to;
let meta :meta t = {
parser_kind = _;
parser = parser;
parser32 = parser32;
v = LP.contents parser h slice from;
len = to - from;
repr_bytes = LP.bytes_of_slice_from_to h slice from to;
meta_ok = ()
} in
let sub_b = C.sub b.base from (to - from) in
let value =
// Compute [.v]; this code will eventually disappear
let sub_b_bytes = FStar.Bytes.of_buffer (to - from) (C.cast sub_b) in
let Some (v, _) = parser32 sub_b_bytes in
v
in
let p = Ptr sub_b meta value (to - from) in
let h1 = get () in
let slice' = slice_of_const_buffer sub_b (to - from) in
LP.valid_facts p.meta.parser h1 slice from; //elim valid_pos slice
LP.valid_facts p.meta.parser h1 slice' 0ul; //intro valid_pos slice'
p
/// `mk b from to p`:
/// Constructing a `repr_ptr` from a LowParse slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
noextract
let mk (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
#q
(slice:LP.slice (C.q_preorder q LP.byte) (C.q_preorder q LP.byte))
(from to:uint_32)
: Stack (repr_ptr_p t parser)
(requires fun h ->
LP.valid_pos parser h slice from to)
(ensures fun h0 p h1 ->
B.(modifies loc_none h0 h1) /\
valid p h1 /\
p.b `C.const_sub_buffer from (to - from)` (C.of_qbuf slice.LP.base) /\
p.meta.v == LP.contents parser h1 slice from)
= let c = MkSlice (C.of_qbuf slice.LP.base) slice.LP.len in
mk_from_const_slice parser32 c from to
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:uint_32 { from <= b.LP.len })
(x: t)
: Stack (option (repr_ptr_p t parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 popt h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
(match popt with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some p ->
let size = size32 x in
valid p h1 /\
U32.v from + U32.v size <= U32.v b.LP.len /\
p.b == C.gsub (C.of_buffer b.LP.base) from size /\
p.meta.v == x))
= let size = size32 x in
let len = b.LP.len - from in
if len < size
then None
else begin
let bytes = serializer32 x in
let dst = B.sub b.LP.base from size in
(if size > 0ul then FStar.Bytes.store_bytes bytes dst);
let to = from + size in
let h = get () in
LP.serialize_valid_exact serializer h b x from to;
let r = mk parser32 b from to in
Some r
end
(*** Destructors ***)
/// Computes the length in bytes of the representation
/// Using a LowParse "jumper"
let length #t (p: repr_ptr t) (j:LP.jumper p.meta.parser)
: Stack U32.t
(requires fun h ->
valid p h)
(ensures fun h n h' ->
B.modifies B.loc_none h h' /\
n == p.meta.len)
= reveal_valid ();
let s = temp_slice_of_repr_ptr p in
(* TODO: Need to revise the type of jumpers to take a pointer as an argument, not a slice *)
j s 0ul
/// `to_bytes`: for intermediate purposes only, extract bytes from the repr
let to_bytes #t (p: repr_ptr t) (len:uint_32)
: Stack FStar.Bytes.bytes
(requires fun h ->
valid p h /\
len == p.meta.len
)
(ensures fun h x h' ->
B.modifies B.loc_none h h' /\
FStar.Bytes.reveal x == p.meta.repr_bytes /\
FStar.Bytes.len x == p.meta.len
)
= reveal_valid ();
FStar.Bytes.of_buffer len (C.cast p.b)
(*** Stable Representations ***)
(*
By copying a representation into an immutable buffer `i`,
we obtain a stable representation, which remains valid so long
as the `i` remains live.
We achieve this by relying on support for monotonic state provided
by Low*, as described in the POPL '18 paper "Recalling a Witness"
TODO: The feature also relies on an as yet unimplemented feature to
atomically allocate and initialize a buffer to a chosen
value. This will soon be added to the LowStar library.
*)
/// `valid_if_live`: A pure predicate on `r:repr_ptr t` that states that
/// so long as the underlying buffer is live in a given state `h`,
/// `p` is valid in that state
let valid_if_live #t (p:repr_ptr t) =
C.qbuf_qual (C.as_qbuf p.b) == C.IMMUTABLE /\
(let i : I.ibuffer LP.byte = C.as_mbuf p.b in
let m = p.meta in
i `I.value_is` Ghost.hide m.repr_bytes /\
(exists (h:HS.mem).{:pattern valid p h}
m.repr_bytes == B.as_seq h i /\
valid p h /\
(forall h'.
C.live h' p.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i ==>
valid p h')))
/// `stable_repr_ptr t`: A representation that is valid if its buffer is
/// live
let stable_repr_ptr t= p:repr_ptr t { valid_if_live p }
/// `valid_if_live_intro` :
/// An internal lemma to introduce `valid_if_live`
// Note: the next proof is flaky and occasionally enters a triggering
// vortex with the notorious FStar.Seq.Properties.slice_slice
// Removing that from the context makes the proof instantaneous
#push-options "--max_ifuel 1 --initial_ifuel 1 \
--using_facts_from '* -FStar.Seq.Properties.slice_slice'"
let valid_if_live_intro #t (r:repr_ptr t) (h:HS.mem)
: Lemma
(requires (
C.qbuf_qual (C.as_qbuf r.b) == C.IMMUTABLE /\
valid r h /\
(let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let m = r.meta in
B.as_seq h i == m.repr_bytes /\
i `I.value_is` Ghost.hide m.repr_bytes)))
(ensures
valid_if_live r)
= reveal_valid ();
let i : I.ibuffer LP.byte = C.as_mbuf r.b in
let aux (h':HS.mem)
: Lemma
(requires
C.live h' r.b /\
B.as_seq h i `Seq.equal` B.as_seq h' i)
(ensures
valid r h')
[SMTPat (valid r h')]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h' (slice_of_repr_ptr r) 0ul
in
()
let sub_ptr_stable (#t0 #t1:_) (r0:repr_ptr t0) (r1:repr_ptr t1) (h:HS.mem)
: Lemma
(requires
r0 `sub_ptr` r1 /\
valid_if_live r1 /\
valid r1 h /\
valid r0 h)
(ensures
valid_if_live r0 /\
(let b0 = C.cast r0.b in
let b1 = C.cast r1.b in
B.frameOf b0 == B.frameOf b1 /\
(B.region_lifetime_buf b1 ==>
B.region_lifetime_buf b0)))
[SMTPat (r0 `sub_ptr` r1);
SMTPat (valid_if_live r1);
SMTPat (valid r0 h)]
= reveal_valid ();
let b0 : I.ibuffer LP.byte = C.cast r0.b in
let b1 : I.ibuffer LP.byte = C.cast r1.b in
assert (I.value_is b1 (Ghost.hide r1.meta.repr_bytes));
assert (Seq.length r1.meta.repr_bytes == B.length b1);
let aux (i len:U32.t)
: Lemma
(requires
r0.b `C.const_sub_buffer i len` r1.b)
(ensures
I.value_is b0 (Ghost.hide r0.meta.repr_bytes))
[SMTPat (r0.b `C.const_sub_buffer i len` r1.b)]
= I.sub_ptr_value_is b0 b1 h i len r1.meta.repr_bytes
in
B.region_lifetime_sub #_ #_ #_ #(I.immutable_preorder _) b1 b0;
valid_if_live_intro r0 h
/// `recall_stable_repr_ptr` Main lemma: if the underlying buffer is live
/// then a stable repr_ptr is valid
let recall_stable_repr_ptr #t (r:stable_repr_ptr t)
: Stack unit
(requires fun h ->
C.live h r.b)
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid r h1)
= reveal_valid ();
let h1 = get () in
let i = C.to_ibuffer r.b in
let aux (h:HS.mem)
: Lemma
(requires
valid r h /\
B.as_seq h i == B.as_seq h1 i)
(ensures
valid r h1)
[SMTPat (valid r h)]
= let m = r.meta in
LP.valid_ext_intro m.parser h (slice_of_repr_ptr r) 0ul h1 (slice_of_repr_ptr r) 0ul
in
let es =
let m = r.meta in
Ghost.hide m.repr_bytes
in
I.recall_value i es
let is_stable_in_region #t (p:repr_ptr t) =
let r = B.frameOf (C.cast p.b) in
valid_if_live p /\
B.frameOf (C.cast p.b) == r /\
B.region_lifetime_buf (C.cast p.b)
let stable_region_repr_ptr (r:ST.drgn) (t:Type) =
p:repr_ptr t {
is_stable_in_region p /\
B.frameOf (C.cast p.b) == ST.rid_of_drgn r
}
let recall_stable_region_repr_ptr #t (r:ST.drgn) (p:stable_region_repr_ptr r t)
: Stack unit
(requires fun h ->
HS.live_region h (ST.rid_of_drgn r))
(ensures fun h0 _ h1 ->
h0 == h1 /\
valid p h1)
= B.recall (C.cast p.b);
recall_stable_repr_ptr p
private
let ralloc_and_blit (r:ST.drgn) (src:C.const_buffer LP.byte) (len:U32.t)
: ST (b:C.const_buffer LP.byte)
(requires fun h0 ->
HS.live_region h0 (ST.rid_of_drgn r) /\
U32.v len == C.length src /\
C.live h0 src)
(ensures fun h0 b h1 ->
let c = C.as_qbuf b in
let s = Seq.slice (C.as_seq h0 src) 0 (U32.v len) in
let r = ST.rid_of_drgn r in
C.qbuf_qual c == C.IMMUTABLE /\
B.alloc_post_mem_common (C.to_ibuffer b) h0 h1 s /\
C.to_ibuffer b `I.value_is` s /\
B.region_lifetime_buf (C.cast b) /\
B.frameOf (C.cast b) == r)
= let src_buf = C.cast src in
assume (U32.v len > 0);
let b : I.ibuffer LP.byte = B.mmalloc_drgn_and_blit r src_buf 0ul len in
let h0 = get() in
B.witness_p b (I.seq_eq (Ghost.hide (Seq.slice (B.as_seq h0 src_buf) 0 (U32.v len))));
C.of_ibuffer b
/// `stash`: Main stateful operation
/// Copies a repr_ptr into a fresh stable repr_ptr in the given region
let stash (rgn:ST.drgn) #t (r:repr_ptr t) (len:uint_32{len == r.meta.len})
: ST (stable_region_repr_ptr rgn t)
(requires fun h ->
valid r h /\
HS.live_region h (ST.rid_of_drgn rgn))
(ensures fun h0 r' h1 ->
B.modifies B.loc_none h0 h1 /\
valid r' h1 /\
r.meta == r'.meta)
= reveal_valid ();
let buf' = ralloc_and_blit rgn r.b len in
let s = MkSlice buf' len in
let h = get () in
let _ =
let slice = slice_of_const_buffer r.b len in
let slice' = slice_of_const_buffer buf' len in
LP.valid_facts r.meta.parser h slice 0ul; //elim valid_pos slice
LP.valid_facts r.meta.parser h slice' 0ul //intro valid_pos slice'
in
let p = Ptr buf' r.meta r.vv r.length in
valid_if_live_intro p h;
p
(*** Accessing fields of ptrs ***)
/// Instances of field_accessor should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq inline_for_extraction
type field_accessor (#k1 #k2:strong_parser_kind)
(#t1 #t2:Type)
(p1 : LP.parser k1 t1)
(p2 : LP.parser k2 t2) =
| FieldAccessor :
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(jump:LP.jumper p2) ->
(p2': LS.parser32 p2) ->
field_accessor p1 p2
unfold noextract
let field_accessor_comp (#k1 #k2 #k3:strong_parser_kind)
(#t1 #t2 #t3:Type)
(#p1 : LP.parser k1 t1)
(#p2 : LP.parser k2 t2)
(#p3 : LP.parser k3 t3)
(f12 : field_accessor p1 p2)
(f23 : field_accessor p2 p3)
: field_accessor p1 p3
=
[@inline_let] let FieldAccessor acc12 j2 p2' = f12 in
[@inline_let] let FieldAccessor acc23 j3 p3' = f23 in
[@inline_let] let acc13 = LP.accessor_compose acc12 acc23 () in
FieldAccessor acc13 j3 p3'
unfold noextract
let field_accessor_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
= p:repr_ptr_p t1 p1 ->
Stack (repr_ptr_p t2 p2)
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 (q:repr_ptr_p t2 p2) h1 ->
f.cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid q h1 /\
value q == f.cl.LP.clens_get (value p) /\
q `sub_ptr` p /\
region_of q == region_of p)
inline_for_extraction
let get_field (#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2:Type) (#p2:LP.parser k2 t2)
(f:field_accessor p1 p2)
: field_accessor_t f
= reveal_valid ();
fun p ->
[@inline_let] let FieldAccessor acc jump p2' = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
let pos_to = jump b pos in
let q = mk p2' b pos pos_to in
let h = get () in
assert (q.b `C.const_sub_buffer pos (pos_to - pos)` p.b);
assert (q `sub_ptr` p); //needed to trigger the sub_ptr_stable lemma
assert (is_stable_in_region p ==> is_stable_in_region q);
q
/// Instances of field_reader should be marked `unfold`
/// so that we get compact verification conditions for the lens conditions
/// and to inline the code for extraction
noeq
type field_reader (#k1:strong_parser_kind)
(#t1:Type)
(p1 : LP.parser k1 t1)
(t2:Type) =
| FieldReader :
(#k2: strong_parser_kind) ->
(#p2: LP.parser k2 t2) ->
(#cl: LP.clens t1 t2) ->
(#g: LP.gaccessor p1 p2 cl) ->
(acc:LP.accessor g) ->
(reader: LP.leaf_reader p2) ->
field_reader p1 t2
unfold
let field_reader_t
(#k1:strong_parser_kind) #t1 (#p1:LP.parser k1 t1)
(#t2:Type)
(f:field_reader p1 t2)
= p:repr_ptr_p t1 p1 ->
Stack t2
(requires fun h ->
valid p h /\
f.cl.LP.clens_cond p.meta.v)
(ensures fun h0 pv h1 ->
B.modifies B.loc_none h0 h1 /\
pv == f.cl.LP.clens_get (value p))
inline_for_extraction
let read_field (#k1:strong_parser_kind) (#t1:_) (#p1:LP.parser k1 t1)
#t2 (f:field_reader p1 t2)
: field_reader_t f
= reveal_valid ();
fun p ->
[@inline_let]
let FieldReader acc reader = f in
let b = temp_slice_of_repr_ptr p in
let pos = acc b 0ul in
reader b pos
(*** Positional representation types ***)
/// `index b` is the type of valid indexes into `b`
let index (b:const_slice)= i:uint_32{ i <= b.slice_len }
noeq
type repr_pos (t:Type) (b:const_slice) =
| Pos: start_pos:index b ->
meta:meta t ->
vv_pos:t -> //temporary
length:U32.t { U32.v start_pos + U32.v meta.len <= U32.v b.slice_len /\
vv_pos == meta.v /\
length == meta.len } ->
repr_pos t b
let value_pos #t #b (r:repr_pos t b) : GTot t = r.meta.v
let as_ptr_spec #t #b (p:repr_pos t b)
: GTot (repr_ptr t)
= Ptr (C.gsub b.base p.start_pos ((Pos?.meta p).len))
(Pos?.meta p)
(Pos?.vv_pos p)
(Pos?.length p)
let const_buffer_of_repr_pos #t #b (r:repr_pos t b)
: GTot (C.const_buffer LP.byte)
= C.gsub b.base r.start_pos r.meta.len
/// `repr_pos_p`, the analog of `repr_ptr_p`
let repr_pos_p (t:Type) (b:const_slice) #k (parser:LP.parser k t) =
r:repr_pos t b {
r.meta.parser_kind == k /\
r.meta.parser == parser
}
(*** Validity ***)
/// `valid`: abstract validity
let valid_repr_pos (#t:Type) (#b:const_slice) (r:repr_pos t b) (h:HS.mem)
= valid (as_ptr_spec r) h /\
C.live h b.base
/// `fp r`: The memory footprint of a repr_pos is the
/// sub-slice b.[from, to)
let fp_pos #t (#b:const_slice) (r:repr_pos t b)
: GTot B.loc
= fp (as_ptr_spec r)
/// `frame_valid`:
/// A framing principle for `valid r h`
/// It is invariant under footprint-preserving heap updates
let frame_valid_repr_pos #t #b (r:repr_pos t b) (l:B.loc) (h0 h1:HS.mem)
: Lemma
(requires
valid_repr_pos r h0 /\
B.modifies l h0 h1 /\
B.loc_disjoint (fp_pos r) l)
(ensures
valid_repr_pos r h1)
[SMTPat (valid_repr_pos r h1);
SMTPat (B.modifies l h0 h1)]
= ()
(*** Operations on repr_pos ***)
/// End position
/// On positional reprs, this is concretely computable
let end_pos #t #b (r:repr_pos t b)
: index b
= r.start_pos + r.length
let valid_repr_pos_elim
(#t: Type)
(#b: const_slice)
(r: repr_pos t b)
(h: HS.mem)
: Lemma
(requires (
valid_repr_pos r h
))
(ensures (
LP.valid_content_pos r.meta.parser h (to_slice b) r.start_pos r.meta.v (end_pos r)
))
= reveal_valid ();
let p : repr_ptr t = as_ptr_spec r in
let slice = slice_of_const_buffer (Ptr?.b p) (Ptr?.meta p).len in
LP.valid_facts r.meta.parser h slice 0ul;
LP.valid_facts r.meta.parser h (to_slice b) r.start_pos;
LP.parse_strong_prefix r.meta.parser (LP.bytes_of_slice_from h slice 0ul) (LP.bytes_of_slice_from h (to_slice b) r.start_pos)
/// Mostly just by inheriting operations on pointers
let as_ptr #t #b (r:repr_pos t b)
: Stack (repr_ptr t)
(requires fun h ->
valid_repr_pos r h)
(ensures fun h0 ptr h1 ->
ptr == as_ptr_spec r /\
h0 == h1)
= let b = C.sub b.base r.start_pos (Ghost.hide r.meta.len) in
let m = r.meta in
let v = r.vv_pos in
let l = r.length in
Ptr b m v l
let as_repr_pos #t (b:const_slice) (from to:index b) (p:repr_ptr t)
: Pure (repr_pos t b)
(requires
from <= to /\
Ptr?.b p == C.gsub b.base from (to - from))
(ensures fun r ->
p == as_ptr_spec r)
= Pos from (Ptr?.meta p) (Ptr?.vv p) (to - from)
/// `mk_repr_pos b from to p`:
/// Constructing a `repr_pos` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
let mk_repr_pos (#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:LP.slice mut_p mut_p)
(from to:index (of_slice b))
: Stack (repr_pos_p t (of_slice b) parser)
(requires fun h ->
LP.valid_pos parser h b from to)
(ensures fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\
valid_repr_pos r h1 /\
r.start_pos = from /\
end_pos r = to /\
r.vv_pos == LP.contents parser h1 b from)
= as_repr_pos (of_slice b) from to (mk parser32 b from to)
/// `mk b from to p`:
/// Constructing a `repr_pos` from a sub-slice
/// b.[from, to)
/// known to be valid for a given wire-format parser `p`
inline_for_extraction
let mk_repr_pos_from_const_slice
(#k:strong_parser_kind) #t (#parser:LP.parser k t)
(parser32:LS.parser32 parser)
(b:const_slice)
(from to:index b)
: Stack (repr_pos_p t b parser)
(requires fun h ->
LP.valid_pos parser h (to_slice b) from to)
(ensures fun h0 r h1 ->
B.(modifies loc_none h0 h1) /\
valid_repr_pos r h1 /\
r.start_pos = from /\
end_pos r = to /\
r.vv_pos == LP.contents parser h1 (to_slice b) from)
= as_repr_pos b from to (mk_from_const_slice parser32 b from to)
/// A high-level constructor, taking a value instead of a slice.
///
/// Can we remove the `noextract` for the time being? Can we
/// `optimize` it so that vv is assigned x? It will take us a while to
/// lower all message writing.
inline_for_extraction
noextract
let mk_repr_pos_from_serialize
(#k:strong_parser_kind) #t (#parser:LP.parser k t) (#serializer: LP.serializer parser)
(parser32: LS.parser32 parser) (serializer32: LS.serializer32 serializer)
(size32: LS.size32 serializer)
(b:LP.slice mut_p mut_p)
(from:index (of_slice b))
(x: t)
: Stack (option (repr_pos_p t (of_slice b) parser))
(requires fun h ->
LP.live_slice h b)
(ensures fun h0 r h1 ->
B.modifies (LP.loc_slice_from b from) h0 h1 /\
begin match r with
| None ->
(* not enough space in output slice *)
Seq.length (LP.serialize serializer x) > FStar.UInt32.v (b.LP.len - from)
| Some r ->
valid_repr_pos r h1 /\
r.start_pos == from /\
r.vv_pos == x /\
v (end_pos r) = v from + v (size32 x)
end
)
= let size = size32 x in
match (mk_from_serialize parser32 serializer32 size32 b from x) with
| None -> None
| Some p -> Some (as_repr_pos (of_slice b) from (from + size) p)
/// Accessors on positional reprs
unfold
let field_accessor_pos_post (#b:const_slice) (#t1:Type) (p:repr_pos t1 b)
(#k2: strong_parser_kind)
(#t2:Type)
(#p2: LP.parser k2 t2)
(f:field_accessor p.meta.parser p2) =
fun h0 (q:repr_pos_p t2 b p2) h1 ->
let cl = FieldAccessor?.cl f in
cl.LP.clens_cond p.meta.v /\
B.modifies B.loc_none h0 h1 /\
valid_repr_pos q h1 /\
value_pos q == cl.LP.clens_get (value_pos p)
unfold
let get_field_pos_t (#k1: strong_parser_kind) (#t1: Type) (#p1: LP.parser k1 t1)
(#k2: strong_parser_kind) (#t2: Type) (#p2: LP.parser k2 t2)
(f:field_accessor p1 p2)
= (#b:const_slice) ->
(pp:repr_pos_p t1 b p1) ->
Stack (repr_pos_p t2 b p2)
(requires fun h ->
let cl = FieldAccessor?.cl f in
valid_repr_pos pp h /\
cl.LP.clens_cond pp.meta.v)
(ensures
field_accessor_pos_post pp f) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.ImmutableBuffer.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.Buffer.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"LowParse.SLow.Base.fst.checked",
"LowParse.Low.Base.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Bytes.fsti.checked"
],
"interface_file": false,
"source_file": "LowParse.Repr.fsti"
} | [
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": true,
"full_module": "LowStar.ImmutableBuffer",
"short_module": "I"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "C"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "LowParse.SLow.Base",
"short_module": "LS"
},
{
"abbrev": true,
"full_module": "LowParse.Low.Base",
"short_module": "LP"
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: LowParse.Repr.field_accessor p1 p2 -> LowParse.Repr.get_field_pos_t f | Prims.Tot | [
"total"
] | [] | [
"LowParse.Repr.strong_parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Repr.field_accessor",
"LowParse.Repr.const_slice",
"LowParse.Repr.repr_pos_p",
"LowParse.Low.Base.Spec.clens",
"LowParse.Low.Base.Spec.gaccessor",
"LowParse.Low.Base.accessor",
"LowParse.Low.Base.jumper",
"LowParse.SLow.Base.parser32",
"LowParse.Repr.as_repr_pos",
"FStar.Integers.op_Plus",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"LowParse.Repr.__proj__Pos__item__start_pos",
"Prims.unit",
"Prims._assert",
"LowStar.ConstBuffer.const_sub_buffer",
"LowParse.Bytes.byte",
"LowParse.Repr.__proj__Ptr__item__b",
"FStar.Integers.int_t",
"FStar.Integers.op_Subtraction",
"LowParse.Repr.repr_ptr_p",
"LowParse.Repr.mk",
"LowStar.ConstBuffer.qbuf_qual",
"LowStar.ConstBuffer.as_qbuf",
"FStar.UInt32.t",
"LowParse.Repr.preorder",
"FStar.UInt32.__uint_to_t",
"LowParse.Slice.slice",
"LowParse.Repr.temp_slice_of_repr_ptr",
"LowParse.Repr.repr_ptr",
"LowParse.Repr.as_ptr",
"LowParse.Repr.reveal_valid",
"LowParse.Repr.get_field_pos_t"
] | [] | false | false | false | false | false | let get_field_pos
(#k1: strong_parser_kind)
(#t1: Type)
(#p1: LP.parser k1 t1)
(#k2: strong_parser_kind)
(#t2: Type)
(#p2: LP.parser k2 t2)
(f: field_accessor p1 p2)
: get_field_pos_t f =
| reveal_valid ();
fun #b pp ->
[@@ inline_let ]let FieldAccessor acc jump p2' = f in
let p = as_ptr pp in
let bb = temp_slice_of_repr_ptr p in
let pos = acc bb 0ul in
let pos_to = jump bb pos in
let q = mk p2' bb pos pos_to in
let len = pos_to - pos in
assert (C.const_sub_buffer pos len (Ptr?.b q) (Ptr?.b p));
as_repr_pos b (pp.start_pos + pos) (pp.start_pos + pos + len) q | false |
FStar.Algebra.CommMonoid.Equiv.fst | FStar.Algebra.CommMonoid.Equiv.elim_eq_laws | val elim_eq_laws (#a: _) (eq: equiv a)
: Lemma
((forall x. {:pattern (x `eq.eq` x)} x `eq.eq` x) /\
(forall x y. {:pattern (x `eq.eq` y)} x `eq.eq` y ==> y `eq.eq` x) /\
(forall x y z. {:pattern eq.eq x y; eq.eq y z} (x `eq.eq` y /\ y `eq.eq` z) ==> x `eq.eq` z)) | val elim_eq_laws (#a: _) (eq: equiv a)
: Lemma
((forall x. {:pattern (x `eq.eq` x)} x `eq.eq` x) /\
(forall x y. {:pattern (x `eq.eq` y)} x `eq.eq` y ==> y `eq.eq` x) /\
(forall x y z. {:pattern eq.eq x y; eq.eq y z} (x `eq.eq` y /\ y `eq.eq` z) ==> x `eq.eq` z)) | let elim_eq_laws #a (eq:equiv a)
: Lemma (
(forall x.{:pattern (x `eq.eq` x)} x `eq.eq` x) /\
(forall x y.{:pattern (x `eq.eq` y)} x `eq.eq` y ==> y `eq.eq` x) /\
(forall x y z.{:pattern eq.eq x y; eq.eq y z} (x `eq.eq` y /\ y `eq.eq` z) ==> x `eq.eq` z)
)
= introduce forall x. x `eq.eq` x
with (eq.reflexivity x);
introduce forall x y. x `eq.eq` y ==> y `eq.eq` x
with (introduce _ ==> _
with _. eq.symmetry x y);
introduce forall x y z. (x `eq.eq` y /\ y `eq.eq` z) ==> x `eq.eq` z
with (introduce _ ==> _
with _. eq.transitivity x y z) | {
"file_name": "ulib/FStar.Algebra.CommMonoid.Equiv.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 40,
"end_line": 44,
"start_col": 0,
"start_line": 29
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Algebra.CommMonoid.Equiv
open FStar.Mul
unopteq
type equiv (a:Type) =
| EQ :
eq:(a -> a -> Type0) ->
reflexivity:(x:a -> Lemma (x `eq` x)) ->
symmetry:(x:a -> y:a -> Lemma (requires (x `eq` y)) (ensures (y `eq` x))) ->
transitivity:(x:a -> y:a -> z:a -> Lemma (requires (x `eq` y /\ y `eq` z)) (ensures (x `eq` z))) ->
equiv a | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Classical.Sugar.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Algebra.CommMonoid.Equiv.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.CommMonoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.CommMonoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | eq: FStar.Algebra.CommMonoid.Equiv.equiv a
-> FStar.Pervasives.Lemma
(ensures
(forall (x: a). {:pattern EQ?.eq eq x x} EQ?.eq eq x x) /\
(forall (x: a) (y: a). {:pattern EQ?.eq eq x y} EQ?.eq eq x y ==> EQ?.eq eq y x) /\
(forall (x: a) (y: a) (z: a). {:pattern EQ?.eq eq x y; EQ?.eq eq y z}
EQ?.eq eq x y /\ EQ?.eq eq y z ==> EQ?.eq eq x z)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"FStar.Algebra.CommMonoid.Equiv.equiv",
"FStar.Classical.Sugar.forall_intro",
"Prims.l_Forall",
"Prims.l_imp",
"Prims.l_and",
"FStar.Algebra.CommMonoid.Equiv.__proj__EQ__item__eq",
"FStar.Classical.Sugar.implies_intro",
"Prims.squash",
"FStar.Algebra.CommMonoid.Equiv.__proj__EQ__item__transitivity",
"Prims.unit",
"FStar.Algebra.CommMonoid.Equiv.__proj__EQ__item__symmetry",
"FStar.Algebra.CommMonoid.Equiv.__proj__EQ__item__reflexivity",
"Prims.l_True",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | false | false | true | false | false | let elim_eq_laws #a (eq: equiv a)
: Lemma
((forall x. {:pattern (x `eq.eq` x)} x `eq.eq` x) /\
(forall x y. {:pattern (x `eq.eq` y)} x `eq.eq` y ==> y `eq.eq` x) /\
(forall x y z. {:pattern eq.eq x y; eq.eq y z} (x `eq.eq` y /\ y `eq.eq` z) ==> x `eq.eq` z)) =
| introduce forall x . x `eq.eq` x
with (eq.reflexivity x);
introduce forall x y . x `eq.eq` y ==> y `eq.eq` x
with (introduce _ ==> _
with _. eq.symmetry x y);
introduce forall x y z . (x `eq.eq` y /\ y `eq.eq` z) ==> x `eq.eq` z
with (introduce _ ==> _
with _. eq.transitivity x y z) | false |
FStar.Tactics.CanonMonoid.fst | FStar.Tactics.CanonMonoid.reification_aux | val reification_aux (#a: Type) (mult unit me: term) : Tac (exp a) | val reification_aux (#a: Type) (mult unit me: term) : Tac (exp a) | let rec reification_aux (#a:Type) (mult unit me : term) : Tac (exp a) =
let hd, tl = collect_app_ref me in
let tl = list_unref tl in
match inspect hd, tl with
| Tv_FVar fv, [(me1, Q_Explicit) ; (me2, Q_Explicit)] ->
if term_eq_old (pack (Tv_FVar fv)) mult
then Mult (reification_aux mult unit me1) (reification_aux mult unit me2)
else Var (unquote me)
| _, _ ->
if term_eq_old me unit
then Unit
else Var (unquote me) | {
"file_name": "ulib/FStar.Tactics.CanonMonoid.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 25,
"end_line": 94,
"start_col": 0,
"start_line": 83
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Tactics.CanonMonoid
open FStar.Algebra.Monoid
open FStar.List
open FStar.Reflection.V2
open FStar.Tactics.V2
(* Only dump when debugging is on *)
let dump m = if debugging () then dump m
(* "A Monoid Expression Simplifier" ported from
http://adam.chlipala.net/cpdt/html/Cpdt.Reflection.html *)
type exp (a:Type) : Type =
| Unit : exp a
| Var : a -> exp a
| Mult : exp a -> exp a -> exp a
let rec exp_to_string (#a:Type) (a_to_string:a->string) (e:exp a) =
match e with
| Unit -> "Unit"
| Var x -> "Var " ^ a_to_string x
| Mult e1 e2 -> "Mult (" ^ exp_to_string a_to_string e1
^ ") (" ^ exp_to_string a_to_string e2 ^ ")"
let rec mdenote (#a:Type) (m:monoid a) (e:exp a) : a =
match e with
| Unit -> Monoid?.unit m
| Var x -> x
| Mult e1 e2 -> Monoid?.mult m (mdenote m e1) (mdenote m e2)
let rec mldenote (#a:Type) (m:monoid a) (xs:list a) : a =
match xs with
| [] -> Monoid?.unit m
| [x] -> x
| x::xs' -> Monoid?.mult m x (mldenote m xs')
let rec flatten (#a:Type) (e:exp a) : list a =
match e with
| Unit -> []
| Var x -> [x]
| Mult e1 e2 -> flatten e1 @ flatten e2
(* This proof internally uses the monoid laws; the SMT solver picks up
on them because they are written as squashed formulas in the
definition of monoid; need to be careful with this since these are
quantified formulas without any patterns. Dangerous stuff! *)
let rec flatten_correct_aux (#a:Type) (m:monoid a) ml1 ml2 :
Lemma (mldenote m (ml1 @ ml2) == Monoid?.mult m (mldenote m ml1)
(mldenote m ml2)) =
match ml1 with
| [] -> ()
| e::es1' -> flatten_correct_aux m es1' ml2
let rec flatten_correct (#a:Type) (m:monoid a) (e:exp a) :
Lemma (mdenote m e == mldenote m (flatten e)) =
match e with
| Unit | Var _ -> ()
| Mult e1 e2 -> flatten_correct_aux m (flatten e1) (flatten e2);
flatten_correct m e1; flatten_correct m e2
let monoid_reflect (#a:Type) (m:monoid a) (e1 e2:exp a)
(_ : squash (mldenote m (flatten e1) == mldenote m (flatten e2)))
: squash (mdenote m e1 == mdenote m e2) =
flatten_correct m e1; flatten_correct m e2 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.Algebra.Monoid.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Tactics.CanonMonoid.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Tactics.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Reflection.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.Monoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
mult: FStar.Tactics.NamedView.term ->
unit: FStar.Tactics.NamedView.term ->
me: FStar.Tactics.NamedView.term
-> FStar.Tactics.Effect.Tac (FStar.Tactics.CanonMonoid.exp a) | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.Tactics.NamedView.term",
"FStar.Stubs.Reflection.Types.term",
"Prims.l_or",
"Prims.eq2",
"Prims.precedes",
"Prims.list",
"FStar.Stubs.Reflection.V2.Data.argv",
"FStar.Pervasives.Native.fst",
"FStar.Stubs.Reflection.V2.Data.aqualv",
"FStar.Stubs.Reflection.Types.fv",
"FStar.Tactics.CanonMonoid.Mult",
"FStar.Tactics.CanonMonoid.exp",
"FStar.Tactics.CanonMonoid.reification_aux",
"Prims.bool",
"FStar.Tactics.CanonMonoid.Var",
"FStar.Stubs.Tactics.V2.Builtins.unquote",
"FStar.Stubs.Tactics.V2.Builtins.term_eq_old",
"FStar.Tactics.NamedView.pack",
"FStar.Tactics.NamedView.Tv_FVar",
"FStar.Tactics.NamedView.named_term_view",
"FStar.Pervasives.Native.tuple2",
"FStar.Tactics.CanonMonoid.Unit",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Tactics.NamedView.inspect",
"FStar.List.Tot.Base.list_unref",
"FStar.Reflection.V2.Derived.Lemmas.collect_app_ref"
] | [
"recursion"
] | false | true | false | false | false | let rec reification_aux (#a: Type) (mult unit me: term) : Tac (exp a) =
| let hd, tl = collect_app_ref me in
let tl = list_unref tl in
match inspect hd, tl with
| Tv_FVar fv, [me1, Q_Explicit ; me2, Q_Explicit] ->
if term_eq_old (pack (Tv_FVar fv)) mult
then Mult (reification_aux mult unit me1) (reification_aux mult unit me2)
else Var (unquote me)
| _, _ -> if term_eq_old me unit then Unit else Var (unquote me) | false |
FStar.Tactics.CanonMonoid.fst | FStar.Tactics.CanonMonoid.reification | val reification (#a: Type) (m: monoid a) (me: term) : Tac (exp a) | val reification (#a: Type) (m: monoid a) (me: term) : Tac (exp a) | let reification (#a:Type) (m:monoid a) (me:term) : Tac (exp a) =
let mult = norm_term [delta;zeta;iota] (quote (Monoid?.mult m)) in
let unit = norm_term [delta;zeta;iota] (quote (Monoid?.unit m)) in
let me = norm_term [delta;zeta;iota] me in
// dump ("mult = " ^ term_to_string mult ^
// "; unit = " ^ term_to_string unit ^
// "; me = " ^ term_to_string me);
reification_aux mult unit me | {
"file_name": "ulib/FStar.Tactics.CanonMonoid.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 32,
"end_line": 103,
"start_col": 0,
"start_line": 96
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Tactics.CanonMonoid
open FStar.Algebra.Monoid
open FStar.List
open FStar.Reflection.V2
open FStar.Tactics.V2
(* Only dump when debugging is on *)
let dump m = if debugging () then dump m
(* "A Monoid Expression Simplifier" ported from
http://adam.chlipala.net/cpdt/html/Cpdt.Reflection.html *)
type exp (a:Type) : Type =
| Unit : exp a
| Var : a -> exp a
| Mult : exp a -> exp a -> exp a
let rec exp_to_string (#a:Type) (a_to_string:a->string) (e:exp a) =
match e with
| Unit -> "Unit"
| Var x -> "Var " ^ a_to_string x
| Mult e1 e2 -> "Mult (" ^ exp_to_string a_to_string e1
^ ") (" ^ exp_to_string a_to_string e2 ^ ")"
let rec mdenote (#a:Type) (m:monoid a) (e:exp a) : a =
match e with
| Unit -> Monoid?.unit m
| Var x -> x
| Mult e1 e2 -> Monoid?.mult m (mdenote m e1) (mdenote m e2)
let rec mldenote (#a:Type) (m:monoid a) (xs:list a) : a =
match xs with
| [] -> Monoid?.unit m
| [x] -> x
| x::xs' -> Monoid?.mult m x (mldenote m xs')
let rec flatten (#a:Type) (e:exp a) : list a =
match e with
| Unit -> []
| Var x -> [x]
| Mult e1 e2 -> flatten e1 @ flatten e2
(* This proof internally uses the monoid laws; the SMT solver picks up
on them because they are written as squashed formulas in the
definition of monoid; need to be careful with this since these are
quantified formulas without any patterns. Dangerous stuff! *)
let rec flatten_correct_aux (#a:Type) (m:monoid a) ml1 ml2 :
Lemma (mldenote m (ml1 @ ml2) == Monoid?.mult m (mldenote m ml1)
(mldenote m ml2)) =
match ml1 with
| [] -> ()
| e::es1' -> flatten_correct_aux m es1' ml2
let rec flatten_correct (#a:Type) (m:monoid a) (e:exp a) :
Lemma (mdenote m e == mldenote m (flatten e)) =
match e with
| Unit | Var _ -> ()
| Mult e1 e2 -> flatten_correct_aux m (flatten e1) (flatten e2);
flatten_correct m e1; flatten_correct m e2
let monoid_reflect (#a:Type) (m:monoid a) (e1 e2:exp a)
(_ : squash (mldenote m (flatten e1) == mldenote m (flatten e2)))
: squash (mdenote m e1 == mdenote m e2) =
flatten_correct m e1; flatten_correct m e2
// This expects that mult, unit, and me have already been normalized
let rec reification_aux (#a:Type) (mult unit me : term) : Tac (exp a) =
let hd, tl = collect_app_ref me in
let tl = list_unref tl in
match inspect hd, tl with
| Tv_FVar fv, [(me1, Q_Explicit) ; (me2, Q_Explicit)] ->
if term_eq_old (pack (Tv_FVar fv)) mult
then Mult (reification_aux mult unit me1) (reification_aux mult unit me2)
else Var (unquote me)
| _, _ ->
if term_eq_old me unit
then Unit
else Var (unquote me) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.Algebra.Monoid.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Tactics.CanonMonoid.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Tactics.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Reflection.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.Monoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: FStar.Algebra.Monoid.monoid a -> me: FStar.Tactics.NamedView.term
-> FStar.Tactics.Effect.Tac (FStar.Tactics.CanonMonoid.exp a) | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.Algebra.Monoid.monoid",
"FStar.Tactics.NamedView.term",
"FStar.Tactics.CanonMonoid.reification_aux",
"FStar.Tactics.CanonMonoid.exp",
"FStar.Tactics.V2.Derived.norm_term",
"Prims.Cons",
"FStar.Pervasives.norm_step",
"FStar.Pervasives.delta",
"FStar.Pervasives.zeta",
"FStar.Pervasives.iota",
"Prims.Nil",
"FStar.Algebra.Monoid.__proj__Monoid__item__unit",
"FStar.Stubs.Reflection.Types.term",
"FStar.Algebra.Monoid.__proj__Monoid__item__mult"
] | [] | false | true | false | false | false | let reification (#a: Type) (m: monoid a) (me: term) : Tac (exp a) =
| let mult = norm_term [delta; zeta; iota] (quote (Monoid?.mult m)) in
let unit = norm_term [delta; zeta; iota] (quote (Monoid?.unit m)) in
let me = norm_term [delta; zeta; iota] me in
reification_aux mult unit me | false |
FStar.Tactics.CanonMonoid.fst | FStar.Tactics.CanonMonoid.lem0 | val lem0 : a: Prims.int -> b: Prims.int -> c: Prims.int -> d: Prims.int -> Prims.unit | let lem0 (a b c d : int) =
assert_by_tactic (0 + a + b + c + d == (0 + a) + (b + c + 0) + (d + 0))
(fun _ -> canon_monoid int_plus_monoid (* string_of_int *); trefl()) | {
"file_name": "ulib/FStar.Tactics.CanonMonoid.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 70,
"end_line": 124,
"start_col": 0,
"start_line": 122
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Tactics.CanonMonoid
open FStar.Algebra.Monoid
open FStar.List
open FStar.Reflection.V2
open FStar.Tactics.V2
(* Only dump when debugging is on *)
let dump m = if debugging () then dump m
(* "A Monoid Expression Simplifier" ported from
http://adam.chlipala.net/cpdt/html/Cpdt.Reflection.html *)
type exp (a:Type) : Type =
| Unit : exp a
| Var : a -> exp a
| Mult : exp a -> exp a -> exp a
let rec exp_to_string (#a:Type) (a_to_string:a->string) (e:exp a) =
match e with
| Unit -> "Unit"
| Var x -> "Var " ^ a_to_string x
| Mult e1 e2 -> "Mult (" ^ exp_to_string a_to_string e1
^ ") (" ^ exp_to_string a_to_string e2 ^ ")"
let rec mdenote (#a:Type) (m:monoid a) (e:exp a) : a =
match e with
| Unit -> Monoid?.unit m
| Var x -> x
| Mult e1 e2 -> Monoid?.mult m (mdenote m e1) (mdenote m e2)
let rec mldenote (#a:Type) (m:monoid a) (xs:list a) : a =
match xs with
| [] -> Monoid?.unit m
| [x] -> x
| x::xs' -> Monoid?.mult m x (mldenote m xs')
let rec flatten (#a:Type) (e:exp a) : list a =
match e with
| Unit -> []
| Var x -> [x]
| Mult e1 e2 -> flatten e1 @ flatten e2
(* This proof internally uses the monoid laws; the SMT solver picks up
on them because they are written as squashed formulas in the
definition of monoid; need to be careful with this since these are
quantified formulas without any patterns. Dangerous stuff! *)
let rec flatten_correct_aux (#a:Type) (m:monoid a) ml1 ml2 :
Lemma (mldenote m (ml1 @ ml2) == Monoid?.mult m (mldenote m ml1)
(mldenote m ml2)) =
match ml1 with
| [] -> ()
| e::es1' -> flatten_correct_aux m es1' ml2
let rec flatten_correct (#a:Type) (m:monoid a) (e:exp a) :
Lemma (mdenote m e == mldenote m (flatten e)) =
match e with
| Unit | Var _ -> ()
| Mult e1 e2 -> flatten_correct_aux m (flatten e1) (flatten e2);
flatten_correct m e1; flatten_correct m e2
let monoid_reflect (#a:Type) (m:monoid a) (e1 e2:exp a)
(_ : squash (mldenote m (flatten e1) == mldenote m (flatten e2)))
: squash (mdenote m e1 == mdenote m e2) =
flatten_correct m e1; flatten_correct m e2
// This expects that mult, unit, and me have already been normalized
let rec reification_aux (#a:Type) (mult unit me : term) : Tac (exp a) =
let hd, tl = collect_app_ref me in
let tl = list_unref tl in
match inspect hd, tl with
| Tv_FVar fv, [(me1, Q_Explicit) ; (me2, Q_Explicit)] ->
if term_eq_old (pack (Tv_FVar fv)) mult
then Mult (reification_aux mult unit me1) (reification_aux mult unit me2)
else Var (unquote me)
| _, _ ->
if term_eq_old me unit
then Unit
else Var (unquote me)
let reification (#a:Type) (m:monoid a) (me:term) : Tac (exp a) =
let mult = norm_term [delta;zeta;iota] (quote (Monoid?.mult m)) in
let unit = norm_term [delta;zeta;iota] (quote (Monoid?.unit m)) in
let me = norm_term [delta;zeta;iota] me in
// dump ("mult = " ^ term_to_string mult ^
// "; unit = " ^ term_to_string unit ^
// "; me = " ^ term_to_string me);
reification_aux mult unit me
let canon_monoid (#a:Type) (m:monoid a) : Tac unit =
norm [];
let g = cur_goal () in
match term_as_formula g with
| Comp (Eq (Some t)) me1 me2 ->
if term_eq_old t (quote a) then
let r1 = reification m me1 in
let r2 = reification m me2 in
change_sq (quote (mdenote m r1 == mdenote m r2));
apply (`monoid_reflect);
norm [delta_only [`%mldenote;
`%flatten;
`%FStar.List.Tot.op_At;
`%FStar.List.Tot.append]]
else fail "Goal should be an equality at the right monoid type"
| _ -> fail "Goal should be an equality" | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.Algebra.Monoid.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Tactics.CanonMonoid.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Tactics.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Reflection.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.Monoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | a: Prims.int -> b: Prims.int -> c: Prims.int -> d: Prims.int -> Prims.unit | Prims.Tot | [
"total"
] | [] | [
"Prims.int",
"FStar.Tactics.Effect.assert_by_tactic",
"Prims.eq2",
"Prims.op_Addition",
"Prims.unit",
"FStar.Tactics.V2.Derived.trefl",
"FStar.Tactics.CanonMonoid.canon_monoid",
"FStar.Algebra.Monoid.int_plus_monoid"
] | [] | false | false | false | true | false | let lem0 (a b c d: int) =
| assert_by_tactic (0 + a + b + c + d == (0 + a) + (b + c + 0) + (d + 0))
(fun _ ->
canon_monoid int_plus_monoid;
trefl ()) | false |
|
FStar.Tactics.CanonMonoid.fst | FStar.Tactics.CanonMonoid.exp_to_string | val exp_to_string : a_to_string: (_: a -> Prims.string) -> e: FStar.Tactics.CanonMonoid.exp a -> Prims.string | let rec exp_to_string (#a:Type) (a_to_string:a->string) (e:exp a) =
match e with
| Unit -> "Unit"
| Var x -> "Var " ^ a_to_string x
| Mult e1 e2 -> "Mult (" ^ exp_to_string a_to_string e1
^ ") (" ^ exp_to_string a_to_string e2 ^ ")" | {
"file_name": "ulib/FStar.Tactics.CanonMonoid.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 63,
"end_line": 39,
"start_col": 0,
"start_line": 34
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Tactics.CanonMonoid
open FStar.Algebra.Monoid
open FStar.List
open FStar.Reflection.V2
open FStar.Tactics.V2
(* Only dump when debugging is on *)
let dump m = if debugging () then dump m
(* "A Monoid Expression Simplifier" ported from
http://adam.chlipala.net/cpdt/html/Cpdt.Reflection.html *)
type exp (a:Type) : Type =
| Unit : exp a
| Var : a -> exp a
| Mult : exp a -> exp a -> exp a | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.Algebra.Monoid.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Tactics.CanonMonoid.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Tactics.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Reflection.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.Monoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | a_to_string: (_: a -> Prims.string) -> e: FStar.Tactics.CanonMonoid.exp a -> Prims.string | Prims.Tot | [
"total"
] | [] | [
"Prims.string",
"FStar.Tactics.CanonMonoid.exp",
"Prims.op_Hat",
"FStar.Tactics.CanonMonoid.exp_to_string"
] | [
"recursion"
] | false | false | false | true | false | let rec exp_to_string (#a: Type) (a_to_string: (a -> string)) (e: exp a) =
| match e with
| Unit -> "Unit"
| Var x -> "Var " ^ a_to_string x
| Mult e1 e2 -> "Mult (" ^ exp_to_string a_to_string e1 ^ ") (" ^ exp_to_string a_to_string e2 ^ ")" | false |
|
FStar.Algebra.CommMonoid.Equiv.fst | FStar.Algebra.CommMonoid.Equiv.equality_equiv | val equality_equiv (a: Type) : equiv a | val equality_equiv (a: Type) : equiv a | let equality_equiv (a:Type) : equiv a =
EQ (fun x y -> x == y) (fun x -> ()) (fun x y -> ()) (fun x y z -> ()) | {
"file_name": "ulib/FStar.Algebra.CommMonoid.Equiv.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 72,
"end_line": 47,
"start_col": 0,
"start_line": 46
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Algebra.CommMonoid.Equiv
open FStar.Mul
unopteq
type equiv (a:Type) =
| EQ :
eq:(a -> a -> Type0) ->
reflexivity:(x:a -> Lemma (x `eq` x)) ->
symmetry:(x:a -> y:a -> Lemma (requires (x `eq` y)) (ensures (y `eq` x))) ->
transitivity:(x:a -> y:a -> z:a -> Lemma (requires (x `eq` y /\ y `eq` z)) (ensures (x `eq` z))) ->
equiv a
let elim_eq_laws #a (eq:equiv a)
: Lemma (
(forall x.{:pattern (x `eq.eq` x)} x `eq.eq` x) /\
(forall x y.{:pattern (x `eq.eq` y)} x `eq.eq` y ==> y `eq.eq` x) /\
(forall x y z.{:pattern eq.eq x y; eq.eq y z} (x `eq.eq` y /\ y `eq.eq` z) ==> x `eq.eq` z)
)
= introduce forall x. x `eq.eq` x
with (eq.reflexivity x);
introduce forall x y. x `eq.eq` y ==> y `eq.eq` x
with (introduce _ ==> _
with _. eq.symmetry x y);
introduce forall x y z. (x `eq.eq` y /\ y `eq.eq` z) ==> x `eq.eq` z
with (introduce _ ==> _
with _. eq.transitivity x y z) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Classical.Sugar.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Algebra.CommMonoid.Equiv.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.CommMonoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.CommMonoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | a: Type -> FStar.Algebra.CommMonoid.Equiv.equiv a | Prims.Tot | [
"total"
] | [] | [
"FStar.Algebra.CommMonoid.Equiv.EQ",
"Prims.eq2",
"Prims.unit",
"FStar.Algebra.CommMonoid.Equiv.equiv"
] | [] | false | false | false | true | false | let equality_equiv (a: Type) : equiv a =
| EQ (fun x y -> x == y) (fun x -> ()) (fun x y -> ()) (fun x y z -> ()) | false |
FStar.Algebra.CommMonoid.Equiv.fst | FStar.Algebra.CommMonoid.Equiv.right_identity | val right_identity (#a: Type u#aa) (eq: equiv a) (m: cm a eq) (x: a)
: Lemma (EQ?.eq eq (CM?.mult m x (CM?.unit m)) x) | val right_identity (#a: Type u#aa) (eq: equiv a) (m: cm a eq) (x: a)
: Lemma (EQ?.eq eq (CM?.mult m x (CM?.unit m)) x) | let right_identity (#a:Type u#aa) (eq:equiv a) (m:cm a eq) (x:a)
: Lemma (x `CM?.mult m` (CM?.unit m) `EQ?.eq eq` x) =
CM?.commutativity m x (CM?.unit m);
CM?.identity m x;
EQ?.transitivity eq (x `CM?.mult m` (CM?.unit m)) ((CM?.unit m) `CM?.mult m` x) x | {
"file_name": "ulib/FStar.Algebra.CommMonoid.Equiv.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 83,
"end_line": 70,
"start_col": 0,
"start_line": 66
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Algebra.CommMonoid.Equiv
open FStar.Mul
unopteq
type equiv (a:Type) =
| EQ :
eq:(a -> a -> Type0) ->
reflexivity:(x:a -> Lemma (x `eq` x)) ->
symmetry:(x:a -> y:a -> Lemma (requires (x `eq` y)) (ensures (y `eq` x))) ->
transitivity:(x:a -> y:a -> z:a -> Lemma (requires (x `eq` y /\ y `eq` z)) (ensures (x `eq` z))) ->
equiv a
let elim_eq_laws #a (eq:equiv a)
: Lemma (
(forall x.{:pattern (x `eq.eq` x)} x `eq.eq` x) /\
(forall x y.{:pattern (x `eq.eq` y)} x `eq.eq` y ==> y `eq.eq` x) /\
(forall x y z.{:pattern eq.eq x y; eq.eq y z} (x `eq.eq` y /\ y `eq.eq` z) ==> x `eq.eq` z)
)
= introduce forall x. x `eq.eq` x
with (eq.reflexivity x);
introduce forall x y. x `eq.eq` y ==> y `eq.eq` x
with (introduce _ ==> _
with _. eq.symmetry x y);
introduce forall x y z. (x `eq.eq` y /\ y `eq.eq` z) ==> x `eq.eq` z
with (introduce _ ==> _
with _. eq.transitivity x y z)
let equality_equiv (a:Type) : equiv a =
EQ (fun x y -> x == y) (fun x -> ()) (fun x y -> ()) (fun x y z -> ())
unopteq
type cm (a:Type) (eq:equiv a) =
| CM :
unit:a ->
mult:(a -> a -> a) ->
identity : (x:a -> Lemma ((unit `mult` x) `EQ?.eq eq` x)) ->
associativity : (x:a -> y:a -> z:a ->
Lemma ((x `mult` y `mult` z) `EQ?.eq eq` (x `mult` (y `mult` z)))) ->
commutativity:(x:a -> y:a -> Lemma ((x `mult` y) `EQ?.eq eq` (y `mult` x))) ->
congruence:(x:a -> y:a -> z:a -> w:a -> Lemma (requires (x `EQ?.eq eq` z /\ y `EQ?.eq eq` w)) (ensures ((mult x y) `EQ?.eq eq` (mult z w)))) ->
cm a eq
// temporarily fixing the universe of this lemma to u#1 because
// otherwise tactics for LowStar.Resource canonicalization fails | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Classical.Sugar.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Algebra.CommMonoid.Equiv.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.CommMonoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.CommMonoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | eq: FStar.Algebra.CommMonoid.Equiv.equiv a -> m: FStar.Algebra.CommMonoid.Equiv.cm a eq -> x: a
-> FStar.Pervasives.Lemma (ensures EQ?.eq eq (CM?.mult m x (CM?.unit m)) x) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"FStar.Algebra.CommMonoid.Equiv.equiv",
"FStar.Algebra.CommMonoid.Equiv.cm",
"FStar.Algebra.CommMonoid.Equiv.__proj__EQ__item__transitivity",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__unit",
"Prims.unit",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__identity",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__commutativity",
"Prims.l_True",
"Prims.squash",
"FStar.Algebra.CommMonoid.Equiv.__proj__EQ__item__eq",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let right_identity (#a: Type u#aa) (eq: equiv a) (m: cm a eq) (x: a)
: Lemma (EQ?.eq eq (CM?.mult m x (CM?.unit m)) x) =
| CM?.commutativity m x (CM?.unit m);
CM?.identity m x;
EQ?.transitivity eq (CM?.mult m x (CM?.unit m)) (CM?.mult m (CM?.unit m) x) x | false |
FStar.Algebra.CommMonoid.Equiv.fst | FStar.Algebra.CommMonoid.Equiv.int_plus_cm | val int_plus_cm:cm int (equality_equiv int) | val int_plus_cm:cm int (equality_equiv int) | let int_plus_cm : cm int (equality_equiv int) =
CM 0 (+) (fun _ -> ()) (fun _ _ _ -> ()) (fun _ _ -> ()) (fun _ _ _ _ -> ()) | {
"file_name": "ulib/FStar.Algebra.CommMonoid.Equiv.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 78,
"end_line": 73,
"start_col": 0,
"start_line": 72
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Algebra.CommMonoid.Equiv
open FStar.Mul
unopteq
type equiv (a:Type) =
| EQ :
eq:(a -> a -> Type0) ->
reflexivity:(x:a -> Lemma (x `eq` x)) ->
symmetry:(x:a -> y:a -> Lemma (requires (x `eq` y)) (ensures (y `eq` x))) ->
transitivity:(x:a -> y:a -> z:a -> Lemma (requires (x `eq` y /\ y `eq` z)) (ensures (x `eq` z))) ->
equiv a
let elim_eq_laws #a (eq:equiv a)
: Lemma (
(forall x.{:pattern (x `eq.eq` x)} x `eq.eq` x) /\
(forall x y.{:pattern (x `eq.eq` y)} x `eq.eq` y ==> y `eq.eq` x) /\
(forall x y z.{:pattern eq.eq x y; eq.eq y z} (x `eq.eq` y /\ y `eq.eq` z) ==> x `eq.eq` z)
)
= introduce forall x. x `eq.eq` x
with (eq.reflexivity x);
introduce forall x y. x `eq.eq` y ==> y `eq.eq` x
with (introduce _ ==> _
with _. eq.symmetry x y);
introduce forall x y z. (x `eq.eq` y /\ y `eq.eq` z) ==> x `eq.eq` z
with (introduce _ ==> _
with _. eq.transitivity x y z)
let equality_equiv (a:Type) : equiv a =
EQ (fun x y -> x == y) (fun x -> ()) (fun x y -> ()) (fun x y z -> ())
unopteq
type cm (a:Type) (eq:equiv a) =
| CM :
unit:a ->
mult:(a -> a -> a) ->
identity : (x:a -> Lemma ((unit `mult` x) `EQ?.eq eq` x)) ->
associativity : (x:a -> y:a -> z:a ->
Lemma ((x `mult` y `mult` z) `EQ?.eq eq` (x `mult` (y `mult` z)))) ->
commutativity:(x:a -> y:a -> Lemma ((x `mult` y) `EQ?.eq eq` (y `mult` x))) ->
congruence:(x:a -> y:a -> z:a -> w:a -> Lemma (requires (x `EQ?.eq eq` z /\ y `EQ?.eq eq` w)) (ensures ((mult x y) `EQ?.eq eq` (mult z w)))) ->
cm a eq
// temporarily fixing the universe of this lemma to u#1 because
// otherwise tactics for LowStar.Resource canonicalization fails
// by picking up an incorrect universe u#0 for resource type
let right_identity (#a:Type u#aa) (eq:equiv a) (m:cm a eq) (x:a)
: Lemma (x `CM?.mult m` (CM?.unit m) `EQ?.eq eq` x) =
CM?.commutativity m x (CM?.unit m);
CM?.identity m x;
EQ?.transitivity eq (x `CM?.mult m` (CM?.unit m)) ((CM?.unit m) `CM?.mult m` x) x | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Classical.Sugar.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Algebra.CommMonoid.Equiv.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.CommMonoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.CommMonoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | FStar.Algebra.CommMonoid.Equiv.cm Prims.int
(FStar.Algebra.CommMonoid.Equiv.equality_equiv Prims.int) | Prims.Tot | [
"total"
] | [] | [
"FStar.Algebra.CommMonoid.Equiv.CM",
"Prims.int",
"FStar.Algebra.CommMonoid.Equiv.equality_equiv",
"Prims.op_Addition",
"Prims.unit"
] | [] | false | false | false | true | false | let int_plus_cm:cm int (equality_equiv int) =
| CM 0 ( + ) (fun _ -> ()) (fun _ _ _ -> ()) (fun _ _ -> ()) (fun _ _ _ _ -> ()) | false |
FStar.Tactics.CanonMonoid.fst | FStar.Tactics.CanonMonoid.canon_monoid | val canon_monoid (#a: Type) (m: monoid a) : Tac unit | val canon_monoid (#a: Type) (m: monoid a) : Tac unit | let canon_monoid (#a:Type) (m:monoid a) : Tac unit =
norm [];
let g = cur_goal () in
match term_as_formula g with
| Comp (Eq (Some t)) me1 me2 ->
if term_eq_old t (quote a) then
let r1 = reification m me1 in
let r2 = reification m me2 in
change_sq (quote (mdenote m r1 == mdenote m r2));
apply (`monoid_reflect);
norm [delta_only [`%mldenote;
`%flatten;
`%FStar.List.Tot.op_At;
`%FStar.List.Tot.append]]
else fail "Goal should be an equality at the right monoid type"
| _ -> fail "Goal should be an equality" | {
"file_name": "ulib/FStar.Tactics.CanonMonoid.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 42,
"end_line": 120,
"start_col": 0,
"start_line": 105
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Tactics.CanonMonoid
open FStar.Algebra.Monoid
open FStar.List
open FStar.Reflection.V2
open FStar.Tactics.V2
(* Only dump when debugging is on *)
let dump m = if debugging () then dump m
(* "A Monoid Expression Simplifier" ported from
http://adam.chlipala.net/cpdt/html/Cpdt.Reflection.html *)
type exp (a:Type) : Type =
| Unit : exp a
| Var : a -> exp a
| Mult : exp a -> exp a -> exp a
let rec exp_to_string (#a:Type) (a_to_string:a->string) (e:exp a) =
match e with
| Unit -> "Unit"
| Var x -> "Var " ^ a_to_string x
| Mult e1 e2 -> "Mult (" ^ exp_to_string a_to_string e1
^ ") (" ^ exp_to_string a_to_string e2 ^ ")"
let rec mdenote (#a:Type) (m:monoid a) (e:exp a) : a =
match e with
| Unit -> Monoid?.unit m
| Var x -> x
| Mult e1 e2 -> Monoid?.mult m (mdenote m e1) (mdenote m e2)
let rec mldenote (#a:Type) (m:monoid a) (xs:list a) : a =
match xs with
| [] -> Monoid?.unit m
| [x] -> x
| x::xs' -> Monoid?.mult m x (mldenote m xs')
let rec flatten (#a:Type) (e:exp a) : list a =
match e with
| Unit -> []
| Var x -> [x]
| Mult e1 e2 -> flatten e1 @ flatten e2
(* This proof internally uses the monoid laws; the SMT solver picks up
on them because they are written as squashed formulas in the
definition of monoid; need to be careful with this since these are
quantified formulas without any patterns. Dangerous stuff! *)
let rec flatten_correct_aux (#a:Type) (m:monoid a) ml1 ml2 :
Lemma (mldenote m (ml1 @ ml2) == Monoid?.mult m (mldenote m ml1)
(mldenote m ml2)) =
match ml1 with
| [] -> ()
| e::es1' -> flatten_correct_aux m es1' ml2
let rec flatten_correct (#a:Type) (m:monoid a) (e:exp a) :
Lemma (mdenote m e == mldenote m (flatten e)) =
match e with
| Unit | Var _ -> ()
| Mult e1 e2 -> flatten_correct_aux m (flatten e1) (flatten e2);
flatten_correct m e1; flatten_correct m e2
let monoid_reflect (#a:Type) (m:monoid a) (e1 e2:exp a)
(_ : squash (mldenote m (flatten e1) == mldenote m (flatten e2)))
: squash (mdenote m e1 == mdenote m e2) =
flatten_correct m e1; flatten_correct m e2
// This expects that mult, unit, and me have already been normalized
let rec reification_aux (#a:Type) (mult unit me : term) : Tac (exp a) =
let hd, tl = collect_app_ref me in
let tl = list_unref tl in
match inspect hd, tl with
| Tv_FVar fv, [(me1, Q_Explicit) ; (me2, Q_Explicit)] ->
if term_eq_old (pack (Tv_FVar fv)) mult
then Mult (reification_aux mult unit me1) (reification_aux mult unit me2)
else Var (unquote me)
| _, _ ->
if term_eq_old me unit
then Unit
else Var (unquote me)
let reification (#a:Type) (m:monoid a) (me:term) : Tac (exp a) =
let mult = norm_term [delta;zeta;iota] (quote (Monoid?.mult m)) in
let unit = norm_term [delta;zeta;iota] (quote (Monoid?.unit m)) in
let me = norm_term [delta;zeta;iota] me in
// dump ("mult = " ^ term_to_string mult ^
// "; unit = " ^ term_to_string unit ^
// "; me = " ^ term_to_string me);
reification_aux mult unit me | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.Algebra.Monoid.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Tactics.CanonMonoid.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Tactics.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Reflection.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.Monoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: FStar.Algebra.Monoid.monoid a -> FStar.Tactics.Effect.Tac Prims.unit | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.Algebra.Monoid.monoid",
"FStar.Stubs.Reflection.Types.typ",
"FStar.Tactics.NamedView.term",
"FStar.Stubs.Tactics.V2.Builtins.norm",
"Prims.Cons",
"FStar.Pervasives.norm_step",
"FStar.Pervasives.delta_only",
"Prims.string",
"Prims.Nil",
"Prims.unit",
"FStar.Tactics.V2.Derived.apply",
"FStar.Tactics.V2.Derived.change_sq",
"Prims.eq2",
"FStar.Tactics.CanonMonoid.mdenote",
"FStar.Stubs.Reflection.Types.term",
"FStar.Tactics.CanonMonoid.exp",
"FStar.Tactics.CanonMonoid.reification",
"Prims.bool",
"FStar.Tactics.V2.Derived.fail",
"FStar.Stubs.Tactics.V2.Builtins.term_eq_old",
"FStar.Reflection.V2.Formula.formula",
"FStar.Reflection.V2.Formula.term_as_formula",
"FStar.Tactics.V2.Derived.cur_goal"
] | [] | false | true | false | false | false | let canon_monoid (#a: Type) (m: monoid a) : Tac unit =
| norm [];
let g = cur_goal () in
match term_as_formula g with
| Comp (Eq (Some t)) me1 me2 ->
if term_eq_old t (quote a)
then
let r1 = reification m me1 in
let r2 = reification m me2 in
change_sq (quote (mdenote m r1 == mdenote m r2));
apply (`monoid_reflect);
norm [delta_only [`%mldenote; `%flatten; `%FStar.List.Tot.op_At; `%FStar.List.Tot.append]]
else fail "Goal should be an equality at the right monoid type"
| _ -> fail "Goal should be an equality" | false |
FStar.Tactics.CanonMonoid.fst | FStar.Tactics.CanonMonoid.mdenote | val mdenote (#a: Type) (m: monoid a) (e: exp a) : a | val mdenote (#a: Type) (m: monoid a) (e: exp a) : a | let rec mdenote (#a:Type) (m:monoid a) (e:exp a) : a =
match e with
| Unit -> Monoid?.unit m
| Var x -> x
| Mult e1 e2 -> Monoid?.mult m (mdenote m e1) (mdenote m e2) | {
"file_name": "ulib/FStar.Tactics.CanonMonoid.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 62,
"end_line": 45,
"start_col": 0,
"start_line": 41
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Tactics.CanonMonoid
open FStar.Algebra.Monoid
open FStar.List
open FStar.Reflection.V2
open FStar.Tactics.V2
(* Only dump when debugging is on *)
let dump m = if debugging () then dump m
(* "A Monoid Expression Simplifier" ported from
http://adam.chlipala.net/cpdt/html/Cpdt.Reflection.html *)
type exp (a:Type) : Type =
| Unit : exp a
| Var : a -> exp a
| Mult : exp a -> exp a -> exp a
let rec exp_to_string (#a:Type) (a_to_string:a->string) (e:exp a) =
match e with
| Unit -> "Unit"
| Var x -> "Var " ^ a_to_string x
| Mult e1 e2 -> "Mult (" ^ exp_to_string a_to_string e1
^ ") (" ^ exp_to_string a_to_string e2 ^ ")" | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.Algebra.Monoid.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Tactics.CanonMonoid.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Tactics.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Reflection.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.Monoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: FStar.Algebra.Monoid.monoid a -> e: FStar.Tactics.CanonMonoid.exp a -> a | Prims.Tot | [
"total"
] | [] | [
"FStar.Algebra.Monoid.monoid",
"FStar.Tactics.CanonMonoid.exp",
"FStar.Algebra.Monoid.__proj__Monoid__item__unit",
"FStar.Algebra.Monoid.__proj__Monoid__item__mult",
"FStar.Tactics.CanonMonoid.mdenote"
] | [
"recursion"
] | false | false | false | true | false | let rec mdenote (#a: Type) (m: monoid a) (e: exp a) : a =
| match e with
| Unit -> Monoid?.unit m
| Var x -> x
| Mult e1 e2 -> Monoid?.mult m (mdenote m e1) (mdenote m e2) | false |
FStar.Algebra.CommMonoid.Equiv.fst | FStar.Algebra.CommMonoid.Equiv.int_multiply_cm | val int_multiply_cm:cm int (equality_equiv int) | val int_multiply_cm:cm int (equality_equiv int) | let int_multiply_cm : cm int (equality_equiv int) =
CM 1 ( * ) (fun _ -> ()) (fun _ _ _ -> ()) (fun _ _ -> ()) (fun _ _ _ _ -> ()) | {
"file_name": "ulib/FStar.Algebra.CommMonoid.Equiv.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 80,
"end_line": 76,
"start_col": 0,
"start_line": 75
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Algebra.CommMonoid.Equiv
open FStar.Mul
unopteq
type equiv (a:Type) =
| EQ :
eq:(a -> a -> Type0) ->
reflexivity:(x:a -> Lemma (x `eq` x)) ->
symmetry:(x:a -> y:a -> Lemma (requires (x `eq` y)) (ensures (y `eq` x))) ->
transitivity:(x:a -> y:a -> z:a -> Lemma (requires (x `eq` y /\ y `eq` z)) (ensures (x `eq` z))) ->
equiv a
let elim_eq_laws #a (eq:equiv a)
: Lemma (
(forall x.{:pattern (x `eq.eq` x)} x `eq.eq` x) /\
(forall x y.{:pattern (x `eq.eq` y)} x `eq.eq` y ==> y `eq.eq` x) /\
(forall x y z.{:pattern eq.eq x y; eq.eq y z} (x `eq.eq` y /\ y `eq.eq` z) ==> x `eq.eq` z)
)
= introduce forall x. x `eq.eq` x
with (eq.reflexivity x);
introduce forall x y. x `eq.eq` y ==> y `eq.eq` x
with (introduce _ ==> _
with _. eq.symmetry x y);
introduce forall x y z. (x `eq.eq` y /\ y `eq.eq` z) ==> x `eq.eq` z
with (introduce _ ==> _
with _. eq.transitivity x y z)
let equality_equiv (a:Type) : equiv a =
EQ (fun x y -> x == y) (fun x -> ()) (fun x y -> ()) (fun x y z -> ())
unopteq
type cm (a:Type) (eq:equiv a) =
| CM :
unit:a ->
mult:(a -> a -> a) ->
identity : (x:a -> Lemma ((unit `mult` x) `EQ?.eq eq` x)) ->
associativity : (x:a -> y:a -> z:a ->
Lemma ((x `mult` y `mult` z) `EQ?.eq eq` (x `mult` (y `mult` z)))) ->
commutativity:(x:a -> y:a -> Lemma ((x `mult` y) `EQ?.eq eq` (y `mult` x))) ->
congruence:(x:a -> y:a -> z:a -> w:a -> Lemma (requires (x `EQ?.eq eq` z /\ y `EQ?.eq eq` w)) (ensures ((mult x y) `EQ?.eq eq` (mult z w)))) ->
cm a eq
// temporarily fixing the universe of this lemma to u#1 because
// otherwise tactics for LowStar.Resource canonicalization fails
// by picking up an incorrect universe u#0 for resource type
let right_identity (#a:Type u#aa) (eq:equiv a) (m:cm a eq) (x:a)
: Lemma (x `CM?.mult m` (CM?.unit m) `EQ?.eq eq` x) =
CM?.commutativity m x (CM?.unit m);
CM?.identity m x;
EQ?.transitivity eq (x `CM?.mult m` (CM?.unit m)) ((CM?.unit m) `CM?.mult m` x) x
let int_plus_cm : cm int (equality_equiv int) =
CM 0 (+) (fun _ -> ()) (fun _ _ _ -> ()) (fun _ _ -> ()) (fun _ _ _ _ -> ()) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Classical.Sugar.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Algebra.CommMonoid.Equiv.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.CommMonoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.CommMonoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | FStar.Algebra.CommMonoid.Equiv.cm Prims.int
(FStar.Algebra.CommMonoid.Equiv.equality_equiv Prims.int) | Prims.Tot | [
"total"
] | [] | [
"FStar.Algebra.CommMonoid.Equiv.CM",
"Prims.int",
"FStar.Algebra.CommMonoid.Equiv.equality_equiv",
"FStar.Mul.op_Star",
"Prims.unit"
] | [] | false | false | false | true | false | let int_multiply_cm:cm int (equality_equiv int) =
| CM 1 ( * ) (fun _ -> ()) (fun _ _ _ -> ()) (fun _ _ -> ()) (fun _ _ _ _ -> ()) | false |
FStar.Tactics.CanonMonoid.fst | FStar.Tactics.CanonMonoid.flatten_correct | val flatten_correct (#a: Type) (m: monoid a) (e: exp a)
: Lemma (mdenote m e == mldenote m (flatten e)) | val flatten_correct (#a: Type) (m: monoid a) (e: exp a)
: Lemma (mdenote m e == mldenote m (flatten e)) | let rec flatten_correct (#a:Type) (m:monoid a) (e:exp a) :
Lemma (mdenote m e == mldenote m (flatten e)) =
match e with
| Unit | Var _ -> ()
| Mult e1 e2 -> flatten_correct_aux m (flatten e1) (flatten e2);
flatten_correct m e1; flatten_correct m e2 | {
"file_name": "ulib/FStar.Tactics.CanonMonoid.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 60,
"end_line": 75,
"start_col": 0,
"start_line": 70
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Tactics.CanonMonoid
open FStar.Algebra.Monoid
open FStar.List
open FStar.Reflection.V2
open FStar.Tactics.V2
(* Only dump when debugging is on *)
let dump m = if debugging () then dump m
(* "A Monoid Expression Simplifier" ported from
http://adam.chlipala.net/cpdt/html/Cpdt.Reflection.html *)
type exp (a:Type) : Type =
| Unit : exp a
| Var : a -> exp a
| Mult : exp a -> exp a -> exp a
let rec exp_to_string (#a:Type) (a_to_string:a->string) (e:exp a) =
match e with
| Unit -> "Unit"
| Var x -> "Var " ^ a_to_string x
| Mult e1 e2 -> "Mult (" ^ exp_to_string a_to_string e1
^ ") (" ^ exp_to_string a_to_string e2 ^ ")"
let rec mdenote (#a:Type) (m:monoid a) (e:exp a) : a =
match e with
| Unit -> Monoid?.unit m
| Var x -> x
| Mult e1 e2 -> Monoid?.mult m (mdenote m e1) (mdenote m e2)
let rec mldenote (#a:Type) (m:monoid a) (xs:list a) : a =
match xs with
| [] -> Monoid?.unit m
| [x] -> x
| x::xs' -> Monoid?.mult m x (mldenote m xs')
let rec flatten (#a:Type) (e:exp a) : list a =
match e with
| Unit -> []
| Var x -> [x]
| Mult e1 e2 -> flatten e1 @ flatten e2
(* This proof internally uses the monoid laws; the SMT solver picks up
on them because they are written as squashed formulas in the
definition of monoid; need to be careful with this since these are
quantified formulas without any patterns. Dangerous stuff! *)
let rec flatten_correct_aux (#a:Type) (m:monoid a) ml1 ml2 :
Lemma (mldenote m (ml1 @ ml2) == Monoid?.mult m (mldenote m ml1)
(mldenote m ml2)) =
match ml1 with
| [] -> ()
| e::es1' -> flatten_correct_aux m es1' ml2 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.Algebra.Monoid.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Tactics.CanonMonoid.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Tactics.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Reflection.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.Monoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: FStar.Algebra.Monoid.monoid a -> e: FStar.Tactics.CanonMonoid.exp a
-> FStar.Pervasives.Lemma
(ensures
FStar.Tactics.CanonMonoid.mdenote m e ==
FStar.Tactics.CanonMonoid.mldenote m (FStar.Tactics.CanonMonoid.flatten e)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"FStar.Algebra.Monoid.monoid",
"FStar.Tactics.CanonMonoid.exp",
"FStar.Tactics.CanonMonoid.flatten_correct",
"Prims.unit",
"FStar.Tactics.CanonMonoid.flatten_correct_aux",
"FStar.Tactics.CanonMonoid.flatten",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"FStar.Tactics.CanonMonoid.mdenote",
"FStar.Tactics.CanonMonoid.mldenote",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [
"recursion"
] | false | false | true | false | false | let rec flatten_correct (#a: Type) (m: monoid a) (e: exp a)
: Lemma (mdenote m e == mldenote m (flatten e)) =
| match e with
| Unit | Var _ -> ()
| Mult e1 e2 ->
flatten_correct_aux m (flatten e1) (flatten e2);
flatten_correct m e1;
flatten_correct m e2 | false |
FStar.Tactics.CanonMonoid.fst | FStar.Tactics.CanonMonoid.mldenote | val mldenote (#a: Type) (m: monoid a) (xs: list a) : a | val mldenote (#a: Type) (m: monoid a) (xs: list a) : a | let rec mldenote (#a:Type) (m:monoid a) (xs:list a) : a =
match xs with
| [] -> Monoid?.unit m
| [x] -> x
| x::xs' -> Monoid?.mult m x (mldenote m xs') | {
"file_name": "ulib/FStar.Tactics.CanonMonoid.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 47,
"end_line": 51,
"start_col": 0,
"start_line": 47
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Tactics.CanonMonoid
open FStar.Algebra.Monoid
open FStar.List
open FStar.Reflection.V2
open FStar.Tactics.V2
(* Only dump when debugging is on *)
let dump m = if debugging () then dump m
(* "A Monoid Expression Simplifier" ported from
http://adam.chlipala.net/cpdt/html/Cpdt.Reflection.html *)
type exp (a:Type) : Type =
| Unit : exp a
| Var : a -> exp a
| Mult : exp a -> exp a -> exp a
let rec exp_to_string (#a:Type) (a_to_string:a->string) (e:exp a) =
match e with
| Unit -> "Unit"
| Var x -> "Var " ^ a_to_string x
| Mult e1 e2 -> "Mult (" ^ exp_to_string a_to_string e1
^ ") (" ^ exp_to_string a_to_string e2 ^ ")"
let rec mdenote (#a:Type) (m:monoid a) (e:exp a) : a =
match e with
| Unit -> Monoid?.unit m
| Var x -> x
| Mult e1 e2 -> Monoid?.mult m (mdenote m e1) (mdenote m e2) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.Algebra.Monoid.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Tactics.CanonMonoid.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Tactics.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Reflection.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.Monoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: FStar.Algebra.Monoid.monoid a -> xs: Prims.list a -> a | Prims.Tot | [
"total"
] | [] | [
"FStar.Algebra.Monoid.monoid",
"Prims.list",
"FStar.Algebra.Monoid.__proj__Monoid__item__unit",
"FStar.Algebra.Monoid.__proj__Monoid__item__mult",
"FStar.Tactics.CanonMonoid.mldenote"
] | [
"recursion"
] | false | false | false | true | false | let rec mldenote (#a: Type) (m: monoid a) (xs: list a) : a =
| match xs with
| [] -> Monoid?.unit m
| [x] -> x
| x :: xs' -> Monoid?.mult m x (mldenote m xs') | false |
FStar.Tactics.CanonMonoid.fst | FStar.Tactics.CanonMonoid.flatten | val flatten (#a: Type) (e: exp a) : list a | val flatten (#a: Type) (e: exp a) : list a | let rec flatten (#a:Type) (e:exp a) : list a =
match e with
| Unit -> []
| Var x -> [x]
| Mult e1 e2 -> flatten e1 @ flatten e2 | {
"file_name": "ulib/FStar.Tactics.CanonMonoid.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 41,
"end_line": 57,
"start_col": 0,
"start_line": 53
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Tactics.CanonMonoid
open FStar.Algebra.Monoid
open FStar.List
open FStar.Reflection.V2
open FStar.Tactics.V2
(* Only dump when debugging is on *)
let dump m = if debugging () then dump m
(* "A Monoid Expression Simplifier" ported from
http://adam.chlipala.net/cpdt/html/Cpdt.Reflection.html *)
type exp (a:Type) : Type =
| Unit : exp a
| Var : a -> exp a
| Mult : exp a -> exp a -> exp a
let rec exp_to_string (#a:Type) (a_to_string:a->string) (e:exp a) =
match e with
| Unit -> "Unit"
| Var x -> "Var " ^ a_to_string x
| Mult e1 e2 -> "Mult (" ^ exp_to_string a_to_string e1
^ ") (" ^ exp_to_string a_to_string e2 ^ ")"
let rec mdenote (#a:Type) (m:monoid a) (e:exp a) : a =
match e with
| Unit -> Monoid?.unit m
| Var x -> x
| Mult e1 e2 -> Monoid?.mult m (mdenote m e1) (mdenote m e2)
let rec mldenote (#a:Type) (m:monoid a) (xs:list a) : a =
match xs with
| [] -> Monoid?.unit m
| [x] -> x
| x::xs' -> Monoid?.mult m x (mldenote m xs') | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.Algebra.Monoid.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Tactics.CanonMonoid.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Tactics.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Reflection.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.Monoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | e: FStar.Tactics.CanonMonoid.exp a -> Prims.list a | Prims.Tot | [
"total"
] | [] | [
"FStar.Tactics.CanonMonoid.exp",
"Prims.Nil",
"Prims.Cons",
"FStar.List.Tot.Base.op_At",
"FStar.Tactics.CanonMonoid.flatten",
"Prims.list"
] | [
"recursion"
] | false | false | false | true | false | let rec flatten (#a: Type) (e: exp a) : list a =
| match e with
| Unit -> []
| Var x -> [x]
| Mult e1 e2 -> flatten e1 @ flatten e2 | false |
Hacl.Impl.Frodo.KEM.Encaps.fst | Hacl.Impl.Frodo.KEM.Encaps.frodo_mul_add_sb_plus_e_plus_mu | val frodo_mul_add_sb_plus_e_plus_mu:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> v_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h b /\ live h mu /\ live h v_matrix /\
live h sp_matrix /\ live h epp_matrix /\
disjoint v_matrix b /\ disjoint v_matrix sp_matrix /\
disjoint v_matrix mu /\ disjoint v_matrix epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc v_matrix) h0 h1 /\
as_matrix h1 v_matrix ==
S.frodo_mul_add_sb_plus_e_plus_mu a (as_seq h0 mu) (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix)) | val frodo_mul_add_sb_plus_e_plus_mu:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> v_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h b /\ live h mu /\ live h v_matrix /\
live h sp_matrix /\ live h epp_matrix /\
disjoint v_matrix b /\ disjoint v_matrix sp_matrix /\
disjoint v_matrix mu /\ disjoint v_matrix epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc v_matrix) h0 h1 /\
as_matrix h1 v_matrix ==
S.frodo_mul_add_sb_plus_e_plus_mu a (as_seq h0 mu) (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix)) | let frodo_mul_add_sb_plus_e_plus_mu a mu b sp_matrix epp_matrix v_matrix =
push_frame ();
frodo_mul_add_sb_plus_e a b sp_matrix epp_matrix v_matrix;
let mu_encode = matrix_create params_nbar params_nbar in
frodo_key_encode (params_logq a) (params_extracted_bits a) params_nbar mu mu_encode;
matrix_add v_matrix mu_encode;
clear_matrix mu_encode;
pop_frame () | {
"file_name": "code/frodo/Hacl.Impl.Frodo.KEM.Encaps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 14,
"end_line": 129,
"start_col": 0,
"start_line": 122
} | module Hacl.Impl.Frodo.KEM.Encaps
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open LowStar.Buffer
open Lib.IntTypes
open Lib.Buffer
open Hacl.Impl.Matrix
open Hacl.Impl.Frodo.Params
open Hacl.Impl.Frodo.KEM
open Hacl.Impl.Frodo.Encode
open Hacl.Impl.Frodo.Pack
open Hacl.Impl.Frodo.Sample
open Hacl.Frodo.Random
module ST = FStar.HyperStack.ST
module LSeq = Lib.Sequence
module LB = Lib.ByteSequence
module FP = Spec.Frodo.Params
module S = Spec.Frodo.KEM.Encaps
module M = Spec.Matrix
module KG = Hacl.Impl.Frodo.KEM.KeyGen
#set-options "--z3rlimit 100 --fuel 0 --ifuel 0"
inline_for_extraction noextract
val frodo_mul_add_sa_plus_e:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> bp_matrix:matrix_t params_nbar (params_n a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h ep_matrix /\ live h sp_matrix /\ live h bp_matrix /\
disjoint bp_matrix seed_a /\ disjoint bp_matrix ep_matrix /\ disjoint bp_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc bp_matrix) h0 h1 /\
as_matrix h1 bp_matrix ==
S.frodo_mul_add_sa_plus_e a gen_a (as_seq h0 seed_a) (as_matrix h0 sp_matrix) (as_matrix h0 ep_matrix))
let frodo_mul_add_sa_plus_e a gen_a seed_a sp_matrix ep_matrix bp_matrix =
push_frame ();
let a_matrix = matrix_create (params_n a) (params_n a) in
frodo_gen_matrix gen_a (params_n a) seed_a a_matrix;
matrix_mul sp_matrix a_matrix bp_matrix;
matrix_add bp_matrix ep_matrix;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct_pack_c1:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> c1:lbytes (ct1bytes_len a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h ep_matrix /\ live h sp_matrix /\ live h c1 /\
disjoint seed_a c1 /\ disjoint ep_matrix c1 /\ disjoint sp_matrix c1)
(ensures fun h0 _ h1 -> modifies (loc c1) h0 h1 /\
as_seq h1 c1 ==
S.crypto_kem_enc_ct_pack_c1 a gen_a (as_seq h0 seed_a) (as_matrix h0 sp_matrix) (as_matrix h0 ep_matrix))
let crypto_kem_enc_ct_pack_c1 a gen_a seed_a sp_matrix ep_matrix c1 =
push_frame ();
let bp_matrix = matrix_create params_nbar (params_n a) in
frodo_mul_add_sa_plus_e a gen_a seed_a sp_matrix ep_matrix bp_matrix;
frodo_pack (params_logq a) bp_matrix c1;
pop_frame ()
inline_for_extraction noextract
val frodo_mul_add_sb_plus_e:
a:FP.frodo_alg
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> v_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h b /\ live h epp_matrix /\ live h v_matrix /\ live h sp_matrix /\
disjoint v_matrix b /\ disjoint v_matrix epp_matrix /\ disjoint v_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc v_matrix) h0 h1 /\
as_matrix h1 v_matrix ==
S.frodo_mul_add_sb_plus_e a (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix))
let frodo_mul_add_sb_plus_e a b sp_matrix epp_matrix v_matrix =
push_frame ();
let b_matrix = matrix_create (params_n a) params_nbar in
frodo_unpack (params_n a) params_nbar (params_logq a) b b_matrix;
matrix_mul sp_matrix b_matrix v_matrix;
matrix_add v_matrix epp_matrix;
pop_frame ()
inline_for_extraction noextract
val frodo_mul_add_sb_plus_e_plus_mu:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> v_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h b /\ live h mu /\ live h v_matrix /\
live h sp_matrix /\ live h epp_matrix /\
disjoint v_matrix b /\ disjoint v_matrix sp_matrix /\
disjoint v_matrix mu /\ disjoint v_matrix epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc v_matrix) h0 h1 /\
as_matrix h1 v_matrix ==
S.frodo_mul_add_sb_plus_e_plus_mu a (as_seq h0 mu) (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix)) | {
"checked_file": "/",
"dependencies": [
"Spec.Matrix.fst.checked",
"Spec.Frodo.Params.fst.checked",
"Spec.Frodo.KEM.Encaps.fst.checked",
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Matrix.fst.checked",
"Hacl.Impl.Frodo.Sample.fst.checked",
"Hacl.Impl.Frodo.Params.fst.checked",
"Hacl.Impl.Frodo.Pack.fst.checked",
"Hacl.Impl.Frodo.KEM.KeyGen.fst.checked",
"Hacl.Impl.Frodo.KEM.fst.checked",
"Hacl.Impl.Frodo.Encode.fst.checked",
"Hacl.Frodo.Random.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Frodo.KEM.Encaps.fst"
} | [
{
"abbrev": true,
"full_module": "Hacl.Impl.Frodo.KEM.KeyGen",
"short_module": "KG"
},
{
"abbrev": true,
"full_module": "Spec.Matrix",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "Spec.Frodo.KEM.Encaps",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Spec.Frodo.Params",
"short_module": "FP"
},
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "LB"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Frodo.Random",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Sample",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Pack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Encode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Params",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Matrix",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
a: Spec.Frodo.Params.frodo_alg ->
mu: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.bytes_mu a) ->
b: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.publicmatrixbytes_len a) ->
sp_matrix:
Hacl.Impl.Matrix.matrix_t Hacl.Impl.Frodo.Params.params_nbar
(Hacl.Impl.Frodo.Params.params_n a) ->
epp_matrix:
Hacl.Impl.Matrix.matrix_t Hacl.Impl.Frodo.Params.params_nbar
Hacl.Impl.Frodo.Params.params_nbar ->
v_matrix:
Hacl.Impl.Matrix.matrix_t Hacl.Impl.Frodo.Params.params_nbar
Hacl.Impl.Frodo.Params.params_nbar
-> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Spec.Frodo.Params.frodo_alg",
"Hacl.Impl.Matrix.lbytes",
"Hacl.Impl.Frodo.Params.bytes_mu",
"Hacl.Impl.Frodo.Params.publicmatrixbytes_len",
"Hacl.Impl.Matrix.matrix_t",
"Hacl.Impl.Frodo.Params.params_nbar",
"Hacl.Impl.Frodo.Params.params_n",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Hacl.Impl.Frodo.KEM.clear_matrix",
"Hacl.Impl.Matrix.matrix_add",
"Hacl.Impl.Frodo.Encode.frodo_key_encode",
"Hacl.Impl.Frodo.Params.params_logq",
"Hacl.Impl.Frodo.Params.params_extracted_bits",
"Lib.Buffer.lbuffer_t",
"Lib.Buffer.MUT",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U16",
"Lib.IntTypes.SEC",
"Lib.IntTypes.mul",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Hacl.Impl.Matrix.matrix_create",
"Hacl.Impl.Frodo.KEM.Encaps.frodo_mul_add_sb_plus_e",
"FStar.HyperStack.ST.push_frame"
] | [] | false | true | false | false | false | let frodo_mul_add_sb_plus_e_plus_mu a mu b sp_matrix epp_matrix v_matrix =
| push_frame ();
frodo_mul_add_sb_plus_e a b sp_matrix epp_matrix v_matrix;
let mu_encode = matrix_create params_nbar params_nbar in
frodo_key_encode (params_logq a) (params_extracted_bits a) params_nbar mu mu_encode;
matrix_add v_matrix mu_encode;
clear_matrix mu_encode;
pop_frame () | false |
FStar.Tactics.CanonMonoid.fst | FStar.Tactics.CanonMonoid.monoid_reflect | val monoid_reflect:
#a: Type ->
m: monoid a ->
e1: exp a ->
e2: exp a ->
squash (mldenote m (flatten e1) == mldenote m (flatten e2))
-> squash (mdenote m e1 == mdenote m e2) | val monoid_reflect:
#a: Type ->
m: monoid a ->
e1: exp a ->
e2: exp a ->
squash (mldenote m (flatten e1) == mldenote m (flatten e2))
-> squash (mdenote m e1 == mdenote m e2) | let monoid_reflect (#a:Type) (m:monoid a) (e1 e2:exp a)
(_ : squash (mldenote m (flatten e1) == mldenote m (flatten e2)))
: squash (mdenote m e1 == mdenote m e2) =
flatten_correct m e1; flatten_correct m e2 | {
"file_name": "ulib/FStar.Tactics.CanonMonoid.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 44,
"end_line": 80,
"start_col": 0,
"start_line": 77
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Tactics.CanonMonoid
open FStar.Algebra.Monoid
open FStar.List
open FStar.Reflection.V2
open FStar.Tactics.V2
(* Only dump when debugging is on *)
let dump m = if debugging () then dump m
(* "A Monoid Expression Simplifier" ported from
http://adam.chlipala.net/cpdt/html/Cpdt.Reflection.html *)
type exp (a:Type) : Type =
| Unit : exp a
| Var : a -> exp a
| Mult : exp a -> exp a -> exp a
let rec exp_to_string (#a:Type) (a_to_string:a->string) (e:exp a) =
match e with
| Unit -> "Unit"
| Var x -> "Var " ^ a_to_string x
| Mult e1 e2 -> "Mult (" ^ exp_to_string a_to_string e1
^ ") (" ^ exp_to_string a_to_string e2 ^ ")"
let rec mdenote (#a:Type) (m:monoid a) (e:exp a) : a =
match e with
| Unit -> Monoid?.unit m
| Var x -> x
| Mult e1 e2 -> Monoid?.mult m (mdenote m e1) (mdenote m e2)
let rec mldenote (#a:Type) (m:monoid a) (xs:list a) : a =
match xs with
| [] -> Monoid?.unit m
| [x] -> x
| x::xs' -> Monoid?.mult m x (mldenote m xs')
let rec flatten (#a:Type) (e:exp a) : list a =
match e with
| Unit -> []
| Var x -> [x]
| Mult e1 e2 -> flatten e1 @ flatten e2
(* This proof internally uses the monoid laws; the SMT solver picks up
on them because they are written as squashed formulas in the
definition of monoid; need to be careful with this since these are
quantified formulas without any patterns. Dangerous stuff! *)
let rec flatten_correct_aux (#a:Type) (m:monoid a) ml1 ml2 :
Lemma (mldenote m (ml1 @ ml2) == Monoid?.mult m (mldenote m ml1)
(mldenote m ml2)) =
match ml1 with
| [] -> ()
| e::es1' -> flatten_correct_aux m es1' ml2
let rec flatten_correct (#a:Type) (m:monoid a) (e:exp a) :
Lemma (mdenote m e == mldenote m (flatten e)) =
match e with
| Unit | Var _ -> ()
| Mult e1 e2 -> flatten_correct_aux m (flatten e1) (flatten e2);
flatten_correct m e1; flatten_correct m e2 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.Algebra.Monoid.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Tactics.CanonMonoid.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Tactics.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Reflection.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.Monoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
m: FStar.Algebra.Monoid.monoid a ->
e1: FStar.Tactics.CanonMonoid.exp a ->
e2: FStar.Tactics.CanonMonoid.exp a ->
_:
Prims.squash (FStar.Tactics.CanonMonoid.mldenote m (FStar.Tactics.CanonMonoid.flatten e1) ==
FStar.Tactics.CanonMonoid.mldenote m (FStar.Tactics.CanonMonoid.flatten e2))
-> Prims.squash (FStar.Tactics.CanonMonoid.mdenote m e1 == FStar.Tactics.CanonMonoid.mdenote m e2) | Prims.Tot | [
"total"
] | [] | [
"FStar.Algebra.Monoid.monoid",
"FStar.Tactics.CanonMonoid.exp",
"Prims.squash",
"Prims.eq2",
"FStar.Tactics.CanonMonoid.mldenote",
"FStar.Tactics.CanonMonoid.flatten",
"FStar.Tactics.CanonMonoid.flatten_correct",
"Prims.unit",
"FStar.Tactics.CanonMonoid.mdenote"
] | [] | false | false | true | false | false | let monoid_reflect
(#a: Type)
(m: monoid a)
(e1: exp a)
(e2: exp a)
(_: squash (mldenote m (flatten e1) == mldenote m (flatten e2)))
: squash (mdenote m e1 == mdenote m e2) =
| flatten_correct m e1;
flatten_correct m e2 | false |
FStar.Tactics.CanonMonoid.fst | FStar.Tactics.CanonMonoid.flatten_correct_aux | val flatten_correct_aux (#a: Type) (m: monoid a) (ml1 ml2: _)
: Lemma (mldenote m (ml1 @ ml2) == Monoid?.mult m (mldenote m ml1) (mldenote m ml2)) | val flatten_correct_aux (#a: Type) (m: monoid a) (ml1 ml2: _)
: Lemma (mldenote m (ml1 @ ml2) == Monoid?.mult m (mldenote m ml1) (mldenote m ml2)) | let rec flatten_correct_aux (#a:Type) (m:monoid a) ml1 ml2 :
Lemma (mldenote m (ml1 @ ml2) == Monoid?.mult m (mldenote m ml1)
(mldenote m ml2)) =
match ml1 with
| [] -> ()
| e::es1' -> flatten_correct_aux m es1' ml2 | {
"file_name": "ulib/FStar.Tactics.CanonMonoid.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 45,
"end_line": 68,
"start_col": 0,
"start_line": 63
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Tactics.CanonMonoid
open FStar.Algebra.Monoid
open FStar.List
open FStar.Reflection.V2
open FStar.Tactics.V2
(* Only dump when debugging is on *)
let dump m = if debugging () then dump m
(* "A Monoid Expression Simplifier" ported from
http://adam.chlipala.net/cpdt/html/Cpdt.Reflection.html *)
type exp (a:Type) : Type =
| Unit : exp a
| Var : a -> exp a
| Mult : exp a -> exp a -> exp a
let rec exp_to_string (#a:Type) (a_to_string:a->string) (e:exp a) =
match e with
| Unit -> "Unit"
| Var x -> "Var " ^ a_to_string x
| Mult e1 e2 -> "Mult (" ^ exp_to_string a_to_string e1
^ ") (" ^ exp_to_string a_to_string e2 ^ ")"
let rec mdenote (#a:Type) (m:monoid a) (e:exp a) : a =
match e with
| Unit -> Monoid?.unit m
| Var x -> x
| Mult e1 e2 -> Monoid?.mult m (mdenote m e1) (mdenote m e2)
let rec mldenote (#a:Type) (m:monoid a) (xs:list a) : a =
match xs with
| [] -> Monoid?.unit m
| [x] -> x
| x::xs' -> Monoid?.mult m x (mldenote m xs')
let rec flatten (#a:Type) (e:exp a) : list a =
match e with
| Unit -> []
| Var x -> [x]
| Mult e1 e2 -> flatten e1 @ flatten e2
(* This proof internally uses the monoid laws; the SMT solver picks up
on them because they are written as squashed formulas in the
definition of monoid; need to be careful with this since these are | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.Algebra.Monoid.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Tactics.CanonMonoid.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Tactics.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Reflection.V2",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Algebra.Monoid",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: FStar.Algebra.Monoid.monoid a -> ml1: Prims.list a -> ml2: Prims.list a
-> FStar.Pervasives.Lemma
(ensures
FStar.Tactics.CanonMonoid.mldenote m (ml1 @ ml2) ==
Monoid?.mult m
(FStar.Tactics.CanonMonoid.mldenote m ml1)
(FStar.Tactics.CanonMonoid.mldenote m ml2)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"FStar.Algebra.Monoid.monoid",
"Prims.list",
"FStar.Tactics.CanonMonoid.flatten_correct_aux",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"FStar.Tactics.CanonMonoid.mldenote",
"FStar.List.Tot.Base.op_At",
"FStar.Algebra.Monoid.__proj__Monoid__item__mult",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [
"recursion"
] | false | false | true | false | false | let rec flatten_correct_aux (#a: Type) (m: monoid a) ml1 ml2
: Lemma (mldenote m (ml1 @ ml2) == Monoid?.mult m (mldenote m ml1) (mldenote m ml2)) =
| match ml1 with
| [] -> ()
| e :: es1' -> flatten_correct_aux m es1' ml2 | false |
Hacl.Impl.Frodo.KEM.Encaps.fst | Hacl.Impl.Frodo.KEM.Encaps.crypto_kem_enc | val crypto_kem_enc:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> pk:lbytes (crypto_publickeybytes a)
-> Stack uint32
(requires fun h ->
disjoint state ct /\ disjoint state ss /\ disjoint state pk /\
live h ct /\ live h ss /\ live h pk /\
disjoint ct ss /\ disjoint ct pk /\ disjoint ss pk)
(ensures fun h0 _ h1 -> modifies (loc state |+| (loc ct |+| loc ss)) h0 h1 /\
(as_seq h1 ct, as_seq h1 ss) == S.crypto_kem_enc a gen_a (as_seq h0 state) (as_seq h0 pk)) | val crypto_kem_enc:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> pk:lbytes (crypto_publickeybytes a)
-> Stack uint32
(requires fun h ->
disjoint state ct /\ disjoint state ss /\ disjoint state pk /\
live h ct /\ live h ss /\ live h pk /\
disjoint ct ss /\ disjoint ct pk /\ disjoint ss pk)
(ensures fun h0 _ h1 -> modifies (loc state |+| (loc ct |+| loc ss)) h0 h1 /\
(as_seq h1 ct, as_seq h1 ss) == S.crypto_kem_enc a gen_a (as_seq h0 state) (as_seq h0 pk)) | let crypto_kem_enc a gen_a ct ss pk =
recall state;
push_frame ();
let coins = create (bytes_mu a) (u8 0) in
recall state;
randombytes_ (bytes_mu a) coins;
crypto_kem_enc_ a gen_a coins ct ss pk;
clear_words_u8 coins;
pop_frame ();
u32 0 | {
"file_name": "code/frodo/Hacl.Impl.Frodo.KEM.Encaps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 7,
"end_line": 439,
"start_col": 0,
"start_line": 430
} | module Hacl.Impl.Frodo.KEM.Encaps
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open LowStar.Buffer
open Lib.IntTypes
open Lib.Buffer
open Hacl.Impl.Matrix
open Hacl.Impl.Frodo.Params
open Hacl.Impl.Frodo.KEM
open Hacl.Impl.Frodo.Encode
open Hacl.Impl.Frodo.Pack
open Hacl.Impl.Frodo.Sample
open Hacl.Frodo.Random
module ST = FStar.HyperStack.ST
module LSeq = Lib.Sequence
module LB = Lib.ByteSequence
module FP = Spec.Frodo.Params
module S = Spec.Frodo.KEM.Encaps
module M = Spec.Matrix
module KG = Hacl.Impl.Frodo.KEM.KeyGen
#set-options "--z3rlimit 100 --fuel 0 --ifuel 0"
inline_for_extraction noextract
val frodo_mul_add_sa_plus_e:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> bp_matrix:matrix_t params_nbar (params_n a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h ep_matrix /\ live h sp_matrix /\ live h bp_matrix /\
disjoint bp_matrix seed_a /\ disjoint bp_matrix ep_matrix /\ disjoint bp_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc bp_matrix) h0 h1 /\
as_matrix h1 bp_matrix ==
S.frodo_mul_add_sa_plus_e a gen_a (as_seq h0 seed_a) (as_matrix h0 sp_matrix) (as_matrix h0 ep_matrix))
let frodo_mul_add_sa_plus_e a gen_a seed_a sp_matrix ep_matrix bp_matrix =
push_frame ();
let a_matrix = matrix_create (params_n a) (params_n a) in
frodo_gen_matrix gen_a (params_n a) seed_a a_matrix;
matrix_mul sp_matrix a_matrix bp_matrix;
matrix_add bp_matrix ep_matrix;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct_pack_c1:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> c1:lbytes (ct1bytes_len a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h ep_matrix /\ live h sp_matrix /\ live h c1 /\
disjoint seed_a c1 /\ disjoint ep_matrix c1 /\ disjoint sp_matrix c1)
(ensures fun h0 _ h1 -> modifies (loc c1) h0 h1 /\
as_seq h1 c1 ==
S.crypto_kem_enc_ct_pack_c1 a gen_a (as_seq h0 seed_a) (as_matrix h0 sp_matrix) (as_matrix h0 ep_matrix))
let crypto_kem_enc_ct_pack_c1 a gen_a seed_a sp_matrix ep_matrix c1 =
push_frame ();
let bp_matrix = matrix_create params_nbar (params_n a) in
frodo_mul_add_sa_plus_e a gen_a seed_a sp_matrix ep_matrix bp_matrix;
frodo_pack (params_logq a) bp_matrix c1;
pop_frame ()
inline_for_extraction noextract
val frodo_mul_add_sb_plus_e:
a:FP.frodo_alg
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> v_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h b /\ live h epp_matrix /\ live h v_matrix /\ live h sp_matrix /\
disjoint v_matrix b /\ disjoint v_matrix epp_matrix /\ disjoint v_matrix sp_matrix)
(ensures fun h0 _ h1 -> modifies (loc v_matrix) h0 h1 /\
as_matrix h1 v_matrix ==
S.frodo_mul_add_sb_plus_e a (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix))
let frodo_mul_add_sb_plus_e a b sp_matrix epp_matrix v_matrix =
push_frame ();
let b_matrix = matrix_create (params_n a) params_nbar in
frodo_unpack (params_n a) params_nbar (params_logq a) b b_matrix;
matrix_mul sp_matrix b_matrix v_matrix;
matrix_add v_matrix epp_matrix;
pop_frame ()
inline_for_extraction noextract
val frodo_mul_add_sb_plus_e_plus_mu:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> v_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h b /\ live h mu /\ live h v_matrix /\
live h sp_matrix /\ live h epp_matrix /\
disjoint v_matrix b /\ disjoint v_matrix sp_matrix /\
disjoint v_matrix mu /\ disjoint v_matrix epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc v_matrix) h0 h1 /\
as_matrix h1 v_matrix ==
S.frodo_mul_add_sb_plus_e_plus_mu a (as_seq h0 mu) (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix))
let frodo_mul_add_sb_plus_e_plus_mu a mu b sp_matrix epp_matrix v_matrix =
push_frame ();
frodo_mul_add_sb_plus_e a b sp_matrix epp_matrix v_matrix;
let mu_encode = matrix_create params_nbar params_nbar in
frodo_key_encode (params_logq a) (params_extracted_bits a) params_nbar mu mu_encode;
matrix_add v_matrix mu_encode;
clear_matrix mu_encode;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct_pack_c2:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> b:lbytes (publicmatrixbytes_len a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> c2:lbytes (ct2bytes_len a)
-> Stack unit
(requires fun h ->
live h mu /\ live h b /\ live h sp_matrix /\
live h epp_matrix /\ live h c2 /\
disjoint mu c2 /\ disjoint b c2 /\
disjoint sp_matrix c2 /\ disjoint epp_matrix c2)
(ensures fun h0 _ h1 -> modifies (loc c2) h0 h1 /\
as_seq h1 c2 ==
S.crypto_kem_enc_ct_pack_c2 a (as_seq h0 mu) (as_seq h0 b) (as_matrix h0 sp_matrix) (as_matrix h0 epp_matrix))
#push-options "--z3rlimit 200"
let crypto_kem_enc_ct_pack_c2 a mu b sp_matrix epp_matrix c2 =
push_frame ();
let v_matrix = matrix_create params_nbar params_nbar in
frodo_mul_add_sb_plus_e_plus_mu a mu b sp_matrix epp_matrix v_matrix;
frodo_pack (params_logq a) v_matrix c2;
clear_matrix v_matrix;
pop_frame ()
#pop-options
inline_for_extraction noextract
val get_sp_ep_epp_matrices:
a:FP.frodo_alg
-> seed_se:lbytes (crypto_bytes a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h seed_se /\ live h sp_matrix /\
live h ep_matrix /\ live h epp_matrix /\
disjoint seed_se sp_matrix /\ disjoint seed_se ep_matrix /\
disjoint seed_se epp_matrix /\ disjoint sp_matrix ep_matrix /\
disjoint sp_matrix epp_matrix /\ disjoint ep_matrix epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc sp_matrix |+| loc ep_matrix |+| loc epp_matrix) h0 h1 /\
(as_matrix h1 sp_matrix, as_matrix h1 ep_matrix, as_matrix h1 epp_matrix) ==
S.get_sp_ep_epp_matrices a (as_seq h0 seed_se))
let get_sp_ep_epp_matrices a seed_se sp_matrix ep_matrix epp_matrix =
push_frame ();
[@inline_let] let s_bytes_len = secretmatrixbytes_len a in
let r = create (2ul *! s_bytes_len +! 2ul *! params_nbar *! params_nbar) (u8 0) in
KG.frodo_shake_r a (u8 0x96) seed_se (2ul *! s_bytes_len +! 2ul *! params_nbar *! params_nbar) r;
frodo_sample_matrix a params_nbar (params_n a) (sub r 0ul s_bytes_len) sp_matrix;
frodo_sample_matrix a params_nbar (params_n a) (sub r s_bytes_len s_bytes_len) ep_matrix;
frodo_sample_matrix a params_nbar params_nbar (sub r (2ul *! s_bytes_len) (2ul *! params_nbar *! params_nbar)) epp_matrix;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct0:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_a:lbytes bytes_seed_a
-> b:lbytes (publicmatrixbytes_len a)
-> mu:lbytes (bytes_mu a)
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> ct:lbytes (crypto_ciphertextbytes a)
-> Stack unit
(requires fun h ->
live h seed_a /\ live h b /\ live h mu /\ live h ct /\
live h sp_matrix /\ live h ep_matrix /\ live h epp_matrix /\
disjoint ct seed_a /\ disjoint ct b /\ disjoint ct mu /\
disjoint ct sp_matrix /\ disjoint ct ep_matrix /\ disjoint ct epp_matrix)
(ensures fun h0 _ h1 -> modifies (loc ct) h0 h1 /\
(let c1:LB.lbytes (FP.ct1bytes_len a) = S.crypto_kem_enc_ct_pack_c1 a gen_a (as_seq h0 seed_a) (as_seq h0 sp_matrix) (as_seq h0 ep_matrix) in
let c2:LB.lbytes (FP.ct2bytes_len a) = S.crypto_kem_enc_ct_pack_c2 a (as_seq h0 mu) (as_seq h0 b) (as_seq h0 sp_matrix) (as_seq h0 epp_matrix) in
v (crypto_ciphertextbytes a) == FP.ct1bytes_len a + FP.ct2bytes_len a /\
as_seq h1 ct `Seq.equal` LSeq.concat #_ #(FP.ct1bytes_len a) #(FP.ct2bytes_len a) c1 c2))
let crypto_kem_enc_ct0 a gen_a seed_a b mu sp_matrix ep_matrix epp_matrix ct =
let c1 = sub ct 0ul (ct1bytes_len a) in
let c2 = sub ct (ct1bytes_len a) (ct2bytes_len a) in
let h0 = ST.get () in
crypto_kem_enc_ct_pack_c1 a gen_a seed_a sp_matrix ep_matrix c1;
let h1 = ST.get () in
crypto_kem_enc_ct_pack_c2 a mu b sp_matrix epp_matrix c2;
let h2 = ST.get () in
LSeq.eq_intro
(LSeq.sub (as_seq h2 ct) 0 (v (ct1bytes_len a)))
(LSeq.sub (as_seq h1 ct) 0 (v (ct1bytes_len a)));
LSeq.lemma_concat2
(v (ct1bytes_len a)) (LSeq.sub (as_seq h1 ct) 0 (v (ct1bytes_len a)))
(v (ct2bytes_len a)) (LSeq.sub (as_seq h2 ct) (v (ct1bytes_len a)) (v (ct2bytes_len a))) (as_seq h2 ct)
inline_for_extraction noextract
val clear_matrix3:
a:FP.frodo_alg
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h sp_matrix /\ live h ep_matrix /\ live h epp_matrix /\
disjoint sp_matrix ep_matrix /\ disjoint sp_matrix epp_matrix /\
disjoint ep_matrix epp_matrix)
(ensures fun h0 _ h1 ->
modifies (loc sp_matrix |+| loc ep_matrix |+| loc epp_matrix) h0 h1)
let clear_matrix3 a sp_matrix ep_matrix epp_matrix =
clear_matrix sp_matrix;
clear_matrix ep_matrix;
clear_matrix epp_matrix
inline_for_extraction noextract
val crypto_kem_enc_ct:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> mu:lbytes (bytes_mu a)
-> pk:lbytes (crypto_publickeybytes a)
-> seed_se:lbytes (crypto_bytes a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> Stack unit
(requires fun h ->
live h mu /\ live h pk /\ live h seed_se /\ live h ct /\
disjoint ct mu /\ disjoint ct pk /\ disjoint ct seed_se)
(ensures fun h0 _ h1 -> modifies (loc ct) h0 h1 /\
as_seq h1 ct == S.crypto_kem_enc_ct a gen_a (as_seq h0 mu) (as_seq h0 pk) (as_seq h0 seed_se))
#push-options "--z3rlimit 200"
let crypto_kem_enc_ct a gen_a mu pk seed_se ct =
push_frame ();
let h0 = ST.get () in
FP.expand_crypto_publickeybytes a;
let seed_a = sub pk 0ul bytes_seed_a in
let b = sub pk bytes_seed_a (publicmatrixbytes_len a) in
let sp_matrix = matrix_create params_nbar (params_n a) in
let ep_matrix = matrix_create params_nbar (params_n a) in
let epp_matrix = matrix_create params_nbar params_nbar in
get_sp_ep_epp_matrices a seed_se sp_matrix ep_matrix epp_matrix;
crypto_kem_enc_ct0 a gen_a seed_a b mu sp_matrix ep_matrix epp_matrix ct;
clear_matrix3 a sp_matrix ep_matrix epp_matrix;
let h1 = ST.get () in
LSeq.eq_intro
(as_seq h1 ct)
(S.crypto_kem_enc_ct a gen_a (as_seq h0 mu) (as_seq h0 pk) (as_seq h0 seed_se));
pop_frame ()
#pop-options
inline_for_extraction noextract
val crypto_kem_enc_ss:
a:FP.frodo_alg
-> k:lbytes (crypto_bytes a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> Stack unit
(requires fun h ->
live h k /\ live h ct /\ live h ss /\
disjoint ct ss /\ disjoint k ct /\ disjoint k ss)
(ensures fun h0 _ h1 -> modifies (loc ss) h0 h1 /\
as_seq h1 ss == S.crypto_kem_enc_ss a (as_seq h0 k) (as_seq h0 ct))
let crypto_kem_enc_ss a k ct ss =
push_frame ();
let ss_init_len = crypto_ciphertextbytes a +! crypto_bytes a in
let shake_input_ss = create ss_init_len (u8 0) in
concat2 (crypto_ciphertextbytes a) ct (crypto_bytes a) k shake_input_ss;
frodo_shake a ss_init_len shake_input_ss (crypto_bytes a) ss;
clear_words_u8 shake_input_ss;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_seed_se_k:
a:FP.frodo_alg
-> mu:lbytes (bytes_mu a)
-> pk:lbytes (crypto_publickeybytes a)
-> seed_se_k:lbytes (2ul *! crypto_bytes a)
-> Stack unit
(requires fun h ->
live h mu /\ live h pk /\ live h seed_se_k /\
disjoint seed_se_k mu /\ disjoint seed_se_k pk)
(ensures fun h0 _ h1 -> modifies (loc seed_se_k) h0 h1 /\
as_seq h1 seed_se_k == S.crypto_kem_enc_seed_se_k a (as_seq h0 mu) (as_seq h0 pk))
let crypto_kem_enc_seed_se_k a mu pk seed_se_k =
push_frame ();
let pkh_mu = create (bytes_pkhash a +! bytes_mu a) (u8 0) in
let h0 = ST.get () in
update_sub_f h0 pkh_mu 0ul (bytes_pkhash a)
(fun h -> FP.frodo_shake a (v (crypto_publickeybytes a)) (as_seq h0 pk) (v (bytes_pkhash a)))
(fun _ -> frodo_shake a (crypto_publickeybytes a) pk (bytes_pkhash a) (sub pkh_mu 0ul (bytes_pkhash a)));
let h1 = ST.get () in
update_sub pkh_mu (bytes_pkhash a) (bytes_mu a) mu;
let h2 = ST.get () in
LSeq.eq_intro
(LSeq.sub (as_seq h2 pkh_mu) 0 (v (bytes_pkhash a)))
(LSeq.sub (as_seq h1 pkh_mu) 0 (v (bytes_pkhash a)));
LSeq.lemma_concat2
(v (bytes_pkhash a)) (LSeq.sub (as_seq h1 pkh_mu) 0 (v (bytes_pkhash a)))
(v (bytes_mu a)) (as_seq h0 mu) (as_seq h2 pkh_mu);
//concat2 (bytes_pkhash a) pkh (bytes_mu a) mu pkh_mu;
frodo_shake a (bytes_pkhash a +! bytes_mu a) pkh_mu (2ul *! crypto_bytes a) seed_se_k;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc_ct_ss:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> seed_se_k:lbytes (2ul *! crypto_bytes a)
-> mu:lbytes (bytes_mu a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> pk:lbytes (crypto_publickeybytes a)
-> Stack unit
(requires fun h ->
live h seed_se_k /\ live h ct /\ live h ss /\ live h pk /\ live h mu /\
disjoint ct ss /\ disjoint ct pk /\ disjoint ss pk /\
disjoint mu ss /\ disjoint mu ct /\ disjoint seed_se_k ct /\ disjoint seed_se_k ss)
(ensures fun h0 _ h1 -> modifies (loc ct |+| loc ss) h0 h1 /\
(let seed_se = LSeq.sub (as_seq h0 seed_se_k) 0 (v (crypto_bytes a)) in
let k = LSeq.sub (as_seq h0 seed_se_k) (v (crypto_bytes a)) (v (crypto_bytes a)) in
as_seq h1 ct == S.crypto_kem_enc_ct a gen_a (as_seq h0 mu) (as_seq h0 pk) seed_se /\
as_seq h1 ss == S.crypto_kem_enc_ss a k (as_seq h1 ct)))
let crypto_kem_enc_ct_ss a gen_a seed_se_k mu ct ss pk =
let seed_se = sub seed_se_k 0ul (crypto_bytes a) in
let k = sub seed_se_k (crypto_bytes a) (crypto_bytes a) in
crypto_kem_enc_ct a gen_a mu pk seed_se ct;
crypto_kem_enc_ss a k ct ss
inline_for_extraction noextract
val crypto_kem_enc0:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> mu:lbytes (bytes_mu a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> pk:lbytes (crypto_publickeybytes a)
-> seed_se_k:lbytes (2ul *! crypto_bytes a)
-> Stack unit
(requires fun h ->
live h ct /\ live h ss /\ live h pk /\ live h mu /\ live h seed_se_k /\
loc_pairwise_disjoint [loc mu; loc ct; loc ss; loc pk; loc seed_se_k])
(ensures fun h0 _ h1 -> modifies (loc ct |+| loc ss |+| loc seed_se_k) h0 h1 /\
(as_seq h1 ct, as_seq h1 ss) == S.crypto_kem_enc_ a gen_a (as_seq h0 mu) (as_seq h0 pk))
#push-options "--z3rlimit 200"
let crypto_kem_enc0 a gen_a mu ct ss pk seed_se_k =
crypto_kem_enc_seed_se_k a mu pk seed_se_k;
crypto_kem_enc_ct_ss a gen_a seed_se_k mu ct ss pk
#pop-options
inline_for_extraction noextract
val crypto_kem_enc_:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> mu:lbytes (bytes_mu a)
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> pk:lbytes (crypto_publickeybytes a)
-> Stack unit
(requires fun h ->
live h ct /\ live h ss /\ live h pk /\ live h mu /\
disjoint ct ss /\ disjoint ct pk /\ disjoint ss pk /\
disjoint mu ss /\ disjoint mu ct /\ disjoint mu pk)
(ensures fun h0 _ h1 -> modifies (loc ct |+| loc ss) h0 h1 /\
(as_seq h1 ct, as_seq h1 ss) == S.crypto_kem_enc_ a gen_a (as_seq h0 mu) (as_seq h0 pk))
let crypto_kem_enc_ a gen_a mu ct ss pk =
push_frame ();
let seed_se_k = create (2ul *! crypto_bytes a) (u8 0) in
crypto_kem_enc0 a gen_a mu ct ss pk seed_se_k;
clear_words_u8 seed_se_k;
pop_frame ()
inline_for_extraction noextract
val crypto_kem_enc:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> ct:lbytes (crypto_ciphertextbytes a)
-> ss:lbytes (crypto_bytes a)
-> pk:lbytes (crypto_publickeybytes a)
-> Stack uint32
(requires fun h ->
disjoint state ct /\ disjoint state ss /\ disjoint state pk /\
live h ct /\ live h ss /\ live h pk /\
disjoint ct ss /\ disjoint ct pk /\ disjoint ss pk)
(ensures fun h0 _ h1 -> modifies (loc state |+| (loc ct |+| loc ss)) h0 h1 /\
(as_seq h1 ct, as_seq h1 ss) == S.crypto_kem_enc a gen_a (as_seq h0 state) (as_seq h0 pk)) | {
"checked_file": "/",
"dependencies": [
"Spec.Matrix.fst.checked",
"Spec.Frodo.Params.fst.checked",
"Spec.Frodo.KEM.Encaps.fst.checked",
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Matrix.fst.checked",
"Hacl.Impl.Frodo.Sample.fst.checked",
"Hacl.Impl.Frodo.Params.fst.checked",
"Hacl.Impl.Frodo.Pack.fst.checked",
"Hacl.Impl.Frodo.KEM.KeyGen.fst.checked",
"Hacl.Impl.Frodo.KEM.fst.checked",
"Hacl.Impl.Frodo.Encode.fst.checked",
"Hacl.Frodo.Random.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Frodo.KEM.Encaps.fst"
} | [
{
"abbrev": true,
"full_module": "Hacl.Impl.Frodo.KEM.KeyGen",
"short_module": "KG"
},
{
"abbrev": true,
"full_module": "Spec.Matrix",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "Spec.Frodo.KEM.Encaps",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Spec.Frodo.Params",
"short_module": "FP"
},
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "LB"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Frodo.Random",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Sample",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Pack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Encode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Params",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Matrix",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
a: Spec.Frodo.Params.frodo_alg ->
gen_a: Spec.Frodo.Params.frodo_gen_a{Hacl.Impl.Frodo.Params.is_supported gen_a} ->
ct: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.crypto_ciphertextbytes a) ->
ss: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.crypto_bytes a) ->
pk: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.crypto_publickeybytes a)
-> FStar.HyperStack.ST.Stack Lib.IntTypes.uint32 | FStar.HyperStack.ST.Stack | [] | [] | [
"Spec.Frodo.Params.frodo_alg",
"Spec.Frodo.Params.frodo_gen_a",
"Prims.b2t",
"Hacl.Impl.Frodo.Params.is_supported",
"Hacl.Impl.Matrix.lbytes",
"Hacl.Impl.Frodo.Params.crypto_ciphertextbytes",
"Hacl.Impl.Frodo.Params.crypto_bytes",
"Hacl.Impl.Frodo.Params.crypto_publickeybytes",
"Lib.IntTypes.u32",
"Lib.IntTypes.uint32",
"Prims.unit",
"FStar.HyperStack.ST.pop_frame",
"Hacl.Impl.Frodo.KEM.clear_words_u8",
"Hacl.Impl.Frodo.Params.bytes_mu",
"Hacl.Impl.Frodo.KEM.Encaps.crypto_kem_enc_",
"Hacl.Frodo.Random.randombytes_",
"Lib.Buffer.recall",
"Lib.Buffer.MUT",
"Lib.IntTypes.uint8",
"FStar.UInt32.__uint_to_t",
"Hacl.Frodo.Random.state",
"Lib.Buffer.lbuffer_t",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U8",
"Lib.IntTypes.SEC",
"Lib.Buffer.create",
"Lib.IntTypes.u8",
"Lib.Buffer.lbuffer",
"FStar.HyperStack.ST.push_frame"
] | [] | false | true | false | false | false | let crypto_kem_enc a gen_a ct ss pk =
| recall state;
push_frame ();
let coins = create (bytes_mu a) (u8 0) in
recall state;
randombytes_ (bytes_mu a) coins;
crypto_kem_enc_ a gen_a coins ct ss pk;
clear_words_u8 coins;
pop_frame ();
u32 0 | false |
Hacl.Impl.Ed25519.Ladder.fst | Hacl.Impl.Ed25519.Ladder.point_mul_g_double_vartime | val point_mul_g_double_vartime:
out:point
-> scalar1:lbuffer uint8 32ul
-> scalar2:lbuffer uint8 32ul
-> q2:point ->
Stack unit
(requires fun h ->
live h out /\ live h scalar1 /\
live h scalar2 /\ live h q2 /\
disjoint q2 out /\ disjoint scalar1 out /\ disjoint scalar2 out /\
F51.linv (as_seq h q2))
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.linv (as_seq h1 out) /\
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_double_fw #S.aff_point_c S.mk_ed25519_comm_monoid
(S.to_aff_point g_c) 256 (BSeq.nat_from_bytes_le (as_seq h0 scalar1))
(S.to_aff_point (F51.point_eval h0 q2)) (BSeq.nat_from_bytes_le (as_seq h0 scalar2)) 5) | val point_mul_g_double_vartime:
out:point
-> scalar1:lbuffer uint8 32ul
-> scalar2:lbuffer uint8 32ul
-> q2:point ->
Stack unit
(requires fun h ->
live h out /\ live h scalar1 /\
live h scalar2 /\ live h q2 /\
disjoint q2 out /\ disjoint scalar1 out /\ disjoint scalar2 out /\
F51.linv (as_seq h q2))
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.linv (as_seq h1 out) /\
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_double_fw #S.aff_point_c S.mk_ed25519_comm_monoid
(S.to_aff_point g_c) 256 (BSeq.nat_from_bytes_le (as_seq h0 scalar1))
(S.to_aff_point (F51.point_eval h0 q2)) (BSeq.nat_from_bytes_le (as_seq h0 scalar2)) 5) | let point_mul_g_double_vartime out scalar1 scalar2 q2 =
push_frame ();
let tmp = create 28ul (u64 0) in
let g = sub tmp 0ul 20ul in
let bscalar1 = sub tmp 20ul 4ul in
let bscalar2 = sub tmp 24ul 4ul in
make_g g;
point_mul_g_double_vartime_aux out scalar1 g scalar2 q2 bscalar1 bscalar2;
pop_frame () | {
"file_name": "code/ed25519/Hacl.Impl.Ed25519.Ladder.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 14,
"end_line": 404,
"start_col": 0,
"start_line": 396
} | module Hacl.Impl.Ed25519.Ladder
module ST = FStar.HyperStack.ST
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
open Hacl.Bignum25519
module F51 = Hacl.Impl.Ed25519.Field51
module BSeq = Lib.ByteSequence
module LE = Lib.Exponentiation
module SE = Spec.Exponentiation
module BE = Hacl.Impl.Exponentiation
module ME = Hacl.Impl.MultiExponentiation
module PT = Hacl.Impl.PrecompTable
module SPT256 = Hacl.Spec.PrecompBaseTable256
module BD = Hacl.Bignum.Definitions
module SD = Hacl.Spec.Bignum.Definitions
module S = Spec.Ed25519
open Hacl.Impl.Ed25519.PointConstants
include Hacl.Impl.Ed25519.Group
include Hacl.Ed25519.PrecompTable
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
inline_for_extraction noextract
let table_inv_w4 : BE.table_inv_t U64 20ul 16ul =
[@inline_let] let len = 20ul in
[@inline_let] let ctx_len = 0ul in
[@inline_let] let k = mk_ed25519_concrete_ops in
[@inline_let] let l = 4ul in
[@inline_let] let table_len = 16ul in
BE.table_inv_precomp len ctx_len k l table_len
inline_for_extraction noextract
let table_inv_w5 : BE.table_inv_t U64 20ul 32ul =
[@inline_let] let len = 20ul in
[@inline_let] let ctx_len = 0ul in
[@inline_let] let k = mk_ed25519_concrete_ops in
[@inline_let] let l = 5ul in
[@inline_let] let table_len = 32ul in
assert_norm (pow2 (v l) = v table_len);
BE.table_inv_precomp len ctx_len k l table_len
inline_for_extraction noextract
val convert_scalar: scalar:lbuffer uint8 32ul -> bscalar:lbuffer uint64 4ul ->
Stack unit
(requires fun h -> live h scalar /\ live h bscalar /\ disjoint scalar bscalar)
(ensures fun h0 _ h1 -> modifies (loc bscalar) h0 h1 /\
BD.bn_v h1 bscalar == BSeq.nat_from_bytes_le (as_seq h0 scalar))
let convert_scalar scalar bscalar =
let h0 = ST.get () in
Hacl.Spec.Bignum.Convert.bn_from_bytes_le_lemma #U64 32 (as_seq h0 scalar);
Hacl.Bignum.Convert.mk_bn_from_bytes_le true 32ul scalar bscalar
inline_for_extraction noextract
val point_mul_noalloc:
out:point
-> bscalar:lbuffer uint64 4ul
-> q:point ->
Stack unit
(requires fun h ->
live h bscalar /\ live h q /\ live h out /\
disjoint q out /\ disjoint q bscalar /\ disjoint out bscalar /\
F51.point_inv_t h q /\ F51.inv_ext_point (as_seq h q) /\
BD.bn_v h bscalar < pow2 256)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.point_inv_t h1 out /\ F51.inv_ext_point (as_seq h1 out) /\
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_fw S.mk_ed25519_comm_monoid
(S.to_aff_point (F51.point_eval h0 q)) 256 (BD.bn_v h0 bscalar) 4)
let point_mul_noalloc out bscalar q =
BE.lexp_fw_consttime 20ul 0ul mk_ed25519_concrete_ops
4ul (null uint64) q 4ul 256ul bscalar out
let point_mul out scalar q =
let h0 = ST.get () in
SE.exp_fw_lemma S.mk_ed25519_concrete_ops
(F51.point_eval h0 q) 256 (BSeq.nat_from_bytes_le (as_seq h0 scalar)) 4;
push_frame ();
let bscalar = create 4ul (u64 0) in
convert_scalar scalar bscalar;
point_mul_noalloc out bscalar q;
pop_frame ()
val precomp_get_consttime: BE.pow_a_to_small_b_st U64 20ul 0ul mk_ed25519_concrete_ops 4ul 16ul
(BE.table_inv_precomp 20ul 0ul mk_ed25519_concrete_ops 4ul 16ul)
[@CInline]
let precomp_get_consttime ctx a table bits_l tmp =
[@inline_let] let len = 20ul in
[@inline_let] let ctx_len = 0ul in
[@inline_let] let k = mk_ed25519_concrete_ops in
[@inline_let] let l = 4ul in
[@inline_let] let table_len = 16ul in
BE.lprecomp_get_consttime len ctx_len k l table_len ctx a table bits_l tmp
inline_for_extraction noextract
val point_mul_g_noalloc: out:point -> bscalar:lbuffer uint64 4ul
-> q1:point -> q2:point
-> q3:point -> q4:point ->
Stack unit
(requires fun h ->
live h bscalar /\ live h out /\ live h q1 /\
live h q2 /\ live h q3 /\ live h q4 /\
disjoint out bscalar /\ disjoint out q1 /\ disjoint out q2 /\
disjoint out q3 /\ disjoint out q4 /\
disjoint q1 q2 /\ disjoint q1 q3 /\ disjoint q1 q4 /\
disjoint q2 q3 /\ disjoint q2 q4 /\ disjoint q3 q4 /\
BD.bn_v h bscalar < pow2 256 /\
F51.linv (as_seq h q1) /\ refl (as_seq h q1) == g_aff /\
F51.linv (as_seq h q2) /\ refl (as_seq h q2) == g_pow2_64 /\
F51.linv (as_seq h q3) /\ refl (as_seq h q3) == g_pow2_128 /\
F51.linv (as_seq h q4) /\ refl (as_seq h q4) == g_pow2_192)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.linv (as_seq h1 out) /\
(let (b0, b1, b2, b3) = SPT256.decompose_nat256_as_four_u64 (BD.bn_v h0 bscalar) in
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_four_fw S.mk_ed25519_comm_monoid
g_aff 64 b0 g_pow2_64 b1 g_pow2_128 b2 g_pow2_192 b3 4))
let point_mul_g_noalloc out bscalar q1 q2 q3 q4 =
[@inline_let] let len = 20ul in
[@inline_let] let ctx_len = 0ul in
[@inline_let] let k = mk_ed25519_concrete_ops in
[@inline_let] let l = 4ul in
[@inline_let] let table_len = 16ul in
[@inline_let] let bLen = 1ul in
[@inline_let] let bBits = 64ul in
let h0 = ST.get () in
recall_contents precomp_basepoint_table_w4 precomp_basepoint_table_lseq_w4;
let h1 = ST.get () in
precomp_basepoint_table_lemma_w4 ();
assert (table_inv_w4 (as_seq h1 q1) (as_seq h1 precomp_basepoint_table_w4));
recall_contents precomp_g_pow2_64_table_w4 precomp_g_pow2_64_table_lseq_w4;
let h1 = ST.get () in
precomp_g_pow2_64_table_lemma_w4 ();
assert (table_inv_w4 (as_seq h1 q2) (as_seq h1 precomp_g_pow2_64_table_w4));
recall_contents precomp_g_pow2_128_table_w4 precomp_g_pow2_128_table_lseq_w4;
let h1 = ST.get () in
precomp_g_pow2_128_table_lemma_w4 ();
assert (table_inv_w4 (as_seq h1 q3) (as_seq h1 precomp_g_pow2_128_table_w4));
recall_contents precomp_g_pow2_192_table_w4 precomp_g_pow2_192_table_lseq_w4;
let h1 = ST.get () in
precomp_g_pow2_192_table_lemma_w4 ();
assert (table_inv_w4 (as_seq h1 q4) (as_seq h1 precomp_g_pow2_192_table_w4));
let r1 = sub bscalar 0ul 1ul in
let r2 = sub bscalar 1ul 1ul in
let r3 = sub bscalar 2ul 1ul in
let r4 = sub bscalar 3ul 1ul in
SPT256.lemma_decompose_nat256_as_four_u64_lbignum (as_seq h0 bscalar);
ME.mk_lexp_four_fw_tables len ctx_len k l table_len
table_inv_w4 table_inv_w4 table_inv_w4 table_inv_w4
precomp_get_consttime
precomp_get_consttime
precomp_get_consttime
precomp_get_consttime
(null uint64) q1 bLen bBits r1 q2 r2 q3 r3 q4 r4
(to_const precomp_basepoint_table_w4)
(to_const precomp_g_pow2_64_table_w4)
(to_const precomp_g_pow2_128_table_w4)
(to_const precomp_g_pow2_192_table_w4)
out;
LowStar.Ignore.ignore q2; // q2, q3, q4 are unused variables
LowStar.Ignore.ignore q3;
LowStar.Ignore.ignore q4
inline_for_extraction noextract
val point_mul_g_mk_q1234: out:point -> bscalar:lbuffer uint64 4ul -> q1:point ->
Stack unit
(requires fun h ->
live h bscalar /\ live h out /\ live h q1 /\
disjoint out bscalar /\ disjoint out q1 /\
BD.bn_v h bscalar < pow2 256 /\
F51.linv (as_seq h q1) /\ refl (as_seq h q1) == g_aff)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.linv (as_seq h1 out) /\
(let (b0, b1, b2, b3) = SPT256.decompose_nat256_as_four_u64 (BD.bn_v h0 bscalar) in
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_four_fw S.mk_ed25519_comm_monoid
g_aff 64 b0 g_pow2_64 b1 g_pow2_128 b2 g_pow2_192 b3 4))
let point_mul_g_mk_q1234 out bscalar q1 =
push_frame ();
let q2 = mk_ext_g_pow2_64 () in
let q3 = mk_ext_g_pow2_128 () in
let q4 = mk_ext_g_pow2_192 () in
ext_g_pow2_64_lseq_lemma ();
ext_g_pow2_128_lseq_lemma ();
ext_g_pow2_192_lseq_lemma ();
point_mul_g_noalloc out bscalar q1 q2 q3 q4;
pop_frame ()
val lemma_exp_four_fw_local: b:BSeq.lbytes 32 ->
Lemma (let bn = BSeq.nat_from_bytes_le b in
let (b0, b1, b2, b3) = SPT256.decompose_nat256_as_four_u64 bn in
let cm = S.mk_ed25519_comm_monoid in
LE.exp_four_fw cm g_aff 64 b0 g_pow2_64 b1 g_pow2_128 b2 g_pow2_192 b3 4 ==
S.to_aff_point (S.point_mul_g b))
let lemma_exp_four_fw_local b =
let bn = BSeq.nat_from_bytes_le b in
let (b0, b1, b2, b3) = SPT256.decompose_nat256_as_four_u64 bn in
let cm = S.mk_ed25519_comm_monoid in
let res = LE.exp_four_fw cm g_aff 64 b0 g_pow2_64 b1 g_pow2_128 b2 g_pow2_192 b3 4 in
assert (res == SPT256.exp_as_exp_four_nat256_precomp cm g_aff bn);
SPT256.lemma_point_mul_base_precomp4 cm g_aff bn;
assert (res == LE.pow cm g_aff bn);
SE.exp_fw_lemma S.mk_ed25519_concrete_ops g_c 256 bn 4;
LE.exp_fw_lemma cm g_aff 256 bn 4;
assert (S.to_aff_point (S.point_mul_g b) == LE.pow cm g_aff bn)
[@CInline]
let point_mul_g out scalar =
push_frame ();
let h0 = ST.get () in
let bscalar = create 4ul (u64 0) in
convert_scalar scalar bscalar;
let q1 = create 20ul (u64 0) in
make_g q1;
point_mul_g_mk_q1234 out bscalar q1;
lemma_exp_four_fw_local (as_seq h0 scalar);
pop_frame ()
inline_for_extraction noextract
val point_mul_g_double_vartime_noalloc:
out:point
-> scalar1:lbuffer uint64 4ul -> q1:point
-> scalar2:lbuffer uint64 4ul -> q2:point
-> table2: lbuffer uint64 640ul ->
Stack unit
(requires fun h ->
live h out /\ live h scalar1 /\ live h q1 /\
live h scalar2 /\ live h q2 /\ live h table2 /\
eq_or_disjoint q1 q2 /\ disjoint out q1 /\ disjoint out q2 /\
disjoint out scalar1 /\ disjoint out scalar2 /\ disjoint out table2 /\
BD.bn_v h scalar1 < pow2 256 /\ BD.bn_v h scalar2 < pow2 256 /\
F51.linv (as_seq h q1) /\ F51.linv (as_seq h q2) /\
F51.point_eval h q1 == g_c /\
table_inv_w5 (as_seq h q2) (as_seq h table2))
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.linv (as_seq h1 out) /\
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_double_fw #S.aff_point_c S.mk_ed25519_comm_monoid
(S.to_aff_point (F51.point_eval h0 q1)) 256 (BD.bn_v h0 scalar1)
(S.to_aff_point (F51.point_eval h0 q2)) (BD.bn_v h0 scalar2) 5)
let point_mul_g_double_vartime_noalloc out scalar1 q1 scalar2 q2 table2 =
[@inline_let] let len = 20ul in
[@inline_let] let ctx_len = 0ul in
[@inline_let] let k = mk_ed25519_concrete_ops in
[@inline_let] let l = 5ul in
[@inline_let] let table_len = 32ul in
[@inline_let] let bLen = 4ul in
[@inline_let] let bBits = 256ul in
assert_norm (pow2 (v l) == v table_len);
let h0 = ST.get () in
recall_contents precomp_basepoint_table_w5 precomp_basepoint_table_lseq_w5;
let h1 = ST.get () in
precomp_basepoint_table_lemma_w5 ();
assert (table_inv_w5 (as_seq h1 q1) (as_seq h1 precomp_basepoint_table_w5));
assert (table_inv_w5 (as_seq h1 q2) (as_seq h1 table2));
ME.mk_lexp_double_fw_tables len ctx_len k l table_len
table_inv_w5 table_inv_w5
(BE.lprecomp_get_vartime len ctx_len k l table_len)
(BE.lprecomp_get_vartime len ctx_len k l table_len)
(null uint64) q1 bLen bBits scalar1 q2 scalar2
(to_const precomp_basepoint_table_w5) (to_const table2) out
inline_for_extraction noextract
val point_mul_g_double_vartime_table:
out:point
-> scalar1:lbuffer uint64 4ul -> q1:point
-> scalar2:lbuffer uint64 4ul -> q2:point ->
Stack unit
(requires fun h ->
live h out /\ live h scalar1 /\ live h q1 /\
live h scalar2 /\ live h q2 /\
eq_or_disjoint q1 q2 /\ disjoint out q1 /\ disjoint out q2 /\
disjoint out scalar1 /\ disjoint out scalar2 /\
BD.bn_v h scalar1 < pow2 256 /\ BD.bn_v h scalar2 < pow2 256 /\
F51.linv (as_seq h q1) /\ F51.linv (as_seq h q2) /\
F51.point_eval h q1 == g_c)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.linv (as_seq h1 out) /\
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_double_fw #S.aff_point_c S.mk_ed25519_comm_monoid
(S.to_aff_point (F51.point_eval h0 q1)) 256 (BD.bn_v h0 scalar1)
(S.to_aff_point (F51.point_eval h0 q2)) (BD.bn_v h0 scalar2) 5)
let point_mul_g_double_vartime_table out scalar1 q1 scalar2 q2 =
[@inline_let] let len = 20ul in
[@inline_let] let ctx_len = 0ul in
[@inline_let] let k = mk_ed25519_concrete_ops in
[@inline_let] let table_len = 32ul in
assert_norm (pow2 5 == v table_len);
push_frame ();
let table2 = create (table_len *! len) (u64 0) in
PT.lprecomp_table len ctx_len k (null uint64) q2 table_len table2;
point_mul_g_double_vartime_noalloc out scalar1 q1 scalar2 q2 table2;
pop_frame ()
inline_for_extraction noextract
val point_mul_g_double_vartime_aux:
out:point
-> scalar1:lbuffer uint8 32ul -> q1:point
-> scalar2:lbuffer uint8 32ul -> q2:point
-> bscalar1:lbuffer uint64 4ul
-> bscalar2:lbuffer uint64 4ul ->
Stack unit
(requires fun h ->
live h out /\ live h scalar1 /\ live h q1 /\
live h scalar2 /\ live h q2 /\ live h bscalar1 /\ live h bscalar2 /\
disjoint scalar1 bscalar1 /\ disjoint scalar2 bscalar2 /\ disjoint scalar2 bscalar1 /\
disjoint scalar1 bscalar2 /\ disjoint bscalar1 bscalar2 /\ disjoint bscalar1 out /\
disjoint bscalar1 q1 /\ disjoint bscalar1 q2 /\ disjoint bscalar2 out /\
disjoint bscalar2 q1 /\ disjoint bscalar2 q2 /\ eq_or_disjoint q1 q2 /\
disjoint q1 out /\ disjoint q2 out /\ disjoint scalar1 out /\ disjoint scalar2 out /\
F51.linv (as_seq h q1) /\ F51.linv (as_seq h q2) /\
F51.point_eval h q1 == g_c)
(ensures fun h0 _ h1 -> modifies (loc out |+| loc bscalar1 |+| loc bscalar2) h0 h1 /\
F51.linv (as_seq h1 out) /\
BD.bn_v h1 bscalar1 == BSeq.nat_from_bytes_le (as_seq h0 scalar1) /\
BD.bn_v h1 bscalar2 == BSeq.nat_from_bytes_le (as_seq h0 scalar2) /\
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_double_fw #S.aff_point_c S.mk_ed25519_comm_monoid
(S.to_aff_point (F51.point_eval h0 q1)) 256 (BD.bn_v h1 bscalar1)
(S.to_aff_point (F51.point_eval h0 q2)) (BD.bn_v h1 bscalar2) 5)
let point_mul_g_double_vartime_aux out scalar1 q1 scalar2 q2 bscalar1 bscalar2 =
let h0 = ST.get () in
convert_scalar scalar1 bscalar1;
convert_scalar scalar2 bscalar2;
let h1 = ST.get () in
assert (BD.bn_v h1 bscalar1 == BSeq.nat_from_bytes_le (as_seq h0 scalar1));
assert (BD.bn_v h1 bscalar2 == BSeq.nat_from_bytes_le (as_seq h0 scalar2));
point_mul_g_double_vartime_table out bscalar1 q1 bscalar2 q2
val point_mul_g_double_vartime:
out:point
-> scalar1:lbuffer uint8 32ul
-> scalar2:lbuffer uint8 32ul
-> q2:point ->
Stack unit
(requires fun h ->
live h out /\ live h scalar1 /\
live h scalar2 /\ live h q2 /\
disjoint q2 out /\ disjoint scalar1 out /\ disjoint scalar2 out /\
F51.linv (as_seq h q2))
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F51.linv (as_seq h1 out) /\
S.to_aff_point (F51.point_eval h1 out) ==
LE.exp_double_fw #S.aff_point_c S.mk_ed25519_comm_monoid
(S.to_aff_point g_c) 256 (BSeq.nat_from_bytes_le (as_seq h0 scalar1))
(S.to_aff_point (F51.point_eval h0 q2)) (BSeq.nat_from_bytes_le (as_seq h0 scalar2)) 5) | {
"checked_file": "/",
"dependencies": [
"Spec.Exponentiation.fsti.checked",
"Spec.Ed25519.Lemmas.fsti.checked",
"Spec.Ed25519.fst.checked",
"prims.fst.checked",
"LowStar.Ignore.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Exponentiation.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Spec.PrecompBaseTable256.fsti.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"Hacl.Spec.Bignum.Convert.fst.checked",
"Hacl.Impl.PrecompTable.fsti.checked",
"Hacl.Impl.MultiExponentiation.fsti.checked",
"Hacl.Impl.Exponentiation.fsti.checked",
"Hacl.Impl.Ed25519.PointNegate.fst.checked",
"Hacl.Impl.Ed25519.PointConstants.fst.checked",
"Hacl.Impl.Ed25519.Group.fst.checked",
"Hacl.Impl.Ed25519.Field51.fst.checked",
"Hacl.Ed25519.PrecompTable.fsti.checked",
"Hacl.Bignum25519.fsti.checked",
"Hacl.Bignum.Definitions.fst.checked",
"Hacl.Bignum.Convert.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked"
],
"interface_file": true,
"source_file": "Hacl.Impl.Ed25519.Ladder.fst"
} | [
{
"abbrev": false,
"full_module": "Hacl.Ed25519.PrecompTable",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519.Group",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519.PointConstants",
"short_module": null
},
{
"abbrev": true,
"full_module": "Spec.Ed25519",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "SD"
},
{
"abbrev": true,
"full_module": "Hacl.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.PrecompBaseTable256",
"short_module": "SPT256"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.PrecompTable",
"short_module": "PT"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.MultiExponentiation",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Exponentiation",
"short_module": "BE"
},
{
"abbrev": true,
"full_module": "Spec.Exponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation",
"short_module": "LE"
},
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Ed25519.Field51",
"short_module": "F51"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": true,
"full_module": "Spec.Ed25519",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Ed25519.Field51",
"short_module": "F51"
},
{
"abbrev": true,
"full_module": "Lib.ByteSequence",
"short_module": "BSeq"
},
{
"abbrev": false,
"full_module": "Hacl.Bignum25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Ed25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
out: Hacl.Bignum25519.point ->
scalar1: Lib.Buffer.lbuffer Lib.IntTypes.uint8 32ul ->
scalar2: Lib.Buffer.lbuffer Lib.IntTypes.uint8 32ul ->
q2: Hacl.Bignum25519.point
-> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Hacl.Bignum25519.point",
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint8",
"FStar.UInt32.__uint_to_t",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Hacl.Impl.Ed25519.Ladder.point_mul_g_double_vartime_aux",
"Hacl.Impl.Ed25519.PointConstants.make_g",
"Lib.Buffer.lbuffer_t",
"Lib.Buffer.MUT",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U64",
"Lib.IntTypes.SEC",
"FStar.UInt32.uint_to_t",
"FStar.UInt32.t",
"Lib.Buffer.sub",
"Lib.IntTypes.uint64",
"Lib.Buffer.create",
"Lib.IntTypes.u64",
"FStar.HyperStack.ST.push_frame"
] | [] | false | true | false | false | false | let point_mul_g_double_vartime out scalar1 scalar2 q2 =
| push_frame ();
let tmp = create 28ul (u64 0) in
let g = sub tmp 0ul 20ul in
let bscalar1 = sub tmp 20ul 4ul in
let bscalar2 = sub tmp 24ul 4ul in
make_g g;
point_mul_g_double_vartime_aux out scalar1 g scalar2 q2 bscalar1 bscalar2;
pop_frame () | false |
BinaryTrees.fst | BinaryTrees.size | val size : tree -> Tot nat | val size : tree -> Tot nat | let rec size t =
match t with
| Leaf -> 0
| Node n t1 t2 -> 1 + size t1 + size t2 | {
"file_name": "examples/data_structures/BinaryTrees.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 41,
"end_line": 26,
"start_col": 0,
"start_line": 23
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module BinaryTrees
type tree =
| Leaf : tree
| Node : root:int -> left:tree -> right:tree -> tree | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "BinaryTrees.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: BinaryTrees.tree -> Prims.nat | Prims.Tot | [
"total"
] | [] | [
"BinaryTrees.tree",
"Prims.int",
"Prims.op_Addition",
"BinaryTrees.size",
"Prims.nat"
] | [
"recursion"
] | false | false | false | true | false | let rec size t =
| match t with
| Leaf -> 0
| Node n t1 t2 -> 1 + size t1 + size t2 | false |
BinaryTrees.fst | BinaryTrees.compose | val compose : f1: (_: _ -> _) -> f2: (_: _ -> _) -> x: _ -> _ | let compose f1 f2 x = f1 (f2 x) | {
"file_name": "examples/data_structures/BinaryTrees.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 31,
"end_line": 59,
"start_col": 0,
"start_line": 59
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module BinaryTrees
type tree =
| Leaf : tree
| Node : root:int -> left:tree -> right:tree -> tree
val size : tree -> Tot nat
let rec size t =
match t with
| Leaf -> 0
| Node n t1 t2 -> 1 + size t1 + size t2
val map : f:(int -> Tot int) -> tree -> Tot tree
let rec map f t =
match t with
| Leaf -> Leaf
| Node n t1 t2 -> Node (f n) (map f t1) (map f t2)
val map_size : f:(int -> Tot int) -> t:tree -> Lemma (size (map f t) = size t)
let rec map_size f t =
match t with
| Leaf -> ()
| Node n t1 t2 -> map_size f t1; map_size f t2
val find : p:(int -> Tot bool) -> tree -> Tot (option int)
let rec find p t =
match t with
| Leaf -> None
| Node n t1 t2 -> if p n then Some n else
if Some? (find p t1) then find p t1
else find p t2
val find_some : p:(int -> Tot bool) -> t:tree ->
Lemma (None? (find p t) \/ p (Some?.v (find p t)))
let rec find_some p t =
match t with
| Leaf -> ()
| Node n t1 t2 -> find_some p t1; find_some p t2
let map_option f o = match o with
| Some n -> Some (f n)
| None -> None | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "BinaryTrees.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f1: (_: _ -> _) -> f2: (_: _ -> _) -> x: _ -> _ | Prims.Tot | [
"total"
] | [] | [] | [] | false | false | false | true | false | let compose f1 f2 x =
| f1 (f2 x) | false |
|
BinaryTrees.fst | BinaryTrees.map_size | val map_size : f:(int -> Tot int) -> t:tree -> Lemma (size (map f t) = size t) | val map_size : f:(int -> Tot int) -> t:tree -> Lemma (size (map f t) = size t) | let rec map_size f t =
match t with
| Leaf -> ()
| Node n t1 t2 -> map_size f t1; map_size f t2 | {
"file_name": "examples/data_structures/BinaryTrees.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 48,
"end_line": 38,
"start_col": 0,
"start_line": 35
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module BinaryTrees
type tree =
| Leaf : tree
| Node : root:int -> left:tree -> right:tree -> tree
val size : tree -> Tot nat
let rec size t =
match t with
| Leaf -> 0
| Node n t1 t2 -> 1 + size t1 + size t2
val map : f:(int -> Tot int) -> tree -> Tot tree
let rec map f t =
match t with
| Leaf -> Leaf
| Node n t1 t2 -> Node (f n) (map f t1) (map f t2) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "BinaryTrees.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: (_: Prims.int -> Prims.int) -> t: BinaryTrees.tree
-> FStar.Pervasives.Lemma (ensures BinaryTrees.size (BinaryTrees.map f t) = BinaryTrees.size t) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Prims.int",
"BinaryTrees.tree",
"BinaryTrees.map_size",
"Prims.unit"
] | [
"recursion"
] | false | false | true | false | false | let rec map_size f t =
| match t with
| Leaf -> ()
| Node n t1 t2 ->
map_size f t1;
map_size f t2 | false |
BinaryTrees.fst | BinaryTrees.map | val map : f:(int -> Tot int) -> tree -> Tot tree | val map : f:(int -> Tot int) -> tree -> Tot tree | let rec map f t =
match t with
| Leaf -> Leaf
| Node n t1 t2 -> Node (f n) (map f t1) (map f t2) | {
"file_name": "examples/data_structures/BinaryTrees.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 52,
"end_line": 32,
"start_col": 0,
"start_line": 29
} | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module BinaryTrees
type tree =
| Leaf : tree
| Node : root:int -> left:tree -> right:tree -> tree
val size : tree -> Tot nat
let rec size t =
match t with
| Leaf -> 0
| Node n t1 t2 -> 1 + size t1 + size t2 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "BinaryTrees.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: (_: Prims.int -> Prims.int) -> t: BinaryTrees.tree -> BinaryTrees.tree | Prims.Tot | [
"total"
] | [] | [
"Prims.int",
"BinaryTrees.tree",
"BinaryTrees.Leaf",
"BinaryTrees.Node",
"BinaryTrees.map"
] | [
"recursion"
] | false | false | false | true | false | let rec map f t =
| match t with
| Leaf -> Leaf
| Node n t1 t2 -> Node (f n) (map f t1) (map f t2) | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.