file_name
stringlengths 5
52
| name
stringlengths 4
95
| original_source_type
stringlengths 0
23k
| source_type
stringlengths 9
23k
| source_definition
stringlengths 9
57.9k
| source
dict | source_range
dict | file_context
stringlengths 0
721k
| dependencies
dict | opens_and_abbrevs
listlengths 2
94
| vconfig
dict | interleaved
bool 1
class | verbose_type
stringlengths 1
7.42k
| effect
stringclasses 118
values | effect_flags
sequencelengths 0
2
| mutual_with
sequencelengths 0
11
| ideal_premises
sequencelengths 0
236
| proof_features
sequencelengths 0
1
| is_simple_lemma
bool 2
classes | is_div
bool 2
classes | is_proof
bool 2
classes | is_simply_typed
bool 2
classes | is_type
bool 2
classes | partial_definition
stringlengths 5
3.99k
| completed_definiton
stringlengths 1
1.63M
| isa_cross_project_example
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Vale.X64.Leakage.fst | Vale.X64.Leakage.monotone_decreases_count | val monotone_decreases_count (ts ts': analysis_taints)
: Lemma (requires taintstate_monotone ts ts' /\ not (eq_leakage_taints ts.lts ts'.lts))
(ensures count_publics ts' < count_publics ts) | val monotone_decreases_count (ts ts': analysis_taints)
: Lemma (requires taintstate_monotone ts ts' /\ not (eq_leakage_taints ts.lts ts'.lts))
(ensures count_publics ts' < count_publics ts) | let monotone_decreases_count (ts ts':analysis_taints) : Lemma
(requires taintstate_monotone ts ts' /\ not (eq_leakage_taints ts.lts ts'.lts))
(ensures count_publics ts' < count_publics ts)
=
let regs1 = ts'.lts.regTaint in
let regs2 = ts.lts.regTaint in
if count_public_registers regs1 n_reg_files >= count_public_registers regs2 n_reg_files then (
lemma_count_public_registers regs1 regs2 n_reg_files;
assert (FStar.FunctionalExtensionality.feq regs1 regs2)
) | {
"file_name": "vale/code/arch/x64/Vale.X64.Leakage.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 3,
"end_line": 182,
"start_col": 0,
"start_line": 173
} | module Vale.X64.Leakage
open FStar.Mul
open Vale.X64.Machine_s
module S = Vale.X64.Machine_Semantics_s
open Vale.X64.Leakage_s
open Vale.X64.Leakage_Helpers
open Vale.X64.Leakage_Ins
unfold let machine_eval_ocmp = S.machine_eval_ocmp
unfold let machine_eval_code = S.machine_eval_code
unfold let machine_eval_codes = S.machine_eval_codes
unfold let machine_eval_while = S.machine_eval_while
#reset-options "--initial_ifuel 0 --max_ifuel 1 --initial_fuel 1 --max_fuel 1"
let normalize_taints (ts:analysis_taints) : analysis_taints =
let AnalysisTaints lts rts = ts in
AnalysisTaints lts (regs_to_map (map_to_regs rts))
let combine_reg_taints (regs1 regs2:reg_taint) : reg_taint =
FunctionalExtensionality.on reg (fun x -> merge_taint (regs1 x) (regs2 x))
let rec eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : bool =
if k = 0 then true
else regs1 (Reg rf (k - 1)) = regs2 (Reg rf (k - 1)) && eq_regs_file regs1 regs2 rf (k - 1)
let rec eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : bool =
if k = 0 then true
else eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1)) && eq_regs regs1 regs2 (k - 1)
let rec lemma_eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(ensures eq_regs_file regs1 regs2 rf k <==>
(forall (i:nat).{:pattern (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i)))
=
if k > 0 then lemma_eq_regs_file regs1 regs2 rf (k - 1)
let rec lemma_eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : Lemma
(ensures
eq_regs regs1 regs2 k <==>
(forall (i j:nat).{:pattern (Reg i j)} i < k /\ j < n_regs i ==>
regs1 (Reg i j) == regs2 (Reg i j)))
=
if k > 0 then (
lemma_eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1));
lemma_eq_regs regs1 regs2 (k - 1)
)
let eq_registers (regs1 regs2:reg_taint) : (b:bool{b <==> regs1 == regs2}) =
lemma_eq_regs regs1 regs2 n_reg_files;
let b = eq_regs regs1 regs2 n_reg_files in
if b then (
assert (FStar.FunctionalExtensionality.feq regs1 regs2)
);
b
let eq_leakage_taints (ts1 ts2:leakage_taints) : (b:bool{b <==> ts1 == ts2}) =
eq_registers ts1.regTaint ts2.regTaint &&
ts1.flagsTaint = ts2.flagsTaint &&
ts1.cfFlagsTaint = ts2.cfFlagsTaint &&
ts1.ofFlagsTaint = ts2.ofFlagsTaint
let taintstate_monotone_regs (ts ts':reg_taint) =
(forall (r:reg).{:pattern (ts' r) \/ (ts r)}
Public? (ts' r) ==> Public? (ts r))
let taintstate_monotone (ts ts':analysis_taints) =
let ts = ts.lts in
let ts' = ts'.lts in
taintstate_monotone_regs ts.regTaint ts'.regTaint /\
(Public? (ts'.flagsTaint) ==> Public? (ts.flagsTaint)) /\
(Public? (ts'.cfFlagsTaint) ==> Public? (ts.cfFlagsTaint)) /\
(Public? (ts'.ofFlagsTaint) ==> Public? (ts.ofFlagsTaint))
let taintstate_monotone_trans (ts1:analysis_taints) (ts2:analysis_taints) (ts3:analysis_taints)
: Lemma (taintstate_monotone ts1 ts2 /\ taintstate_monotone ts2 ts3 ==> taintstate_monotone ts1 ts3) = ()
let isConstant_monotone (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isConstantTimeGivenStates code fuel ts2.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isConstantTimeGivenStates code fuel ts1.lts s1 s2)
= ()
let isExplicit_monotone (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code)
(fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts.lts ts1.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts.lts ts2.lts s1 s2)
= ()
let isExplicit_monotone2 (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints)
(code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts2.lts ts.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts1.lts ts.lts s1 s2)
= ()
let combine_leakage_taints (ts1:leakage_taints) (ts2:leakage_taints) : leakage_taints =
let LeakageTaints rs1 fs1 c1 o1 = ts1 in
let LeakageTaints rs2 fs2 c2 o2 = ts2 in
let rs = combine_reg_taints rs1 rs2 in
LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
let combine_analysis_taints (ts1:analysis_taints) (ts2:analysis_taints)
: (ts:analysis_taints{taintstate_monotone ts1 ts /\ taintstate_monotone ts2 ts /\ ts.lts == combine_leakage_taints ts1.lts ts2.lts})
=
let AnalysisTaints (LeakageTaints rs1_old fs1 c1 o1) rts1 = ts1 in
let AnalysisTaints (LeakageTaints rs2_old fs2 c2 o2) rts2 = ts2 in
let rts1 = ts1.rts in
let rts2 = ts2.rts in
let rs1 = map_to_regs rts1 in // \
let rs2 = map_to_regs rts2 in // - build efficient representations of reg_taint before calling combine_reg_taints
assert (FStar.FunctionalExtensionality.feq rs1 rs1_old);
assert (FStar.FunctionalExtensionality.feq rs2 rs2_old);
let rs = combine_reg_taints rs1 rs2 in
let rts = regs_to_map rs in
let lts = LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
in
AnalysisTaints lts rts
let count_public_register (regs:reg_taint) (r:reg) = if Public? (regs r) then 1 else 0
let rec count_public_registers_file (regs:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : nat =
if k = 0 then 0
else count_public_register regs (Reg rf (k - 1)) + count_public_registers_file regs rf (k - 1)
let rec lemma_count_public_registers_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers_file regs1 rf k >= count_public_registers_file regs2 rf k
)
(ensures
count_public_registers_file regs1 rf k == count_public_registers_file regs2 rf k /\
(forall (i:nat).{:pattern regs1 (Reg rf i) \/ regs2 (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i))
)
=
if k > 0 then lemma_count_public_registers_file regs1 regs2 rf (k - 1)
let rec count_public_registers (regs:reg_taint) (k:nat{k <= n_reg_files}) : nat =
if k = 0 then 0
else count_public_registers_file regs (k - 1) (n_regs (k - 1)) + count_public_registers regs (k - 1)
let rec lemma_count_public_registers (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers regs1 k >= count_public_registers regs2 k
)
(ensures
count_public_registers regs1 k == count_public_registers regs2 k /\
(forall (r:reg).{:pattern regs1 r \/ regs2 r} Reg?.rf r < k ==> regs1 r == regs2 r)
)
=
if k > 0 then (
let n = n_regs (k - 1) in
if count_public_registers_file regs1 (k - 1) n >= count_public_registers_file regs2 (k - 1) n then
lemma_count_public_registers_file regs1 regs2 (k - 1) n;
lemma_count_public_registers regs1 regs2 (k - 1)
)
let count_flagTaint (ts:analysis_taints) : nat = if Public? ts.lts.flagsTaint then 1 else 0
let count_cfFlagTaint (ts:analysis_taints) : nat = if Public? ts.lts.cfFlagsTaint then 1 else 0
let count_ofFlagTaint (ts:analysis_taints) : nat = if Public? ts.lts.ofFlagsTaint then 1 else 0
let count_publics (ts:analysis_taints) : nat =
count_public_registers ts.lts.regTaint n_reg_files +
count_flagTaint ts +
count_cfFlagTaint ts +
count_ofFlagTaint ts | {
"checked_file": "/",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Leakage_s.fst.checked",
"Vale.X64.Leakage_Ins.fsti.checked",
"Vale.X64.Leakage_Helpers.fst.checked",
"Vale.Lib.MapTree.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Leakage.fst"
} | [
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Ins",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Ins",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ts: Vale.X64.Leakage_Helpers.analysis_taints -> ts': Vale.X64.Leakage_Helpers.analysis_taints
-> FStar.Pervasives.Lemma
(requires
Vale.X64.Leakage.taintstate_monotone ts ts' /\
Prims.op_Negation (Vale.X64.Leakage.eq_leakage_taints (AnalysisTaints?.lts ts)
(AnalysisTaints?.lts ts')))
(ensures Vale.X64.Leakage.count_publics ts' < Vale.X64.Leakage.count_publics ts) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Vale.X64.Leakage_Helpers.analysis_taints",
"Prims.op_GreaterThanOrEqual",
"Vale.X64.Leakage.count_public_registers",
"Vale.X64.Machine_s.n_reg_files",
"Prims._assert",
"FStar.FunctionalExtensionality.feq",
"Vale.X64.Machine_s.reg",
"Vale.Arch.HeapTypes_s.taint",
"Prims.unit",
"Vale.X64.Leakage.lemma_count_public_registers",
"Prims.bool",
"Vale.X64.Leakage_s.reg_taint",
"Vale.X64.Leakage_s.__proj__LeakageTaints__item__regTaint",
"Vale.X64.Leakage_Helpers.__proj__AnalysisTaints__item__lts",
"Prims.l_and",
"Vale.X64.Leakage.taintstate_monotone",
"Prims.b2t",
"Prims.op_Negation",
"Vale.X64.Leakage.eq_leakage_taints",
"Prims.squash",
"Prims.op_LessThan",
"Vale.X64.Leakage.count_publics",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | false | false | true | false | false | let monotone_decreases_count (ts ts': analysis_taints)
: Lemma (requires taintstate_monotone ts ts' /\ not (eq_leakage_taints ts.lts ts'.lts))
(ensures count_publics ts' < count_publics ts) =
| let regs1 = ts'.lts.regTaint in
let regs2 = ts.lts.regTaint in
if count_public_registers regs1 n_reg_files >= count_public_registers regs2 n_reg_files
then
(lemma_count_public_registers regs1 regs2 n_reg_files;
assert (FStar.FunctionalExtensionality.feq regs1 regs2)) | false |
Vale.X64.Leakage.fst | Vale.X64.Leakage.lemma_code_leakage_free | val lemma_code_leakage_free: (ts:analysis_taints) -> (code:S.code) -> Lemma
(let b, ts' = check_if_code_consumes_fixed_time code ts in
(b2t b ==> isConstantTime code ts.lts /\ isLeakageFree code ts.lts ts'.lts)) | val lemma_code_leakage_free: (ts:analysis_taints) -> (code:S.code) -> Lemma
(let b, ts' = check_if_code_consumes_fixed_time code ts in
(b2t b ==> isConstantTime code ts.lts /\ isLeakageFree code ts.lts ts'.lts)) | let lemma_code_leakage_free ts code = FStar.Classical.forall_intro_3 (lemma_code_explicit_leakage_free ts code) | {
"file_name": "vale/code/arch/x64/Vale.X64.Leakage.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 111,
"end_line": 437,
"start_col": 0,
"start_line": 437
} | module Vale.X64.Leakage
open FStar.Mul
open Vale.X64.Machine_s
module S = Vale.X64.Machine_Semantics_s
open Vale.X64.Leakage_s
open Vale.X64.Leakage_Helpers
open Vale.X64.Leakage_Ins
unfold let machine_eval_ocmp = S.machine_eval_ocmp
unfold let machine_eval_code = S.machine_eval_code
unfold let machine_eval_codes = S.machine_eval_codes
unfold let machine_eval_while = S.machine_eval_while
#reset-options "--initial_ifuel 0 --max_ifuel 1 --initial_fuel 1 --max_fuel 1"
let normalize_taints (ts:analysis_taints) : analysis_taints =
let AnalysisTaints lts rts = ts in
AnalysisTaints lts (regs_to_map (map_to_regs rts))
let combine_reg_taints (regs1 regs2:reg_taint) : reg_taint =
FunctionalExtensionality.on reg (fun x -> merge_taint (regs1 x) (regs2 x))
let rec eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : bool =
if k = 0 then true
else regs1 (Reg rf (k - 1)) = regs2 (Reg rf (k - 1)) && eq_regs_file regs1 regs2 rf (k - 1)
let rec eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : bool =
if k = 0 then true
else eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1)) && eq_regs regs1 regs2 (k - 1)
let rec lemma_eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(ensures eq_regs_file regs1 regs2 rf k <==>
(forall (i:nat).{:pattern (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i)))
=
if k > 0 then lemma_eq_regs_file regs1 regs2 rf (k - 1)
let rec lemma_eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : Lemma
(ensures
eq_regs regs1 regs2 k <==>
(forall (i j:nat).{:pattern (Reg i j)} i < k /\ j < n_regs i ==>
regs1 (Reg i j) == regs2 (Reg i j)))
=
if k > 0 then (
lemma_eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1));
lemma_eq_regs regs1 regs2 (k - 1)
)
let eq_registers (regs1 regs2:reg_taint) : (b:bool{b <==> regs1 == regs2}) =
lemma_eq_regs regs1 regs2 n_reg_files;
let b = eq_regs regs1 regs2 n_reg_files in
if b then (
assert (FStar.FunctionalExtensionality.feq regs1 regs2)
);
b
let eq_leakage_taints (ts1 ts2:leakage_taints) : (b:bool{b <==> ts1 == ts2}) =
eq_registers ts1.regTaint ts2.regTaint &&
ts1.flagsTaint = ts2.flagsTaint &&
ts1.cfFlagsTaint = ts2.cfFlagsTaint &&
ts1.ofFlagsTaint = ts2.ofFlagsTaint
let taintstate_monotone_regs (ts ts':reg_taint) =
(forall (r:reg).{:pattern (ts' r) \/ (ts r)}
Public? (ts' r) ==> Public? (ts r))
let taintstate_monotone (ts ts':analysis_taints) =
let ts = ts.lts in
let ts' = ts'.lts in
taintstate_monotone_regs ts.regTaint ts'.regTaint /\
(Public? (ts'.flagsTaint) ==> Public? (ts.flagsTaint)) /\
(Public? (ts'.cfFlagsTaint) ==> Public? (ts.cfFlagsTaint)) /\
(Public? (ts'.ofFlagsTaint) ==> Public? (ts.ofFlagsTaint))
let taintstate_monotone_trans (ts1:analysis_taints) (ts2:analysis_taints) (ts3:analysis_taints)
: Lemma (taintstate_monotone ts1 ts2 /\ taintstate_monotone ts2 ts3 ==> taintstate_monotone ts1 ts3) = ()
let isConstant_monotone (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isConstantTimeGivenStates code fuel ts2.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isConstantTimeGivenStates code fuel ts1.lts s1 s2)
= ()
let isExplicit_monotone (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code)
(fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts.lts ts1.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts.lts ts2.lts s1 s2)
= ()
let isExplicit_monotone2 (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints)
(code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts2.lts ts.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts1.lts ts.lts s1 s2)
= ()
let combine_leakage_taints (ts1:leakage_taints) (ts2:leakage_taints) : leakage_taints =
let LeakageTaints rs1 fs1 c1 o1 = ts1 in
let LeakageTaints rs2 fs2 c2 o2 = ts2 in
let rs = combine_reg_taints rs1 rs2 in
LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
let combine_analysis_taints (ts1:analysis_taints) (ts2:analysis_taints)
: (ts:analysis_taints{taintstate_monotone ts1 ts /\ taintstate_monotone ts2 ts /\ ts.lts == combine_leakage_taints ts1.lts ts2.lts})
=
let AnalysisTaints (LeakageTaints rs1_old fs1 c1 o1) rts1 = ts1 in
let AnalysisTaints (LeakageTaints rs2_old fs2 c2 o2) rts2 = ts2 in
let rts1 = ts1.rts in
let rts2 = ts2.rts in
let rs1 = map_to_regs rts1 in // \
let rs2 = map_to_regs rts2 in // - build efficient representations of reg_taint before calling combine_reg_taints
assert (FStar.FunctionalExtensionality.feq rs1 rs1_old);
assert (FStar.FunctionalExtensionality.feq rs2 rs2_old);
let rs = combine_reg_taints rs1 rs2 in
let rts = regs_to_map rs in
let lts = LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
in
AnalysisTaints lts rts
let count_public_register (regs:reg_taint) (r:reg) = if Public? (regs r) then 1 else 0
let rec count_public_registers_file (regs:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : nat =
if k = 0 then 0
else count_public_register regs (Reg rf (k - 1)) + count_public_registers_file regs rf (k - 1)
let rec lemma_count_public_registers_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers_file regs1 rf k >= count_public_registers_file regs2 rf k
)
(ensures
count_public_registers_file regs1 rf k == count_public_registers_file regs2 rf k /\
(forall (i:nat).{:pattern regs1 (Reg rf i) \/ regs2 (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i))
)
=
if k > 0 then lemma_count_public_registers_file regs1 regs2 rf (k - 1)
let rec count_public_registers (regs:reg_taint) (k:nat{k <= n_reg_files}) : nat =
if k = 0 then 0
else count_public_registers_file regs (k - 1) (n_regs (k - 1)) + count_public_registers regs (k - 1)
let rec lemma_count_public_registers (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers regs1 k >= count_public_registers regs2 k
)
(ensures
count_public_registers regs1 k == count_public_registers regs2 k /\
(forall (r:reg).{:pattern regs1 r \/ regs2 r} Reg?.rf r < k ==> regs1 r == regs2 r)
)
=
if k > 0 then (
let n = n_regs (k - 1) in
if count_public_registers_file regs1 (k - 1) n >= count_public_registers_file regs2 (k - 1) n then
lemma_count_public_registers_file regs1 regs2 (k - 1) n;
lemma_count_public_registers regs1 regs2 (k - 1)
)
let count_flagTaint (ts:analysis_taints) : nat = if Public? ts.lts.flagsTaint then 1 else 0
let count_cfFlagTaint (ts:analysis_taints) : nat = if Public? ts.lts.cfFlagsTaint then 1 else 0
let count_ofFlagTaint (ts:analysis_taints) : nat = if Public? ts.lts.ofFlagsTaint then 1 else 0
let count_publics (ts:analysis_taints) : nat =
count_public_registers ts.lts.regTaint n_reg_files +
count_flagTaint ts +
count_cfFlagTaint ts +
count_ofFlagTaint ts
let monotone_decreases_count (ts ts':analysis_taints) : Lemma
(requires taintstate_monotone ts ts' /\ not (eq_leakage_taints ts.lts ts'.lts))
(ensures count_publics ts' < count_publics ts)
=
let regs1 = ts'.lts.regTaint in
let regs2 = ts.lts.regTaint in
if count_public_registers regs1 n_reg_files >= count_public_registers regs2 n_reg_files then (
lemma_count_public_registers regs1 regs2 n_reg_files;
assert (FStar.FunctionalExtensionality.feq regs1 regs2)
)
val check_if_block_consumes_fixed_time (block:S.codes) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[block])
val check_if_code_consumes_fixed_time (code:S.code) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[code; count_publics ts; 1])
val check_if_loop_consumes_fixed_time (code:S.code{While? code}) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[code; count_publics ts; 0])
#set-options "--z3refresh --z3rlimit 600"
let rec check_if_block_consumes_fixed_time (block:S.codes) (ts:analysis_taints) : bool & analysis_taints =
match block with
| [] -> true, ts
| hd::tl -> let fixedTime, ts_int = check_if_code_consumes_fixed_time hd ts in
if (not fixedTime) then fixedTime, ts_int
else check_if_block_consumes_fixed_time tl ts_int
and check_if_code_consumes_fixed_time (code:S.code) (ts:analysis_taints) : bool & analysis_taints =
match code with
| Ins ins -> let b, ts = check_if_ins_consumes_fixed_time ins ts in b, ts
| Block block -> check_if_block_consumes_fixed_time block ts
| IfElse ifCond ifTrue ifFalse ->
let o1 = operand_taint 0 (S.get_fst_ocmp ifCond) ts in
let o2 = operand_taint 0 (S.get_snd_ocmp ifCond) ts in
let predTaint = merge_taint o1 o2 in
if (Secret? predTaint) then (false, ts)
else
let o1Public = operand_does_not_use_secrets (S.get_fst_ocmp ifCond) ts in
if (not o1Public) then (false, ts)
else
let o2Public = operand_does_not_use_secrets (S.get_snd_ocmp ifCond) ts in
if (not o2Public) then (false, ts)
else
let validIfTrue, tsIfTrue = check_if_code_consumes_fixed_time ifTrue ts in
if (not validIfTrue) then (false, ts)
else
let validIfFalse, tsIfFalse = check_if_code_consumes_fixed_time ifFalse ts in
if (not validIfFalse) then (false, ts)
else
(true, combine_analysis_taints tsIfTrue tsIfFalse)
| While cond body -> check_if_loop_consumes_fixed_time code ts
and check_if_loop_consumes_fixed_time c (ts:analysis_taints) : (bool & analysis_taints) =
let ts = normalize_taints ts in
let While pred body = c in
let o1 = operand_taint 0 (S.get_fst_ocmp pred) ts in
let o2 = operand_taint 0 (S.get_snd_ocmp pred) ts in
let predTaint = merge_taint o1 o2 in
if (Secret? predTaint) then false, ts
else
let o1Public = operand_does_not_use_secrets (S.get_fst_ocmp pred) ts in
if (not o1Public) then (false, ts)
else
let o2Public = operand_does_not_use_secrets (S.get_snd_ocmp pred) ts in
if (not o2Public) then (false, ts)
else
let fixedTime, next_ts = check_if_code_consumes_fixed_time body ts in
if (not fixedTime) then (false, ts)
else
let combined_ts = combine_analysis_taints ts next_ts in
assert (taintstate_monotone ts combined_ts);
if eq_leakage_taints combined_ts.lts ts.lts then
true, combined_ts
else (
monotone_decreases_count ts combined_ts;
check_if_loop_consumes_fixed_time c combined_ts
)
val monotone_ok_eval: (code:S.code) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (let s' = machine_eval_code code fuel s in
Some? s' /\ (Some?.v s').S.ms_ok ==> s.S.ms_ok))
(decreases %[code; 0])
val monotone_ok_eval_block: (codes:S.codes) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (let s' = machine_eval_codes codes fuel s in
Some? s' /\ (Some?.v s').S.ms_ok ==> s.S.ms_ok))
(decreases %[codes;1])
#set-options "--z3rlimit 20 --initial_ifuel 0 --max_ifuel 1 --initial_fuel 2 --max_fuel 2"
let rec monotone_ok_eval code fuel s =
match code with
| Ins ins -> reveal_opaque (`%S.machine_eval_code_ins) S.machine_eval_code_ins
| Block block -> monotone_ok_eval_block block fuel s
| IfElse ifCond ifTrue ifFalse ->
let (st, b) = machine_eval_ocmp s ifCond in
if b then monotone_ok_eval ifTrue fuel st else monotone_ok_eval ifFalse fuel st
| While cond body ->
if fuel = 0 then ()
else
let (st, b) = machine_eval_ocmp s cond in
if not b then () else
monotone_ok_eval body (fuel - 1) st;
()
and monotone_ok_eval_block block fuel s =
match block with
| [] -> ()
| hd :: tl ->
let s' = machine_eval_code hd fuel s in
if None? s' then () else
monotone_ok_eval_block tl fuel (Some?.v s');
monotone_ok_eval hd fuel s
val monotone_ok_eval_while: (code:S.code{While? code}) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (
let While cond body = code in
let (s1, b1) = machine_eval_ocmp s cond in
let r1 = machine_eval_code code fuel s in
Some? r1 /\ (Some?.v r1).S.ms_ok ==> s1.S.ms_ok))
let monotone_ok_eval_while code fuel s =
let While cond body = code in
let (s1, b) = machine_eval_ocmp s cond in
let r1 = machine_eval_while cond body fuel s in
if fuel = 0 then () else
if not b then () else
match machine_eval_code body (fuel - 1) s1 with
| None -> ()
| Some s ->
if not s.S.ms_ok then ()
else (monotone_ok_eval body (fuel - 1) s1; monotone_ok_eval code (fuel - 1) s)
val lemma_loop_taintstate_monotone (ts:analysis_taints) (code:S.code{While? code}) : Lemma
(requires True)
(ensures (let _, ts' = check_if_loop_consumes_fixed_time code ts in
taintstate_monotone ts ts'))
(decreases %[count_publics ts])
#reset-options "--initial_ifuel 1 --max_ifuel 1 --initial_fuel 2 --max_fuel 2 --z3rlimit 40"
let rec lemma_loop_taintstate_monotone ts code =
let ts = normalize_taints ts in
let While pred body = code in
let b, ts' = check_if_code_consumes_fixed_time body ts in
let combined_ts = combine_analysis_taints ts ts' in
if eq_leakage_taints combined_ts.lts ts.lts then ()
else (
monotone_decreases_count ts combined_ts;
let b, ts_fin = check_if_loop_consumes_fixed_time code combined_ts in
lemma_loop_taintstate_monotone combined_ts code;
taintstate_monotone_trans ts combined_ts ts_fin
)
#reset-options "--initial_ifuel 1 --max_ifuel 1 --initial_fuel 2 --max_fuel 2 --z3rlimit 60"
val lemma_code_explicit_leakage_free: (ts:analysis_taints) -> (code:S.code) -> (s1:S.machine_state) -> (s2:S.machine_state) -> (fuel:nat) -> Lemma
(requires True)
(ensures (let b, ts' = check_if_code_consumes_fixed_time code ts in
(b2t b ==> isConstantTimeGivenStates code fuel ts.lts s1 s2 /\ isExplicitLeakageFreeGivenStates code fuel ts.lts ts'.lts s1 s2)))
(decreases %[fuel; code; 1])
val lemma_block_explicit_leakage_free: (ts:analysis_taints) -> (codes:S.codes) -> (s1:S.machine_state) -> (s2:S.machine_state) -> (fuel:nat) -> Lemma
(requires True)
(ensures (let b, ts' = check_if_block_consumes_fixed_time codes ts in
(b2t b ==> isConstantTimeGivenStates (Block codes) fuel ts.lts s1 s2 /\ isExplicitLeakageFreeGivenStates (Block codes) fuel ts.lts ts'.lts s1 s2)))
(decreases %[fuel; codes; 2])
val lemma_loop_explicit_leakage_free: (ts:analysis_taints) -> (code:S.code{While? code}) -> (s1:S.machine_state) -> (s2:S.machine_state) -> (fuel:nat) -> Lemma
(requires True)
(ensures (let b, ts' = check_if_loop_consumes_fixed_time code ts in
(b2t b ==> isConstantTimeGivenStates code fuel ts.lts s1 s2 /\ isExplicitLeakageFreeGivenStates code fuel ts.lts ts'.lts s1 s2)))
(decreases %[fuel; code; 0])
#reset-options "--initial_ifuel 2 --max_ifuel 2 --initial_fuel 1 --max_fuel 2 --z3rlimit 300"
let rec lemma_code_explicit_leakage_free ts code s1 s2 fuel = match code with
| Ins ins -> lemma_ins_leakage_free ts ins
| Block block -> lemma_block_explicit_leakage_free ts block s1 s2 fuel
| IfElse ifCond ifTrue ifFalse ->
reveal_opaque (`%S.valid_ocmp_opaque) S.valid_ocmp_opaque;
reveal_opaque (`%S.eval_ocmp_opaque) S.eval_ocmp_opaque;
let (b_fin, ts_fin) = check_if_code_consumes_fixed_time code ts in
let (st1, b1) = machine_eval_ocmp s1 ifCond in
let (st2, b2) = machine_eval_ocmp s2 ifCond in
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> constTimeInvariant ts.lts st1 st2);
monotone_ok_eval ifTrue fuel st1;
monotone_ok_eval ifTrue fuel st2;
lemma_code_explicit_leakage_free ts ifTrue st1 st2 fuel;
monotone_ok_eval ifFalse fuel st1;
monotone_ok_eval ifFalse fuel st2;
lemma_code_explicit_leakage_free ts ifFalse st1 st2 fuel
| While _ _ -> lemma_loop_explicit_leakage_free ts code s1 s2 fuel
and lemma_block_explicit_leakage_free ts block s1 s2 fuel = match block with
| [] -> ()
| hd :: tl ->
let b, ts' = check_if_code_consumes_fixed_time hd ts in
lemma_code_explicit_leakage_free ts hd s1 s2 fuel;
let s'1 = machine_eval_code hd fuel s1 in
let s'2 = machine_eval_code hd fuel s2 in
if None? s'1 || None? s'2 then ()
else
let s'1 = Some?.v s'1 in
let s'2 = Some?.v s'2 in
lemma_block_explicit_leakage_free ts' tl s'1 s'2 fuel;
monotone_ok_eval (Block tl) fuel s'1;
monotone_ok_eval (Block tl) fuel s'2
and lemma_loop_explicit_leakage_free ts code s1 s2 fuel =
reveal_opaque (`%S.valid_ocmp_opaque) S.valid_ocmp_opaque;
reveal_opaque (`%S.eval_ocmp_opaque) S.eval_ocmp_opaque;
let ts = normalize_taints ts in
if fuel = 0 then () else
let (b_fin, ts_fin) = check_if_code_consumes_fixed_time code ts in
let r1 = machine_eval_code code fuel s1 in
let r2 = machine_eval_code code fuel s2 in
let While cond body = code in
let (st1, b1) = machine_eval_ocmp s1 cond in
let (st2, b2) = machine_eval_ocmp s2 cond in
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> b1 = b2);
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> constTimeInvariant ts.lts st1 st2);
if not b1 || not b2 then
(
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> not b1 /\ not b2);
assert (not b1 ==> r1 == Some st1);
assert (not b2 ==> r2 == Some st2);
monotone_ok_eval_while code fuel s1;
assert (Some? r1 /\ (Some?.v r1).S.ms_ok ==> st1.S.ms_ok);
monotone_ok_eval_while code fuel s2;
assert (Some? r2 /\ (Some?.v r2).S.ms_ok ==> st2.S.ms_ok);
lemma_loop_taintstate_monotone ts code;
isExplicit_monotone ts ts ts_fin code fuel s1 s2;
()
)
else
(
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> constTimeInvariant ts.lts st1 st2);
let (b', ts') = check_if_code_consumes_fixed_time body ts in
lemma_code_explicit_leakage_free ts body st1 st2 (fuel - 1);
monotone_ok_eval body (fuel - 1) st1;
monotone_ok_eval body (fuel - 1) st2;
let st1 = machine_eval_code body (fuel - 1) st1 in
let st2 = machine_eval_code body (fuel - 1) st2 in
assert (None? st1 ==> r1 == st1);
assert (None? st2 ==> r2 == st2);
if (None? st1 || None? st2) then () else
let st1 = Some?.v st1 in
let st2 = Some?.v st2 in
if not st1.S.ms_ok || not st2.S.ms_ok then () else
let combined_ts = combine_analysis_taints ts ts' in
let (b_aux, ts_aux) = check_if_loop_consumes_fixed_time code combined_ts in
lemma_loop_explicit_leakage_free combined_ts code st1 st2 (fuel - 1);
isConstant_monotone ts combined_ts code (fuel - 1) st1 st2;
isExplicit_monotone2 ts_aux ts combined_ts code (fuel - 1) st1 st2;
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> constTimeInvariant ts'.lts st1 st2)
)
val lemma_code_leakage_free: (ts:analysis_taints) -> (code:S.code) -> Lemma
(let b, ts' = check_if_code_consumes_fixed_time code ts in
(b2t b ==> isConstantTime code ts.lts /\ isLeakageFree code ts.lts ts'.lts)) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Leakage_s.fst.checked",
"Vale.X64.Leakage_Ins.fsti.checked",
"Vale.X64.Leakage_Helpers.fst.checked",
"Vale.Lib.MapTree.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Leakage.fst"
} | [
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Ins",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Ins",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 2,
"max_fuel": 2,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 300,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ts: Vale.X64.Leakage_Helpers.analysis_taints -> code: Vale.X64.Machine_Semantics_s.code
-> FStar.Pervasives.Lemma
(ensures
(let _ = Vale.X64.Leakage.check_if_code_consumes_fixed_time code ts in
(let FStar.Pervasives.Native.Mktuple2 #_ #_ b ts' = _ in
b ==>
Vale.X64.Leakage_s.isConstantTime code (AnalysisTaints?.lts ts) /\
Vale.X64.Leakage_s.isLeakageFree code (AnalysisTaints?.lts ts) (AnalysisTaints?.lts ts'))
<:
Type0)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Vale.X64.Leakage_Helpers.analysis_taints",
"Vale.X64.Machine_Semantics_s.code",
"FStar.Classical.forall_intro_3",
"Vale.X64.Machine_Semantics_s.machine_state",
"Prims.nat",
"Vale.X64.Leakage.check_if_code_consumes_fixed_time",
"Prims.bool",
"Prims.l_imp",
"Prims.b2t",
"Prims.l_and",
"Vale.X64.Leakage_s.isConstantTimeGivenStates",
"Vale.X64.Leakage_Helpers.__proj__AnalysisTaints__item__lts",
"Vale.X64.Leakage_s.isExplicitLeakageFreeGivenStates",
"Vale.X64.Leakage.lemma_code_explicit_leakage_free",
"Prims.unit"
] | [] | false | false | true | false | false | let lemma_code_leakage_free ts code =
| FStar.Classical.forall_intro_3 (lemma_code_explicit_leakage_free ts code) | false |
Vale.X64.Leakage.fst | Vale.X64.Leakage.combine_analysis_taints | val combine_analysis_taints (ts1 ts2: analysis_taints)
: (ts:
analysis_taints
{ taintstate_monotone ts1 ts /\ taintstate_monotone ts2 ts /\
ts.lts == combine_leakage_taints ts1.lts ts2.lts }) | val combine_analysis_taints (ts1 ts2: analysis_taints)
: (ts:
analysis_taints
{ taintstate_monotone ts1 ts /\ taintstate_monotone ts2 ts /\
ts.lts == combine_leakage_taints ts1.lts ts2.lts }) | let combine_analysis_taints (ts1:analysis_taints) (ts2:analysis_taints)
: (ts:analysis_taints{taintstate_monotone ts1 ts /\ taintstate_monotone ts2 ts /\ ts.lts == combine_leakage_taints ts1.lts ts2.lts})
=
let AnalysisTaints (LeakageTaints rs1_old fs1 c1 o1) rts1 = ts1 in
let AnalysisTaints (LeakageTaints rs2_old fs2 c2 o2) rts2 = ts2 in
let rts1 = ts1.rts in
let rts2 = ts2.rts in
let rs1 = map_to_regs rts1 in // \
let rs2 = map_to_regs rts2 in // - build efficient representations of reg_taint before calling combine_reg_taints
assert (FStar.FunctionalExtensionality.feq rs1 rs1_old);
assert (FStar.FunctionalExtensionality.feq rs2 rs2_old);
let rs = combine_reg_taints rs1 rs2 in
let rts = regs_to_map rs in
let lts = LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
in
AnalysisTaints lts rts | {
"file_name": "vale/code/arch/x64/Vale.X64.Leakage.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 24,
"end_line": 120,
"start_col": 0,
"start_line": 101
} | module Vale.X64.Leakage
open FStar.Mul
open Vale.X64.Machine_s
module S = Vale.X64.Machine_Semantics_s
open Vale.X64.Leakage_s
open Vale.X64.Leakage_Helpers
open Vale.X64.Leakage_Ins
unfold let machine_eval_ocmp = S.machine_eval_ocmp
unfold let machine_eval_code = S.machine_eval_code
unfold let machine_eval_codes = S.machine_eval_codes
unfold let machine_eval_while = S.machine_eval_while
#reset-options "--initial_ifuel 0 --max_ifuel 1 --initial_fuel 1 --max_fuel 1"
let normalize_taints (ts:analysis_taints) : analysis_taints =
let AnalysisTaints lts rts = ts in
AnalysisTaints lts (regs_to_map (map_to_regs rts))
let combine_reg_taints (regs1 regs2:reg_taint) : reg_taint =
FunctionalExtensionality.on reg (fun x -> merge_taint (regs1 x) (regs2 x))
let rec eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : bool =
if k = 0 then true
else regs1 (Reg rf (k - 1)) = regs2 (Reg rf (k - 1)) && eq_regs_file regs1 regs2 rf (k - 1)
let rec eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : bool =
if k = 0 then true
else eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1)) && eq_regs regs1 regs2 (k - 1)
let rec lemma_eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(ensures eq_regs_file regs1 regs2 rf k <==>
(forall (i:nat).{:pattern (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i)))
=
if k > 0 then lemma_eq_regs_file regs1 regs2 rf (k - 1)
let rec lemma_eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : Lemma
(ensures
eq_regs regs1 regs2 k <==>
(forall (i j:nat).{:pattern (Reg i j)} i < k /\ j < n_regs i ==>
regs1 (Reg i j) == regs2 (Reg i j)))
=
if k > 0 then (
lemma_eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1));
lemma_eq_regs regs1 regs2 (k - 1)
)
let eq_registers (regs1 regs2:reg_taint) : (b:bool{b <==> regs1 == regs2}) =
lemma_eq_regs regs1 regs2 n_reg_files;
let b = eq_regs regs1 regs2 n_reg_files in
if b then (
assert (FStar.FunctionalExtensionality.feq regs1 regs2)
);
b
let eq_leakage_taints (ts1 ts2:leakage_taints) : (b:bool{b <==> ts1 == ts2}) =
eq_registers ts1.regTaint ts2.regTaint &&
ts1.flagsTaint = ts2.flagsTaint &&
ts1.cfFlagsTaint = ts2.cfFlagsTaint &&
ts1.ofFlagsTaint = ts2.ofFlagsTaint
let taintstate_monotone_regs (ts ts':reg_taint) =
(forall (r:reg).{:pattern (ts' r) \/ (ts r)}
Public? (ts' r) ==> Public? (ts r))
let taintstate_monotone (ts ts':analysis_taints) =
let ts = ts.lts in
let ts' = ts'.lts in
taintstate_monotone_regs ts.regTaint ts'.regTaint /\
(Public? (ts'.flagsTaint) ==> Public? (ts.flagsTaint)) /\
(Public? (ts'.cfFlagsTaint) ==> Public? (ts.cfFlagsTaint)) /\
(Public? (ts'.ofFlagsTaint) ==> Public? (ts.ofFlagsTaint))
let taintstate_monotone_trans (ts1:analysis_taints) (ts2:analysis_taints) (ts3:analysis_taints)
: Lemma (taintstate_monotone ts1 ts2 /\ taintstate_monotone ts2 ts3 ==> taintstate_monotone ts1 ts3) = ()
let isConstant_monotone (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isConstantTimeGivenStates code fuel ts2.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isConstantTimeGivenStates code fuel ts1.lts s1 s2)
= ()
let isExplicit_monotone (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code)
(fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts.lts ts1.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts.lts ts2.lts s1 s2)
= ()
let isExplicit_monotone2 (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints)
(code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts2.lts ts.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts1.lts ts.lts s1 s2)
= ()
let combine_leakage_taints (ts1:leakage_taints) (ts2:leakage_taints) : leakage_taints =
let LeakageTaints rs1 fs1 c1 o1 = ts1 in
let LeakageTaints rs2 fs2 c2 o2 = ts2 in
let rs = combine_reg_taints rs1 rs2 in
LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Leakage_s.fst.checked",
"Vale.X64.Leakage_Ins.fsti.checked",
"Vale.X64.Leakage_Helpers.fst.checked",
"Vale.Lib.MapTree.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Leakage.fst"
} | [
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Ins",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Ins",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ts1: Vale.X64.Leakage_Helpers.analysis_taints -> ts2: Vale.X64.Leakage_Helpers.analysis_taints
-> ts:
Vale.X64.Leakage_Helpers.analysis_taints
{ Vale.X64.Leakage.taintstate_monotone ts1 ts /\ Vale.X64.Leakage.taintstate_monotone ts2 ts /\
AnalysisTaints?.lts ts ==
Vale.X64.Leakage.combine_leakage_taints (AnalysisTaints?.lts ts1) (AnalysisTaints?.lts ts2)
} | Prims.Tot | [
"total"
] | [] | [
"Vale.X64.Leakage_Helpers.analysis_taints",
"Vale.X64.Leakage_s.reg_taint",
"Vale.Arch.HeapTypes_s.taint",
"Vale.X64.Leakage_Helpers.regmap",
"Vale.X64.Leakage_Helpers.is_map_of",
"Vale.X64.Leakage_s.__proj__LeakageTaints__item__regTaint",
"Vale.X64.Leakage_s.LeakageTaints",
"Vale.X64.Leakage_Helpers.AnalysisTaints",
"Vale.X64.Leakage_s.leakage_taints",
"Vale.X64.Leakage_Helpers.merge_taint",
"Vale.X64.Leakage_Helpers.regs_to_map",
"Vale.X64.Leakage.combine_reg_taints",
"Prims.unit",
"Prims._assert",
"FStar.FunctionalExtensionality.feq",
"Vale.X64.Machine_s.reg",
"Vale.X64.Leakage_Helpers.map_to_regs",
"Vale.X64.Leakage_Helpers.__proj__AnalysisTaints__item__lts",
"Vale.X64.Leakage_Helpers.__proj__AnalysisTaints__item__rts",
"Prims.l_and",
"Vale.X64.Leakage.taintstate_monotone",
"Prims.eq2",
"Vale.X64.Leakage.combine_leakage_taints"
] | [] | false | false | false | false | false | let combine_analysis_taints (ts1 ts2: analysis_taints)
: (ts:
analysis_taints
{ taintstate_monotone ts1 ts /\ taintstate_monotone ts2 ts /\
ts.lts == combine_leakage_taints ts1.lts ts2.lts }) =
| let AnalysisTaints (LeakageTaints rs1_old fs1 c1 o1) rts1 = ts1 in
let AnalysisTaints (LeakageTaints rs2_old fs2 c2 o2) rts2 = ts2 in
let rts1 = ts1.rts in
let rts2 = ts2.rts in
let rs1 = map_to_regs rts1 in
let rs2 = map_to_regs rts2 in
assert (FStar.FunctionalExtensionality.feq rs1 rs1_old);
assert (FStar.FunctionalExtensionality.feq rs2 rs2_old);
let rs = combine_reg_taints rs1 rs2 in
let rts = regs_to_map rs in
let lts = LeakageTaints rs (merge_taint fs1 fs2) (merge_taint c1 c2) (merge_taint o1 o2) in
AnalysisTaints lts rts | false |
Vale.X64.Leakage.fst | Vale.X64.Leakage.check_if_code_is_leakage_free | val check_if_code_is_leakage_free (code:S.code) (ts:analysis_taints) (public_return:bool) : bool | val check_if_code_is_leakage_free (code:S.code) (ts:analysis_taints) (public_return:bool) : bool | let check_if_code_is_leakage_free code ts public_return =
let b, ts' = check_if_code_consumes_fixed_time code ts in
if public_return then
b && Public? (Vale.Lib.MapTree.sel ts'.rts reg_Rax)
else b | {
"file_name": "vale/code/arch/x64/Vale.X64.Leakage.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 8,
"end_line": 454,
"start_col": 0,
"start_line": 450
} | module Vale.X64.Leakage
open FStar.Mul
open Vale.X64.Machine_s
module S = Vale.X64.Machine_Semantics_s
open Vale.X64.Leakage_s
open Vale.X64.Leakage_Helpers
open Vale.X64.Leakage_Ins
unfold let machine_eval_ocmp = S.machine_eval_ocmp
unfold let machine_eval_code = S.machine_eval_code
unfold let machine_eval_codes = S.machine_eval_codes
unfold let machine_eval_while = S.machine_eval_while
#reset-options "--initial_ifuel 0 --max_ifuel 1 --initial_fuel 1 --max_fuel 1"
let normalize_taints (ts:analysis_taints) : analysis_taints =
let AnalysisTaints lts rts = ts in
AnalysisTaints lts (regs_to_map (map_to_regs rts))
let combine_reg_taints (regs1 regs2:reg_taint) : reg_taint =
FunctionalExtensionality.on reg (fun x -> merge_taint (regs1 x) (regs2 x))
let rec eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : bool =
if k = 0 then true
else regs1 (Reg rf (k - 1)) = regs2 (Reg rf (k - 1)) && eq_regs_file regs1 regs2 rf (k - 1)
let rec eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : bool =
if k = 0 then true
else eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1)) && eq_regs regs1 regs2 (k - 1)
let rec lemma_eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(ensures eq_regs_file regs1 regs2 rf k <==>
(forall (i:nat).{:pattern (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i)))
=
if k > 0 then lemma_eq_regs_file regs1 regs2 rf (k - 1)
let rec lemma_eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : Lemma
(ensures
eq_regs regs1 regs2 k <==>
(forall (i j:nat).{:pattern (Reg i j)} i < k /\ j < n_regs i ==>
regs1 (Reg i j) == regs2 (Reg i j)))
=
if k > 0 then (
lemma_eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1));
lemma_eq_regs regs1 regs2 (k - 1)
)
let eq_registers (regs1 regs2:reg_taint) : (b:bool{b <==> regs1 == regs2}) =
lemma_eq_regs regs1 regs2 n_reg_files;
let b = eq_regs regs1 regs2 n_reg_files in
if b then (
assert (FStar.FunctionalExtensionality.feq regs1 regs2)
);
b
let eq_leakage_taints (ts1 ts2:leakage_taints) : (b:bool{b <==> ts1 == ts2}) =
eq_registers ts1.regTaint ts2.regTaint &&
ts1.flagsTaint = ts2.flagsTaint &&
ts1.cfFlagsTaint = ts2.cfFlagsTaint &&
ts1.ofFlagsTaint = ts2.ofFlagsTaint
let taintstate_monotone_regs (ts ts':reg_taint) =
(forall (r:reg).{:pattern (ts' r) \/ (ts r)}
Public? (ts' r) ==> Public? (ts r))
let taintstate_monotone (ts ts':analysis_taints) =
let ts = ts.lts in
let ts' = ts'.lts in
taintstate_monotone_regs ts.regTaint ts'.regTaint /\
(Public? (ts'.flagsTaint) ==> Public? (ts.flagsTaint)) /\
(Public? (ts'.cfFlagsTaint) ==> Public? (ts.cfFlagsTaint)) /\
(Public? (ts'.ofFlagsTaint) ==> Public? (ts.ofFlagsTaint))
let taintstate_monotone_trans (ts1:analysis_taints) (ts2:analysis_taints) (ts3:analysis_taints)
: Lemma (taintstate_monotone ts1 ts2 /\ taintstate_monotone ts2 ts3 ==> taintstate_monotone ts1 ts3) = ()
let isConstant_monotone (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isConstantTimeGivenStates code fuel ts2.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isConstantTimeGivenStates code fuel ts1.lts s1 s2)
= ()
let isExplicit_monotone (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code)
(fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts.lts ts1.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts.lts ts2.lts s1 s2)
= ()
let isExplicit_monotone2 (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints)
(code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts2.lts ts.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts1.lts ts.lts s1 s2)
= ()
let combine_leakage_taints (ts1:leakage_taints) (ts2:leakage_taints) : leakage_taints =
let LeakageTaints rs1 fs1 c1 o1 = ts1 in
let LeakageTaints rs2 fs2 c2 o2 = ts2 in
let rs = combine_reg_taints rs1 rs2 in
LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
let combine_analysis_taints (ts1:analysis_taints) (ts2:analysis_taints)
: (ts:analysis_taints{taintstate_monotone ts1 ts /\ taintstate_monotone ts2 ts /\ ts.lts == combine_leakage_taints ts1.lts ts2.lts})
=
let AnalysisTaints (LeakageTaints rs1_old fs1 c1 o1) rts1 = ts1 in
let AnalysisTaints (LeakageTaints rs2_old fs2 c2 o2) rts2 = ts2 in
let rts1 = ts1.rts in
let rts2 = ts2.rts in
let rs1 = map_to_regs rts1 in // \
let rs2 = map_to_regs rts2 in // - build efficient representations of reg_taint before calling combine_reg_taints
assert (FStar.FunctionalExtensionality.feq rs1 rs1_old);
assert (FStar.FunctionalExtensionality.feq rs2 rs2_old);
let rs = combine_reg_taints rs1 rs2 in
let rts = regs_to_map rs in
let lts = LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
in
AnalysisTaints lts rts
let count_public_register (regs:reg_taint) (r:reg) = if Public? (regs r) then 1 else 0
let rec count_public_registers_file (regs:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : nat =
if k = 0 then 0
else count_public_register regs (Reg rf (k - 1)) + count_public_registers_file regs rf (k - 1)
let rec lemma_count_public_registers_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers_file regs1 rf k >= count_public_registers_file regs2 rf k
)
(ensures
count_public_registers_file regs1 rf k == count_public_registers_file regs2 rf k /\
(forall (i:nat).{:pattern regs1 (Reg rf i) \/ regs2 (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i))
)
=
if k > 0 then lemma_count_public_registers_file regs1 regs2 rf (k - 1)
let rec count_public_registers (regs:reg_taint) (k:nat{k <= n_reg_files}) : nat =
if k = 0 then 0
else count_public_registers_file regs (k - 1) (n_regs (k - 1)) + count_public_registers regs (k - 1)
let rec lemma_count_public_registers (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers regs1 k >= count_public_registers regs2 k
)
(ensures
count_public_registers regs1 k == count_public_registers regs2 k /\
(forall (r:reg).{:pattern regs1 r \/ regs2 r} Reg?.rf r < k ==> regs1 r == regs2 r)
)
=
if k > 0 then (
let n = n_regs (k - 1) in
if count_public_registers_file regs1 (k - 1) n >= count_public_registers_file regs2 (k - 1) n then
lemma_count_public_registers_file regs1 regs2 (k - 1) n;
lemma_count_public_registers regs1 regs2 (k - 1)
)
let count_flagTaint (ts:analysis_taints) : nat = if Public? ts.lts.flagsTaint then 1 else 0
let count_cfFlagTaint (ts:analysis_taints) : nat = if Public? ts.lts.cfFlagsTaint then 1 else 0
let count_ofFlagTaint (ts:analysis_taints) : nat = if Public? ts.lts.ofFlagsTaint then 1 else 0
let count_publics (ts:analysis_taints) : nat =
count_public_registers ts.lts.regTaint n_reg_files +
count_flagTaint ts +
count_cfFlagTaint ts +
count_ofFlagTaint ts
let monotone_decreases_count (ts ts':analysis_taints) : Lemma
(requires taintstate_monotone ts ts' /\ not (eq_leakage_taints ts.lts ts'.lts))
(ensures count_publics ts' < count_publics ts)
=
let regs1 = ts'.lts.regTaint in
let regs2 = ts.lts.regTaint in
if count_public_registers regs1 n_reg_files >= count_public_registers regs2 n_reg_files then (
lemma_count_public_registers regs1 regs2 n_reg_files;
assert (FStar.FunctionalExtensionality.feq regs1 regs2)
)
val check_if_block_consumes_fixed_time (block:S.codes) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[block])
val check_if_code_consumes_fixed_time (code:S.code) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[code; count_publics ts; 1])
val check_if_loop_consumes_fixed_time (code:S.code{While? code}) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[code; count_publics ts; 0])
#set-options "--z3refresh --z3rlimit 600"
let rec check_if_block_consumes_fixed_time (block:S.codes) (ts:analysis_taints) : bool & analysis_taints =
match block with
| [] -> true, ts
| hd::tl -> let fixedTime, ts_int = check_if_code_consumes_fixed_time hd ts in
if (not fixedTime) then fixedTime, ts_int
else check_if_block_consumes_fixed_time tl ts_int
and check_if_code_consumes_fixed_time (code:S.code) (ts:analysis_taints) : bool & analysis_taints =
match code with
| Ins ins -> let b, ts = check_if_ins_consumes_fixed_time ins ts in b, ts
| Block block -> check_if_block_consumes_fixed_time block ts
| IfElse ifCond ifTrue ifFalse ->
let o1 = operand_taint 0 (S.get_fst_ocmp ifCond) ts in
let o2 = operand_taint 0 (S.get_snd_ocmp ifCond) ts in
let predTaint = merge_taint o1 o2 in
if (Secret? predTaint) then (false, ts)
else
let o1Public = operand_does_not_use_secrets (S.get_fst_ocmp ifCond) ts in
if (not o1Public) then (false, ts)
else
let o2Public = operand_does_not_use_secrets (S.get_snd_ocmp ifCond) ts in
if (not o2Public) then (false, ts)
else
let validIfTrue, tsIfTrue = check_if_code_consumes_fixed_time ifTrue ts in
if (not validIfTrue) then (false, ts)
else
let validIfFalse, tsIfFalse = check_if_code_consumes_fixed_time ifFalse ts in
if (not validIfFalse) then (false, ts)
else
(true, combine_analysis_taints tsIfTrue tsIfFalse)
| While cond body -> check_if_loop_consumes_fixed_time code ts
and check_if_loop_consumes_fixed_time c (ts:analysis_taints) : (bool & analysis_taints) =
let ts = normalize_taints ts in
let While pred body = c in
let o1 = operand_taint 0 (S.get_fst_ocmp pred) ts in
let o2 = operand_taint 0 (S.get_snd_ocmp pred) ts in
let predTaint = merge_taint o1 o2 in
if (Secret? predTaint) then false, ts
else
let o1Public = operand_does_not_use_secrets (S.get_fst_ocmp pred) ts in
if (not o1Public) then (false, ts)
else
let o2Public = operand_does_not_use_secrets (S.get_snd_ocmp pred) ts in
if (not o2Public) then (false, ts)
else
let fixedTime, next_ts = check_if_code_consumes_fixed_time body ts in
if (not fixedTime) then (false, ts)
else
let combined_ts = combine_analysis_taints ts next_ts in
assert (taintstate_monotone ts combined_ts);
if eq_leakage_taints combined_ts.lts ts.lts then
true, combined_ts
else (
monotone_decreases_count ts combined_ts;
check_if_loop_consumes_fixed_time c combined_ts
)
val monotone_ok_eval: (code:S.code) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (let s' = machine_eval_code code fuel s in
Some? s' /\ (Some?.v s').S.ms_ok ==> s.S.ms_ok))
(decreases %[code; 0])
val monotone_ok_eval_block: (codes:S.codes) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (let s' = machine_eval_codes codes fuel s in
Some? s' /\ (Some?.v s').S.ms_ok ==> s.S.ms_ok))
(decreases %[codes;1])
#set-options "--z3rlimit 20 --initial_ifuel 0 --max_ifuel 1 --initial_fuel 2 --max_fuel 2"
let rec monotone_ok_eval code fuel s =
match code with
| Ins ins -> reveal_opaque (`%S.machine_eval_code_ins) S.machine_eval_code_ins
| Block block -> monotone_ok_eval_block block fuel s
| IfElse ifCond ifTrue ifFalse ->
let (st, b) = machine_eval_ocmp s ifCond in
if b then monotone_ok_eval ifTrue fuel st else monotone_ok_eval ifFalse fuel st
| While cond body ->
if fuel = 0 then ()
else
let (st, b) = machine_eval_ocmp s cond in
if not b then () else
monotone_ok_eval body (fuel - 1) st;
()
and monotone_ok_eval_block block fuel s =
match block with
| [] -> ()
| hd :: tl ->
let s' = machine_eval_code hd fuel s in
if None? s' then () else
monotone_ok_eval_block tl fuel (Some?.v s');
monotone_ok_eval hd fuel s
val monotone_ok_eval_while: (code:S.code{While? code}) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (
let While cond body = code in
let (s1, b1) = machine_eval_ocmp s cond in
let r1 = machine_eval_code code fuel s in
Some? r1 /\ (Some?.v r1).S.ms_ok ==> s1.S.ms_ok))
let monotone_ok_eval_while code fuel s =
let While cond body = code in
let (s1, b) = machine_eval_ocmp s cond in
let r1 = machine_eval_while cond body fuel s in
if fuel = 0 then () else
if not b then () else
match machine_eval_code body (fuel - 1) s1 with
| None -> ()
| Some s ->
if not s.S.ms_ok then ()
else (monotone_ok_eval body (fuel - 1) s1; monotone_ok_eval code (fuel - 1) s)
val lemma_loop_taintstate_monotone (ts:analysis_taints) (code:S.code{While? code}) : Lemma
(requires True)
(ensures (let _, ts' = check_if_loop_consumes_fixed_time code ts in
taintstate_monotone ts ts'))
(decreases %[count_publics ts])
#reset-options "--initial_ifuel 1 --max_ifuel 1 --initial_fuel 2 --max_fuel 2 --z3rlimit 40"
let rec lemma_loop_taintstate_monotone ts code =
let ts = normalize_taints ts in
let While pred body = code in
let b, ts' = check_if_code_consumes_fixed_time body ts in
let combined_ts = combine_analysis_taints ts ts' in
if eq_leakage_taints combined_ts.lts ts.lts then ()
else (
monotone_decreases_count ts combined_ts;
let b, ts_fin = check_if_loop_consumes_fixed_time code combined_ts in
lemma_loop_taintstate_monotone combined_ts code;
taintstate_monotone_trans ts combined_ts ts_fin
)
#reset-options "--initial_ifuel 1 --max_ifuel 1 --initial_fuel 2 --max_fuel 2 --z3rlimit 60"
val lemma_code_explicit_leakage_free: (ts:analysis_taints) -> (code:S.code) -> (s1:S.machine_state) -> (s2:S.machine_state) -> (fuel:nat) -> Lemma
(requires True)
(ensures (let b, ts' = check_if_code_consumes_fixed_time code ts in
(b2t b ==> isConstantTimeGivenStates code fuel ts.lts s1 s2 /\ isExplicitLeakageFreeGivenStates code fuel ts.lts ts'.lts s1 s2)))
(decreases %[fuel; code; 1])
val lemma_block_explicit_leakage_free: (ts:analysis_taints) -> (codes:S.codes) -> (s1:S.machine_state) -> (s2:S.machine_state) -> (fuel:nat) -> Lemma
(requires True)
(ensures (let b, ts' = check_if_block_consumes_fixed_time codes ts in
(b2t b ==> isConstantTimeGivenStates (Block codes) fuel ts.lts s1 s2 /\ isExplicitLeakageFreeGivenStates (Block codes) fuel ts.lts ts'.lts s1 s2)))
(decreases %[fuel; codes; 2])
val lemma_loop_explicit_leakage_free: (ts:analysis_taints) -> (code:S.code{While? code}) -> (s1:S.machine_state) -> (s2:S.machine_state) -> (fuel:nat) -> Lemma
(requires True)
(ensures (let b, ts' = check_if_loop_consumes_fixed_time code ts in
(b2t b ==> isConstantTimeGivenStates code fuel ts.lts s1 s2 /\ isExplicitLeakageFreeGivenStates code fuel ts.lts ts'.lts s1 s2)))
(decreases %[fuel; code; 0])
#reset-options "--initial_ifuel 2 --max_ifuel 2 --initial_fuel 1 --max_fuel 2 --z3rlimit 300"
let rec lemma_code_explicit_leakage_free ts code s1 s2 fuel = match code with
| Ins ins -> lemma_ins_leakage_free ts ins
| Block block -> lemma_block_explicit_leakage_free ts block s1 s2 fuel
| IfElse ifCond ifTrue ifFalse ->
reveal_opaque (`%S.valid_ocmp_opaque) S.valid_ocmp_opaque;
reveal_opaque (`%S.eval_ocmp_opaque) S.eval_ocmp_opaque;
let (b_fin, ts_fin) = check_if_code_consumes_fixed_time code ts in
let (st1, b1) = machine_eval_ocmp s1 ifCond in
let (st2, b2) = machine_eval_ocmp s2 ifCond in
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> constTimeInvariant ts.lts st1 st2);
monotone_ok_eval ifTrue fuel st1;
monotone_ok_eval ifTrue fuel st2;
lemma_code_explicit_leakage_free ts ifTrue st1 st2 fuel;
monotone_ok_eval ifFalse fuel st1;
monotone_ok_eval ifFalse fuel st2;
lemma_code_explicit_leakage_free ts ifFalse st1 st2 fuel
| While _ _ -> lemma_loop_explicit_leakage_free ts code s1 s2 fuel
and lemma_block_explicit_leakage_free ts block s1 s2 fuel = match block with
| [] -> ()
| hd :: tl ->
let b, ts' = check_if_code_consumes_fixed_time hd ts in
lemma_code_explicit_leakage_free ts hd s1 s2 fuel;
let s'1 = machine_eval_code hd fuel s1 in
let s'2 = machine_eval_code hd fuel s2 in
if None? s'1 || None? s'2 then ()
else
let s'1 = Some?.v s'1 in
let s'2 = Some?.v s'2 in
lemma_block_explicit_leakage_free ts' tl s'1 s'2 fuel;
monotone_ok_eval (Block tl) fuel s'1;
monotone_ok_eval (Block tl) fuel s'2
and lemma_loop_explicit_leakage_free ts code s1 s2 fuel =
reveal_opaque (`%S.valid_ocmp_opaque) S.valid_ocmp_opaque;
reveal_opaque (`%S.eval_ocmp_opaque) S.eval_ocmp_opaque;
let ts = normalize_taints ts in
if fuel = 0 then () else
let (b_fin, ts_fin) = check_if_code_consumes_fixed_time code ts in
let r1 = machine_eval_code code fuel s1 in
let r2 = machine_eval_code code fuel s2 in
let While cond body = code in
let (st1, b1) = machine_eval_ocmp s1 cond in
let (st2, b2) = machine_eval_ocmp s2 cond in
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> b1 = b2);
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> constTimeInvariant ts.lts st1 st2);
if not b1 || not b2 then
(
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> not b1 /\ not b2);
assert (not b1 ==> r1 == Some st1);
assert (not b2 ==> r2 == Some st2);
monotone_ok_eval_while code fuel s1;
assert (Some? r1 /\ (Some?.v r1).S.ms_ok ==> st1.S.ms_ok);
monotone_ok_eval_while code fuel s2;
assert (Some? r2 /\ (Some?.v r2).S.ms_ok ==> st2.S.ms_ok);
lemma_loop_taintstate_monotone ts code;
isExplicit_monotone ts ts ts_fin code fuel s1 s2;
()
)
else
(
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> constTimeInvariant ts.lts st1 st2);
let (b', ts') = check_if_code_consumes_fixed_time body ts in
lemma_code_explicit_leakage_free ts body st1 st2 (fuel - 1);
monotone_ok_eval body (fuel - 1) st1;
monotone_ok_eval body (fuel - 1) st2;
let st1 = machine_eval_code body (fuel - 1) st1 in
let st2 = machine_eval_code body (fuel - 1) st2 in
assert (None? st1 ==> r1 == st1);
assert (None? st2 ==> r2 == st2);
if (None? st1 || None? st2) then () else
let st1 = Some?.v st1 in
let st2 = Some?.v st2 in
if not st1.S.ms_ok || not st2.S.ms_ok then () else
let combined_ts = combine_analysis_taints ts ts' in
let (b_aux, ts_aux) = check_if_loop_consumes_fixed_time code combined_ts in
lemma_loop_explicit_leakage_free combined_ts code st1 st2 (fuel - 1);
isConstant_monotone ts combined_ts code (fuel - 1) st1 st2;
isExplicit_monotone2 ts_aux ts combined_ts code (fuel - 1) st1 st2;
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> constTimeInvariant ts'.lts st1 st2)
)
val lemma_code_leakage_free: (ts:analysis_taints) -> (code:S.code) -> Lemma
(let b, ts' = check_if_code_consumes_fixed_time code ts in
(b2t b ==> isConstantTime code ts.lts /\ isLeakageFree code ts.lts ts'.lts))
let lemma_code_leakage_free ts code = FStar.Classical.forall_intro_3 (lemma_code_explicit_leakage_free ts code)
#set-options "--z3rlimit 20"
val check_if_code_is_leakage_free': (code:S.code) -> (ts:analysis_taints) -> (tsExpected:analysis_taints) -> (b:bool{b ==> isLeakageFree code ts.lts tsExpected.lts
/\ b ==> isConstantTime code ts.lts})
let check_if_code_is_leakage_free' code ts tsExpected =
let b, ts' = check_if_code_consumes_fixed_time code ts in
if b then
publicTaintsAreAsExpected ts' tsExpected
else
b | {
"checked_file": "/",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Leakage_s.fst.checked",
"Vale.X64.Leakage_Ins.fsti.checked",
"Vale.X64.Leakage_Helpers.fst.checked",
"Vale.Lib.MapTree.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Leakage.fst"
} | [
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Ins",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 2,
"max_fuel": 2,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
code: Vale.X64.Machine_Semantics_s.code ->
ts: Vale.X64.Leakage_Helpers.analysis_taints ->
public_return: Prims.bool
-> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"Vale.X64.Machine_Semantics_s.code",
"Vale.X64.Leakage_Helpers.analysis_taints",
"Prims.bool",
"Prims.op_AmpAmp",
"Vale.Arch.HeapTypes_s.uu___is_Public",
"Vale.Lib.MapTree.sel",
"Vale.X64.Machine_s.reg",
"Vale.Arch.HeapTypes_s.taint",
"Vale.X64.Leakage_Helpers.__proj__AnalysisTaints__item__rts",
"Vale.X64.Machine_s.reg_Rax",
"FStar.Pervasives.Native.tuple2",
"Vale.X64.Leakage.check_if_code_consumes_fixed_time"
] | [] | false | false | false | true | false | let check_if_code_is_leakage_free code ts public_return =
| let b, ts' = check_if_code_consumes_fixed_time code ts in
if public_return then b && Public? (Vale.Lib.MapTree.sel ts'.rts reg_Rax) else b | false |
Vale.X64.Leakage.fst | Vale.X64.Leakage.lemma_count_public_registers_file | val lemma_count_public_registers_file
(regs1 regs2: reg_taint)
(rf: reg_file_id)
(k: nat{k <= n_regs rf})
: Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers_file regs1 rf k >= count_public_registers_file regs2 rf k)
(ensures
count_public_registers_file regs1 rf k == count_public_registers_file regs2 rf k /\
(forall (i: nat). {:pattern regs1 (Reg rf i)\/regs2 (Reg rf i)}
i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i))) | val lemma_count_public_registers_file
(regs1 regs2: reg_taint)
(rf: reg_file_id)
(k: nat{k <= n_regs rf})
: Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers_file regs1 rf k >= count_public_registers_file regs2 rf k)
(ensures
count_public_registers_file regs1 rf k == count_public_registers_file regs2 rf k /\
(forall (i: nat). {:pattern regs1 (Reg rf i)\/regs2 (Reg rf i)}
i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i))) | let rec lemma_count_public_registers_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers_file regs1 rf k >= count_public_registers_file regs2 rf k
)
(ensures
count_public_registers_file regs1 rf k == count_public_registers_file regs2 rf k /\
(forall (i:nat).{:pattern regs1 (Reg rf i) \/ regs2 (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i))
)
=
if k > 0 then lemma_count_public_registers_file regs1 regs2 rf (k - 1) | {
"file_name": "vale/code/arch/x64/Vale.X64.Leakage.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 72,
"end_line": 138,
"start_col": 0,
"start_line": 128
} | module Vale.X64.Leakage
open FStar.Mul
open Vale.X64.Machine_s
module S = Vale.X64.Machine_Semantics_s
open Vale.X64.Leakage_s
open Vale.X64.Leakage_Helpers
open Vale.X64.Leakage_Ins
unfold let machine_eval_ocmp = S.machine_eval_ocmp
unfold let machine_eval_code = S.machine_eval_code
unfold let machine_eval_codes = S.machine_eval_codes
unfold let machine_eval_while = S.machine_eval_while
#reset-options "--initial_ifuel 0 --max_ifuel 1 --initial_fuel 1 --max_fuel 1"
let normalize_taints (ts:analysis_taints) : analysis_taints =
let AnalysisTaints lts rts = ts in
AnalysisTaints lts (regs_to_map (map_to_regs rts))
let combine_reg_taints (regs1 regs2:reg_taint) : reg_taint =
FunctionalExtensionality.on reg (fun x -> merge_taint (regs1 x) (regs2 x))
let rec eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : bool =
if k = 0 then true
else regs1 (Reg rf (k - 1)) = regs2 (Reg rf (k - 1)) && eq_regs_file regs1 regs2 rf (k - 1)
let rec eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : bool =
if k = 0 then true
else eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1)) && eq_regs regs1 regs2 (k - 1)
let rec lemma_eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(ensures eq_regs_file regs1 regs2 rf k <==>
(forall (i:nat).{:pattern (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i)))
=
if k > 0 then lemma_eq_regs_file regs1 regs2 rf (k - 1)
let rec lemma_eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : Lemma
(ensures
eq_regs regs1 regs2 k <==>
(forall (i j:nat).{:pattern (Reg i j)} i < k /\ j < n_regs i ==>
regs1 (Reg i j) == regs2 (Reg i j)))
=
if k > 0 then (
lemma_eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1));
lemma_eq_regs regs1 regs2 (k - 1)
)
let eq_registers (regs1 regs2:reg_taint) : (b:bool{b <==> regs1 == regs2}) =
lemma_eq_regs regs1 regs2 n_reg_files;
let b = eq_regs regs1 regs2 n_reg_files in
if b then (
assert (FStar.FunctionalExtensionality.feq regs1 regs2)
);
b
let eq_leakage_taints (ts1 ts2:leakage_taints) : (b:bool{b <==> ts1 == ts2}) =
eq_registers ts1.regTaint ts2.regTaint &&
ts1.flagsTaint = ts2.flagsTaint &&
ts1.cfFlagsTaint = ts2.cfFlagsTaint &&
ts1.ofFlagsTaint = ts2.ofFlagsTaint
let taintstate_monotone_regs (ts ts':reg_taint) =
(forall (r:reg).{:pattern (ts' r) \/ (ts r)}
Public? (ts' r) ==> Public? (ts r))
let taintstate_monotone (ts ts':analysis_taints) =
let ts = ts.lts in
let ts' = ts'.lts in
taintstate_monotone_regs ts.regTaint ts'.regTaint /\
(Public? (ts'.flagsTaint) ==> Public? (ts.flagsTaint)) /\
(Public? (ts'.cfFlagsTaint) ==> Public? (ts.cfFlagsTaint)) /\
(Public? (ts'.ofFlagsTaint) ==> Public? (ts.ofFlagsTaint))
let taintstate_monotone_trans (ts1:analysis_taints) (ts2:analysis_taints) (ts3:analysis_taints)
: Lemma (taintstate_monotone ts1 ts2 /\ taintstate_monotone ts2 ts3 ==> taintstate_monotone ts1 ts3) = ()
let isConstant_monotone (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isConstantTimeGivenStates code fuel ts2.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isConstantTimeGivenStates code fuel ts1.lts s1 s2)
= ()
let isExplicit_monotone (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code)
(fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts.lts ts1.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts.lts ts2.lts s1 s2)
= ()
let isExplicit_monotone2 (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints)
(code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts2.lts ts.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts1.lts ts.lts s1 s2)
= ()
let combine_leakage_taints (ts1:leakage_taints) (ts2:leakage_taints) : leakage_taints =
let LeakageTaints rs1 fs1 c1 o1 = ts1 in
let LeakageTaints rs2 fs2 c2 o2 = ts2 in
let rs = combine_reg_taints rs1 rs2 in
LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
let combine_analysis_taints (ts1:analysis_taints) (ts2:analysis_taints)
: (ts:analysis_taints{taintstate_monotone ts1 ts /\ taintstate_monotone ts2 ts /\ ts.lts == combine_leakage_taints ts1.lts ts2.lts})
=
let AnalysisTaints (LeakageTaints rs1_old fs1 c1 o1) rts1 = ts1 in
let AnalysisTaints (LeakageTaints rs2_old fs2 c2 o2) rts2 = ts2 in
let rts1 = ts1.rts in
let rts2 = ts2.rts in
let rs1 = map_to_regs rts1 in // \
let rs2 = map_to_regs rts2 in // - build efficient representations of reg_taint before calling combine_reg_taints
assert (FStar.FunctionalExtensionality.feq rs1 rs1_old);
assert (FStar.FunctionalExtensionality.feq rs2 rs2_old);
let rs = combine_reg_taints rs1 rs2 in
let rts = regs_to_map rs in
let lts = LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
in
AnalysisTaints lts rts
let count_public_register (regs:reg_taint) (r:reg) = if Public? (regs r) then 1 else 0
let rec count_public_registers_file (regs:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : nat =
if k = 0 then 0
else count_public_register regs (Reg rf (k - 1)) + count_public_registers_file regs rf (k - 1) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Leakage_s.fst.checked",
"Vale.X64.Leakage_Ins.fsti.checked",
"Vale.X64.Leakage_Helpers.fst.checked",
"Vale.Lib.MapTree.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Leakage.fst"
} | [
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Ins",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Ins",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
regs1: Vale.X64.Leakage_s.reg_taint ->
regs2: Vale.X64.Leakage_s.reg_taint ->
rf: Vale.X64.Machine_s.reg_file_id ->
k: Prims.nat{k <= Vale.X64.Machine_s.n_regs rf}
-> FStar.Pervasives.Lemma
(requires
Vale.X64.Leakage.taintstate_monotone_regs regs2 regs1 /\
Vale.X64.Leakage.count_public_registers_file regs1 rf k >=
Vale.X64.Leakage.count_public_registers_file regs2 rf k)
(ensures
Vale.X64.Leakage.count_public_registers_file regs1 rf k ==
Vale.X64.Leakage.count_public_registers_file regs2 rf k /\
(forall (i: Prims.nat).
{:pattern regs1 (Vale.X64.Machine_s.Reg rf i)\/regs2 (Vale.X64.Machine_s.Reg rf i)}
i < k ==> regs1 (Vale.X64.Machine_s.Reg rf i) == regs2 (Vale.X64.Machine_s.Reg rf i))) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Vale.X64.Leakage_s.reg_taint",
"Vale.X64.Machine_s.reg_file_id",
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Vale.X64.Machine_s.n_regs",
"Prims.op_GreaterThan",
"Vale.X64.Leakage.lemma_count_public_registers_file",
"Prims.op_Subtraction",
"Prims.bool",
"Prims.unit",
"Prims.l_and",
"Vale.X64.Leakage.taintstate_monotone_regs",
"Prims.op_GreaterThanOrEqual",
"Vale.X64.Leakage.count_public_registers_file",
"Prims.squash",
"Prims.eq2",
"Prims.l_Forall",
"Prims.l_imp",
"Prims.op_LessThan",
"Vale.Arch.HeapTypes_s.taint",
"Vale.X64.Machine_s.Reg",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [
"recursion"
] | false | false | true | false | false | let rec lemma_count_public_registers_file
(regs1 regs2: reg_taint)
(rf: reg_file_id)
(k: nat{k <= n_regs rf})
: Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers_file regs1 rf k >= count_public_registers_file regs2 rf k)
(ensures
count_public_registers_file regs1 rf k == count_public_registers_file regs2 rf k /\
(forall (i: nat). {:pattern regs1 (Reg rf i)\/regs2 (Reg rf i)}
i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i))) =
| if k > 0 then lemma_count_public_registers_file regs1 regs2 rf (k - 1) | false |
Vale.X64.Leakage.fst | Vale.X64.Leakage.mk_analysis_taints | val mk_analysis_taints (win:bool) (nbr_args:nat) : analysis_taints | val mk_analysis_taints (win:bool) (nbr_args:nat) : analysis_taints | let mk_analysis_taints win nbr_args : analysis_taints =
let regTaint r =
if win then
if r = reg_Rsp then Public else
if r = reg_Rcx && nbr_args >= 1 then Public else
if r = reg_Rdx && nbr_args >= 2 then Public else
if r = reg_R8 && nbr_args >= 3 then Public else
if r = reg_R9 && nbr_args >= 4 then Public else
Secret
else
if r = reg_Rsp then Public else
if r = reg_Rdi && nbr_args >= 1 then Public else
if r = reg_Rsi && nbr_args >= 2 then Public else
if r = reg_Rdx && nbr_args >= 3 then Public else
if r = reg_Rcx && nbr_args >= 4 then Public else
if r = reg_R8 && nbr_args >= 5 then Public else
if r = reg_R9 && nbr_args >= 6 then Public else
Secret
in
let rs = FunctionalExtensionality.on reg regTaint in
let rts = regs_to_map rs in
let lts = LeakageTaints (map_to_regs rts) Secret Secret Secret in
AnalysisTaints lts rts | {
"file_name": "vale/code/arch/x64/Vale.X64.Leakage.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 24,
"end_line": 479,
"start_col": 0,
"start_line": 457
} | module Vale.X64.Leakage
open FStar.Mul
open Vale.X64.Machine_s
module S = Vale.X64.Machine_Semantics_s
open Vale.X64.Leakage_s
open Vale.X64.Leakage_Helpers
open Vale.X64.Leakage_Ins
unfold let machine_eval_ocmp = S.machine_eval_ocmp
unfold let machine_eval_code = S.machine_eval_code
unfold let machine_eval_codes = S.machine_eval_codes
unfold let machine_eval_while = S.machine_eval_while
#reset-options "--initial_ifuel 0 --max_ifuel 1 --initial_fuel 1 --max_fuel 1"
let normalize_taints (ts:analysis_taints) : analysis_taints =
let AnalysisTaints lts rts = ts in
AnalysisTaints lts (regs_to_map (map_to_regs rts))
let combine_reg_taints (regs1 regs2:reg_taint) : reg_taint =
FunctionalExtensionality.on reg (fun x -> merge_taint (regs1 x) (regs2 x))
let rec eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : bool =
if k = 0 then true
else regs1 (Reg rf (k - 1)) = regs2 (Reg rf (k - 1)) && eq_regs_file regs1 regs2 rf (k - 1)
let rec eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : bool =
if k = 0 then true
else eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1)) && eq_regs regs1 regs2 (k - 1)
let rec lemma_eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(ensures eq_regs_file regs1 regs2 rf k <==>
(forall (i:nat).{:pattern (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i)))
=
if k > 0 then lemma_eq_regs_file regs1 regs2 rf (k - 1)
let rec lemma_eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : Lemma
(ensures
eq_regs regs1 regs2 k <==>
(forall (i j:nat).{:pattern (Reg i j)} i < k /\ j < n_regs i ==>
regs1 (Reg i j) == regs2 (Reg i j)))
=
if k > 0 then (
lemma_eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1));
lemma_eq_regs regs1 regs2 (k - 1)
)
let eq_registers (regs1 regs2:reg_taint) : (b:bool{b <==> regs1 == regs2}) =
lemma_eq_regs regs1 regs2 n_reg_files;
let b = eq_regs regs1 regs2 n_reg_files in
if b then (
assert (FStar.FunctionalExtensionality.feq regs1 regs2)
);
b
let eq_leakage_taints (ts1 ts2:leakage_taints) : (b:bool{b <==> ts1 == ts2}) =
eq_registers ts1.regTaint ts2.regTaint &&
ts1.flagsTaint = ts2.flagsTaint &&
ts1.cfFlagsTaint = ts2.cfFlagsTaint &&
ts1.ofFlagsTaint = ts2.ofFlagsTaint
let taintstate_monotone_regs (ts ts':reg_taint) =
(forall (r:reg).{:pattern (ts' r) \/ (ts r)}
Public? (ts' r) ==> Public? (ts r))
let taintstate_monotone (ts ts':analysis_taints) =
let ts = ts.lts in
let ts' = ts'.lts in
taintstate_monotone_regs ts.regTaint ts'.regTaint /\
(Public? (ts'.flagsTaint) ==> Public? (ts.flagsTaint)) /\
(Public? (ts'.cfFlagsTaint) ==> Public? (ts.cfFlagsTaint)) /\
(Public? (ts'.ofFlagsTaint) ==> Public? (ts.ofFlagsTaint))
let taintstate_monotone_trans (ts1:analysis_taints) (ts2:analysis_taints) (ts3:analysis_taints)
: Lemma (taintstate_monotone ts1 ts2 /\ taintstate_monotone ts2 ts3 ==> taintstate_monotone ts1 ts3) = ()
let isConstant_monotone (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isConstantTimeGivenStates code fuel ts2.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isConstantTimeGivenStates code fuel ts1.lts s1 s2)
= ()
let isExplicit_monotone (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code)
(fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts.lts ts1.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts.lts ts2.lts s1 s2)
= ()
let isExplicit_monotone2 (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints)
(code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts2.lts ts.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts1.lts ts.lts s1 s2)
= ()
let combine_leakage_taints (ts1:leakage_taints) (ts2:leakage_taints) : leakage_taints =
let LeakageTaints rs1 fs1 c1 o1 = ts1 in
let LeakageTaints rs2 fs2 c2 o2 = ts2 in
let rs = combine_reg_taints rs1 rs2 in
LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
let combine_analysis_taints (ts1:analysis_taints) (ts2:analysis_taints)
: (ts:analysis_taints{taintstate_monotone ts1 ts /\ taintstate_monotone ts2 ts /\ ts.lts == combine_leakage_taints ts1.lts ts2.lts})
=
let AnalysisTaints (LeakageTaints rs1_old fs1 c1 o1) rts1 = ts1 in
let AnalysisTaints (LeakageTaints rs2_old fs2 c2 o2) rts2 = ts2 in
let rts1 = ts1.rts in
let rts2 = ts2.rts in
let rs1 = map_to_regs rts1 in // \
let rs2 = map_to_regs rts2 in // - build efficient representations of reg_taint before calling combine_reg_taints
assert (FStar.FunctionalExtensionality.feq rs1 rs1_old);
assert (FStar.FunctionalExtensionality.feq rs2 rs2_old);
let rs = combine_reg_taints rs1 rs2 in
let rts = regs_to_map rs in
let lts = LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
in
AnalysisTaints lts rts
let count_public_register (regs:reg_taint) (r:reg) = if Public? (regs r) then 1 else 0
let rec count_public_registers_file (regs:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : nat =
if k = 0 then 0
else count_public_register regs (Reg rf (k - 1)) + count_public_registers_file regs rf (k - 1)
let rec lemma_count_public_registers_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers_file regs1 rf k >= count_public_registers_file regs2 rf k
)
(ensures
count_public_registers_file regs1 rf k == count_public_registers_file regs2 rf k /\
(forall (i:nat).{:pattern regs1 (Reg rf i) \/ regs2 (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i))
)
=
if k > 0 then lemma_count_public_registers_file regs1 regs2 rf (k - 1)
let rec count_public_registers (regs:reg_taint) (k:nat{k <= n_reg_files}) : nat =
if k = 0 then 0
else count_public_registers_file regs (k - 1) (n_regs (k - 1)) + count_public_registers regs (k - 1)
let rec lemma_count_public_registers (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers regs1 k >= count_public_registers regs2 k
)
(ensures
count_public_registers regs1 k == count_public_registers regs2 k /\
(forall (r:reg).{:pattern regs1 r \/ regs2 r} Reg?.rf r < k ==> regs1 r == regs2 r)
)
=
if k > 0 then (
let n = n_regs (k - 1) in
if count_public_registers_file regs1 (k - 1) n >= count_public_registers_file regs2 (k - 1) n then
lemma_count_public_registers_file regs1 regs2 (k - 1) n;
lemma_count_public_registers regs1 regs2 (k - 1)
)
let count_flagTaint (ts:analysis_taints) : nat = if Public? ts.lts.flagsTaint then 1 else 0
let count_cfFlagTaint (ts:analysis_taints) : nat = if Public? ts.lts.cfFlagsTaint then 1 else 0
let count_ofFlagTaint (ts:analysis_taints) : nat = if Public? ts.lts.ofFlagsTaint then 1 else 0
let count_publics (ts:analysis_taints) : nat =
count_public_registers ts.lts.regTaint n_reg_files +
count_flagTaint ts +
count_cfFlagTaint ts +
count_ofFlagTaint ts
let monotone_decreases_count (ts ts':analysis_taints) : Lemma
(requires taintstate_monotone ts ts' /\ not (eq_leakage_taints ts.lts ts'.lts))
(ensures count_publics ts' < count_publics ts)
=
let regs1 = ts'.lts.regTaint in
let regs2 = ts.lts.regTaint in
if count_public_registers regs1 n_reg_files >= count_public_registers regs2 n_reg_files then (
lemma_count_public_registers regs1 regs2 n_reg_files;
assert (FStar.FunctionalExtensionality.feq regs1 regs2)
)
val check_if_block_consumes_fixed_time (block:S.codes) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[block])
val check_if_code_consumes_fixed_time (code:S.code) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[code; count_publics ts; 1])
val check_if_loop_consumes_fixed_time (code:S.code{While? code}) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[code; count_publics ts; 0])
#set-options "--z3refresh --z3rlimit 600"
let rec check_if_block_consumes_fixed_time (block:S.codes) (ts:analysis_taints) : bool & analysis_taints =
match block with
| [] -> true, ts
| hd::tl -> let fixedTime, ts_int = check_if_code_consumes_fixed_time hd ts in
if (not fixedTime) then fixedTime, ts_int
else check_if_block_consumes_fixed_time tl ts_int
and check_if_code_consumes_fixed_time (code:S.code) (ts:analysis_taints) : bool & analysis_taints =
match code with
| Ins ins -> let b, ts = check_if_ins_consumes_fixed_time ins ts in b, ts
| Block block -> check_if_block_consumes_fixed_time block ts
| IfElse ifCond ifTrue ifFalse ->
let o1 = operand_taint 0 (S.get_fst_ocmp ifCond) ts in
let o2 = operand_taint 0 (S.get_snd_ocmp ifCond) ts in
let predTaint = merge_taint o1 o2 in
if (Secret? predTaint) then (false, ts)
else
let o1Public = operand_does_not_use_secrets (S.get_fst_ocmp ifCond) ts in
if (not o1Public) then (false, ts)
else
let o2Public = operand_does_not_use_secrets (S.get_snd_ocmp ifCond) ts in
if (not o2Public) then (false, ts)
else
let validIfTrue, tsIfTrue = check_if_code_consumes_fixed_time ifTrue ts in
if (not validIfTrue) then (false, ts)
else
let validIfFalse, tsIfFalse = check_if_code_consumes_fixed_time ifFalse ts in
if (not validIfFalse) then (false, ts)
else
(true, combine_analysis_taints tsIfTrue tsIfFalse)
| While cond body -> check_if_loop_consumes_fixed_time code ts
and check_if_loop_consumes_fixed_time c (ts:analysis_taints) : (bool & analysis_taints) =
let ts = normalize_taints ts in
let While pred body = c in
let o1 = operand_taint 0 (S.get_fst_ocmp pred) ts in
let o2 = operand_taint 0 (S.get_snd_ocmp pred) ts in
let predTaint = merge_taint o1 o2 in
if (Secret? predTaint) then false, ts
else
let o1Public = operand_does_not_use_secrets (S.get_fst_ocmp pred) ts in
if (not o1Public) then (false, ts)
else
let o2Public = operand_does_not_use_secrets (S.get_snd_ocmp pred) ts in
if (not o2Public) then (false, ts)
else
let fixedTime, next_ts = check_if_code_consumes_fixed_time body ts in
if (not fixedTime) then (false, ts)
else
let combined_ts = combine_analysis_taints ts next_ts in
assert (taintstate_monotone ts combined_ts);
if eq_leakage_taints combined_ts.lts ts.lts then
true, combined_ts
else (
monotone_decreases_count ts combined_ts;
check_if_loop_consumes_fixed_time c combined_ts
)
val monotone_ok_eval: (code:S.code) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (let s' = machine_eval_code code fuel s in
Some? s' /\ (Some?.v s').S.ms_ok ==> s.S.ms_ok))
(decreases %[code; 0])
val monotone_ok_eval_block: (codes:S.codes) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (let s' = machine_eval_codes codes fuel s in
Some? s' /\ (Some?.v s').S.ms_ok ==> s.S.ms_ok))
(decreases %[codes;1])
#set-options "--z3rlimit 20 --initial_ifuel 0 --max_ifuel 1 --initial_fuel 2 --max_fuel 2"
let rec monotone_ok_eval code fuel s =
match code with
| Ins ins -> reveal_opaque (`%S.machine_eval_code_ins) S.machine_eval_code_ins
| Block block -> monotone_ok_eval_block block fuel s
| IfElse ifCond ifTrue ifFalse ->
let (st, b) = machine_eval_ocmp s ifCond in
if b then monotone_ok_eval ifTrue fuel st else monotone_ok_eval ifFalse fuel st
| While cond body ->
if fuel = 0 then ()
else
let (st, b) = machine_eval_ocmp s cond in
if not b then () else
monotone_ok_eval body (fuel - 1) st;
()
and monotone_ok_eval_block block fuel s =
match block with
| [] -> ()
| hd :: tl ->
let s' = machine_eval_code hd fuel s in
if None? s' then () else
monotone_ok_eval_block tl fuel (Some?.v s');
monotone_ok_eval hd fuel s
val monotone_ok_eval_while: (code:S.code{While? code}) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (
let While cond body = code in
let (s1, b1) = machine_eval_ocmp s cond in
let r1 = machine_eval_code code fuel s in
Some? r1 /\ (Some?.v r1).S.ms_ok ==> s1.S.ms_ok))
let monotone_ok_eval_while code fuel s =
let While cond body = code in
let (s1, b) = machine_eval_ocmp s cond in
let r1 = machine_eval_while cond body fuel s in
if fuel = 0 then () else
if not b then () else
match machine_eval_code body (fuel - 1) s1 with
| None -> ()
| Some s ->
if not s.S.ms_ok then ()
else (monotone_ok_eval body (fuel - 1) s1; monotone_ok_eval code (fuel - 1) s)
val lemma_loop_taintstate_monotone (ts:analysis_taints) (code:S.code{While? code}) : Lemma
(requires True)
(ensures (let _, ts' = check_if_loop_consumes_fixed_time code ts in
taintstate_monotone ts ts'))
(decreases %[count_publics ts])
#reset-options "--initial_ifuel 1 --max_ifuel 1 --initial_fuel 2 --max_fuel 2 --z3rlimit 40"
let rec lemma_loop_taintstate_monotone ts code =
let ts = normalize_taints ts in
let While pred body = code in
let b, ts' = check_if_code_consumes_fixed_time body ts in
let combined_ts = combine_analysis_taints ts ts' in
if eq_leakage_taints combined_ts.lts ts.lts then ()
else (
monotone_decreases_count ts combined_ts;
let b, ts_fin = check_if_loop_consumes_fixed_time code combined_ts in
lemma_loop_taintstate_monotone combined_ts code;
taintstate_monotone_trans ts combined_ts ts_fin
)
#reset-options "--initial_ifuel 1 --max_ifuel 1 --initial_fuel 2 --max_fuel 2 --z3rlimit 60"
val lemma_code_explicit_leakage_free: (ts:analysis_taints) -> (code:S.code) -> (s1:S.machine_state) -> (s2:S.machine_state) -> (fuel:nat) -> Lemma
(requires True)
(ensures (let b, ts' = check_if_code_consumes_fixed_time code ts in
(b2t b ==> isConstantTimeGivenStates code fuel ts.lts s1 s2 /\ isExplicitLeakageFreeGivenStates code fuel ts.lts ts'.lts s1 s2)))
(decreases %[fuel; code; 1])
val lemma_block_explicit_leakage_free: (ts:analysis_taints) -> (codes:S.codes) -> (s1:S.machine_state) -> (s2:S.machine_state) -> (fuel:nat) -> Lemma
(requires True)
(ensures (let b, ts' = check_if_block_consumes_fixed_time codes ts in
(b2t b ==> isConstantTimeGivenStates (Block codes) fuel ts.lts s1 s2 /\ isExplicitLeakageFreeGivenStates (Block codes) fuel ts.lts ts'.lts s1 s2)))
(decreases %[fuel; codes; 2])
val lemma_loop_explicit_leakage_free: (ts:analysis_taints) -> (code:S.code{While? code}) -> (s1:S.machine_state) -> (s2:S.machine_state) -> (fuel:nat) -> Lemma
(requires True)
(ensures (let b, ts' = check_if_loop_consumes_fixed_time code ts in
(b2t b ==> isConstantTimeGivenStates code fuel ts.lts s1 s2 /\ isExplicitLeakageFreeGivenStates code fuel ts.lts ts'.lts s1 s2)))
(decreases %[fuel; code; 0])
#reset-options "--initial_ifuel 2 --max_ifuel 2 --initial_fuel 1 --max_fuel 2 --z3rlimit 300"
let rec lemma_code_explicit_leakage_free ts code s1 s2 fuel = match code with
| Ins ins -> lemma_ins_leakage_free ts ins
| Block block -> lemma_block_explicit_leakage_free ts block s1 s2 fuel
| IfElse ifCond ifTrue ifFalse ->
reveal_opaque (`%S.valid_ocmp_opaque) S.valid_ocmp_opaque;
reveal_opaque (`%S.eval_ocmp_opaque) S.eval_ocmp_opaque;
let (b_fin, ts_fin) = check_if_code_consumes_fixed_time code ts in
let (st1, b1) = machine_eval_ocmp s1 ifCond in
let (st2, b2) = machine_eval_ocmp s2 ifCond in
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> constTimeInvariant ts.lts st1 st2);
monotone_ok_eval ifTrue fuel st1;
monotone_ok_eval ifTrue fuel st2;
lemma_code_explicit_leakage_free ts ifTrue st1 st2 fuel;
monotone_ok_eval ifFalse fuel st1;
monotone_ok_eval ifFalse fuel st2;
lemma_code_explicit_leakage_free ts ifFalse st1 st2 fuel
| While _ _ -> lemma_loop_explicit_leakage_free ts code s1 s2 fuel
and lemma_block_explicit_leakage_free ts block s1 s2 fuel = match block with
| [] -> ()
| hd :: tl ->
let b, ts' = check_if_code_consumes_fixed_time hd ts in
lemma_code_explicit_leakage_free ts hd s1 s2 fuel;
let s'1 = machine_eval_code hd fuel s1 in
let s'2 = machine_eval_code hd fuel s2 in
if None? s'1 || None? s'2 then ()
else
let s'1 = Some?.v s'1 in
let s'2 = Some?.v s'2 in
lemma_block_explicit_leakage_free ts' tl s'1 s'2 fuel;
monotone_ok_eval (Block tl) fuel s'1;
monotone_ok_eval (Block tl) fuel s'2
and lemma_loop_explicit_leakage_free ts code s1 s2 fuel =
reveal_opaque (`%S.valid_ocmp_opaque) S.valid_ocmp_opaque;
reveal_opaque (`%S.eval_ocmp_opaque) S.eval_ocmp_opaque;
let ts = normalize_taints ts in
if fuel = 0 then () else
let (b_fin, ts_fin) = check_if_code_consumes_fixed_time code ts in
let r1 = machine_eval_code code fuel s1 in
let r2 = machine_eval_code code fuel s2 in
let While cond body = code in
let (st1, b1) = machine_eval_ocmp s1 cond in
let (st2, b2) = machine_eval_ocmp s2 cond in
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> b1 = b2);
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> constTimeInvariant ts.lts st1 st2);
if not b1 || not b2 then
(
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> not b1 /\ not b2);
assert (not b1 ==> r1 == Some st1);
assert (not b2 ==> r2 == Some st2);
monotone_ok_eval_while code fuel s1;
assert (Some? r1 /\ (Some?.v r1).S.ms_ok ==> st1.S.ms_ok);
monotone_ok_eval_while code fuel s2;
assert (Some? r2 /\ (Some?.v r2).S.ms_ok ==> st2.S.ms_ok);
lemma_loop_taintstate_monotone ts code;
isExplicit_monotone ts ts ts_fin code fuel s1 s2;
()
)
else
(
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> constTimeInvariant ts.lts st1 st2);
let (b', ts') = check_if_code_consumes_fixed_time body ts in
lemma_code_explicit_leakage_free ts body st1 st2 (fuel - 1);
monotone_ok_eval body (fuel - 1) st1;
monotone_ok_eval body (fuel - 1) st2;
let st1 = machine_eval_code body (fuel - 1) st1 in
let st2 = machine_eval_code body (fuel - 1) st2 in
assert (None? st1 ==> r1 == st1);
assert (None? st2 ==> r2 == st2);
if (None? st1 || None? st2) then () else
let st1 = Some?.v st1 in
let st2 = Some?.v st2 in
if not st1.S.ms_ok || not st2.S.ms_ok then () else
let combined_ts = combine_analysis_taints ts ts' in
let (b_aux, ts_aux) = check_if_loop_consumes_fixed_time code combined_ts in
lemma_loop_explicit_leakage_free combined_ts code st1 st2 (fuel - 1);
isConstant_monotone ts combined_ts code (fuel - 1) st1 st2;
isExplicit_monotone2 ts_aux ts combined_ts code (fuel - 1) st1 st2;
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> constTimeInvariant ts'.lts st1 st2)
)
val lemma_code_leakage_free: (ts:analysis_taints) -> (code:S.code) -> Lemma
(let b, ts' = check_if_code_consumes_fixed_time code ts in
(b2t b ==> isConstantTime code ts.lts /\ isLeakageFree code ts.lts ts'.lts))
let lemma_code_leakage_free ts code = FStar.Classical.forall_intro_3 (lemma_code_explicit_leakage_free ts code)
#set-options "--z3rlimit 20"
val check_if_code_is_leakage_free': (code:S.code) -> (ts:analysis_taints) -> (tsExpected:analysis_taints) -> (b:bool{b ==> isLeakageFree code ts.lts tsExpected.lts
/\ b ==> isConstantTime code ts.lts})
let check_if_code_is_leakage_free' code ts tsExpected =
let b, ts' = check_if_code_consumes_fixed_time code ts in
if b then
publicTaintsAreAsExpected ts' tsExpected
else
b
let check_if_code_is_leakage_free code ts public_return =
let b, ts' = check_if_code_consumes_fixed_time code ts in
if public_return then
b && Public? (Vale.Lib.MapTree.sel ts'.rts reg_Rax)
else b | {
"checked_file": "/",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Leakage_s.fst.checked",
"Vale.X64.Leakage_Ins.fsti.checked",
"Vale.X64.Leakage_Helpers.fst.checked",
"Vale.Lib.MapTree.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Leakage.fst"
} | [
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Ins",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 2,
"max_fuel": 2,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | win: Prims.bool -> nbr_args: Prims.nat -> Vale.X64.Leakage_Helpers.analysis_taints | Prims.Tot | [
"total"
] | [] | [
"Prims.bool",
"Prims.nat",
"Vale.X64.Leakage_Helpers.AnalysisTaints",
"Vale.X64.Leakage_s.leakage_taints",
"Vale.X64.Leakage_s.LeakageTaints",
"Vale.X64.Leakage_Helpers.map_to_regs",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Leakage_Helpers.regmap",
"Vale.X64.Leakage_Helpers.is_map_of",
"Vale.X64.Leakage_Helpers.regs_to_map",
"FStar.FunctionalExtensionality.restricted_t",
"Vale.X64.Machine_s.reg",
"Vale.Arch.HeapTypes_s.taint",
"FStar.FunctionalExtensionality.on",
"Prims.op_Equality",
"Vale.X64.Machine_s.reg_Rsp",
"Vale.Arch.HeapTypes_s.Public",
"Prims.op_AmpAmp",
"Vale.X64.Machine_s.reg_Rcx",
"Prims.op_GreaterThanOrEqual",
"Vale.X64.Machine_s.reg_Rdx",
"Vale.X64.Machine_s.reg_R8",
"Vale.X64.Machine_s.reg_R9",
"Vale.X64.Machine_s.reg_Rdi",
"Vale.X64.Machine_s.reg_Rsi",
"Vale.X64.Leakage_Helpers.analysis_taints"
] | [] | false | false | false | true | false | let mk_analysis_taints win nbr_args : analysis_taints =
| let regTaint r =
if win
then
if r = reg_Rsp
then Public
else
if r = reg_Rcx && nbr_args >= 1
then Public
else
if r = reg_Rdx && nbr_args >= 2
then Public
else
if r = reg_R8 && nbr_args >= 3
then Public
else if r = reg_R9 && nbr_args >= 4 then Public else Secret
else
if r = reg_Rsp
then Public
else
if r = reg_Rdi && nbr_args >= 1
then Public
else
if r = reg_Rsi && nbr_args >= 2
then Public
else
if r = reg_Rdx && nbr_args >= 3
then Public
else
if r = reg_Rcx && nbr_args >= 4
then Public
else
if r = reg_R8 && nbr_args >= 5
then Public
else if r = reg_R9 && nbr_args >= 6 then Public else Secret
in
let rs = FunctionalExtensionality.on reg regTaint in
let rts = regs_to_map rs in
let lts = LeakageTaints (map_to_regs rts) Secret Secret Secret in
AnalysisTaints lts rts | false |
Vale.X64.Leakage.fst | Vale.X64.Leakage.check_if_code_is_leakage_free' | val check_if_code_is_leakage_free': (code:S.code) -> (ts:analysis_taints) -> (tsExpected:analysis_taints) -> (b:bool{b ==> isLeakageFree code ts.lts tsExpected.lts
/\ b ==> isConstantTime code ts.lts}) | val check_if_code_is_leakage_free': (code:S.code) -> (ts:analysis_taints) -> (tsExpected:analysis_taints) -> (b:bool{b ==> isLeakageFree code ts.lts tsExpected.lts
/\ b ==> isConstantTime code ts.lts}) | let check_if_code_is_leakage_free' code ts tsExpected =
let b, ts' = check_if_code_consumes_fixed_time code ts in
if b then
publicTaintsAreAsExpected ts' tsExpected
else
b | {
"file_name": "vale/code/arch/x64/Vale.X64.Leakage.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 5,
"end_line": 448,
"start_col": 0,
"start_line": 443
} | module Vale.X64.Leakage
open FStar.Mul
open Vale.X64.Machine_s
module S = Vale.X64.Machine_Semantics_s
open Vale.X64.Leakage_s
open Vale.X64.Leakage_Helpers
open Vale.X64.Leakage_Ins
unfold let machine_eval_ocmp = S.machine_eval_ocmp
unfold let machine_eval_code = S.machine_eval_code
unfold let machine_eval_codes = S.machine_eval_codes
unfold let machine_eval_while = S.machine_eval_while
#reset-options "--initial_ifuel 0 --max_ifuel 1 --initial_fuel 1 --max_fuel 1"
let normalize_taints (ts:analysis_taints) : analysis_taints =
let AnalysisTaints lts rts = ts in
AnalysisTaints lts (regs_to_map (map_to_regs rts))
let combine_reg_taints (regs1 regs2:reg_taint) : reg_taint =
FunctionalExtensionality.on reg (fun x -> merge_taint (regs1 x) (regs2 x))
let rec eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : bool =
if k = 0 then true
else regs1 (Reg rf (k - 1)) = regs2 (Reg rf (k - 1)) && eq_regs_file regs1 regs2 rf (k - 1)
let rec eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : bool =
if k = 0 then true
else eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1)) && eq_regs regs1 regs2 (k - 1)
let rec lemma_eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(ensures eq_regs_file regs1 regs2 rf k <==>
(forall (i:nat).{:pattern (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i)))
=
if k > 0 then lemma_eq_regs_file regs1 regs2 rf (k - 1)
let rec lemma_eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : Lemma
(ensures
eq_regs regs1 regs2 k <==>
(forall (i j:nat).{:pattern (Reg i j)} i < k /\ j < n_regs i ==>
regs1 (Reg i j) == regs2 (Reg i j)))
=
if k > 0 then (
lemma_eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1));
lemma_eq_regs regs1 regs2 (k - 1)
)
let eq_registers (regs1 regs2:reg_taint) : (b:bool{b <==> regs1 == regs2}) =
lemma_eq_regs regs1 regs2 n_reg_files;
let b = eq_regs regs1 regs2 n_reg_files in
if b then (
assert (FStar.FunctionalExtensionality.feq regs1 regs2)
);
b
let eq_leakage_taints (ts1 ts2:leakage_taints) : (b:bool{b <==> ts1 == ts2}) =
eq_registers ts1.regTaint ts2.regTaint &&
ts1.flagsTaint = ts2.flagsTaint &&
ts1.cfFlagsTaint = ts2.cfFlagsTaint &&
ts1.ofFlagsTaint = ts2.ofFlagsTaint
let taintstate_monotone_regs (ts ts':reg_taint) =
(forall (r:reg).{:pattern (ts' r) \/ (ts r)}
Public? (ts' r) ==> Public? (ts r))
let taintstate_monotone (ts ts':analysis_taints) =
let ts = ts.lts in
let ts' = ts'.lts in
taintstate_monotone_regs ts.regTaint ts'.regTaint /\
(Public? (ts'.flagsTaint) ==> Public? (ts.flagsTaint)) /\
(Public? (ts'.cfFlagsTaint) ==> Public? (ts.cfFlagsTaint)) /\
(Public? (ts'.ofFlagsTaint) ==> Public? (ts.ofFlagsTaint))
let taintstate_monotone_trans (ts1:analysis_taints) (ts2:analysis_taints) (ts3:analysis_taints)
: Lemma (taintstate_monotone ts1 ts2 /\ taintstate_monotone ts2 ts3 ==> taintstate_monotone ts1 ts3) = ()
let isConstant_monotone (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isConstantTimeGivenStates code fuel ts2.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isConstantTimeGivenStates code fuel ts1.lts s1 s2)
= ()
let isExplicit_monotone (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code)
(fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts.lts ts1.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts.lts ts2.lts s1 s2)
= ()
let isExplicit_monotone2 (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints)
(code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts2.lts ts.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts1.lts ts.lts s1 s2)
= ()
let combine_leakage_taints (ts1:leakage_taints) (ts2:leakage_taints) : leakage_taints =
let LeakageTaints rs1 fs1 c1 o1 = ts1 in
let LeakageTaints rs2 fs2 c2 o2 = ts2 in
let rs = combine_reg_taints rs1 rs2 in
LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
let combine_analysis_taints (ts1:analysis_taints) (ts2:analysis_taints)
: (ts:analysis_taints{taintstate_monotone ts1 ts /\ taintstate_monotone ts2 ts /\ ts.lts == combine_leakage_taints ts1.lts ts2.lts})
=
let AnalysisTaints (LeakageTaints rs1_old fs1 c1 o1) rts1 = ts1 in
let AnalysisTaints (LeakageTaints rs2_old fs2 c2 o2) rts2 = ts2 in
let rts1 = ts1.rts in
let rts2 = ts2.rts in
let rs1 = map_to_regs rts1 in // \
let rs2 = map_to_regs rts2 in // - build efficient representations of reg_taint before calling combine_reg_taints
assert (FStar.FunctionalExtensionality.feq rs1 rs1_old);
assert (FStar.FunctionalExtensionality.feq rs2 rs2_old);
let rs = combine_reg_taints rs1 rs2 in
let rts = regs_to_map rs in
let lts = LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
in
AnalysisTaints lts rts
let count_public_register (regs:reg_taint) (r:reg) = if Public? (regs r) then 1 else 0
let rec count_public_registers_file (regs:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : nat =
if k = 0 then 0
else count_public_register regs (Reg rf (k - 1)) + count_public_registers_file regs rf (k - 1)
let rec lemma_count_public_registers_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers_file regs1 rf k >= count_public_registers_file regs2 rf k
)
(ensures
count_public_registers_file regs1 rf k == count_public_registers_file regs2 rf k /\
(forall (i:nat).{:pattern regs1 (Reg rf i) \/ regs2 (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i))
)
=
if k > 0 then lemma_count_public_registers_file regs1 regs2 rf (k - 1)
let rec count_public_registers (regs:reg_taint) (k:nat{k <= n_reg_files}) : nat =
if k = 0 then 0
else count_public_registers_file regs (k - 1) (n_regs (k - 1)) + count_public_registers regs (k - 1)
let rec lemma_count_public_registers (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers regs1 k >= count_public_registers regs2 k
)
(ensures
count_public_registers regs1 k == count_public_registers regs2 k /\
(forall (r:reg).{:pattern regs1 r \/ regs2 r} Reg?.rf r < k ==> regs1 r == regs2 r)
)
=
if k > 0 then (
let n = n_regs (k - 1) in
if count_public_registers_file regs1 (k - 1) n >= count_public_registers_file regs2 (k - 1) n then
lemma_count_public_registers_file regs1 regs2 (k - 1) n;
lemma_count_public_registers regs1 regs2 (k - 1)
)
let count_flagTaint (ts:analysis_taints) : nat = if Public? ts.lts.flagsTaint then 1 else 0
let count_cfFlagTaint (ts:analysis_taints) : nat = if Public? ts.lts.cfFlagsTaint then 1 else 0
let count_ofFlagTaint (ts:analysis_taints) : nat = if Public? ts.lts.ofFlagsTaint then 1 else 0
let count_publics (ts:analysis_taints) : nat =
count_public_registers ts.lts.regTaint n_reg_files +
count_flagTaint ts +
count_cfFlagTaint ts +
count_ofFlagTaint ts
let monotone_decreases_count (ts ts':analysis_taints) : Lemma
(requires taintstate_monotone ts ts' /\ not (eq_leakage_taints ts.lts ts'.lts))
(ensures count_publics ts' < count_publics ts)
=
let regs1 = ts'.lts.regTaint in
let regs2 = ts.lts.regTaint in
if count_public_registers regs1 n_reg_files >= count_public_registers regs2 n_reg_files then (
lemma_count_public_registers regs1 regs2 n_reg_files;
assert (FStar.FunctionalExtensionality.feq regs1 regs2)
)
val check_if_block_consumes_fixed_time (block:S.codes) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[block])
val check_if_code_consumes_fixed_time (code:S.code) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[code; count_publics ts; 1])
val check_if_loop_consumes_fixed_time (code:S.code{While? code}) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[code; count_publics ts; 0])
#set-options "--z3refresh --z3rlimit 600"
let rec check_if_block_consumes_fixed_time (block:S.codes) (ts:analysis_taints) : bool & analysis_taints =
match block with
| [] -> true, ts
| hd::tl -> let fixedTime, ts_int = check_if_code_consumes_fixed_time hd ts in
if (not fixedTime) then fixedTime, ts_int
else check_if_block_consumes_fixed_time tl ts_int
and check_if_code_consumes_fixed_time (code:S.code) (ts:analysis_taints) : bool & analysis_taints =
match code with
| Ins ins -> let b, ts = check_if_ins_consumes_fixed_time ins ts in b, ts
| Block block -> check_if_block_consumes_fixed_time block ts
| IfElse ifCond ifTrue ifFalse ->
let o1 = operand_taint 0 (S.get_fst_ocmp ifCond) ts in
let o2 = operand_taint 0 (S.get_snd_ocmp ifCond) ts in
let predTaint = merge_taint o1 o2 in
if (Secret? predTaint) then (false, ts)
else
let o1Public = operand_does_not_use_secrets (S.get_fst_ocmp ifCond) ts in
if (not o1Public) then (false, ts)
else
let o2Public = operand_does_not_use_secrets (S.get_snd_ocmp ifCond) ts in
if (not o2Public) then (false, ts)
else
let validIfTrue, tsIfTrue = check_if_code_consumes_fixed_time ifTrue ts in
if (not validIfTrue) then (false, ts)
else
let validIfFalse, tsIfFalse = check_if_code_consumes_fixed_time ifFalse ts in
if (not validIfFalse) then (false, ts)
else
(true, combine_analysis_taints tsIfTrue tsIfFalse)
| While cond body -> check_if_loop_consumes_fixed_time code ts
and check_if_loop_consumes_fixed_time c (ts:analysis_taints) : (bool & analysis_taints) =
let ts = normalize_taints ts in
let While pred body = c in
let o1 = operand_taint 0 (S.get_fst_ocmp pred) ts in
let o2 = operand_taint 0 (S.get_snd_ocmp pred) ts in
let predTaint = merge_taint o1 o2 in
if (Secret? predTaint) then false, ts
else
let o1Public = operand_does_not_use_secrets (S.get_fst_ocmp pred) ts in
if (not o1Public) then (false, ts)
else
let o2Public = operand_does_not_use_secrets (S.get_snd_ocmp pred) ts in
if (not o2Public) then (false, ts)
else
let fixedTime, next_ts = check_if_code_consumes_fixed_time body ts in
if (not fixedTime) then (false, ts)
else
let combined_ts = combine_analysis_taints ts next_ts in
assert (taintstate_monotone ts combined_ts);
if eq_leakage_taints combined_ts.lts ts.lts then
true, combined_ts
else (
monotone_decreases_count ts combined_ts;
check_if_loop_consumes_fixed_time c combined_ts
)
val monotone_ok_eval: (code:S.code) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (let s' = machine_eval_code code fuel s in
Some? s' /\ (Some?.v s').S.ms_ok ==> s.S.ms_ok))
(decreases %[code; 0])
val monotone_ok_eval_block: (codes:S.codes) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (let s' = machine_eval_codes codes fuel s in
Some? s' /\ (Some?.v s').S.ms_ok ==> s.S.ms_ok))
(decreases %[codes;1])
#set-options "--z3rlimit 20 --initial_ifuel 0 --max_ifuel 1 --initial_fuel 2 --max_fuel 2"
let rec monotone_ok_eval code fuel s =
match code with
| Ins ins -> reveal_opaque (`%S.machine_eval_code_ins) S.machine_eval_code_ins
| Block block -> monotone_ok_eval_block block fuel s
| IfElse ifCond ifTrue ifFalse ->
let (st, b) = machine_eval_ocmp s ifCond in
if b then monotone_ok_eval ifTrue fuel st else monotone_ok_eval ifFalse fuel st
| While cond body ->
if fuel = 0 then ()
else
let (st, b) = machine_eval_ocmp s cond in
if not b then () else
monotone_ok_eval body (fuel - 1) st;
()
and monotone_ok_eval_block block fuel s =
match block with
| [] -> ()
| hd :: tl ->
let s' = machine_eval_code hd fuel s in
if None? s' then () else
monotone_ok_eval_block tl fuel (Some?.v s');
monotone_ok_eval hd fuel s
val monotone_ok_eval_while: (code:S.code{While? code}) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (
let While cond body = code in
let (s1, b1) = machine_eval_ocmp s cond in
let r1 = machine_eval_code code fuel s in
Some? r1 /\ (Some?.v r1).S.ms_ok ==> s1.S.ms_ok))
let monotone_ok_eval_while code fuel s =
let While cond body = code in
let (s1, b) = machine_eval_ocmp s cond in
let r1 = machine_eval_while cond body fuel s in
if fuel = 0 then () else
if not b then () else
match machine_eval_code body (fuel - 1) s1 with
| None -> ()
| Some s ->
if not s.S.ms_ok then ()
else (monotone_ok_eval body (fuel - 1) s1; monotone_ok_eval code (fuel - 1) s)
val lemma_loop_taintstate_monotone (ts:analysis_taints) (code:S.code{While? code}) : Lemma
(requires True)
(ensures (let _, ts' = check_if_loop_consumes_fixed_time code ts in
taintstate_monotone ts ts'))
(decreases %[count_publics ts])
#reset-options "--initial_ifuel 1 --max_ifuel 1 --initial_fuel 2 --max_fuel 2 --z3rlimit 40"
let rec lemma_loop_taintstate_monotone ts code =
let ts = normalize_taints ts in
let While pred body = code in
let b, ts' = check_if_code_consumes_fixed_time body ts in
let combined_ts = combine_analysis_taints ts ts' in
if eq_leakage_taints combined_ts.lts ts.lts then ()
else (
monotone_decreases_count ts combined_ts;
let b, ts_fin = check_if_loop_consumes_fixed_time code combined_ts in
lemma_loop_taintstate_monotone combined_ts code;
taintstate_monotone_trans ts combined_ts ts_fin
)
#reset-options "--initial_ifuel 1 --max_ifuel 1 --initial_fuel 2 --max_fuel 2 --z3rlimit 60"
val lemma_code_explicit_leakage_free: (ts:analysis_taints) -> (code:S.code) -> (s1:S.machine_state) -> (s2:S.machine_state) -> (fuel:nat) -> Lemma
(requires True)
(ensures (let b, ts' = check_if_code_consumes_fixed_time code ts in
(b2t b ==> isConstantTimeGivenStates code fuel ts.lts s1 s2 /\ isExplicitLeakageFreeGivenStates code fuel ts.lts ts'.lts s1 s2)))
(decreases %[fuel; code; 1])
val lemma_block_explicit_leakage_free: (ts:analysis_taints) -> (codes:S.codes) -> (s1:S.machine_state) -> (s2:S.machine_state) -> (fuel:nat) -> Lemma
(requires True)
(ensures (let b, ts' = check_if_block_consumes_fixed_time codes ts in
(b2t b ==> isConstantTimeGivenStates (Block codes) fuel ts.lts s1 s2 /\ isExplicitLeakageFreeGivenStates (Block codes) fuel ts.lts ts'.lts s1 s2)))
(decreases %[fuel; codes; 2])
val lemma_loop_explicit_leakage_free: (ts:analysis_taints) -> (code:S.code{While? code}) -> (s1:S.machine_state) -> (s2:S.machine_state) -> (fuel:nat) -> Lemma
(requires True)
(ensures (let b, ts' = check_if_loop_consumes_fixed_time code ts in
(b2t b ==> isConstantTimeGivenStates code fuel ts.lts s1 s2 /\ isExplicitLeakageFreeGivenStates code fuel ts.lts ts'.lts s1 s2)))
(decreases %[fuel; code; 0])
#reset-options "--initial_ifuel 2 --max_ifuel 2 --initial_fuel 1 --max_fuel 2 --z3rlimit 300"
let rec lemma_code_explicit_leakage_free ts code s1 s2 fuel = match code with
| Ins ins -> lemma_ins_leakage_free ts ins
| Block block -> lemma_block_explicit_leakage_free ts block s1 s2 fuel
| IfElse ifCond ifTrue ifFalse ->
reveal_opaque (`%S.valid_ocmp_opaque) S.valid_ocmp_opaque;
reveal_opaque (`%S.eval_ocmp_opaque) S.eval_ocmp_opaque;
let (b_fin, ts_fin) = check_if_code_consumes_fixed_time code ts in
let (st1, b1) = machine_eval_ocmp s1 ifCond in
let (st2, b2) = machine_eval_ocmp s2 ifCond in
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> constTimeInvariant ts.lts st1 st2);
monotone_ok_eval ifTrue fuel st1;
monotone_ok_eval ifTrue fuel st2;
lemma_code_explicit_leakage_free ts ifTrue st1 st2 fuel;
monotone_ok_eval ifFalse fuel st1;
monotone_ok_eval ifFalse fuel st2;
lemma_code_explicit_leakage_free ts ifFalse st1 st2 fuel
| While _ _ -> lemma_loop_explicit_leakage_free ts code s1 s2 fuel
and lemma_block_explicit_leakage_free ts block s1 s2 fuel = match block with
| [] -> ()
| hd :: tl ->
let b, ts' = check_if_code_consumes_fixed_time hd ts in
lemma_code_explicit_leakage_free ts hd s1 s2 fuel;
let s'1 = machine_eval_code hd fuel s1 in
let s'2 = machine_eval_code hd fuel s2 in
if None? s'1 || None? s'2 then ()
else
let s'1 = Some?.v s'1 in
let s'2 = Some?.v s'2 in
lemma_block_explicit_leakage_free ts' tl s'1 s'2 fuel;
monotone_ok_eval (Block tl) fuel s'1;
monotone_ok_eval (Block tl) fuel s'2
and lemma_loop_explicit_leakage_free ts code s1 s2 fuel =
reveal_opaque (`%S.valid_ocmp_opaque) S.valid_ocmp_opaque;
reveal_opaque (`%S.eval_ocmp_opaque) S.eval_ocmp_opaque;
let ts = normalize_taints ts in
if fuel = 0 then () else
let (b_fin, ts_fin) = check_if_code_consumes_fixed_time code ts in
let r1 = machine_eval_code code fuel s1 in
let r2 = machine_eval_code code fuel s2 in
let While cond body = code in
let (st1, b1) = machine_eval_ocmp s1 cond in
let (st2, b2) = machine_eval_ocmp s2 cond in
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> b1 = b2);
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> constTimeInvariant ts.lts st1 st2);
if not b1 || not b2 then
(
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> not b1 /\ not b2);
assert (not b1 ==> r1 == Some st1);
assert (not b2 ==> r2 == Some st2);
monotone_ok_eval_while code fuel s1;
assert (Some? r1 /\ (Some?.v r1).S.ms_ok ==> st1.S.ms_ok);
monotone_ok_eval_while code fuel s2;
assert (Some? r2 /\ (Some?.v r2).S.ms_ok ==> st2.S.ms_ok);
lemma_loop_taintstate_monotone ts code;
isExplicit_monotone ts ts ts_fin code fuel s1 s2;
()
)
else
(
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> constTimeInvariant ts.lts st1 st2);
let (b', ts') = check_if_code_consumes_fixed_time body ts in
lemma_code_explicit_leakage_free ts body st1 st2 (fuel - 1);
monotone_ok_eval body (fuel - 1) st1;
monotone_ok_eval body (fuel - 1) st2;
let st1 = machine_eval_code body (fuel - 1) st1 in
let st2 = machine_eval_code body (fuel - 1) st2 in
assert (None? st1 ==> r1 == st1);
assert (None? st2 ==> r2 == st2);
if (None? st1 || None? st2) then () else
let st1 = Some?.v st1 in
let st2 = Some?.v st2 in
if not st1.S.ms_ok || not st2.S.ms_ok then () else
let combined_ts = combine_analysis_taints ts ts' in
let (b_aux, ts_aux) = check_if_loop_consumes_fixed_time code combined_ts in
lemma_loop_explicit_leakage_free combined_ts code st1 st2 (fuel - 1);
isConstant_monotone ts combined_ts code (fuel - 1) st1 st2;
isExplicit_monotone2 ts_aux ts combined_ts code (fuel - 1) st1 st2;
assert (b2t b_fin ==> constTimeInvariant ts.lts s1 s2 /\ st1.S.ms_ok /\ st2.S.ms_ok ==> constTimeInvariant ts'.lts st1 st2)
)
val lemma_code_leakage_free: (ts:analysis_taints) -> (code:S.code) -> Lemma
(let b, ts' = check_if_code_consumes_fixed_time code ts in
(b2t b ==> isConstantTime code ts.lts /\ isLeakageFree code ts.lts ts'.lts))
let lemma_code_leakage_free ts code = FStar.Classical.forall_intro_3 (lemma_code_explicit_leakage_free ts code)
#set-options "--z3rlimit 20"
val check_if_code_is_leakage_free': (code:S.code) -> (ts:analysis_taints) -> (tsExpected:analysis_taints) -> (b:bool{b ==> isLeakageFree code ts.lts tsExpected.lts
/\ b ==> isConstantTime code ts.lts}) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Leakage_s.fst.checked",
"Vale.X64.Leakage_Ins.fsti.checked",
"Vale.X64.Leakage_Helpers.fst.checked",
"Vale.Lib.MapTree.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Leakage.fst"
} | [
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Ins",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Ins",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 2,
"max_fuel": 2,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
code: Vale.X64.Machine_Semantics_s.code ->
ts: Vale.X64.Leakage_Helpers.analysis_taints ->
tsExpected: Vale.X64.Leakage_Helpers.analysis_taints
-> b:
Prims.bool
{ b ==>
Vale.X64.Leakage_s.isLeakageFree code
(AnalysisTaints?.lts ts)
(AnalysisTaints?.lts tsExpected) /\ b ==>
Vale.X64.Leakage_s.isConstantTime code (AnalysisTaints?.lts ts) } | Prims.Tot | [
"total"
] | [] | [
"Vale.X64.Machine_Semantics_s.code",
"Vale.X64.Leakage_Helpers.analysis_taints",
"Prims.bool",
"Vale.X64.Leakage_Helpers.publicTaintsAreAsExpected",
"Prims.l_imp",
"Prims.b2t",
"Prims.l_and",
"Vale.X64.Leakage_s.isLeakageFree",
"Vale.X64.Leakage_Helpers.__proj__AnalysisTaints__item__lts",
"Vale.X64.Leakage_s.isConstantTime",
"FStar.Pervasives.Native.tuple2",
"Vale.X64.Leakage.check_if_code_consumes_fixed_time"
] | [] | false | false | false | false | false | let check_if_code_is_leakage_free' code ts tsExpected =
| let b, ts' = check_if_code_consumes_fixed_time code ts in
if b then publicTaintsAreAsExpected ts' tsExpected else b | false |
Vale.X64.Leakage.fst | Vale.X64.Leakage.lemma_loop_taintstate_monotone | val lemma_loop_taintstate_monotone (ts:analysis_taints) (code:S.code{While? code}) : Lemma
(requires True)
(ensures (let _, ts' = check_if_loop_consumes_fixed_time code ts in
taintstate_monotone ts ts'))
(decreases %[count_publics ts]) | val lemma_loop_taintstate_monotone (ts:analysis_taints) (code:S.code{While? code}) : Lemma
(requires True)
(ensures (let _, ts' = check_if_loop_consumes_fixed_time code ts in
taintstate_monotone ts ts'))
(decreases %[count_publics ts]) | let rec lemma_loop_taintstate_monotone ts code =
let ts = normalize_taints ts in
let While pred body = code in
let b, ts' = check_if_code_consumes_fixed_time body ts in
let combined_ts = combine_analysis_taints ts ts' in
if eq_leakage_taints combined_ts.lts ts.lts then ()
else (
monotone_decreases_count ts combined_ts;
let b, ts_fin = check_if_loop_consumes_fixed_time code combined_ts in
lemma_loop_taintstate_monotone combined_ts code;
taintstate_monotone_trans ts combined_ts ts_fin
) | {
"file_name": "vale/code/arch/x64/Vale.X64.Leakage.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 3,
"end_line": 328,
"start_col": 0,
"start_line": 317
} | module Vale.X64.Leakage
open FStar.Mul
open Vale.X64.Machine_s
module S = Vale.X64.Machine_Semantics_s
open Vale.X64.Leakage_s
open Vale.X64.Leakage_Helpers
open Vale.X64.Leakage_Ins
unfold let machine_eval_ocmp = S.machine_eval_ocmp
unfold let machine_eval_code = S.machine_eval_code
unfold let machine_eval_codes = S.machine_eval_codes
unfold let machine_eval_while = S.machine_eval_while
#reset-options "--initial_ifuel 0 --max_ifuel 1 --initial_fuel 1 --max_fuel 1"
let normalize_taints (ts:analysis_taints) : analysis_taints =
let AnalysisTaints lts rts = ts in
AnalysisTaints lts (regs_to_map (map_to_regs rts))
let combine_reg_taints (regs1 regs2:reg_taint) : reg_taint =
FunctionalExtensionality.on reg (fun x -> merge_taint (regs1 x) (regs2 x))
let rec eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : bool =
if k = 0 then true
else regs1 (Reg rf (k - 1)) = regs2 (Reg rf (k - 1)) && eq_regs_file regs1 regs2 rf (k - 1)
let rec eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : bool =
if k = 0 then true
else eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1)) && eq_regs regs1 regs2 (k - 1)
let rec lemma_eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(ensures eq_regs_file regs1 regs2 rf k <==>
(forall (i:nat).{:pattern (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i)))
=
if k > 0 then lemma_eq_regs_file regs1 regs2 rf (k - 1)
let rec lemma_eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : Lemma
(ensures
eq_regs regs1 regs2 k <==>
(forall (i j:nat).{:pattern (Reg i j)} i < k /\ j < n_regs i ==>
regs1 (Reg i j) == regs2 (Reg i j)))
=
if k > 0 then (
lemma_eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1));
lemma_eq_regs regs1 regs2 (k - 1)
)
let eq_registers (regs1 regs2:reg_taint) : (b:bool{b <==> regs1 == regs2}) =
lemma_eq_regs regs1 regs2 n_reg_files;
let b = eq_regs regs1 regs2 n_reg_files in
if b then (
assert (FStar.FunctionalExtensionality.feq regs1 regs2)
);
b
let eq_leakage_taints (ts1 ts2:leakage_taints) : (b:bool{b <==> ts1 == ts2}) =
eq_registers ts1.regTaint ts2.regTaint &&
ts1.flagsTaint = ts2.flagsTaint &&
ts1.cfFlagsTaint = ts2.cfFlagsTaint &&
ts1.ofFlagsTaint = ts2.ofFlagsTaint
let taintstate_monotone_regs (ts ts':reg_taint) =
(forall (r:reg).{:pattern (ts' r) \/ (ts r)}
Public? (ts' r) ==> Public? (ts r))
let taintstate_monotone (ts ts':analysis_taints) =
let ts = ts.lts in
let ts' = ts'.lts in
taintstate_monotone_regs ts.regTaint ts'.regTaint /\
(Public? (ts'.flagsTaint) ==> Public? (ts.flagsTaint)) /\
(Public? (ts'.cfFlagsTaint) ==> Public? (ts.cfFlagsTaint)) /\
(Public? (ts'.ofFlagsTaint) ==> Public? (ts.ofFlagsTaint))
let taintstate_monotone_trans (ts1:analysis_taints) (ts2:analysis_taints) (ts3:analysis_taints)
: Lemma (taintstate_monotone ts1 ts2 /\ taintstate_monotone ts2 ts3 ==> taintstate_monotone ts1 ts3) = ()
let isConstant_monotone (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isConstantTimeGivenStates code fuel ts2.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isConstantTimeGivenStates code fuel ts1.lts s1 s2)
= ()
let isExplicit_monotone (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code)
(fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts.lts ts1.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts.lts ts2.lts s1 s2)
= ()
let isExplicit_monotone2 (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints)
(code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts2.lts ts.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts1.lts ts.lts s1 s2)
= ()
let combine_leakage_taints (ts1:leakage_taints) (ts2:leakage_taints) : leakage_taints =
let LeakageTaints rs1 fs1 c1 o1 = ts1 in
let LeakageTaints rs2 fs2 c2 o2 = ts2 in
let rs = combine_reg_taints rs1 rs2 in
LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
let combine_analysis_taints (ts1:analysis_taints) (ts2:analysis_taints)
: (ts:analysis_taints{taintstate_monotone ts1 ts /\ taintstate_monotone ts2 ts /\ ts.lts == combine_leakage_taints ts1.lts ts2.lts})
=
let AnalysisTaints (LeakageTaints rs1_old fs1 c1 o1) rts1 = ts1 in
let AnalysisTaints (LeakageTaints rs2_old fs2 c2 o2) rts2 = ts2 in
let rts1 = ts1.rts in
let rts2 = ts2.rts in
let rs1 = map_to_regs rts1 in // \
let rs2 = map_to_regs rts2 in // - build efficient representations of reg_taint before calling combine_reg_taints
assert (FStar.FunctionalExtensionality.feq rs1 rs1_old);
assert (FStar.FunctionalExtensionality.feq rs2 rs2_old);
let rs = combine_reg_taints rs1 rs2 in
let rts = regs_to_map rs in
let lts = LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
in
AnalysisTaints lts rts
let count_public_register (regs:reg_taint) (r:reg) = if Public? (regs r) then 1 else 0
let rec count_public_registers_file (regs:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : nat =
if k = 0 then 0
else count_public_register regs (Reg rf (k - 1)) + count_public_registers_file regs rf (k - 1)
let rec lemma_count_public_registers_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers_file regs1 rf k >= count_public_registers_file regs2 rf k
)
(ensures
count_public_registers_file regs1 rf k == count_public_registers_file regs2 rf k /\
(forall (i:nat).{:pattern regs1 (Reg rf i) \/ regs2 (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i))
)
=
if k > 0 then lemma_count_public_registers_file regs1 regs2 rf (k - 1)
let rec count_public_registers (regs:reg_taint) (k:nat{k <= n_reg_files}) : nat =
if k = 0 then 0
else count_public_registers_file regs (k - 1) (n_regs (k - 1)) + count_public_registers regs (k - 1)
let rec lemma_count_public_registers (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers regs1 k >= count_public_registers regs2 k
)
(ensures
count_public_registers regs1 k == count_public_registers regs2 k /\
(forall (r:reg).{:pattern regs1 r \/ regs2 r} Reg?.rf r < k ==> regs1 r == regs2 r)
)
=
if k > 0 then (
let n = n_regs (k - 1) in
if count_public_registers_file regs1 (k - 1) n >= count_public_registers_file regs2 (k - 1) n then
lemma_count_public_registers_file regs1 regs2 (k - 1) n;
lemma_count_public_registers regs1 regs2 (k - 1)
)
let count_flagTaint (ts:analysis_taints) : nat = if Public? ts.lts.flagsTaint then 1 else 0
let count_cfFlagTaint (ts:analysis_taints) : nat = if Public? ts.lts.cfFlagsTaint then 1 else 0
let count_ofFlagTaint (ts:analysis_taints) : nat = if Public? ts.lts.ofFlagsTaint then 1 else 0
let count_publics (ts:analysis_taints) : nat =
count_public_registers ts.lts.regTaint n_reg_files +
count_flagTaint ts +
count_cfFlagTaint ts +
count_ofFlagTaint ts
let monotone_decreases_count (ts ts':analysis_taints) : Lemma
(requires taintstate_monotone ts ts' /\ not (eq_leakage_taints ts.lts ts'.lts))
(ensures count_publics ts' < count_publics ts)
=
let regs1 = ts'.lts.regTaint in
let regs2 = ts.lts.regTaint in
if count_public_registers regs1 n_reg_files >= count_public_registers regs2 n_reg_files then (
lemma_count_public_registers regs1 regs2 n_reg_files;
assert (FStar.FunctionalExtensionality.feq regs1 regs2)
)
val check_if_block_consumes_fixed_time (block:S.codes) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[block])
val check_if_code_consumes_fixed_time (code:S.code) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[code; count_publics ts; 1])
val check_if_loop_consumes_fixed_time (code:S.code{While? code}) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[code; count_publics ts; 0])
#set-options "--z3refresh --z3rlimit 600"
let rec check_if_block_consumes_fixed_time (block:S.codes) (ts:analysis_taints) : bool & analysis_taints =
match block with
| [] -> true, ts
| hd::tl -> let fixedTime, ts_int = check_if_code_consumes_fixed_time hd ts in
if (not fixedTime) then fixedTime, ts_int
else check_if_block_consumes_fixed_time tl ts_int
and check_if_code_consumes_fixed_time (code:S.code) (ts:analysis_taints) : bool & analysis_taints =
match code with
| Ins ins -> let b, ts = check_if_ins_consumes_fixed_time ins ts in b, ts
| Block block -> check_if_block_consumes_fixed_time block ts
| IfElse ifCond ifTrue ifFalse ->
let o1 = operand_taint 0 (S.get_fst_ocmp ifCond) ts in
let o2 = operand_taint 0 (S.get_snd_ocmp ifCond) ts in
let predTaint = merge_taint o1 o2 in
if (Secret? predTaint) then (false, ts)
else
let o1Public = operand_does_not_use_secrets (S.get_fst_ocmp ifCond) ts in
if (not o1Public) then (false, ts)
else
let o2Public = operand_does_not_use_secrets (S.get_snd_ocmp ifCond) ts in
if (not o2Public) then (false, ts)
else
let validIfTrue, tsIfTrue = check_if_code_consumes_fixed_time ifTrue ts in
if (not validIfTrue) then (false, ts)
else
let validIfFalse, tsIfFalse = check_if_code_consumes_fixed_time ifFalse ts in
if (not validIfFalse) then (false, ts)
else
(true, combine_analysis_taints tsIfTrue tsIfFalse)
| While cond body -> check_if_loop_consumes_fixed_time code ts
and check_if_loop_consumes_fixed_time c (ts:analysis_taints) : (bool & analysis_taints) =
let ts = normalize_taints ts in
let While pred body = c in
let o1 = operand_taint 0 (S.get_fst_ocmp pred) ts in
let o2 = operand_taint 0 (S.get_snd_ocmp pred) ts in
let predTaint = merge_taint o1 o2 in
if (Secret? predTaint) then false, ts
else
let o1Public = operand_does_not_use_secrets (S.get_fst_ocmp pred) ts in
if (not o1Public) then (false, ts)
else
let o2Public = operand_does_not_use_secrets (S.get_snd_ocmp pred) ts in
if (not o2Public) then (false, ts)
else
let fixedTime, next_ts = check_if_code_consumes_fixed_time body ts in
if (not fixedTime) then (false, ts)
else
let combined_ts = combine_analysis_taints ts next_ts in
assert (taintstate_monotone ts combined_ts);
if eq_leakage_taints combined_ts.lts ts.lts then
true, combined_ts
else (
monotone_decreases_count ts combined_ts;
check_if_loop_consumes_fixed_time c combined_ts
)
val monotone_ok_eval: (code:S.code) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (let s' = machine_eval_code code fuel s in
Some? s' /\ (Some?.v s').S.ms_ok ==> s.S.ms_ok))
(decreases %[code; 0])
val monotone_ok_eval_block: (codes:S.codes) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (let s' = machine_eval_codes codes fuel s in
Some? s' /\ (Some?.v s').S.ms_ok ==> s.S.ms_ok))
(decreases %[codes;1])
#set-options "--z3rlimit 20 --initial_ifuel 0 --max_ifuel 1 --initial_fuel 2 --max_fuel 2"
let rec monotone_ok_eval code fuel s =
match code with
| Ins ins -> reveal_opaque (`%S.machine_eval_code_ins) S.machine_eval_code_ins
| Block block -> monotone_ok_eval_block block fuel s
| IfElse ifCond ifTrue ifFalse ->
let (st, b) = machine_eval_ocmp s ifCond in
if b then monotone_ok_eval ifTrue fuel st else monotone_ok_eval ifFalse fuel st
| While cond body ->
if fuel = 0 then ()
else
let (st, b) = machine_eval_ocmp s cond in
if not b then () else
monotone_ok_eval body (fuel - 1) st;
()
and monotone_ok_eval_block block fuel s =
match block with
| [] -> ()
| hd :: tl ->
let s' = machine_eval_code hd fuel s in
if None? s' then () else
monotone_ok_eval_block tl fuel (Some?.v s');
monotone_ok_eval hd fuel s
val monotone_ok_eval_while: (code:S.code{While? code}) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (
let While cond body = code in
let (s1, b1) = machine_eval_ocmp s cond in
let r1 = machine_eval_code code fuel s in
Some? r1 /\ (Some?.v r1).S.ms_ok ==> s1.S.ms_ok))
let monotone_ok_eval_while code fuel s =
let While cond body = code in
let (s1, b) = machine_eval_ocmp s cond in
let r1 = machine_eval_while cond body fuel s in
if fuel = 0 then () else
if not b then () else
match machine_eval_code body (fuel - 1) s1 with
| None -> ()
| Some s ->
if not s.S.ms_ok then ()
else (monotone_ok_eval body (fuel - 1) s1; monotone_ok_eval code (fuel - 1) s)
val lemma_loop_taintstate_monotone (ts:analysis_taints) (code:S.code{While? code}) : Lemma
(requires True)
(ensures (let _, ts' = check_if_loop_consumes_fixed_time code ts in
taintstate_monotone ts ts'))
(decreases %[count_publics ts]) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Leakage_s.fst.checked",
"Vale.X64.Leakage_Ins.fsti.checked",
"Vale.X64.Leakage_Helpers.fst.checked",
"Vale.Lib.MapTree.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Leakage.fst"
} | [
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Ins",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Ins",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 2,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 40,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ts: Vale.X64.Leakage_Helpers.analysis_taints -> code: Vale.X64.Machine_Semantics_s.code{While? code}
-> FStar.Pervasives.Lemma
(ensures
(let _ = Vale.X64.Leakage.check_if_loop_consumes_fixed_time code ts in
(let FStar.Pervasives.Native.Mktuple2 #_ #_ _ ts' = _ in
Vale.X64.Leakage.taintstate_monotone ts ts')
<:
Type0)) (decreases Vale.X64.Leakage.count_publics ts) | FStar.Pervasives.Lemma | [
"lemma",
""
] | [] | [
"Vale.X64.Leakage_Helpers.analysis_taints",
"Vale.X64.Machine_Semantics_s.code",
"Prims.b2t",
"Vale.X64.Machine_s.uu___is_While",
"Vale.X64.Bytes_Code_s.instruction_t",
"Vale.X64.Machine_Semantics_s.instr_annotation",
"Vale.X64.Bytes_Code_s.ocmp",
"Vale.X64.Machine_s.precode",
"Prims.bool",
"Vale.X64.Leakage.eq_leakage_taints",
"Vale.X64.Leakage_Helpers.__proj__AnalysisTaints__item__lts",
"Vale.X64.Leakage.taintstate_monotone_trans",
"Prims.unit",
"Vale.X64.Leakage.lemma_loop_taintstate_monotone",
"FStar.Pervasives.Native.tuple2",
"Vale.X64.Leakage.check_if_loop_consumes_fixed_time",
"Vale.X64.Leakage.monotone_decreases_count",
"Prims.l_and",
"Vale.X64.Leakage.taintstate_monotone",
"Prims.eq2",
"Vale.X64.Leakage_s.leakage_taints",
"Vale.X64.Leakage.combine_leakage_taints",
"Vale.X64.Leakage.combine_analysis_taints",
"Vale.X64.Leakage.check_if_code_consumes_fixed_time",
"Vale.X64.Leakage.normalize_taints"
] | [
"recursion"
] | false | false | true | false | false | let rec lemma_loop_taintstate_monotone ts code =
| let ts = normalize_taints ts in
let While pred body = code in
let b, ts' = check_if_code_consumes_fixed_time body ts in
let combined_ts = combine_analysis_taints ts ts' in
if eq_leakage_taints combined_ts.lts ts.lts
then ()
else
(monotone_decreases_count ts combined_ts;
let b, ts_fin = check_if_loop_consumes_fixed_time code combined_ts in
lemma_loop_taintstate_monotone combined_ts code;
taintstate_monotone_trans ts combined_ts ts_fin) | false |
Steel.ST.C.Types.Union.fsti | Steel.ST.C.Types.Union.union_t | val union_t
(#tf: Type0)
(n: string)
(#tn: Type0)
(#[solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn)))
(fields: field_description_t tf)
: Tot Type0 | val union_t
(#tf: Type0)
(n: string)
(#tn: Type0)
(#[solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn)))
(fields: field_description_t tf)
: Tot Type0 | let union_t (#tf: Type0) (n: string) (#tn: Type0) (# [solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn))) (fields: field_description_t tf) : Tot Type0
= union_t0 tn #tf n fields | {
"file_name": "lib/steel/c/Steel.ST.C.Types.Union.fsti",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 26,
"end_line": 19,
"start_col": 0,
"start_line": 18
} | module Steel.ST.C.Types.Union
open Steel.ST.Util
include Steel.ST.C.Types.Fields
open Steel.C.Typestring
module P = Steel.FractionalPermission
[@@noextract_to "krml"] // primitive
val define_union0 (tn: Type0) (#tf: Type0) (n: string) (fields: field_description_t tf) : Tot Type0
inline_for_extraction [@@noextract_to "krml"]
let define_union (n: string) (#tf: Type0) (#tn: Type0) (#[solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn))) (fields: field_description_t tf) : Tot Type0
= define_union0 tn #tf n fields
// To be extracted as: union t
[@@noextract_to "krml"] // primitive
val union_t0 (tn: Type0) (#tf: Type0) (n: string) (fields: field_description_t tf) : Tot Type0 | {
"checked_file": "/",
"dependencies": [
"Steel.ST.Util.fsti.checked",
"Steel.ST.C.Types.Fields.fsti.checked",
"Steel.FractionalPermission.fst.checked",
"Steel.C.Typestring.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Steel.ST.C.Types.Union.fsti"
} | [
{
"abbrev": true,
"full_module": "Steel.FractionalPermission",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "Steel.C.Typestring",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.C.Types.Fields",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.Util",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.C.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.C.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | n: Prims.string -> fields: Steel.ST.C.Types.Fields.field_description_t tf -> Type0 | Prims.Tot | [
"total"
] | [] | [
"Prims.string",
"Prims.squash",
"FStar.Pervasives.norm",
"Steel.C.Typestring.norm_typestring",
"Prims.eq2",
"Steel.C.Typestring.mk_string_t",
"Steel.ST.C.Types.Fields.field_description_t",
"Steel.ST.C.Types.Union.union_t0"
] | [] | false | false | false | false | true | let union_t
(#tf: Type0)
(n: string)
(#tn: Type0)
(#[solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn)))
(fields: field_description_t tf)
: Tot Type0 =
| union_t0 tn #tf n fields | false |
Steel.ST.C.Types.Union.fsti | Steel.ST.C.Types.Union.define_union | val define_union
(n: string)
(#tf #tn: Type0)
(#[solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn)))
(fields: field_description_t tf)
: Tot Type0 | val define_union
(n: string)
(#tf #tn: Type0)
(#[solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn)))
(fields: field_description_t tf)
: Tot Type0 | let define_union (n: string) (#tf: Type0) (#tn: Type0) (#[solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn))) (fields: field_description_t tf) : Tot Type0
= define_union0 tn #tf n fields | {
"file_name": "lib/steel/c/Steel.ST.C.Types.Union.fsti",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 31,
"end_line": 12,
"start_col": 0,
"start_line": 11
} | module Steel.ST.C.Types.Union
open Steel.ST.Util
include Steel.ST.C.Types.Fields
open Steel.C.Typestring
module P = Steel.FractionalPermission
[@@noextract_to "krml"] // primitive
val define_union0 (tn: Type0) (#tf: Type0) (n: string) (fields: field_description_t tf) : Tot Type0 | {
"checked_file": "/",
"dependencies": [
"Steel.ST.Util.fsti.checked",
"Steel.ST.C.Types.Fields.fsti.checked",
"Steel.FractionalPermission.fst.checked",
"Steel.C.Typestring.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Steel.ST.C.Types.Union.fsti"
} | [
{
"abbrev": true,
"full_module": "Steel.FractionalPermission",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "Steel.C.Typestring",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.C.Types.Fields",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.Util",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.C.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.C.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | n: Prims.string -> fields: Steel.ST.C.Types.Fields.field_description_t tf -> Type0 | Prims.Tot | [
"total"
] | [] | [
"Prims.string",
"Prims.squash",
"FStar.Pervasives.norm",
"Steel.C.Typestring.norm_typestring",
"Prims.eq2",
"Steel.C.Typestring.mk_string_t",
"Steel.ST.C.Types.Fields.field_description_t",
"Steel.ST.C.Types.Union.define_union0"
] | [] | false | false | false | false | true | let define_union
(n: string)
(#tf #tn: Type0)
(#[solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn)))
(fields: field_description_t tf)
: Tot Type0 =
| define_union0 tn #tf n fields | false |
Steel.ST.C.Types.Union.fsti | Steel.ST.C.Types.Union.union | val union
(#tf: Type0)
(n: string)
(#tn: Type0)
(#[solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn)))
(fields: field_description_t tf)
: Tot (typedef (union_t0 tn n fields)) | val union
(#tf: Type0)
(n: string)
(#tn: Type0)
(#[solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn)))
(fields: field_description_t tf)
: Tot (typedef (union_t0 tn n fields)) | let union (#tf: Type0) (n: string) (#tn: Type0) (# [solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn))) (fields: field_description_t tf) : Tot (typedef (union_t0 tn n fields))
= union0 tn #tf n fields | {
"file_name": "lib/steel/c/Steel.ST.C.Types.Union.fsti",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 24,
"end_line": 81,
"start_col": 0,
"start_line": 80
} | module Steel.ST.C.Types.Union
open Steel.ST.Util
include Steel.ST.C.Types.Fields
open Steel.C.Typestring
module P = Steel.FractionalPermission
[@@noextract_to "krml"] // primitive
val define_union0 (tn: Type0) (#tf: Type0) (n: string) (fields: field_description_t tf) : Tot Type0
inline_for_extraction [@@noextract_to "krml"]
let define_union (n: string) (#tf: Type0) (#tn: Type0) (#[solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn))) (fields: field_description_t tf) : Tot Type0
= define_union0 tn #tf n fields
// To be extracted as: union t
[@@noextract_to "krml"] // primitive
val union_t0 (tn: Type0) (#tf: Type0) (n: string) (fields: field_description_t tf) : Tot Type0
inline_for_extraction [@@noextract_to "krml"]
let union_t (#tf: Type0) (n: string) (#tn: Type0) (# [solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn))) (fields: field_description_t tf) : Tot Type0
= union_t0 tn #tf n fields
val union_set_field (tn: Type0) (#tf: Type0) (n: string) (fields: field_description_t tf) (f: field_t fields) (v: fields.fd_type f) : GTot (union_t0 tn n fields)
val union_get_case
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(u: union_t0 tn n fields)
: GTot (option (field_t fields))
val union_get_field
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(u: union_t0 tn n fields)
(field: field_t fields)
: Ghost (fields.fd_type field)
(requires (union_get_case u == Some field))
(ensures (fun _ -> True))
val union_get_field_same
(tn: Type0)
(#tf: Type0)
(n: string)
(fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
: Lemma
(requires (~ (v == unknown (fields.fd_typedef field))))
(ensures (
let u = union_set_field tn n fields field v in
union_get_case u == Some field /\
union_get_field u field == v
))
[SMTPatOr [
[SMTPat (union_get_case (union_set_field tn n fields field v))];
[SMTPat (union_get_field (union_set_field tn n fields field v) field)];
]]
val union_set_field_same
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(s: union_t0 tn n fields)
(field: field_t fields)
: Lemma
(requires (union_get_case s == Some field))
(ensures (
union_set_field tn n fields field (union_get_field s field) == s
))
[SMTPat (union_set_field tn n fields (union_get_field s field))]
[@@noextract_to "krml"] // proof-only
val union0 (tn: Type0) (#tf: Type0) (n: string) (fields: field_description_t tf) : Tot (typedef (union_t0 tn n fields))
inline_for_extraction | {
"checked_file": "/",
"dependencies": [
"Steel.ST.Util.fsti.checked",
"Steel.ST.C.Types.Fields.fsti.checked",
"Steel.FractionalPermission.fst.checked",
"Steel.C.Typestring.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Steel.ST.C.Types.Union.fsti"
} | [
{
"abbrev": true,
"full_module": "Steel.FractionalPermission",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "Steel.C.Typestring",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.C.Types.Fields",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.Util",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.C.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.C.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | n: Prims.string -> fields: Steel.ST.C.Types.Fields.field_description_t tf
-> Steel.ST.C.Types.Base.typedef (Steel.ST.C.Types.Union.union_t0 tn n fields) | Prims.Tot | [
"total"
] | [] | [
"Prims.string",
"Prims.squash",
"FStar.Pervasives.norm",
"Steel.C.Typestring.norm_typestring",
"Prims.eq2",
"Steel.C.Typestring.mk_string_t",
"Steel.ST.C.Types.Fields.field_description_t",
"Steel.ST.C.Types.Union.union0",
"Steel.ST.C.Types.Base.typedef",
"Steel.ST.C.Types.Union.union_t0"
] | [] | false | false | false | false | false | let union
(#tf: Type0)
(n: string)
(#tn: Type0)
(#[solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn)))
(fields: field_description_t tf)
: Tot (typedef (union_t0 tn n fields)) =
| union0 tn #tf n fields | false |
Vale.X64.Leakage.fst | Vale.X64.Leakage.monotone_ok_eval_while | val monotone_ok_eval_while: (code:S.code{While? code}) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (
let While cond body = code in
let (s1, b1) = machine_eval_ocmp s cond in
let r1 = machine_eval_code code fuel s in
Some? r1 /\ (Some?.v r1).S.ms_ok ==> s1.S.ms_ok)) | val monotone_ok_eval_while: (code:S.code{While? code}) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (
let While cond body = code in
let (s1, b1) = machine_eval_ocmp s cond in
let r1 = machine_eval_code code fuel s in
Some? r1 /\ (Some?.v r1).S.ms_ok ==> s1.S.ms_ok)) | let monotone_ok_eval_while code fuel s =
let While cond body = code in
let (s1, b) = machine_eval_ocmp s cond in
let r1 = machine_eval_while cond body fuel s in
if fuel = 0 then () else
if not b then () else
match machine_eval_code body (fuel - 1) s1 with
| None -> ()
| Some s ->
if not s.S.ms_ok then ()
else (monotone_ok_eval body (fuel - 1) s1; monotone_ok_eval code (fuel - 1) s) | {
"file_name": "vale/code/arch/x64/Vale.X64.Leakage.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 82,
"end_line": 308,
"start_col": 0,
"start_line": 298
} | module Vale.X64.Leakage
open FStar.Mul
open Vale.X64.Machine_s
module S = Vale.X64.Machine_Semantics_s
open Vale.X64.Leakage_s
open Vale.X64.Leakage_Helpers
open Vale.X64.Leakage_Ins
unfold let machine_eval_ocmp = S.machine_eval_ocmp
unfold let machine_eval_code = S.machine_eval_code
unfold let machine_eval_codes = S.machine_eval_codes
unfold let machine_eval_while = S.machine_eval_while
#reset-options "--initial_ifuel 0 --max_ifuel 1 --initial_fuel 1 --max_fuel 1"
let normalize_taints (ts:analysis_taints) : analysis_taints =
let AnalysisTaints lts rts = ts in
AnalysisTaints lts (regs_to_map (map_to_regs rts))
let combine_reg_taints (regs1 regs2:reg_taint) : reg_taint =
FunctionalExtensionality.on reg (fun x -> merge_taint (regs1 x) (regs2 x))
let rec eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : bool =
if k = 0 then true
else regs1 (Reg rf (k - 1)) = regs2 (Reg rf (k - 1)) && eq_regs_file regs1 regs2 rf (k - 1)
let rec eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : bool =
if k = 0 then true
else eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1)) && eq_regs regs1 regs2 (k - 1)
let rec lemma_eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(ensures eq_regs_file regs1 regs2 rf k <==>
(forall (i:nat).{:pattern (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i)))
=
if k > 0 then lemma_eq_regs_file regs1 regs2 rf (k - 1)
let rec lemma_eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : Lemma
(ensures
eq_regs regs1 regs2 k <==>
(forall (i j:nat).{:pattern (Reg i j)} i < k /\ j < n_regs i ==>
regs1 (Reg i j) == regs2 (Reg i j)))
=
if k > 0 then (
lemma_eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1));
lemma_eq_regs regs1 regs2 (k - 1)
)
let eq_registers (regs1 regs2:reg_taint) : (b:bool{b <==> regs1 == regs2}) =
lemma_eq_regs regs1 regs2 n_reg_files;
let b = eq_regs regs1 regs2 n_reg_files in
if b then (
assert (FStar.FunctionalExtensionality.feq regs1 regs2)
);
b
let eq_leakage_taints (ts1 ts2:leakage_taints) : (b:bool{b <==> ts1 == ts2}) =
eq_registers ts1.regTaint ts2.regTaint &&
ts1.flagsTaint = ts2.flagsTaint &&
ts1.cfFlagsTaint = ts2.cfFlagsTaint &&
ts1.ofFlagsTaint = ts2.ofFlagsTaint
let taintstate_monotone_regs (ts ts':reg_taint) =
(forall (r:reg).{:pattern (ts' r) \/ (ts r)}
Public? (ts' r) ==> Public? (ts r))
let taintstate_monotone (ts ts':analysis_taints) =
let ts = ts.lts in
let ts' = ts'.lts in
taintstate_monotone_regs ts.regTaint ts'.regTaint /\
(Public? (ts'.flagsTaint) ==> Public? (ts.flagsTaint)) /\
(Public? (ts'.cfFlagsTaint) ==> Public? (ts.cfFlagsTaint)) /\
(Public? (ts'.ofFlagsTaint) ==> Public? (ts.ofFlagsTaint))
let taintstate_monotone_trans (ts1:analysis_taints) (ts2:analysis_taints) (ts3:analysis_taints)
: Lemma (taintstate_monotone ts1 ts2 /\ taintstate_monotone ts2 ts3 ==> taintstate_monotone ts1 ts3) = ()
let isConstant_monotone (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isConstantTimeGivenStates code fuel ts2.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isConstantTimeGivenStates code fuel ts1.lts s1 s2)
= ()
let isExplicit_monotone (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code)
(fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts.lts ts1.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts.lts ts2.lts s1 s2)
= ()
let isExplicit_monotone2 (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints)
(code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts2.lts ts.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts1.lts ts.lts s1 s2)
= ()
let combine_leakage_taints (ts1:leakage_taints) (ts2:leakage_taints) : leakage_taints =
let LeakageTaints rs1 fs1 c1 o1 = ts1 in
let LeakageTaints rs2 fs2 c2 o2 = ts2 in
let rs = combine_reg_taints rs1 rs2 in
LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
let combine_analysis_taints (ts1:analysis_taints) (ts2:analysis_taints)
: (ts:analysis_taints{taintstate_monotone ts1 ts /\ taintstate_monotone ts2 ts /\ ts.lts == combine_leakage_taints ts1.lts ts2.lts})
=
let AnalysisTaints (LeakageTaints rs1_old fs1 c1 o1) rts1 = ts1 in
let AnalysisTaints (LeakageTaints rs2_old fs2 c2 o2) rts2 = ts2 in
let rts1 = ts1.rts in
let rts2 = ts2.rts in
let rs1 = map_to_regs rts1 in // \
let rs2 = map_to_regs rts2 in // - build efficient representations of reg_taint before calling combine_reg_taints
assert (FStar.FunctionalExtensionality.feq rs1 rs1_old);
assert (FStar.FunctionalExtensionality.feq rs2 rs2_old);
let rs = combine_reg_taints rs1 rs2 in
let rts = regs_to_map rs in
let lts = LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
in
AnalysisTaints lts rts
let count_public_register (regs:reg_taint) (r:reg) = if Public? (regs r) then 1 else 0
let rec count_public_registers_file (regs:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : nat =
if k = 0 then 0
else count_public_register regs (Reg rf (k - 1)) + count_public_registers_file regs rf (k - 1)
let rec lemma_count_public_registers_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers_file regs1 rf k >= count_public_registers_file regs2 rf k
)
(ensures
count_public_registers_file regs1 rf k == count_public_registers_file regs2 rf k /\
(forall (i:nat).{:pattern regs1 (Reg rf i) \/ regs2 (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i))
)
=
if k > 0 then lemma_count_public_registers_file regs1 regs2 rf (k - 1)
let rec count_public_registers (regs:reg_taint) (k:nat{k <= n_reg_files}) : nat =
if k = 0 then 0
else count_public_registers_file regs (k - 1) (n_regs (k - 1)) + count_public_registers regs (k - 1)
let rec lemma_count_public_registers (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers regs1 k >= count_public_registers regs2 k
)
(ensures
count_public_registers regs1 k == count_public_registers regs2 k /\
(forall (r:reg).{:pattern regs1 r \/ regs2 r} Reg?.rf r < k ==> regs1 r == regs2 r)
)
=
if k > 0 then (
let n = n_regs (k - 1) in
if count_public_registers_file regs1 (k - 1) n >= count_public_registers_file regs2 (k - 1) n then
lemma_count_public_registers_file regs1 regs2 (k - 1) n;
lemma_count_public_registers regs1 regs2 (k - 1)
)
let count_flagTaint (ts:analysis_taints) : nat = if Public? ts.lts.flagsTaint then 1 else 0
let count_cfFlagTaint (ts:analysis_taints) : nat = if Public? ts.lts.cfFlagsTaint then 1 else 0
let count_ofFlagTaint (ts:analysis_taints) : nat = if Public? ts.lts.ofFlagsTaint then 1 else 0
let count_publics (ts:analysis_taints) : nat =
count_public_registers ts.lts.regTaint n_reg_files +
count_flagTaint ts +
count_cfFlagTaint ts +
count_ofFlagTaint ts
let monotone_decreases_count (ts ts':analysis_taints) : Lemma
(requires taintstate_monotone ts ts' /\ not (eq_leakage_taints ts.lts ts'.lts))
(ensures count_publics ts' < count_publics ts)
=
let regs1 = ts'.lts.regTaint in
let regs2 = ts.lts.regTaint in
if count_public_registers regs1 n_reg_files >= count_public_registers regs2 n_reg_files then (
lemma_count_public_registers regs1 regs2 n_reg_files;
assert (FStar.FunctionalExtensionality.feq regs1 regs2)
)
val check_if_block_consumes_fixed_time (block:S.codes) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[block])
val check_if_code_consumes_fixed_time (code:S.code) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[code; count_publics ts; 1])
val check_if_loop_consumes_fixed_time (code:S.code{While? code}) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[code; count_publics ts; 0])
#set-options "--z3refresh --z3rlimit 600"
let rec check_if_block_consumes_fixed_time (block:S.codes) (ts:analysis_taints) : bool & analysis_taints =
match block with
| [] -> true, ts
| hd::tl -> let fixedTime, ts_int = check_if_code_consumes_fixed_time hd ts in
if (not fixedTime) then fixedTime, ts_int
else check_if_block_consumes_fixed_time tl ts_int
and check_if_code_consumes_fixed_time (code:S.code) (ts:analysis_taints) : bool & analysis_taints =
match code with
| Ins ins -> let b, ts = check_if_ins_consumes_fixed_time ins ts in b, ts
| Block block -> check_if_block_consumes_fixed_time block ts
| IfElse ifCond ifTrue ifFalse ->
let o1 = operand_taint 0 (S.get_fst_ocmp ifCond) ts in
let o2 = operand_taint 0 (S.get_snd_ocmp ifCond) ts in
let predTaint = merge_taint o1 o2 in
if (Secret? predTaint) then (false, ts)
else
let o1Public = operand_does_not_use_secrets (S.get_fst_ocmp ifCond) ts in
if (not o1Public) then (false, ts)
else
let o2Public = operand_does_not_use_secrets (S.get_snd_ocmp ifCond) ts in
if (not o2Public) then (false, ts)
else
let validIfTrue, tsIfTrue = check_if_code_consumes_fixed_time ifTrue ts in
if (not validIfTrue) then (false, ts)
else
let validIfFalse, tsIfFalse = check_if_code_consumes_fixed_time ifFalse ts in
if (not validIfFalse) then (false, ts)
else
(true, combine_analysis_taints tsIfTrue tsIfFalse)
| While cond body -> check_if_loop_consumes_fixed_time code ts
and check_if_loop_consumes_fixed_time c (ts:analysis_taints) : (bool & analysis_taints) =
let ts = normalize_taints ts in
let While pred body = c in
let o1 = operand_taint 0 (S.get_fst_ocmp pred) ts in
let o2 = operand_taint 0 (S.get_snd_ocmp pred) ts in
let predTaint = merge_taint o1 o2 in
if (Secret? predTaint) then false, ts
else
let o1Public = operand_does_not_use_secrets (S.get_fst_ocmp pred) ts in
if (not o1Public) then (false, ts)
else
let o2Public = operand_does_not_use_secrets (S.get_snd_ocmp pred) ts in
if (not o2Public) then (false, ts)
else
let fixedTime, next_ts = check_if_code_consumes_fixed_time body ts in
if (not fixedTime) then (false, ts)
else
let combined_ts = combine_analysis_taints ts next_ts in
assert (taintstate_monotone ts combined_ts);
if eq_leakage_taints combined_ts.lts ts.lts then
true, combined_ts
else (
monotone_decreases_count ts combined_ts;
check_if_loop_consumes_fixed_time c combined_ts
)
val monotone_ok_eval: (code:S.code) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (let s' = machine_eval_code code fuel s in
Some? s' /\ (Some?.v s').S.ms_ok ==> s.S.ms_ok))
(decreases %[code; 0])
val monotone_ok_eval_block: (codes:S.codes) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (let s' = machine_eval_codes codes fuel s in
Some? s' /\ (Some?.v s').S.ms_ok ==> s.S.ms_ok))
(decreases %[codes;1])
#set-options "--z3rlimit 20 --initial_ifuel 0 --max_ifuel 1 --initial_fuel 2 --max_fuel 2"
let rec monotone_ok_eval code fuel s =
match code with
| Ins ins -> reveal_opaque (`%S.machine_eval_code_ins) S.machine_eval_code_ins
| Block block -> monotone_ok_eval_block block fuel s
| IfElse ifCond ifTrue ifFalse ->
let (st, b) = machine_eval_ocmp s ifCond in
if b then monotone_ok_eval ifTrue fuel st else monotone_ok_eval ifFalse fuel st
| While cond body ->
if fuel = 0 then ()
else
let (st, b) = machine_eval_ocmp s cond in
if not b then () else
monotone_ok_eval body (fuel - 1) st;
()
and monotone_ok_eval_block block fuel s =
match block with
| [] -> ()
| hd :: tl ->
let s' = machine_eval_code hd fuel s in
if None? s' then () else
monotone_ok_eval_block tl fuel (Some?.v s');
monotone_ok_eval hd fuel s
val monotone_ok_eval_while: (code:S.code{While? code}) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (
let While cond body = code in
let (s1, b1) = machine_eval_ocmp s cond in
let r1 = machine_eval_code code fuel s in
Some? r1 /\ (Some?.v r1).S.ms_ok ==> s1.S.ms_ok)) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Leakage_s.fst.checked",
"Vale.X64.Leakage_Ins.fsti.checked",
"Vale.X64.Leakage_Helpers.fst.checked",
"Vale.Lib.MapTree.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Leakage.fst"
} | [
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Ins",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Ins",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 2,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": true,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
code: Vale.X64.Machine_Semantics_s.code{While? code} ->
fuel: Prims.nat ->
s: Vale.X64.Machine_Semantics_s.machine_state
-> FStar.Pervasives.Lemma
(ensures
(let _ = code in
(let Vale.X64.Machine_s.While #_ #_ cond _ = _ in
let _ = Vale.X64.Leakage.machine_eval_ocmp s cond in
(let FStar.Pervasives.Native.Mktuple2 #_ #_ s1 _ = _ in
let r1 = Vale.X64.Leakage.machine_eval_code code fuel s in
Some? r1 /\ Mkmachine_state?.ms_ok (Some?.v r1) ==> Mkmachine_state?.ms_ok s1)
<:
Type0)
<:
Type0)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Vale.X64.Machine_Semantics_s.code",
"Prims.b2t",
"Vale.X64.Machine_s.uu___is_While",
"Vale.X64.Bytes_Code_s.instruction_t",
"Vale.X64.Machine_Semantics_s.instr_annotation",
"Vale.X64.Bytes_Code_s.ocmp",
"Prims.nat",
"Vale.X64.Machine_Semantics_s.machine_state",
"Vale.X64.Machine_s.precode",
"Prims.bool",
"Prims.op_Equality",
"Prims.int",
"Prims.op_Negation",
"Vale.X64.Leakage.machine_eval_code",
"Prims.op_Subtraction",
"Vale.X64.Machine_Semantics_s.__proj__Mkmachine_state__item__ms_ok",
"Vale.X64.Leakage.monotone_ok_eval",
"Prims.unit",
"FStar.Pervasives.Native.option",
"Vale.X64.Leakage.machine_eval_while",
"FStar.Pervasives.Native.tuple2",
"Vale.X64.Leakage.machine_eval_ocmp"
] | [] | false | false | true | false | false | let monotone_ok_eval_while code fuel s =
| let While cond body = code in
let s1, b = machine_eval_ocmp s cond in
let r1 = machine_eval_while cond body fuel s in
if fuel = 0
then ()
else
if not b
then ()
else
match machine_eval_code body (fuel - 1) s1 with
| None -> ()
| Some s ->
if not s.S.ms_ok
then ()
else
(monotone_ok_eval body (fuel - 1) s1;
monotone_ok_eval code (fuel - 1) s) | false |
Steel.ST.C.Types.Union.fsti | Steel.ST.C.Types.Union.full_union_set_field_elim | val full_union_set_field_elim
(#tn #tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
: Lemma (requires (full (union0 tn n fields) (union_set_field tn n fields field v)))
(ensures (full (fields.fd_typedef field) v)) | val full_union_set_field_elim
(#tn #tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
: Lemma (requires (full (union0 tn n fields) (union_set_field tn n fields field v)))
(ensures (full (fields.fd_typedef field) v)) | let full_union_set_field_elim
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
: Lemma
(requires (
full (union0 tn n fields) (union_set_field tn n fields field v)
))
(ensures (
full (fields.fd_typedef field) v
))
= full_union (union_set_field tn n fields field v) field | {
"file_name": "lib/steel/c/Steel.ST.C.Types.Union.fsti",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 56,
"end_line": 209,
"start_col": 0,
"start_line": 195
} | module Steel.ST.C.Types.Union
open Steel.ST.Util
include Steel.ST.C.Types.Fields
open Steel.C.Typestring
module P = Steel.FractionalPermission
[@@noextract_to "krml"] // primitive
val define_union0 (tn: Type0) (#tf: Type0) (n: string) (fields: field_description_t tf) : Tot Type0
inline_for_extraction [@@noextract_to "krml"]
let define_union (n: string) (#tf: Type0) (#tn: Type0) (#[solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn))) (fields: field_description_t tf) : Tot Type0
= define_union0 tn #tf n fields
// To be extracted as: union t
[@@noextract_to "krml"] // primitive
val union_t0 (tn: Type0) (#tf: Type0) (n: string) (fields: field_description_t tf) : Tot Type0
inline_for_extraction [@@noextract_to "krml"]
let union_t (#tf: Type0) (n: string) (#tn: Type0) (# [solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn))) (fields: field_description_t tf) : Tot Type0
= union_t0 tn #tf n fields
val union_set_field (tn: Type0) (#tf: Type0) (n: string) (fields: field_description_t tf) (f: field_t fields) (v: fields.fd_type f) : GTot (union_t0 tn n fields)
val union_get_case
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(u: union_t0 tn n fields)
: GTot (option (field_t fields))
val union_get_field
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(u: union_t0 tn n fields)
(field: field_t fields)
: Ghost (fields.fd_type field)
(requires (union_get_case u == Some field))
(ensures (fun _ -> True))
val union_get_field_same
(tn: Type0)
(#tf: Type0)
(n: string)
(fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
: Lemma
(requires (~ (v == unknown (fields.fd_typedef field))))
(ensures (
let u = union_set_field tn n fields field v in
union_get_case u == Some field /\
union_get_field u field == v
))
[SMTPatOr [
[SMTPat (union_get_case (union_set_field tn n fields field v))];
[SMTPat (union_get_field (union_set_field tn n fields field v) field)];
]]
val union_set_field_same
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(s: union_t0 tn n fields)
(field: field_t fields)
: Lemma
(requires (union_get_case s == Some field))
(ensures (
union_set_field tn n fields field (union_get_field s field) == s
))
[SMTPat (union_set_field tn n fields (union_get_field s field))]
[@@noextract_to "krml"] // proof-only
val union0 (tn: Type0) (#tf: Type0) (n: string) (fields: field_description_t tf) : Tot (typedef (union_t0 tn n fields))
inline_for_extraction
[@@noextract_to "krml"; norm_field_attr] // proof-only
let union (#tf: Type0) (n: string) (#tn: Type0) (# [solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn))) (fields: field_description_t tf) : Tot (typedef (union_t0 tn n fields))
= union0 tn #tf n fields
val union_get_case_unknown
(tn: Type0)
(#tf: Type0)
(n: string)
(fields: field_description_t tf)
: Lemma
(union_get_case (unknown (union0 tn n fields)) == None)
[SMTPat (unknown (union0 tn n fields))]
val union_set_field_unknown
(tn: Type0)
(#tf: Type0)
(n: string)
(fields: field_description_t tf)
(field: field_t fields)
: Lemma
(union_set_field tn n fields field (unknown (fields.fd_typedef field)) == unknown (union0 tn n fields))
[SMTPat (union_set_field tn n fields field (unknown (fields.fd_typedef field)))]
val union_get_case_uninitialized
(tn: Type0)
(#tf: Type0)
(n: string)
(fields: field_description_t tf)
: Lemma
(union_get_case (uninitialized (union0 tn n fields)) == None)
[SMTPat (uninitialized (union0 tn n fields))]
val mk_fraction_union_get_case
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(s: union_t0 tn n fields)
(p: P.perm)
: Lemma
(requires (fractionable (union0 tn n fields) s))
(ensures (
union_get_case (mk_fraction (union0 tn n fields) s p) == union_get_case s
))
[SMTPat (union_get_case (mk_fraction (union0 tn n fields) s p))]
val fractionable_union_get_field
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(s: union_t0 tn n fields)
(field: field_t fields)
: Lemma
(requires (union_get_case s == Some field))
(ensures (
fractionable (union0 tn n fields) s <==> fractionable (fields.fd_typedef field) (union_get_field s field)
))
[SMTPat (fractionable (union0 tn n fields) s); SMTPat (union_get_field s field)]
val mk_fraction_union_get_field
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(s: union_t0 tn n fields)
(p: P.perm)
(field: field_t fields)
: Lemma
(requires (fractionable (union0 tn n fields) s /\ union_get_case s == Some field))
(ensures (union_get_field (mk_fraction (union0 tn n fields) s p) field == mk_fraction (fields.fd_typedef field) (union_get_field s field) p))
[SMTPat (union_get_field (mk_fraction (union0 tn n fields) s p) field)]
val mk_fraction_union_set_field
(tn: Type0)
(#tf: Type0)
(n: string)
(fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
(p: P.perm)
: Lemma
(requires (fractionable (fields.fd_typedef field) v))
(ensures (
fractionable (union0 tn n fields) (union_set_field tn n fields field v) /\
mk_fraction (union0 tn n fields) (union_set_field tn n fields field v) p == union_set_field tn n fields field (mk_fraction (fields.fd_typedef field) v p)
))
val full_union
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(s: union_t0 tn n fields)
(field: field_t fields)
: Lemma
(requires (union_get_case s == Some field))
(ensures (
full (union0 tn n fields) s <==> full (fields.fd_typedef field) (union_get_field s field)
))
[SMTPat (full (union0 tn n fields) s); SMTPat (union_get_field s field)]
let full_union_set_field_intro
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
: Lemma
(requires (full (fields.fd_typedef field) v))
(ensures (
full (union0 tn n fields) (union_set_field tn n fields field v)
))
= full_union (union_set_field tn n fields field v) field | {
"checked_file": "/",
"dependencies": [
"Steel.ST.Util.fsti.checked",
"Steel.ST.C.Types.Fields.fsti.checked",
"Steel.FractionalPermission.fst.checked",
"Steel.C.Typestring.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Steel.ST.C.Types.Union.fsti"
} | [
{
"abbrev": true,
"full_module": "Steel.FractionalPermission",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "Steel.C.Typestring",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.C.Types.Fields",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.Util",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.C.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.C.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | field: Steel.ST.C.Types.Fields.field_t fields -> v: Mkfield_description_t?.fd_type fields field
-> FStar.Pervasives.Lemma
(requires
Steel.ST.C.Types.Base.full (Steel.ST.C.Types.Union.union0 tn n fields)
(Steel.ST.C.Types.Union.union_set_field tn n fields field v))
(ensures Steel.ST.C.Types.Base.full (Mkfield_description_t?.fd_typedef fields field) v) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Prims.string",
"Steel.ST.C.Types.Fields.field_description_t",
"Steel.ST.C.Types.Fields.field_t",
"Steel.ST.C.Types.Fields.__proj__Mkfield_description_t__item__fd_type",
"Steel.ST.C.Types.Union.full_union",
"Steel.ST.C.Types.Union.union_set_field",
"Prims.unit",
"Steel.ST.C.Types.Base.full",
"Steel.ST.C.Types.Union.union_t0",
"Steel.ST.C.Types.Union.union0",
"Prims.squash",
"Steel.ST.C.Types.Fields.__proj__Mkfield_description_t__item__fd_typedef",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let full_union_set_field_elim
(#tn #tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
: Lemma (requires (full (union0 tn n fields) (union_set_field tn n fields field v)))
(ensures (full (fields.fd_typedef field) v)) =
| full_union (union_set_field tn n fields field v) field | false |
Steel.ST.C.Types.Union.fsti | Steel.ST.C.Types.Union.full_union_set_field_intro | val full_union_set_field_intro
(#tn #tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
: Lemma (requires (full (fields.fd_typedef field) v))
(ensures (full (union0 tn n fields) (union_set_field tn n fields field v))) | val full_union_set_field_intro
(#tn #tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
: Lemma (requires (full (fields.fd_typedef field) v))
(ensures (full (union0 tn n fields) (union_set_field tn n fields field v))) | let full_union_set_field_intro
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
: Lemma
(requires (full (fields.fd_typedef field) v))
(ensures (
full (union0 tn n fields) (union_set_field tn n fields field v)
))
= full_union (union_set_field tn n fields field v) field | {
"file_name": "lib/steel/c/Steel.ST.C.Types.Union.fsti",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 56,
"end_line": 193,
"start_col": 0,
"start_line": 181
} | module Steel.ST.C.Types.Union
open Steel.ST.Util
include Steel.ST.C.Types.Fields
open Steel.C.Typestring
module P = Steel.FractionalPermission
[@@noextract_to "krml"] // primitive
val define_union0 (tn: Type0) (#tf: Type0) (n: string) (fields: field_description_t tf) : Tot Type0
inline_for_extraction [@@noextract_to "krml"]
let define_union (n: string) (#tf: Type0) (#tn: Type0) (#[solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn))) (fields: field_description_t tf) : Tot Type0
= define_union0 tn #tf n fields
// To be extracted as: union t
[@@noextract_to "krml"] // primitive
val union_t0 (tn: Type0) (#tf: Type0) (n: string) (fields: field_description_t tf) : Tot Type0
inline_for_extraction [@@noextract_to "krml"]
let union_t (#tf: Type0) (n: string) (#tn: Type0) (# [solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn))) (fields: field_description_t tf) : Tot Type0
= union_t0 tn #tf n fields
val union_set_field (tn: Type0) (#tf: Type0) (n: string) (fields: field_description_t tf) (f: field_t fields) (v: fields.fd_type f) : GTot (union_t0 tn n fields)
val union_get_case
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(u: union_t0 tn n fields)
: GTot (option (field_t fields))
val union_get_field
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(u: union_t0 tn n fields)
(field: field_t fields)
: Ghost (fields.fd_type field)
(requires (union_get_case u == Some field))
(ensures (fun _ -> True))
val union_get_field_same
(tn: Type0)
(#tf: Type0)
(n: string)
(fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
: Lemma
(requires (~ (v == unknown (fields.fd_typedef field))))
(ensures (
let u = union_set_field tn n fields field v in
union_get_case u == Some field /\
union_get_field u field == v
))
[SMTPatOr [
[SMTPat (union_get_case (union_set_field tn n fields field v))];
[SMTPat (union_get_field (union_set_field tn n fields field v) field)];
]]
val union_set_field_same
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(s: union_t0 tn n fields)
(field: field_t fields)
: Lemma
(requires (union_get_case s == Some field))
(ensures (
union_set_field tn n fields field (union_get_field s field) == s
))
[SMTPat (union_set_field tn n fields (union_get_field s field))]
[@@noextract_to "krml"] // proof-only
val union0 (tn: Type0) (#tf: Type0) (n: string) (fields: field_description_t tf) : Tot (typedef (union_t0 tn n fields))
inline_for_extraction
[@@noextract_to "krml"; norm_field_attr] // proof-only
let union (#tf: Type0) (n: string) (#tn: Type0) (# [solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn))) (fields: field_description_t tf) : Tot (typedef (union_t0 tn n fields))
= union0 tn #tf n fields
val union_get_case_unknown
(tn: Type0)
(#tf: Type0)
(n: string)
(fields: field_description_t tf)
: Lemma
(union_get_case (unknown (union0 tn n fields)) == None)
[SMTPat (unknown (union0 tn n fields))]
val union_set_field_unknown
(tn: Type0)
(#tf: Type0)
(n: string)
(fields: field_description_t tf)
(field: field_t fields)
: Lemma
(union_set_field tn n fields field (unknown (fields.fd_typedef field)) == unknown (union0 tn n fields))
[SMTPat (union_set_field tn n fields field (unknown (fields.fd_typedef field)))]
val union_get_case_uninitialized
(tn: Type0)
(#tf: Type0)
(n: string)
(fields: field_description_t tf)
: Lemma
(union_get_case (uninitialized (union0 tn n fields)) == None)
[SMTPat (uninitialized (union0 tn n fields))]
val mk_fraction_union_get_case
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(s: union_t0 tn n fields)
(p: P.perm)
: Lemma
(requires (fractionable (union0 tn n fields) s))
(ensures (
union_get_case (mk_fraction (union0 tn n fields) s p) == union_get_case s
))
[SMTPat (union_get_case (mk_fraction (union0 tn n fields) s p))]
val fractionable_union_get_field
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(s: union_t0 tn n fields)
(field: field_t fields)
: Lemma
(requires (union_get_case s == Some field))
(ensures (
fractionable (union0 tn n fields) s <==> fractionable (fields.fd_typedef field) (union_get_field s field)
))
[SMTPat (fractionable (union0 tn n fields) s); SMTPat (union_get_field s field)]
val mk_fraction_union_get_field
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(s: union_t0 tn n fields)
(p: P.perm)
(field: field_t fields)
: Lemma
(requires (fractionable (union0 tn n fields) s /\ union_get_case s == Some field))
(ensures (union_get_field (mk_fraction (union0 tn n fields) s p) field == mk_fraction (fields.fd_typedef field) (union_get_field s field) p))
[SMTPat (union_get_field (mk_fraction (union0 tn n fields) s p) field)]
val mk_fraction_union_set_field
(tn: Type0)
(#tf: Type0)
(n: string)
(fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
(p: P.perm)
: Lemma
(requires (fractionable (fields.fd_typedef field) v))
(ensures (
fractionable (union0 tn n fields) (union_set_field tn n fields field v) /\
mk_fraction (union0 tn n fields) (union_set_field tn n fields field v) p == union_set_field tn n fields field (mk_fraction (fields.fd_typedef field) v p)
))
val full_union
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(s: union_t0 tn n fields)
(field: field_t fields)
: Lemma
(requires (union_get_case s == Some field))
(ensures (
full (union0 tn n fields) s <==> full (fields.fd_typedef field) (union_get_field s field)
))
[SMTPat (full (union0 tn n fields) s); SMTPat (union_get_field s field)] | {
"checked_file": "/",
"dependencies": [
"Steel.ST.Util.fsti.checked",
"Steel.ST.C.Types.Fields.fsti.checked",
"Steel.FractionalPermission.fst.checked",
"Steel.C.Typestring.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Steel.ST.C.Types.Union.fsti"
} | [
{
"abbrev": true,
"full_module": "Steel.FractionalPermission",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "Steel.C.Typestring",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.C.Types.Fields",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.Util",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.C.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.C.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | field: Steel.ST.C.Types.Fields.field_t fields -> v: Mkfield_description_t?.fd_type fields field
-> FStar.Pervasives.Lemma
(requires Steel.ST.C.Types.Base.full (Mkfield_description_t?.fd_typedef fields field) v)
(ensures
Steel.ST.C.Types.Base.full (Steel.ST.C.Types.Union.union0 tn n fields)
(Steel.ST.C.Types.Union.union_set_field tn n fields field v)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Prims.string",
"Steel.ST.C.Types.Fields.field_description_t",
"Steel.ST.C.Types.Fields.field_t",
"Steel.ST.C.Types.Fields.__proj__Mkfield_description_t__item__fd_type",
"Steel.ST.C.Types.Union.full_union",
"Steel.ST.C.Types.Union.union_set_field",
"Prims.unit",
"Steel.ST.C.Types.Base.full",
"Steel.ST.C.Types.Fields.__proj__Mkfield_description_t__item__fd_typedef",
"Prims.squash",
"Steel.ST.C.Types.Union.union_t0",
"Steel.ST.C.Types.Union.union0",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let full_union_set_field_intro
(#tn #tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
: Lemma (requires (full (fields.fd_typedef field) v))
(ensures (full (union0 tn n fields) (union_set_field tn n fields field v))) =
| full_union (union_set_field tn n fields field v) field | false |
Steel.ST.C.Types.Union.fsti | Steel.ST.C.Types.Union.full_union_set_field | val full_union_set_field
(#tn #tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
: Lemma (requires True)
(ensures
(full (union0 tn n fields) (union_set_field tn n fields field v) <==>
full (fields.fd_typedef field) v))
[SMTPat (full (union0 tn n fields) (union_set_field tn n fields field v))] | val full_union_set_field
(#tn #tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
: Lemma (requires True)
(ensures
(full (union0 tn n fields) (union_set_field tn n fields field v) <==>
full (fields.fd_typedef field) v))
[SMTPat (full (union0 tn n fields) (union_set_field tn n fields field v))] | let full_union_set_field
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
: Lemma
(requires True)
(ensures (
full (union0 tn n fields) (union_set_field tn n fields field v) <==> full (fields.fd_typedef field) v
))
[SMTPat (full (union0 tn n fields) (union_set_field tn n fields field v))]
= Classical.move_requires (full_union_set_field_intro #tn #tf #n #fields field) v;
Classical.move_requires (full_union_set_field_elim #tn #tf #n #fields field) v | {
"file_name": "lib/steel/c/Steel.ST.C.Types.Union.fsti",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 80,
"end_line": 225,
"start_col": 0,
"start_line": 211
} | module Steel.ST.C.Types.Union
open Steel.ST.Util
include Steel.ST.C.Types.Fields
open Steel.C.Typestring
module P = Steel.FractionalPermission
[@@noextract_to "krml"] // primitive
val define_union0 (tn: Type0) (#tf: Type0) (n: string) (fields: field_description_t tf) : Tot Type0
inline_for_extraction [@@noextract_to "krml"]
let define_union (n: string) (#tf: Type0) (#tn: Type0) (#[solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn))) (fields: field_description_t tf) : Tot Type0
= define_union0 tn #tf n fields
// To be extracted as: union t
[@@noextract_to "krml"] // primitive
val union_t0 (tn: Type0) (#tf: Type0) (n: string) (fields: field_description_t tf) : Tot Type0
inline_for_extraction [@@noextract_to "krml"]
let union_t (#tf: Type0) (n: string) (#tn: Type0) (# [solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn))) (fields: field_description_t tf) : Tot Type0
= union_t0 tn #tf n fields
val union_set_field (tn: Type0) (#tf: Type0) (n: string) (fields: field_description_t tf) (f: field_t fields) (v: fields.fd_type f) : GTot (union_t0 tn n fields)
val union_get_case
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(u: union_t0 tn n fields)
: GTot (option (field_t fields))
val union_get_field
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(u: union_t0 tn n fields)
(field: field_t fields)
: Ghost (fields.fd_type field)
(requires (union_get_case u == Some field))
(ensures (fun _ -> True))
val union_get_field_same
(tn: Type0)
(#tf: Type0)
(n: string)
(fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
: Lemma
(requires (~ (v == unknown (fields.fd_typedef field))))
(ensures (
let u = union_set_field tn n fields field v in
union_get_case u == Some field /\
union_get_field u field == v
))
[SMTPatOr [
[SMTPat (union_get_case (union_set_field tn n fields field v))];
[SMTPat (union_get_field (union_set_field tn n fields field v) field)];
]]
val union_set_field_same
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(s: union_t0 tn n fields)
(field: field_t fields)
: Lemma
(requires (union_get_case s == Some field))
(ensures (
union_set_field tn n fields field (union_get_field s field) == s
))
[SMTPat (union_set_field tn n fields (union_get_field s field))]
[@@noextract_to "krml"] // proof-only
val union0 (tn: Type0) (#tf: Type0) (n: string) (fields: field_description_t tf) : Tot (typedef (union_t0 tn n fields))
inline_for_extraction
[@@noextract_to "krml"; norm_field_attr] // proof-only
let union (#tf: Type0) (n: string) (#tn: Type0) (# [solve_mk_string_t ()] prf: squash (norm norm_typestring (mk_string_t n == tn))) (fields: field_description_t tf) : Tot (typedef (union_t0 tn n fields))
= union0 tn #tf n fields
val union_get_case_unknown
(tn: Type0)
(#tf: Type0)
(n: string)
(fields: field_description_t tf)
: Lemma
(union_get_case (unknown (union0 tn n fields)) == None)
[SMTPat (unknown (union0 tn n fields))]
val union_set_field_unknown
(tn: Type0)
(#tf: Type0)
(n: string)
(fields: field_description_t tf)
(field: field_t fields)
: Lemma
(union_set_field tn n fields field (unknown (fields.fd_typedef field)) == unknown (union0 tn n fields))
[SMTPat (union_set_field tn n fields field (unknown (fields.fd_typedef field)))]
val union_get_case_uninitialized
(tn: Type0)
(#tf: Type0)
(n: string)
(fields: field_description_t tf)
: Lemma
(union_get_case (uninitialized (union0 tn n fields)) == None)
[SMTPat (uninitialized (union0 tn n fields))]
val mk_fraction_union_get_case
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(s: union_t0 tn n fields)
(p: P.perm)
: Lemma
(requires (fractionable (union0 tn n fields) s))
(ensures (
union_get_case (mk_fraction (union0 tn n fields) s p) == union_get_case s
))
[SMTPat (union_get_case (mk_fraction (union0 tn n fields) s p))]
val fractionable_union_get_field
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(s: union_t0 tn n fields)
(field: field_t fields)
: Lemma
(requires (union_get_case s == Some field))
(ensures (
fractionable (union0 tn n fields) s <==> fractionable (fields.fd_typedef field) (union_get_field s field)
))
[SMTPat (fractionable (union0 tn n fields) s); SMTPat (union_get_field s field)]
val mk_fraction_union_get_field
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(s: union_t0 tn n fields)
(p: P.perm)
(field: field_t fields)
: Lemma
(requires (fractionable (union0 tn n fields) s /\ union_get_case s == Some field))
(ensures (union_get_field (mk_fraction (union0 tn n fields) s p) field == mk_fraction (fields.fd_typedef field) (union_get_field s field) p))
[SMTPat (union_get_field (mk_fraction (union0 tn n fields) s p) field)]
val mk_fraction_union_set_field
(tn: Type0)
(#tf: Type0)
(n: string)
(fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
(p: P.perm)
: Lemma
(requires (fractionable (fields.fd_typedef field) v))
(ensures (
fractionable (union0 tn n fields) (union_set_field tn n fields field v) /\
mk_fraction (union0 tn n fields) (union_set_field tn n fields field v) p == union_set_field tn n fields field (mk_fraction (fields.fd_typedef field) v p)
))
val full_union
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(s: union_t0 tn n fields)
(field: field_t fields)
: Lemma
(requires (union_get_case s == Some field))
(ensures (
full (union0 tn n fields) s <==> full (fields.fd_typedef field) (union_get_field s field)
))
[SMTPat (full (union0 tn n fields) s); SMTPat (union_get_field s field)]
let full_union_set_field_intro
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
: Lemma
(requires (full (fields.fd_typedef field) v))
(ensures (
full (union0 tn n fields) (union_set_field tn n fields field v)
))
= full_union (union_set_field tn n fields field v) field
let full_union_set_field_elim
(#tn: Type0)
(#tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
: Lemma
(requires (
full (union0 tn n fields) (union_set_field tn n fields field v)
))
(ensures (
full (fields.fd_typedef field) v
))
= full_union (union_set_field tn n fields field v) field | {
"checked_file": "/",
"dependencies": [
"Steel.ST.Util.fsti.checked",
"Steel.ST.C.Types.Fields.fsti.checked",
"Steel.FractionalPermission.fst.checked",
"Steel.C.Typestring.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Steel.ST.C.Types.Union.fsti"
} | [
{
"abbrev": true,
"full_module": "Steel.FractionalPermission",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "Steel.C.Typestring",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.C.Types.Fields",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.Util",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.C.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.C.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | field: Steel.ST.C.Types.Fields.field_t fields -> v: Mkfield_description_t?.fd_type fields field
-> FStar.Pervasives.Lemma
(ensures
Steel.ST.C.Types.Base.full (Steel.ST.C.Types.Union.union0 tn n fields)
(Steel.ST.C.Types.Union.union_set_field tn n fields field v) <==>
Steel.ST.C.Types.Base.full (Mkfield_description_t?.fd_typedef fields field) v)
[
SMTPat (Steel.ST.C.Types.Base.full (Steel.ST.C.Types.Union.union0 tn n fields)
(Steel.ST.C.Types.Union.union_set_field tn n fields field v))
] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Prims.string",
"Steel.ST.C.Types.Fields.field_description_t",
"Steel.ST.C.Types.Fields.field_t",
"Steel.ST.C.Types.Fields.__proj__Mkfield_description_t__item__fd_type",
"FStar.Classical.move_requires",
"Steel.ST.C.Types.Base.full",
"Steel.ST.C.Types.Union.union_t0",
"Steel.ST.C.Types.Union.union0",
"Steel.ST.C.Types.Union.union_set_field",
"Steel.ST.C.Types.Fields.__proj__Mkfield_description_t__item__fd_typedef",
"Steel.ST.C.Types.Union.full_union_set_field_elim",
"Prims.unit",
"Steel.ST.C.Types.Union.full_union_set_field_intro",
"Prims.l_True",
"Prims.squash",
"Prims.l_iff",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.prop",
"Prims.Nil"
] | [] | false | false | true | false | false | let full_union_set_field
(#tn #tf: Type0)
(#n: string)
(#fields: field_description_t tf)
(field: field_t fields)
(v: fields.fd_type field)
: Lemma (requires True)
(ensures
(full (union0 tn n fields) (union_set_field tn n fields field v) <==>
full (fields.fd_typedef field) v))
[SMTPat (full (union0 tn n fields) (union_set_field tn n fields field v))] =
| Classical.move_requires (full_union_set_field_intro #tn #tf #n #fields field) v;
Classical.move_requires (full_union_set_field_elim #tn #tf #n #fields field) v | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.no_repeats | val no_repeats (l: list var) : Type0 | val no_repeats (l: list var) : Type0 | let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 47,
"end_line": 36,
"start_col": 0,
"start_line": 33
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | l: Prims.list Pulse.Syntax.Base.var -> Type0 | Prims.Tot | [
"total"
] | [] | [
"Prims.list",
"Pulse.Syntax.Base.var",
"Prims.l_True",
"Prims.l_and",
"Prims.l_not",
"FStar.List.Tot.Base.memP",
"Pulse.Checker.Prover.Substs.no_repeats"
] | [
"recursion"
] | false | false | false | true | true | let rec no_repeats (l: list var) : Type0 =
| match l with
| [] -> True
| x :: tl -> (~(L.memP x tl)) /\ no_repeats tl | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.ln_ss_t | val ln_ss_t (s:ss_t) : bool | val ln_ss_t (s:ss_t) : bool | let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 52,
"end_line": 70,
"start_col": 0,
"start_line": 69
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
} | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: Pulse.Checker.Prover.Substs.ss_t -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"Pulse.Checker.Prover.Substs.ss_t",
"FStar.List.Tot.Base.for_all",
"Pulse.Syntax.Base.var",
"Pulse.Syntax.Naming.ln",
"FStar.Map.sel",
"Pulse.Syntax.Base.term",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__m",
"Prims.bool",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__l"
] | [] | false | false | false | true | false | let ln_ss_t (s: ss_t) =
| List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.as_map | val as_map (ss:ss_t) : Map.t var term | val as_map (ss:ss_t) : Map.t var term | let as_map (ss:ss_t) = ss.m | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 27,
"end_line": 72,
"start_col": 0,
"start_line": 72
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
}
let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ss: Pulse.Checker.Prover.Substs.ss_t -> FStar.Map.t Pulse.Syntax.Base.var Pulse.Syntax.Base.term | Prims.Tot | [
"total"
] | [] | [
"Pulse.Checker.Prover.Substs.ss_t",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__m",
"FStar.Map.t",
"Pulse.Syntax.Base.var",
"Pulse.Syntax.Base.term"
] | [] | false | false | false | true | false | let as_map (ss: ss_t) =
| ss.m | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.remove_map | val remove_map : m: Pulse.Checker.Prover.Substs.ss_map -> x: Pulse.Syntax.Base.var
-> FStar.Map.t Pulse.Syntax.Base.var Pulse.Syntax.Base.term | let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown) | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 74,
"end_line": 45,
"start_col": 0,
"start_line": 44
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
} | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: Pulse.Checker.Prover.Substs.ss_map -> x: Pulse.Syntax.Base.var
-> FStar.Map.t Pulse.Syntax.Base.var Pulse.Syntax.Base.term | Prims.Tot | [
"total"
] | [] | [
"Pulse.Checker.Prover.Substs.ss_map",
"Pulse.Syntax.Base.var",
"FStar.Map.restrict",
"Pulse.Syntax.Base.term",
"FStar.Set.complement",
"FStar.Set.singleton",
"FStar.Map.upd",
"Pulse.Syntax.Base.tm_unknown",
"FStar.Map.t"
] | [] | false | false | false | true | false | let remove_map (m: ss_map) (x: var) =
| Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown) | false |
|
Vale.X64.Leakage.fst | Vale.X64.Leakage.monotone_ok_eval_block | val monotone_ok_eval_block: (codes:S.codes) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (let s' = machine_eval_codes codes fuel s in
Some? s' /\ (Some?.v s').S.ms_ok ==> s.S.ms_ok))
(decreases %[codes;1]) | val monotone_ok_eval_block: (codes:S.codes) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (let s' = machine_eval_codes codes fuel s in
Some? s' /\ (Some?.v s').S.ms_ok ==> s.S.ms_ok))
(decreases %[codes;1]) | let rec monotone_ok_eval code fuel s =
match code with
| Ins ins -> reveal_opaque (`%S.machine_eval_code_ins) S.machine_eval_code_ins
| Block block -> monotone_ok_eval_block block fuel s
| IfElse ifCond ifTrue ifFalse ->
let (st, b) = machine_eval_ocmp s ifCond in
if b then monotone_ok_eval ifTrue fuel st else monotone_ok_eval ifFalse fuel st
| While cond body ->
if fuel = 0 then ()
else
let (st, b) = machine_eval_ocmp s cond in
if not b then () else
monotone_ok_eval body (fuel - 1) st;
()
and monotone_ok_eval_block block fuel s =
match block with
| [] -> ()
| hd :: tl ->
let s' = machine_eval_code hd fuel s in
if None? s' then () else
monotone_ok_eval_block tl fuel (Some?.v s');
monotone_ok_eval hd fuel s | {
"file_name": "vale/code/arch/x64/Vale.X64.Leakage.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 30,
"end_line": 288,
"start_col": 0,
"start_line": 266
} | module Vale.X64.Leakage
open FStar.Mul
open Vale.X64.Machine_s
module S = Vale.X64.Machine_Semantics_s
open Vale.X64.Leakage_s
open Vale.X64.Leakage_Helpers
open Vale.X64.Leakage_Ins
unfold let machine_eval_ocmp = S.machine_eval_ocmp
unfold let machine_eval_code = S.machine_eval_code
unfold let machine_eval_codes = S.machine_eval_codes
unfold let machine_eval_while = S.machine_eval_while
#reset-options "--initial_ifuel 0 --max_ifuel 1 --initial_fuel 1 --max_fuel 1"
let normalize_taints (ts:analysis_taints) : analysis_taints =
let AnalysisTaints lts rts = ts in
AnalysisTaints lts (regs_to_map (map_to_regs rts))
let combine_reg_taints (regs1 regs2:reg_taint) : reg_taint =
FunctionalExtensionality.on reg (fun x -> merge_taint (regs1 x) (regs2 x))
let rec eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : bool =
if k = 0 then true
else regs1 (Reg rf (k - 1)) = regs2 (Reg rf (k - 1)) && eq_regs_file regs1 regs2 rf (k - 1)
let rec eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : bool =
if k = 0 then true
else eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1)) && eq_regs regs1 regs2 (k - 1)
let rec lemma_eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(ensures eq_regs_file regs1 regs2 rf k <==>
(forall (i:nat).{:pattern (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i)))
=
if k > 0 then lemma_eq_regs_file regs1 regs2 rf (k - 1)
let rec lemma_eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : Lemma
(ensures
eq_regs regs1 regs2 k <==>
(forall (i j:nat).{:pattern (Reg i j)} i < k /\ j < n_regs i ==>
regs1 (Reg i j) == regs2 (Reg i j)))
=
if k > 0 then (
lemma_eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1));
lemma_eq_regs regs1 regs2 (k - 1)
)
let eq_registers (regs1 regs2:reg_taint) : (b:bool{b <==> regs1 == regs2}) =
lemma_eq_regs regs1 regs2 n_reg_files;
let b = eq_regs regs1 regs2 n_reg_files in
if b then (
assert (FStar.FunctionalExtensionality.feq regs1 regs2)
);
b
let eq_leakage_taints (ts1 ts2:leakage_taints) : (b:bool{b <==> ts1 == ts2}) =
eq_registers ts1.regTaint ts2.regTaint &&
ts1.flagsTaint = ts2.flagsTaint &&
ts1.cfFlagsTaint = ts2.cfFlagsTaint &&
ts1.ofFlagsTaint = ts2.ofFlagsTaint
let taintstate_monotone_regs (ts ts':reg_taint) =
(forall (r:reg).{:pattern (ts' r) \/ (ts r)}
Public? (ts' r) ==> Public? (ts r))
let taintstate_monotone (ts ts':analysis_taints) =
let ts = ts.lts in
let ts' = ts'.lts in
taintstate_monotone_regs ts.regTaint ts'.regTaint /\
(Public? (ts'.flagsTaint) ==> Public? (ts.flagsTaint)) /\
(Public? (ts'.cfFlagsTaint) ==> Public? (ts.cfFlagsTaint)) /\
(Public? (ts'.ofFlagsTaint) ==> Public? (ts.ofFlagsTaint))
let taintstate_monotone_trans (ts1:analysis_taints) (ts2:analysis_taints) (ts3:analysis_taints)
: Lemma (taintstate_monotone ts1 ts2 /\ taintstate_monotone ts2 ts3 ==> taintstate_monotone ts1 ts3) = ()
let isConstant_monotone (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isConstantTimeGivenStates code fuel ts2.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isConstantTimeGivenStates code fuel ts1.lts s1 s2)
= ()
let isExplicit_monotone (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code)
(fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts.lts ts1.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts.lts ts2.lts s1 s2)
= ()
let isExplicit_monotone2 (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints)
(code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts2.lts ts.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts1.lts ts.lts s1 s2)
= ()
let combine_leakage_taints (ts1:leakage_taints) (ts2:leakage_taints) : leakage_taints =
let LeakageTaints rs1 fs1 c1 o1 = ts1 in
let LeakageTaints rs2 fs2 c2 o2 = ts2 in
let rs = combine_reg_taints rs1 rs2 in
LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
let combine_analysis_taints (ts1:analysis_taints) (ts2:analysis_taints)
: (ts:analysis_taints{taintstate_monotone ts1 ts /\ taintstate_monotone ts2 ts /\ ts.lts == combine_leakage_taints ts1.lts ts2.lts})
=
let AnalysisTaints (LeakageTaints rs1_old fs1 c1 o1) rts1 = ts1 in
let AnalysisTaints (LeakageTaints rs2_old fs2 c2 o2) rts2 = ts2 in
let rts1 = ts1.rts in
let rts2 = ts2.rts in
let rs1 = map_to_regs rts1 in // \
let rs2 = map_to_regs rts2 in // - build efficient representations of reg_taint before calling combine_reg_taints
assert (FStar.FunctionalExtensionality.feq rs1 rs1_old);
assert (FStar.FunctionalExtensionality.feq rs2 rs2_old);
let rs = combine_reg_taints rs1 rs2 in
let rts = regs_to_map rs in
let lts = LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
in
AnalysisTaints lts rts
let count_public_register (regs:reg_taint) (r:reg) = if Public? (regs r) then 1 else 0
let rec count_public_registers_file (regs:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : nat =
if k = 0 then 0
else count_public_register regs (Reg rf (k - 1)) + count_public_registers_file regs rf (k - 1)
let rec lemma_count_public_registers_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers_file regs1 rf k >= count_public_registers_file regs2 rf k
)
(ensures
count_public_registers_file regs1 rf k == count_public_registers_file regs2 rf k /\
(forall (i:nat).{:pattern regs1 (Reg rf i) \/ regs2 (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i))
)
=
if k > 0 then lemma_count_public_registers_file regs1 regs2 rf (k - 1)
let rec count_public_registers (regs:reg_taint) (k:nat{k <= n_reg_files}) : nat =
if k = 0 then 0
else count_public_registers_file regs (k - 1) (n_regs (k - 1)) + count_public_registers regs (k - 1)
let rec lemma_count_public_registers (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers regs1 k >= count_public_registers regs2 k
)
(ensures
count_public_registers regs1 k == count_public_registers regs2 k /\
(forall (r:reg).{:pattern regs1 r \/ regs2 r} Reg?.rf r < k ==> regs1 r == regs2 r)
)
=
if k > 0 then (
let n = n_regs (k - 1) in
if count_public_registers_file regs1 (k - 1) n >= count_public_registers_file regs2 (k - 1) n then
lemma_count_public_registers_file regs1 regs2 (k - 1) n;
lemma_count_public_registers regs1 regs2 (k - 1)
)
let count_flagTaint (ts:analysis_taints) : nat = if Public? ts.lts.flagsTaint then 1 else 0
let count_cfFlagTaint (ts:analysis_taints) : nat = if Public? ts.lts.cfFlagsTaint then 1 else 0
let count_ofFlagTaint (ts:analysis_taints) : nat = if Public? ts.lts.ofFlagsTaint then 1 else 0
let count_publics (ts:analysis_taints) : nat =
count_public_registers ts.lts.regTaint n_reg_files +
count_flagTaint ts +
count_cfFlagTaint ts +
count_ofFlagTaint ts
let monotone_decreases_count (ts ts':analysis_taints) : Lemma
(requires taintstate_monotone ts ts' /\ not (eq_leakage_taints ts.lts ts'.lts))
(ensures count_publics ts' < count_publics ts)
=
let regs1 = ts'.lts.regTaint in
let regs2 = ts.lts.regTaint in
if count_public_registers regs1 n_reg_files >= count_public_registers regs2 n_reg_files then (
lemma_count_public_registers regs1 regs2 n_reg_files;
assert (FStar.FunctionalExtensionality.feq regs1 regs2)
)
val check_if_block_consumes_fixed_time (block:S.codes) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[block])
val check_if_code_consumes_fixed_time (code:S.code) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[code; count_publics ts; 1])
val check_if_loop_consumes_fixed_time (code:S.code{While? code}) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[code; count_publics ts; 0])
#set-options "--z3refresh --z3rlimit 600"
let rec check_if_block_consumes_fixed_time (block:S.codes) (ts:analysis_taints) : bool & analysis_taints =
match block with
| [] -> true, ts
| hd::tl -> let fixedTime, ts_int = check_if_code_consumes_fixed_time hd ts in
if (not fixedTime) then fixedTime, ts_int
else check_if_block_consumes_fixed_time tl ts_int
and check_if_code_consumes_fixed_time (code:S.code) (ts:analysis_taints) : bool & analysis_taints =
match code with
| Ins ins -> let b, ts = check_if_ins_consumes_fixed_time ins ts in b, ts
| Block block -> check_if_block_consumes_fixed_time block ts
| IfElse ifCond ifTrue ifFalse ->
let o1 = operand_taint 0 (S.get_fst_ocmp ifCond) ts in
let o2 = operand_taint 0 (S.get_snd_ocmp ifCond) ts in
let predTaint = merge_taint o1 o2 in
if (Secret? predTaint) then (false, ts)
else
let o1Public = operand_does_not_use_secrets (S.get_fst_ocmp ifCond) ts in
if (not o1Public) then (false, ts)
else
let o2Public = operand_does_not_use_secrets (S.get_snd_ocmp ifCond) ts in
if (not o2Public) then (false, ts)
else
let validIfTrue, tsIfTrue = check_if_code_consumes_fixed_time ifTrue ts in
if (not validIfTrue) then (false, ts)
else
let validIfFalse, tsIfFalse = check_if_code_consumes_fixed_time ifFalse ts in
if (not validIfFalse) then (false, ts)
else
(true, combine_analysis_taints tsIfTrue tsIfFalse)
| While cond body -> check_if_loop_consumes_fixed_time code ts
and check_if_loop_consumes_fixed_time c (ts:analysis_taints) : (bool & analysis_taints) =
let ts = normalize_taints ts in
let While pred body = c in
let o1 = operand_taint 0 (S.get_fst_ocmp pred) ts in
let o2 = operand_taint 0 (S.get_snd_ocmp pred) ts in
let predTaint = merge_taint o1 o2 in
if (Secret? predTaint) then false, ts
else
let o1Public = operand_does_not_use_secrets (S.get_fst_ocmp pred) ts in
if (not o1Public) then (false, ts)
else
let o2Public = operand_does_not_use_secrets (S.get_snd_ocmp pred) ts in
if (not o2Public) then (false, ts)
else
let fixedTime, next_ts = check_if_code_consumes_fixed_time body ts in
if (not fixedTime) then (false, ts)
else
let combined_ts = combine_analysis_taints ts next_ts in
assert (taintstate_monotone ts combined_ts);
if eq_leakage_taints combined_ts.lts ts.lts then
true, combined_ts
else (
monotone_decreases_count ts combined_ts;
check_if_loop_consumes_fixed_time c combined_ts
)
val monotone_ok_eval: (code:S.code) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (let s' = machine_eval_code code fuel s in
Some? s' /\ (Some?.v s').S.ms_ok ==> s.S.ms_ok))
(decreases %[code; 0])
val monotone_ok_eval_block: (codes:S.codes) -> (fuel:nat) -> (s:S.machine_state) -> Lemma
(requires True)
(ensures (let s' = machine_eval_codes codes fuel s in
Some? s' /\ (Some?.v s').S.ms_ok ==> s.S.ms_ok))
(decreases %[codes;1]) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Leakage_s.fst.checked",
"Vale.X64.Leakage_Ins.fsti.checked",
"Vale.X64.Leakage_Helpers.fst.checked",
"Vale.Lib.MapTree.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Leakage.fst"
} | [
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Ins",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Ins",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 2,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": true,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
codes: Vale.X64.Machine_Semantics_s.codes ->
fuel: Prims.nat ->
s: Vale.X64.Machine_Semantics_s.machine_state
-> FStar.Pervasives.Lemma
(ensures
(let s' = Vale.X64.Leakage.machine_eval_codes codes fuel s in
Some? s' /\ Mkmachine_state?.ms_ok (Some?.v s') ==> Mkmachine_state?.ms_ok s))
(decreases %[codes;1]) | FStar.Pervasives.Lemma | [
"lemma",
""
] | [
"monotone_ok_eval",
"monotone_ok_eval_block"
] | [
"Vale.X64.Machine_Semantics_s.codes",
"Prims.nat",
"Vale.X64.Machine_Semantics_s.machine_state",
"Vale.X64.Bytes_Code_s.code_t",
"Vale.X64.Machine_Semantics_s.instr_annotation",
"Prims.list",
"Vale.X64.Leakage.monotone_ok_eval",
"Prims.unit",
"FStar.Pervasives.Native.uu___is_None",
"Prims.bool",
"Vale.X64.Leakage.monotone_ok_eval_block",
"FStar.Pervasives.Native.__proj__Some__item__v",
"FStar.Pervasives.Native.option",
"Vale.X64.Leakage.machine_eval_code"
] | [
"mutual recursion"
] | false | false | true | false | false | let rec monotone_ok_eval_block block fuel s =
| match block with
| [] -> ()
| hd :: tl ->
let s' = machine_eval_code hd fuel s in
if None? s' then () else monotone_ok_eval_block tl fuel (Some?.v s');
monotone_ok_eval hd fuel s | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.empty | val empty : ss_t | val empty : ss_t | let empty = { l = []; m = Map.const_on Set.empty tm_unknown } | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 61,
"end_line": 74,
"start_col": 0,
"start_line": 74
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
}
let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l
let as_map (ss:ss_t) = ss.m | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Pulse.Checker.Prover.Substs.ss_t | Prims.Tot | [
"total"
] | [] | [
"Pulse.Checker.Prover.Substs.Mkss_t",
"Prims.Nil",
"Pulse.Syntax.Base.var",
"FStar.Map.const_on",
"Pulse.Syntax.Base.term",
"FStar.Set.empty",
"Pulse.Syntax.Base.tm_unknown"
] | [] | false | false | false | true | false | let empty =
| { l = []; m = Map.const_on Set.empty tm_unknown } | false |
Hacl.Impl.Frodo.KEM.Decaps.fst | Hacl.Impl.Frodo.KEM.Decaps.get_bpp_cp_matrices | val get_bpp_cp_matrices:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> mu_decode:lbytes (bytes_mu a)
-> seed_se:lbytes (crypto_bytes a)
-> sk:lbytes (crypto_secretkeybytes a)
-> bpp_matrix:matrix_t params_nbar (params_n a)
-> cp_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h seed_se /\ live h mu_decode /\ live h sk /\
live h bpp_matrix /\ live h cp_matrix /\
loc_pairwise_disjoint [loc mu_decode; loc seed_se; loc sk; loc bpp_matrix; loc cp_matrix])
(ensures fun h0 _ h1 -> modifies (loc bpp_matrix |+| loc cp_matrix) h0 h1 /\
(as_matrix h1 bpp_matrix, as_matrix h1 cp_matrix) ==
S.get_bpp_cp_matrices a gen_a (as_seq h0 mu_decode) (as_seq h0 seed_se) (as_seq h0 sk)) | val get_bpp_cp_matrices:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> mu_decode:lbytes (bytes_mu a)
-> seed_se:lbytes (crypto_bytes a)
-> sk:lbytes (crypto_secretkeybytes a)
-> bpp_matrix:matrix_t params_nbar (params_n a)
-> cp_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h seed_se /\ live h mu_decode /\ live h sk /\
live h bpp_matrix /\ live h cp_matrix /\
loc_pairwise_disjoint [loc mu_decode; loc seed_se; loc sk; loc bpp_matrix; loc cp_matrix])
(ensures fun h0 _ h1 -> modifies (loc bpp_matrix |+| loc cp_matrix) h0 h1 /\
(as_matrix h1 bpp_matrix, as_matrix h1 cp_matrix) ==
S.get_bpp_cp_matrices a gen_a (as_seq h0 mu_decode) (as_seq h0 seed_se) (as_seq h0 sk)) | let get_bpp_cp_matrices a gen_a mu_decode seed_se sk bpp_matrix cp_matrix =
push_frame ();
let sp_matrix = matrix_create params_nbar (params_n a) in
let ep_matrix = matrix_create params_nbar (params_n a) in
let epp_matrix = matrix_create params_nbar params_nbar in
get_sp_ep_epp_matrices a seed_se sp_matrix ep_matrix epp_matrix;
get_bpp_cp_matrices_ a gen_a mu_decode sk bpp_matrix cp_matrix sp_matrix ep_matrix epp_matrix;
clear_matrix3 a sp_matrix ep_matrix epp_matrix;
pop_frame () | {
"file_name": "code/frodo/Hacl.Impl.Frodo.KEM.Decaps.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 14,
"end_line": 144,
"start_col": 0,
"start_line": 136
} | module Hacl.Impl.Frodo.KEM.Decaps
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open LowStar.Buffer
open Lib.IntTypes
open Lib.Buffer
open Hacl.Impl.Matrix
open Hacl.Impl.Frodo.Params
open Hacl.Impl.Frodo.KEM
open Hacl.Impl.Frodo.KEM.Encaps
open Hacl.Impl.Frodo.Encode
open Hacl.Impl.Frodo.Pack
open Hacl.Impl.Frodo.Sample
open Hacl.Frodo.Random
module ST = FStar.HyperStack.ST
module LSeq = Lib.Sequence
module FP = Spec.Frodo.Params
module KG = Hacl.Impl.Frodo.KEM.KeyGen
module S = Spec.Frodo.KEM.Decaps
module M = Spec.Matrix
#set-options "--z3rlimit 100 --fuel 0 --ifuel 0"
inline_for_extraction noextract
val get_bp_c_matrices:
a:FP.frodo_alg
-> ct:lbytes (crypto_ciphertextbytes a)
-> bp_matrix:matrix_t params_nbar (params_n a)
-> c_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h ct /\ live h bp_matrix /\ live h c_matrix /\
disjoint bp_matrix ct /\ disjoint c_matrix ct /\ disjoint bp_matrix c_matrix)
(ensures fun h0 _ h1 -> modifies (loc bp_matrix |+| loc c_matrix) h0 h1 /\
(as_matrix h1 bp_matrix, as_matrix h1 c_matrix) == S.get_bp_c_matrices a (as_seq h0 ct))
let get_bp_c_matrices a ct bp_matrix c_matrix =
let c1 = sub ct 0ul (ct1bytes_len a) in
let c2 = sub ct (ct1bytes_len a) (ct2bytes_len a) in
frodo_unpack params_nbar (params_n a) (params_logq a) c1 bp_matrix;
frodo_unpack params_nbar params_nbar (params_logq a) c2 c_matrix
inline_for_extraction noextract
val frodo_mu_decode:
a:FP.frodo_alg
-> s_bytes:lbytes (secretmatrixbytes_len a)
-> bp_matrix:matrix_t params_nbar (params_n a)
-> c_matrix:matrix_t params_nbar params_nbar
-> mu_decode:lbytes (bytes_mu a)
-> Stack unit
(requires fun h ->
live h s_bytes /\ live h bp_matrix /\
live h c_matrix /\ live h mu_decode /\
disjoint mu_decode s_bytes /\
as_seq h (mu_decode) == Seq.create (v (bytes_mu a)) (u8 0))
(ensures fun h0 _ h1 -> modifies (loc mu_decode) h0 h1 /\
as_seq h1 mu_decode ==
S.frodo_mu_decode a (as_seq h0 s_bytes) (as_matrix h0 bp_matrix) (as_matrix h0 c_matrix))
let frodo_mu_decode a s_bytes bp_matrix c_matrix mu_decode =
push_frame();
let s_matrix = matrix_create (params_n a) params_nbar in
let m_matrix = matrix_create params_nbar params_nbar in
matrix_from_lbytes s_bytes s_matrix;
matrix_mul_s bp_matrix s_matrix m_matrix;
matrix_sub c_matrix m_matrix;
frodo_key_decode (params_logq a) (params_extracted_bits a) params_nbar m_matrix mu_decode;
clear_matrix s_matrix;
clear_matrix m_matrix;
pop_frame()
inline_for_extraction noextract
val get_bpp_cp_matrices_:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> mu_decode:lbytes (bytes_mu a)
-> sk:lbytes (crypto_secretkeybytes a)
-> bpp_matrix:matrix_t params_nbar (params_n a)
-> cp_matrix:matrix_t params_nbar params_nbar
-> sp_matrix:matrix_t params_nbar (params_n a)
-> ep_matrix:matrix_t params_nbar (params_n a)
-> epp_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h mu_decode /\ live h sk /\
live h bpp_matrix /\ live h cp_matrix /\
live h sp_matrix /\ live h ep_matrix /\ live h epp_matrix /\
loc_pairwise_disjoint [loc mu_decode; loc sk; loc bpp_matrix;
loc cp_matrix; loc sp_matrix; loc ep_matrix; loc epp_matrix])
(ensures fun h0 _ h1 -> modifies (loc bpp_matrix |+| loc cp_matrix) h0 h1 /\
(as_matrix h1 bpp_matrix, as_matrix h1 cp_matrix) ==
S.get_bpp_cp_matrices_ a gen_a (as_seq h0 mu_decode) (as_seq h0 sk)
(as_matrix h0 sp_matrix) (as_matrix h0 ep_matrix) (as_matrix h0 epp_matrix))
let get_bpp_cp_matrices_ a gen_a mu_decode sk bpp_matrix cp_matrix sp_matrix ep_matrix epp_matrix =
FP.expand_crypto_secretkeybytes a;
FP.expand_crypto_publickeybytes a;
let pk = sub sk (crypto_bytes a) (crypto_publickeybytes a) in
let seed_a = sub pk 0ul bytes_seed_a in
let b = sub pk bytes_seed_a (crypto_publickeybytes a -! bytes_seed_a) in
frodo_mul_add_sa_plus_e a gen_a seed_a sp_matrix ep_matrix bpp_matrix;
frodo_mul_add_sb_plus_e_plus_mu a mu_decode b sp_matrix epp_matrix cp_matrix;
mod_pow2 (params_logq a) bpp_matrix;
mod_pow2 (params_logq a) cp_matrix
#push-options "--z3rlimit 150"
inline_for_extraction noextract
val get_bpp_cp_matrices:
a:FP.frodo_alg
-> gen_a:FP.frodo_gen_a{is_supported gen_a}
-> mu_decode:lbytes (bytes_mu a)
-> seed_se:lbytes (crypto_bytes a)
-> sk:lbytes (crypto_secretkeybytes a)
-> bpp_matrix:matrix_t params_nbar (params_n a)
-> cp_matrix:matrix_t params_nbar params_nbar
-> Stack unit
(requires fun h ->
live h seed_se /\ live h mu_decode /\ live h sk /\
live h bpp_matrix /\ live h cp_matrix /\
loc_pairwise_disjoint [loc mu_decode; loc seed_se; loc sk; loc bpp_matrix; loc cp_matrix])
(ensures fun h0 _ h1 -> modifies (loc bpp_matrix |+| loc cp_matrix) h0 h1 /\
(as_matrix h1 bpp_matrix, as_matrix h1 cp_matrix) ==
S.get_bpp_cp_matrices a gen_a (as_seq h0 mu_decode) (as_seq h0 seed_se) (as_seq h0 sk)) | {
"checked_file": "/",
"dependencies": [
"Spec.Matrix.fst.checked",
"Spec.Frodo.Params.fst.checked",
"Spec.Frodo.KEM.Decaps.fst.checked",
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Impl.Matrix.fst.checked",
"Hacl.Impl.Frodo.Sample.fst.checked",
"Hacl.Impl.Frodo.Params.fst.checked",
"Hacl.Impl.Frodo.Pack.fst.checked",
"Hacl.Impl.Frodo.KEM.KeyGen.fst.checked",
"Hacl.Impl.Frodo.KEM.Encaps.fst.checked",
"Hacl.Impl.Frodo.KEM.fst.checked",
"Hacl.Impl.Frodo.Encode.fst.checked",
"Hacl.Frodo.Random.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Frodo.KEM.Decaps.fst"
} | [
{
"abbrev": true,
"full_module": "Spec.Matrix",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "Spec.Frodo.KEM.Decaps",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Frodo.KEM.KeyGen",
"short_module": "KG"
},
{
"abbrev": true,
"full_module": "Spec.Frodo.Params",
"short_module": "FP"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Frodo.Random",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Sample",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Pack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Encode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM.Encaps",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.Params",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Matrix",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.Frodo.KEM",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 150,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
a: Spec.Frodo.Params.frodo_alg ->
gen_a: Spec.Frodo.Params.frodo_gen_a{Hacl.Impl.Frodo.Params.is_supported gen_a} ->
mu_decode: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.bytes_mu a) ->
seed_se: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.crypto_bytes a) ->
sk: Hacl.Impl.Matrix.lbytes (Hacl.Impl.Frodo.Params.crypto_secretkeybytes a) ->
bpp_matrix:
Hacl.Impl.Matrix.matrix_t Hacl.Impl.Frodo.Params.params_nbar
(Hacl.Impl.Frodo.Params.params_n a) ->
cp_matrix:
Hacl.Impl.Matrix.matrix_t Hacl.Impl.Frodo.Params.params_nbar
Hacl.Impl.Frodo.Params.params_nbar
-> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Spec.Frodo.Params.frodo_alg",
"Spec.Frodo.Params.frodo_gen_a",
"Prims.b2t",
"Hacl.Impl.Frodo.Params.is_supported",
"Hacl.Impl.Matrix.lbytes",
"Hacl.Impl.Frodo.Params.bytes_mu",
"Hacl.Impl.Frodo.Params.crypto_bytes",
"Hacl.Impl.Frodo.Params.crypto_secretkeybytes",
"Hacl.Impl.Matrix.matrix_t",
"Hacl.Impl.Frodo.Params.params_nbar",
"Hacl.Impl.Frodo.Params.params_n",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Hacl.Impl.Frodo.KEM.Encaps.clear_matrix3",
"Hacl.Impl.Frodo.KEM.Decaps.get_bpp_cp_matrices_",
"Hacl.Impl.Frodo.KEM.Encaps.get_sp_ep_epp_matrices",
"Lib.Buffer.lbuffer_t",
"Lib.Buffer.MUT",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U16",
"Lib.IntTypes.SEC",
"Lib.IntTypes.mul",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Hacl.Impl.Matrix.matrix_create",
"FStar.HyperStack.ST.push_frame"
] | [] | false | true | false | false | false | let get_bpp_cp_matrices a gen_a mu_decode seed_se sk bpp_matrix cp_matrix =
| push_frame ();
let sp_matrix = matrix_create params_nbar (params_n a) in
let ep_matrix = matrix_create params_nbar (params_n a) in
let epp_matrix = matrix_create params_nbar params_nbar in
get_sp_ep_epp_matrices a seed_se sp_matrix ep_matrix epp_matrix;
get_bpp_cp_matrices_ a gen_a mu_decode sk bpp_matrix cp_matrix sp_matrix ep_matrix epp_matrix;
clear_matrix3 a sp_matrix ep_matrix epp_matrix;
pop_frame () | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.fv_eq | val fv_eq : fv -> fv -> Tot bool | val fv_eq : fv -> fv -> Tot bool | let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2 | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 9,
"end_line": 23,
"start_col": 0,
"start_line": 20
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | fv1: FStar.Stubs.Reflection.Types.fv -> fv2: FStar.Stubs.Reflection.Types.fv -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"FStar.Stubs.Reflection.Types.fv",
"Prims.op_Equality",
"FStar.Stubs.Reflection.Types.name",
"FStar.Stubs.Reflection.V1.Builtins.inspect_fv",
"Prims.bool"
] | [] | false | false | false | true | false | let fv_eq fv1 fv2 =
| let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2 | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.is_dom | val is_dom (l: ss_dom) (m: ss_map) : Type0 | val is_dom (l: ss_dom) (m: ss_map) : Type0 | let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x) | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 50,
"end_line": 51,
"start_col": 0,
"start_line": 47
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown) | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | l: Pulse.Checker.Prover.Substs.ss_dom -> m: Pulse.Checker.Prover.Substs.ss_map -> Type0 | Prims.Tot | [
"total"
] | [] | [
"Pulse.Checker.Prover.Substs.ss_dom",
"Pulse.Checker.Prover.Substs.ss_map",
"FStar.Set.equal",
"Pulse.Syntax.Base.var",
"FStar.Map.domain",
"Pulse.Syntax.Base.term",
"FStar.Set.empty",
"Prims.list",
"Prims.l_and",
"Prims.b2t",
"FStar.Map.contains",
"Pulse.Checker.Prover.Substs.is_dom",
"Pulse.Checker.Prover.Substs.remove_map"
] | [
"recursion"
] | false | false | false | true | true | let rec is_dom (l: ss_dom) (m: ss_map) : Type0 =
| match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x :: tl -> Map.contains m x /\ is_dom tl (remove_map m x) | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.tail | val tail (ss: ss_t{Cons? ss.l}) : ss_t | val tail (ss: ss_t{Cons? ss.l}) : ss_t | let tail (ss:ss_t { Cons? ss.l }) : ss_t =
{ l = L.tl ss.l; m = remove_map ss.m (L.hd ss.l) } | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 53,
"end_line": 92,
"start_col": 0,
"start_line": 91
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
}
let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l
let as_map (ss:ss_t) = ss.m
let empty = { l = []; m = Map.const_on Set.empty tm_unknown }
let is_dom_push
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { ~ (Map.contains m x ) })
(t:term)
: Lemma (is_dom (x::l) (Map.upd m x t)) =
assert (Map.equal (remove_map (Map.upd m x t) x) m)
let push (ss:ss_t) (x:var { ~ (contains ss x) }) (t:term) : ss_t =
is_dom_push ss.l ss.m x t;
{ l = x::ss.l;
m = Map.upd ss.m x t } | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ss: Pulse.Checker.Prover.Substs.ss_t{Cons? (Mkss_t?.l ss)} -> Pulse.Checker.Prover.Substs.ss_t | Prims.Tot | [
"total"
] | [] | [
"Pulse.Checker.Prover.Substs.ss_t",
"Prims.b2t",
"Prims.uu___is_Cons",
"Pulse.Syntax.Base.var",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__l",
"Pulse.Checker.Prover.Substs.Mkss_t",
"FStar.List.Tot.Base.tl",
"Pulse.Checker.Prover.Substs.remove_map",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__m",
"FStar.List.Tot.Base.hd"
] | [] | false | false | false | false | false | let tail (ss: ss_t{Cons? ss.l}) : ss_t =
| { l = L.tl ss.l; m = remove_map ss.m (L.hd ss.l) } | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.coerce_eq | val coerce_eq: #a: Type -> #b: Type -> x: a -> squash (a == b) -> y: b{y == x} | val coerce_eq: #a: Type -> #b: Type -> x: a -> squash (a == b) -> y: b{y == x} | let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 71,
"end_line": 31,
"start_col": 0,
"start_line": 31
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: a -> _: Prims.squash (a == b) -> y: b{y == x} | Prims.Tot | [
"total"
] | [] | [
"Prims.squash",
"Prims.eq2"
] | [] | false | false | false | false | false | let coerce_eq (#a: Type) (#b: Type) (x: a) (_: squash (a == b)) : y: b{y == x} =
| x | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.fv_eq_name | val fv_eq_name : fv -> name -> Tot bool | val fv_eq_name : fv -> name -> Tot bool | let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 9,
"end_line": 29,
"start_col": 0,
"start_line": 27
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | fv: FStar.Stubs.Reflection.Types.fv -> n: FStar.Stubs.Reflection.Types.name -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"FStar.Stubs.Reflection.Types.fv",
"FStar.Stubs.Reflection.Types.name",
"Prims.op_Equality",
"FStar.Stubs.Reflection.V1.Builtins.inspect_fv",
"Prims.bool"
] | [] | false | false | false | true | false | let fv_eq_name fv n =
| let fvn = inspect_fv fv in
fvn = n | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.push | val push (ss:ss_t) (x:var { ~ (contains ss x) }) (t:term) : ss_t | val push (ss:ss_t) (x:var { ~ (contains ss x) }) (t:term) : ss_t | let push (ss:ss_t) (x:var { ~ (contains ss x) }) (t:term) : ss_t =
is_dom_push ss.l ss.m x t;
{ l = x::ss.l;
m = Map.upd ss.m x t } | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 26,
"end_line": 89,
"start_col": 0,
"start_line": 85
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
}
let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l
let as_map (ss:ss_t) = ss.m
let empty = { l = []; m = Map.const_on Set.empty tm_unknown }
let is_dom_push
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { ~ (Map.contains m x ) })
(t:term)
: Lemma (is_dom (x::l) (Map.upd m x t)) =
assert (Map.equal (remove_map (Map.upd m x t) x) m) | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
ss: Pulse.Checker.Prover.Substs.ss_t ->
x: Pulse.Syntax.Base.var{~(Pulse.Checker.Prover.Substs.contains ss x)} ->
t: Pulse.Syntax.Base.term
-> Pulse.Checker.Prover.Substs.ss_t | Prims.Tot | [
"total"
] | [] | [
"Pulse.Checker.Prover.Substs.ss_t",
"Pulse.Syntax.Base.var",
"Prims.l_not",
"Prims.b2t",
"Pulse.Checker.Prover.Substs.contains",
"Pulse.Syntax.Base.term",
"Pulse.Checker.Prover.Substs.Mkss_t",
"Prims.Cons",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__l",
"FStar.Map.upd",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__m",
"Prims.unit",
"Pulse.Checker.Prover.Substs.is_dom_push"
] | [] | false | false | false | false | false | let push (ss: ss_t) (x: var{~(contains ss x)}) (t: term) : ss_t =
| is_dom_push ss.l ss.m x t;
{ l = x :: ss.l; m = Map.upd ss.m x t } | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.push_ss | val push_ss (ss1:ss_t) (ss2:ss_t { Set.disjoint (dom ss1) (dom ss2) }) : ss_t | val push_ss (ss1:ss_t) (ss2:ss_t { Set.disjoint (dom ss1) (dom ss2) }) : ss_t | let rec push_ss (ss1:ss_t) (ss2:ss_t { Set.disjoint (dom ss1) (dom ss2) })
: Tot ss_t (decreases L.length ss2.l) =
match ss2.l with
| [] -> ss1
| x::tl ->
push_ss (push ss1 x (Map.sel ss2.m x)) (tail ss2) | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 53,
"end_line": 99,
"start_col": 0,
"start_line": 94
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
}
let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l
let as_map (ss:ss_t) = ss.m
let empty = { l = []; m = Map.const_on Set.empty tm_unknown }
let is_dom_push
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { ~ (Map.contains m x ) })
(t:term)
: Lemma (is_dom (x::l) (Map.upd m x t)) =
assert (Map.equal (remove_map (Map.upd m x t) x) m)
let push (ss:ss_t) (x:var { ~ (contains ss x) }) (t:term) : ss_t =
is_dom_push ss.l ss.m x t;
{ l = x::ss.l;
m = Map.upd ss.m x t }
let tail (ss:ss_t { Cons? ss.l }) : ss_t =
{ l = L.tl ss.l; m = remove_map ss.m (L.hd ss.l) } | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
ss1: Pulse.Checker.Prover.Substs.ss_t ->
ss2:
Pulse.Checker.Prover.Substs.ss_t
{ FStar.Set.disjoint (Pulse.Checker.Prover.Substs.dom ss1)
(Pulse.Checker.Prover.Substs.dom ss2) }
-> Prims.Tot Pulse.Checker.Prover.Substs.ss_t | Prims.Tot | [
"",
"total"
] | [] | [
"Pulse.Checker.Prover.Substs.ss_t",
"FStar.Set.disjoint",
"Pulse.Syntax.Base.var",
"Pulse.Checker.Prover.Substs.dom",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__l",
"Prims.list",
"Pulse.Checker.Prover.Substs.push_ss",
"Pulse.Checker.Prover.Substs.push",
"FStar.Map.sel",
"Pulse.Syntax.Base.term",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__m",
"Pulse.Checker.Prover.Substs.tail"
] | [
"recursion"
] | false | false | false | false | false | let rec push_ss (ss1: ss_t) (ss2: ss_t{Set.disjoint (dom ss1) (dom ss2)})
: Tot ss_t (decreases L.length ss2.l) =
| match ss2.l with
| [] -> ss1
| x :: tl -> push_ss (push ss1 x (Map.sel ss2.m x)) (tail ss2) | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.is_dom_mem | val is_dom_mem (l: ss_dom) (m: ss_map)
: Lemma (requires is_dom l m)
(ensures
forall (x: var). {:pattern L.memP x l\/Map.contains m x} L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] | val is_dom_mem (l: ss_dom) (m: ss_map)
: Lemma (requires is_dom l m)
(ensures
forall (x: var). {:pattern L.memP x l\/Map.contains m x} L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] | let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y) | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 43,
"end_line": 61,
"start_col": 0,
"start_line": 53
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x) | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | l: Pulse.Checker.Prover.Substs.ss_dom -> m: Pulse.Checker.Prover.Substs.ss_map
-> FStar.Pervasives.Lemma (requires Pulse.Checker.Prover.Substs.is_dom l m)
(ensures
forall (x: Pulse.Syntax.Base.var).
{:pattern FStar.List.Tot.Base.memP x l\/FStar.Map.contains m x}
FStar.List.Tot.Base.memP x l <==> FStar.Map.contains m x)
[SMTPat (Pulse.Checker.Prover.Substs.is_dom l m)] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Pulse.Checker.Prover.Substs.ss_dom",
"Pulse.Checker.Prover.Substs.ss_map",
"Pulse.Syntax.Base.var",
"Prims.list",
"Pulse.Checker.Prover.Substs.is_dom_mem",
"Pulse.Checker.Prover.Substs.remove_map",
"Prims.unit",
"Pulse.Checker.Prover.Substs.is_dom",
"Prims.squash",
"Prims.l_Forall",
"Prims.l_iff",
"FStar.List.Tot.Base.memP",
"Prims.b2t",
"FStar.Map.contains",
"Pulse.Syntax.Base.term",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [
"recursion"
] | false | false | true | false | false | let rec is_dom_mem (l: ss_dom) (m: ss_map)
: Lemma (requires is_dom l m)
(ensures
forall (x: var). {:pattern L.memP x l\/Map.contains m x} L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
| match l with
| [] -> ()
| y :: tl -> is_dom_mem tl (remove_map m y) | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.is_dom_push | val is_dom_push (l: ss_dom) (m: ss_map{is_dom l m}) (x: var{~(Map.contains m x)}) (t: term)
: Lemma (is_dom (x :: l) (Map.upd m x t)) | val is_dom_push (l: ss_dom) (m: ss_map{is_dom l m}) (x: var{~(Map.contains m x)}) (t: term)
: Lemma (is_dom (x :: l) (Map.upd m x t)) | let is_dom_push
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { ~ (Map.contains m x ) })
(t:term)
: Lemma (is_dom (x::l) (Map.upd m x t)) =
assert (Map.equal (remove_map (Map.upd m x t) x) m) | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 53,
"end_line": 83,
"start_col": 0,
"start_line": 76
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
}
let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l
let as_map (ss:ss_t) = ss.m
let empty = { l = []; m = Map.const_on Set.empty tm_unknown } | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
l: Pulse.Checker.Prover.Substs.ss_dom ->
m: Pulse.Checker.Prover.Substs.ss_map{Pulse.Checker.Prover.Substs.is_dom l m} ->
x: Pulse.Syntax.Base.var{~(FStar.Map.contains m x)} ->
t: Pulse.Syntax.Base.term
-> FStar.Pervasives.Lemma
(ensures Pulse.Checker.Prover.Substs.is_dom (x :: l) (FStar.Map.upd m x t)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Pulse.Checker.Prover.Substs.ss_dom",
"Pulse.Checker.Prover.Substs.ss_map",
"Pulse.Checker.Prover.Substs.is_dom",
"Pulse.Syntax.Base.var",
"Prims.l_not",
"Prims.b2t",
"FStar.Map.contains",
"Pulse.Syntax.Base.term",
"Prims._assert",
"FStar.Map.equal",
"Pulse.Checker.Prover.Substs.remove_map",
"FStar.Map.upd",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.Cons",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | true | false | true | false | false | let is_dom_push (l: ss_dom) (m: ss_map{is_dom l m}) (x: var{~(Map.contains m x)}) (t: term)
: Lemma (is_dom (x :: l) (Map.upd m x t)) =
| assert (Map.equal (remove_map (Map.upd m x t) x) m) | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.diff | val diff (ss1 ss2:ss_t) : ss:ss_t { Set.disjoint (dom ss) (dom ss2) } | val diff (ss1 ss2:ss_t) : ss:ss_t { Set.disjoint (dom ss) (dom ss2) } | let diff ss1 ss2 = diff_aux ss1 ss2 empty | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 41,
"end_line": 120,
"start_col": 0,
"start_line": 120
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
}
let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l
let as_map (ss:ss_t) = ss.m
let empty = { l = []; m = Map.const_on Set.empty tm_unknown }
let is_dom_push
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { ~ (Map.contains m x ) })
(t:term)
: Lemma (is_dom (x::l) (Map.upd m x t)) =
assert (Map.equal (remove_map (Map.upd m x t) x) m)
let push (ss:ss_t) (x:var { ~ (contains ss x) }) (t:term) : ss_t =
is_dom_push ss.l ss.m x t;
{ l = x::ss.l;
m = Map.upd ss.m x t }
let tail (ss:ss_t { Cons? ss.l }) : ss_t =
{ l = L.tl ss.l; m = remove_map ss.m (L.hd ss.l) }
let rec push_ss (ss1:ss_t) (ss2:ss_t { Set.disjoint (dom ss1) (dom ss2) })
: Tot ss_t (decreases L.length ss2.l) =
match ss2.l with
| [] -> ss1
| x::tl ->
push_ss (push ss1 x (Map.sel ss2.m x)) (tail ss2)
let check_disjoint ss1 ss2 =
admit ();
not (L.existsb (fun v1 -> L.mem v1 ss2.l) ss1.l)
let rec diff_aux (ss1 ss2:ss_t) (acc:ss_t { Set.disjoint (dom acc) (dom ss2) })
: Tot (ss:ss_t { Set.disjoint (dom ss) (dom ss2) }) (decreases L.length ss1.l) =
match ss1.l with
| [] -> acc
| x::l ->
if L.mem x ss2.l
then let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
else let acc_l = x::acc.l in
let acc_m = Map.upd acc.m x (Map.sel ss1.m x) in
assume (no_repeats acc_l /\ is_dom acc_l acc_m);
let acc = { l = acc_l; m = acc_m } in
let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ss1: Pulse.Checker.Prover.Substs.ss_t -> ss2: Pulse.Checker.Prover.Substs.ss_t
-> ss:
Pulse.Checker.Prover.Substs.ss_t
{ FStar.Set.disjoint (Pulse.Checker.Prover.Substs.dom ss)
(Pulse.Checker.Prover.Substs.dom ss2) } | Prims.Tot | [
"total"
] | [] | [
"Pulse.Checker.Prover.Substs.ss_t",
"Pulse.Checker.Prover.Substs.diff_aux",
"Pulse.Checker.Prover.Substs.empty",
"FStar.Set.disjoint",
"Pulse.Syntax.Base.var",
"Pulse.Checker.Prover.Substs.dom"
] | [] | false | false | false | false | false | let diff ss1 ss2 =
| diff_aux ss1 ss2 empty | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.list_to_string | val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string | val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string | let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]" | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 74,
"end_line": 58,
"start_col": 0,
"start_line": 57
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: (_: a -> FStar.Tactics.Effect.Tac Prims.string) -> ls: Prims.list a
-> FStar.Tactics.Effect.Tac Prims.string | FStar.Tactics.Effect.Tac | [] | [] | [
"Prims.string",
"Prims.list",
"Prims.op_Hat",
"FStar.Tactics.Util.fold_left"
] | [] | false | true | false | false | false | let list_to_string #a f ls =
| (Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]" | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.remove_l | val remove_l (l: ss_dom) (x: var{L.memP x l})
: Pure ss_dom
(requires True)
(ensures fun r -> forall (y: var). L.memP y r <==> (L.memP y l /\ y =!= x)) | val remove_l (l: ss_dom) (x: var{L.memP x l})
: Pure ss_dom
(requires True)
(ensures fun r -> forall (y: var). L.memP y r <==> (L.memP y l /\ y =!= x)) | let rec remove_l (l:ss_dom) (x:var { L.memP x l })
: Pure ss_dom
(requires True)
(ensures fun r -> forall (y:var). L.memP y r <==> (L.memP y l /\ y =!= x)) =
match l with
| [] -> assert False; []
| y::tl ->
if y = x then tl
else y::(remove_l tl x) | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 27,
"end_line": 151,
"start_col": 0,
"start_line": 143
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
}
let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l
let as_map (ss:ss_t) = ss.m
let empty = { l = []; m = Map.const_on Set.empty tm_unknown }
let is_dom_push
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { ~ (Map.contains m x ) })
(t:term)
: Lemma (is_dom (x::l) (Map.upd m x t)) =
assert (Map.equal (remove_map (Map.upd m x t) x) m)
let push (ss:ss_t) (x:var { ~ (contains ss x) }) (t:term) : ss_t =
is_dom_push ss.l ss.m x t;
{ l = x::ss.l;
m = Map.upd ss.m x t }
let tail (ss:ss_t { Cons? ss.l }) : ss_t =
{ l = L.tl ss.l; m = remove_map ss.m (L.hd ss.l) }
let rec push_ss (ss1:ss_t) (ss2:ss_t { Set.disjoint (dom ss1) (dom ss2) })
: Tot ss_t (decreases L.length ss2.l) =
match ss2.l with
| [] -> ss1
| x::tl ->
push_ss (push ss1 x (Map.sel ss2.m x)) (tail ss2)
let check_disjoint ss1 ss2 =
admit ();
not (L.existsb (fun v1 -> L.mem v1 ss2.l) ss1.l)
let rec diff_aux (ss1 ss2:ss_t) (acc:ss_t { Set.disjoint (dom acc) (dom ss2) })
: Tot (ss:ss_t { Set.disjoint (dom ss) (dom ss2) }) (decreases L.length ss1.l) =
match ss1.l with
| [] -> acc
| x::l ->
if L.mem x ss2.l
then let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
else let acc_l = x::acc.l in
let acc_m = Map.upd acc.m x (Map.sel ss1.m x) in
assume (no_repeats acc_l /\ is_dom acc_l acc_m);
let acc = { l = acc_l; m = acc_m } in
let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
let diff ss1 ss2 = diff_aux ss1 ss2 empty
#push-options "--warn_error -271"
let push_as_map (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat (as_map (push_ss ss1 ss2))] =
let rec aux (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat ()] =
match ss2.l with
| [] -> ()
| x::tl -> aux (push ss1 x (Map.sel ss2.m x)) (tail ss2)
in
()
#pop-options | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | l: Pulse.Checker.Prover.Substs.ss_dom -> x: Pulse.Syntax.Base.var{FStar.List.Tot.Base.memP x l}
-> Prims.Pure Pulse.Checker.Prover.Substs.ss_dom | Prims.Pure | [] | [] | [
"Pulse.Checker.Prover.Substs.ss_dom",
"Pulse.Syntax.Base.var",
"FStar.List.Tot.Base.memP",
"Prims.Nil",
"Prims.unit",
"Prims._assert",
"Prims.l_False",
"Prims.list",
"Prims.op_Equality",
"Prims.bool",
"Prims.Cons",
"Pulse.Checker.Prover.Substs.remove_l",
"Prims.l_True",
"Prims.l_Forall",
"Prims.l_iff",
"Prims.l_and",
"Prims.l_not",
"Prims.eq2"
] | [
"recursion"
] | false | false | false | false | false | let rec remove_l (l: ss_dom) (x: var{L.memP x l})
: Pure ss_dom
(requires True)
(ensures fun r -> forall (y: var). L.memP y r <==> (L.memP y l /\ y =!= x)) =
| match l with
| [] ->
assert False;
[]
| y :: tl -> if y = x then tl else y :: (remove_l tl x) | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.opt_mk_app_norm | val opt_mk_app_norm : env -> option term -> list term -> Tac (option term) | val opt_mk_app_norm : env -> option term -> list term -> Tac (option term) | let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 52,
"end_line": 71,
"start_col": 0,
"start_line": 70
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
e: FStar.Stubs.Reflection.Types.env ->
opt_t: FStar.Pervasives.Native.option FStar.Stubs.Reflection.Types.term ->
params: Prims.list FStar.Stubs.Reflection.Types.term
-> FStar.Tactics.Effect.Tac (FStar.Pervasives.Native.option FStar.Stubs.Reflection.Types.term) | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.Stubs.Reflection.Types.env",
"FStar.Pervasives.Native.option",
"FStar.Stubs.Reflection.Types.term",
"Prims.list",
"FStar.InteractiveHelpers.Base.opt_tapply",
"FStar.InteractiveHelpers.Base.mk_app_norm"
] | [] | false | true | false | false | false | let opt_mk_app_norm e opt_t params =
| opt_tapply (fun t -> mk_app_norm e t params) opt_t | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.ss_comp | val ss_comp (c:comp) (ss:ss_t) : comp | val ss_comp (c:comp) (ss:ss_t) : comp | let rec ss_comp (c:comp) (ss:ss_t)
: Tot comp (decreases L.length ss.l) =
match ss.l with
| [] -> c
| y::tl ->
let c = subst_comp c [ NT y (Map.sel ss.m y) ] in
ss_comp c (tail ss) | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 23,
"end_line": 200,
"start_col": 0,
"start_line": 194
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
}
let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l
let as_map (ss:ss_t) = ss.m
let empty = { l = []; m = Map.const_on Set.empty tm_unknown }
let is_dom_push
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { ~ (Map.contains m x ) })
(t:term)
: Lemma (is_dom (x::l) (Map.upd m x t)) =
assert (Map.equal (remove_map (Map.upd m x t) x) m)
let push (ss:ss_t) (x:var { ~ (contains ss x) }) (t:term) : ss_t =
is_dom_push ss.l ss.m x t;
{ l = x::ss.l;
m = Map.upd ss.m x t }
let tail (ss:ss_t { Cons? ss.l }) : ss_t =
{ l = L.tl ss.l; m = remove_map ss.m (L.hd ss.l) }
let rec push_ss (ss1:ss_t) (ss2:ss_t { Set.disjoint (dom ss1) (dom ss2) })
: Tot ss_t (decreases L.length ss2.l) =
match ss2.l with
| [] -> ss1
| x::tl ->
push_ss (push ss1 x (Map.sel ss2.m x)) (tail ss2)
let check_disjoint ss1 ss2 =
admit ();
not (L.existsb (fun v1 -> L.mem v1 ss2.l) ss1.l)
let rec diff_aux (ss1 ss2:ss_t) (acc:ss_t { Set.disjoint (dom acc) (dom ss2) })
: Tot (ss:ss_t { Set.disjoint (dom ss) (dom ss2) }) (decreases L.length ss1.l) =
match ss1.l with
| [] -> acc
| x::l ->
if L.mem x ss2.l
then let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
else let acc_l = x::acc.l in
let acc_m = Map.upd acc.m x (Map.sel ss1.m x) in
assume (no_repeats acc_l /\ is_dom acc_l acc_m);
let acc = { l = acc_l; m = acc_m } in
let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
let diff ss1 ss2 = diff_aux ss1 ss2 empty
#push-options "--warn_error -271"
let push_as_map (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat (as_map (push_ss ss1 ss2))] =
let rec aux (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat ()] =
match ss2.l with
| [] -> ()
| x::tl -> aux (push ss1 x (Map.sel ss2.m x)) (tail ss2)
in
()
#pop-options
let rec remove_l (l:ss_dom) (x:var { L.memP x l })
: Pure ss_dom
(requires True)
(ensures fun r -> forall (y:var). L.memP y r <==> (L.memP y l /\ y =!= x)) =
match l with
| [] -> assert False; []
| y::tl ->
if y = x then tl
else y::(remove_l tl x)
let rec is_dom_remove
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { Map.contains m x })
: Lemma (is_dom (remove_l l x) (remove_map m x))
[SMTPat (remove_l l x); SMTPat (remove_map m x)] =
match l with
| [] -> ()
| y::tl ->
if x = y then ()
else let t_y = Map.sel m y in
let m1 = remove_map m y in
is_dom_remove tl m1 x;
assert (is_dom (remove_l tl x) (remove_map m1 x));
is_dom_push (remove_l tl x) (remove_map m1 x) y t_y;
assert (Map.equal (Map.upd (remove_map m1 x) y t_y)
(remove_map m x))
let rec ss_term (t:term) (ss:ss_t) : Tot term (decreases L.length ss.l) =
match ss.l with
| [] -> t
| y::tl ->
let t = subst_term t [ NT y (Map.sel ss.m y) ] in
ss_term t (tail ss)
let rec ss_st_term (t:st_term) (ss:ss_t) : Tot st_term (decreases L.length ss.l) =
match ss.l with
| [] -> t
| y::tl ->
let t = subst_st_term t [ NT y (Map.sel ss.m y) ] in
ss_st_term t (tail ss)
let rec ss_st_comp (s:st_comp) (ss:ss_t)
: Tot st_comp (decreases L.length ss.l) =
match ss.l with
| [] -> s
| y::tl ->
let s = subst_st_comp s [ NT y (Map.sel ss.m y) ] in
ss_st_comp s (tail ss) | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | c: Pulse.Syntax.Base.comp -> ss: Pulse.Checker.Prover.Substs.ss_t
-> Prims.Tot Pulse.Syntax.Base.comp | Prims.Tot | [
"",
"total"
] | [] | [
"Pulse.Syntax.Base.comp",
"Pulse.Checker.Prover.Substs.ss_t",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__l",
"Pulse.Syntax.Base.var",
"Prims.list",
"Pulse.Checker.Prover.Substs.ss_comp",
"Pulse.Checker.Prover.Substs.tail",
"Pulse.Syntax.Naming.subst_comp",
"Prims.Cons",
"Pulse.Syntax.Naming.subst_elt",
"Pulse.Syntax.Naming.NT",
"FStar.Map.sel",
"Pulse.Syntax.Base.term",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__m",
"Prims.Nil"
] | [
"recursion"
] | false | false | false | true | false | let rec ss_comp (c: comp) (ss: ss_t) : Tot comp (decreases L.length ss.l) =
| match ss.l with
| [] -> c
| y :: tl ->
let c = subst_comp c [NT y (Map.sel ss.m y)] in
ss_comp c (tail ss) | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.mk_app_norm | val mk_app_norm : env -> term -> list term -> Tac term | val mk_app_norm : env -> term -> list term -> Tac term | let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2 | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 4,
"end_line": 67,
"start_col": 0,
"start_line": 64
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
e: FStar.Stubs.Reflection.Types.env ->
t: FStar.Stubs.Reflection.Types.term ->
params: Prims.list FStar.Stubs.Reflection.Types.term
-> FStar.Tactics.Effect.Tac FStar.Stubs.Reflection.Types.term | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.Stubs.Reflection.Types.env",
"FStar.Stubs.Reflection.Types.term",
"Prims.list",
"FStar.Stubs.Tactics.V1.Builtins.norm_term_env",
"Prims.Nil",
"FStar.Pervasives.norm_step",
"FStar.Reflection.V1.Derived.mk_e_app"
] | [] | false | true | false | false | false | let mk_app_norm e t params =
| let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2 | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.ss_binder | val ss_binder (b:binder) (ss:ss_t) : binder | val ss_binder (b:binder) (ss:ss_t) : binder | let rec ss_binder (b:binder) (ss:ss_t)
: Tot binder (decreases L.length ss.l) =
match ss.l with
| [] -> b
| y::tl ->
let b = subst_binder b [ NT y (Map.sel ss.m y) ] in
ss_binder b (tail ss) | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 25,
"end_line": 208,
"start_col": 0,
"start_line": 202
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
}
let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l
let as_map (ss:ss_t) = ss.m
let empty = { l = []; m = Map.const_on Set.empty tm_unknown }
let is_dom_push
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { ~ (Map.contains m x ) })
(t:term)
: Lemma (is_dom (x::l) (Map.upd m x t)) =
assert (Map.equal (remove_map (Map.upd m x t) x) m)
let push (ss:ss_t) (x:var { ~ (contains ss x) }) (t:term) : ss_t =
is_dom_push ss.l ss.m x t;
{ l = x::ss.l;
m = Map.upd ss.m x t }
let tail (ss:ss_t { Cons? ss.l }) : ss_t =
{ l = L.tl ss.l; m = remove_map ss.m (L.hd ss.l) }
let rec push_ss (ss1:ss_t) (ss2:ss_t { Set.disjoint (dom ss1) (dom ss2) })
: Tot ss_t (decreases L.length ss2.l) =
match ss2.l with
| [] -> ss1
| x::tl ->
push_ss (push ss1 x (Map.sel ss2.m x)) (tail ss2)
let check_disjoint ss1 ss2 =
admit ();
not (L.existsb (fun v1 -> L.mem v1 ss2.l) ss1.l)
let rec diff_aux (ss1 ss2:ss_t) (acc:ss_t { Set.disjoint (dom acc) (dom ss2) })
: Tot (ss:ss_t { Set.disjoint (dom ss) (dom ss2) }) (decreases L.length ss1.l) =
match ss1.l with
| [] -> acc
| x::l ->
if L.mem x ss2.l
then let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
else let acc_l = x::acc.l in
let acc_m = Map.upd acc.m x (Map.sel ss1.m x) in
assume (no_repeats acc_l /\ is_dom acc_l acc_m);
let acc = { l = acc_l; m = acc_m } in
let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
let diff ss1 ss2 = diff_aux ss1 ss2 empty
#push-options "--warn_error -271"
let push_as_map (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat (as_map (push_ss ss1 ss2))] =
let rec aux (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat ()] =
match ss2.l with
| [] -> ()
| x::tl -> aux (push ss1 x (Map.sel ss2.m x)) (tail ss2)
in
()
#pop-options
let rec remove_l (l:ss_dom) (x:var { L.memP x l })
: Pure ss_dom
(requires True)
(ensures fun r -> forall (y:var). L.memP y r <==> (L.memP y l /\ y =!= x)) =
match l with
| [] -> assert False; []
| y::tl ->
if y = x then tl
else y::(remove_l tl x)
let rec is_dom_remove
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { Map.contains m x })
: Lemma (is_dom (remove_l l x) (remove_map m x))
[SMTPat (remove_l l x); SMTPat (remove_map m x)] =
match l with
| [] -> ()
| y::tl ->
if x = y then ()
else let t_y = Map.sel m y in
let m1 = remove_map m y in
is_dom_remove tl m1 x;
assert (is_dom (remove_l tl x) (remove_map m1 x));
is_dom_push (remove_l tl x) (remove_map m1 x) y t_y;
assert (Map.equal (Map.upd (remove_map m1 x) y t_y)
(remove_map m x))
let rec ss_term (t:term) (ss:ss_t) : Tot term (decreases L.length ss.l) =
match ss.l with
| [] -> t
| y::tl ->
let t = subst_term t [ NT y (Map.sel ss.m y) ] in
ss_term t (tail ss)
let rec ss_st_term (t:st_term) (ss:ss_t) : Tot st_term (decreases L.length ss.l) =
match ss.l with
| [] -> t
| y::tl ->
let t = subst_st_term t [ NT y (Map.sel ss.m y) ] in
ss_st_term t (tail ss)
let rec ss_st_comp (s:st_comp) (ss:ss_t)
: Tot st_comp (decreases L.length ss.l) =
match ss.l with
| [] -> s
| y::tl ->
let s = subst_st_comp s [ NT y (Map.sel ss.m y) ] in
ss_st_comp s (tail ss)
let rec ss_comp (c:comp) (ss:ss_t)
: Tot comp (decreases L.length ss.l) =
match ss.l with
| [] -> c
| y::tl ->
let c = subst_comp c [ NT y (Map.sel ss.m y) ] in
ss_comp c (tail ss) | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b: Pulse.Syntax.Base.binder -> ss: Pulse.Checker.Prover.Substs.ss_t
-> Prims.Tot Pulse.Syntax.Base.binder | Prims.Tot | [
"",
"total"
] | [] | [
"Pulse.Syntax.Base.binder",
"Pulse.Checker.Prover.Substs.ss_t",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__l",
"Pulse.Syntax.Base.var",
"Prims.list",
"Pulse.Checker.Prover.Substs.ss_binder",
"Pulse.Checker.Prover.Substs.tail",
"Pulse.Syntax.Naming.subst_binder",
"Prims.Cons",
"Pulse.Syntax.Naming.subst_elt",
"Pulse.Syntax.Naming.NT",
"FStar.Map.sel",
"Pulse.Syntax.Base.term",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__m",
"Prims.Nil"
] | [
"recursion"
] | false | false | false | true | false | let rec ss_binder (b: binder) (ss: ss_t) : Tot binder (decreases L.length ss.l) =
| match ss.l with
| [] -> b
| y :: tl ->
let b = subst_binder b [NT y (Map.sel ss.m y)] in
ss_binder b (tail ss) | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.bv_eq | val bv_eq : bv -> bv -> Tot bool | val bv_eq : bv -> bv -> Tot bool | let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 31,
"end_line": 17,
"start_col": 0,
"start_line": 11
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | bv1: FStar.Stubs.Reflection.Types.bv -> bv2: FStar.Stubs.Reflection.Types.bv -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"FStar.Stubs.Reflection.Types.bv",
"Prims.op_Equality",
"Prims.nat",
"FStar.Stubs.Reflection.V1.Data.__proj__Mkbv_view__item__bv_index",
"FStar.Stubs.Reflection.V1.Data.bv_view",
"Prims.precedes",
"FStar.Stubs.Reflection.V1.Builtins.inspect_bv",
"Prims.bool"
] | [] | false | false | false | true | false | let bv_eq (bv1 bv2: bv) =
| let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
bvv1.bv_index = bvv2.bv_index | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.ss_st_term | val ss_st_term (t:st_term) (ss:ss_t) : st_term | val ss_st_term (t:st_term) (ss:ss_t) : st_term | let rec ss_st_term (t:st_term) (ss:ss_t) : Tot st_term (decreases L.length ss.l) =
match ss.l with
| [] -> t
| y::tl ->
let t = subst_st_term t [ NT y (Map.sel ss.m y) ] in
ss_st_term t (tail ss) | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 26,
"end_line": 184,
"start_col": 0,
"start_line": 179
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
}
let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l
let as_map (ss:ss_t) = ss.m
let empty = { l = []; m = Map.const_on Set.empty tm_unknown }
let is_dom_push
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { ~ (Map.contains m x ) })
(t:term)
: Lemma (is_dom (x::l) (Map.upd m x t)) =
assert (Map.equal (remove_map (Map.upd m x t) x) m)
let push (ss:ss_t) (x:var { ~ (contains ss x) }) (t:term) : ss_t =
is_dom_push ss.l ss.m x t;
{ l = x::ss.l;
m = Map.upd ss.m x t }
let tail (ss:ss_t { Cons? ss.l }) : ss_t =
{ l = L.tl ss.l; m = remove_map ss.m (L.hd ss.l) }
let rec push_ss (ss1:ss_t) (ss2:ss_t { Set.disjoint (dom ss1) (dom ss2) })
: Tot ss_t (decreases L.length ss2.l) =
match ss2.l with
| [] -> ss1
| x::tl ->
push_ss (push ss1 x (Map.sel ss2.m x)) (tail ss2)
let check_disjoint ss1 ss2 =
admit ();
not (L.existsb (fun v1 -> L.mem v1 ss2.l) ss1.l)
let rec diff_aux (ss1 ss2:ss_t) (acc:ss_t { Set.disjoint (dom acc) (dom ss2) })
: Tot (ss:ss_t { Set.disjoint (dom ss) (dom ss2) }) (decreases L.length ss1.l) =
match ss1.l with
| [] -> acc
| x::l ->
if L.mem x ss2.l
then let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
else let acc_l = x::acc.l in
let acc_m = Map.upd acc.m x (Map.sel ss1.m x) in
assume (no_repeats acc_l /\ is_dom acc_l acc_m);
let acc = { l = acc_l; m = acc_m } in
let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
let diff ss1 ss2 = diff_aux ss1 ss2 empty
#push-options "--warn_error -271"
let push_as_map (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat (as_map (push_ss ss1 ss2))] =
let rec aux (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat ()] =
match ss2.l with
| [] -> ()
| x::tl -> aux (push ss1 x (Map.sel ss2.m x)) (tail ss2)
in
()
#pop-options
let rec remove_l (l:ss_dom) (x:var { L.memP x l })
: Pure ss_dom
(requires True)
(ensures fun r -> forall (y:var). L.memP y r <==> (L.memP y l /\ y =!= x)) =
match l with
| [] -> assert False; []
| y::tl ->
if y = x then tl
else y::(remove_l tl x)
let rec is_dom_remove
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { Map.contains m x })
: Lemma (is_dom (remove_l l x) (remove_map m x))
[SMTPat (remove_l l x); SMTPat (remove_map m x)] =
match l with
| [] -> ()
| y::tl ->
if x = y then ()
else let t_y = Map.sel m y in
let m1 = remove_map m y in
is_dom_remove tl m1 x;
assert (is_dom (remove_l tl x) (remove_map m1 x));
is_dom_push (remove_l tl x) (remove_map m1 x) y t_y;
assert (Map.equal (Map.upd (remove_map m1 x) y t_y)
(remove_map m x))
let rec ss_term (t:term) (ss:ss_t) : Tot term (decreases L.length ss.l) =
match ss.l with
| [] -> t
| y::tl ->
let t = subst_term t [ NT y (Map.sel ss.m y) ] in
ss_term t (tail ss) | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: Pulse.Syntax.Base.st_term -> ss: Pulse.Checker.Prover.Substs.ss_t
-> Prims.Tot Pulse.Syntax.Base.st_term | Prims.Tot | [
"",
"total"
] | [] | [
"Pulse.Syntax.Base.st_term",
"Pulse.Checker.Prover.Substs.ss_t",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__l",
"Pulse.Syntax.Base.var",
"Prims.list",
"Pulse.Checker.Prover.Substs.ss_st_term",
"Pulse.Checker.Prover.Substs.tail",
"Pulse.Syntax.Naming.subst_st_term",
"Prims.Cons",
"Pulse.Syntax.Naming.subst_elt",
"Pulse.Syntax.Naming.NT",
"FStar.Map.sel",
"Pulse.Syntax.Base.term",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__m",
"Prims.Nil"
] | [
"recursion"
] | false | false | false | true | false | let rec ss_st_term (t: st_term) (ss: ss_t) : Tot st_term (decreases L.length ss.l) =
| match ss.l with
| [] -> t
| y :: tl ->
let t = subst_st_term t [NT y (Map.sel ss.m y)] in
ss_st_term t (tail ss) | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.print_binders_info | val print_binders_info (full: bool) (e: env) : Tac unit | val print_binders_info (full: bool) (e: env) : Tac unit | let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e) | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 50,
"end_line": 108,
"start_col": 0,
"start_line": 107
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | full: Prims.bool -> e: FStar.Stubs.Reflection.Types.env -> FStar.Tactics.Effect.Tac Prims.unit | FStar.Tactics.Effect.Tac | [] | [] | [
"Prims.bool",
"FStar.Stubs.Reflection.Types.env",
"FStar.Tactics.Util.iter",
"FStar.Stubs.Reflection.Types.binder",
"FStar.InteractiveHelpers.Base.print_binder_info",
"FStar.Stubs.Reflection.V1.Builtins.binders_of_env",
"Prims.unit"
] | [] | false | true | false | false | false | let print_binders_info (full: bool) (e: env) : Tac unit =
| iter (print_binder_info full) (binders_of_env e) | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.ss_st_comp | val ss_st_comp (s:st_comp) (ss:ss_t) : st_comp | val ss_st_comp (s:st_comp) (ss:ss_t) : st_comp | let rec ss_st_comp (s:st_comp) (ss:ss_t)
: Tot st_comp (decreases L.length ss.l) =
match ss.l with
| [] -> s
| y::tl ->
let s = subst_st_comp s [ NT y (Map.sel ss.m y) ] in
ss_st_comp s (tail ss) | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 26,
"end_line": 192,
"start_col": 0,
"start_line": 186
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
}
let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l
let as_map (ss:ss_t) = ss.m
let empty = { l = []; m = Map.const_on Set.empty tm_unknown }
let is_dom_push
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { ~ (Map.contains m x ) })
(t:term)
: Lemma (is_dom (x::l) (Map.upd m x t)) =
assert (Map.equal (remove_map (Map.upd m x t) x) m)
let push (ss:ss_t) (x:var { ~ (contains ss x) }) (t:term) : ss_t =
is_dom_push ss.l ss.m x t;
{ l = x::ss.l;
m = Map.upd ss.m x t }
let tail (ss:ss_t { Cons? ss.l }) : ss_t =
{ l = L.tl ss.l; m = remove_map ss.m (L.hd ss.l) }
let rec push_ss (ss1:ss_t) (ss2:ss_t { Set.disjoint (dom ss1) (dom ss2) })
: Tot ss_t (decreases L.length ss2.l) =
match ss2.l with
| [] -> ss1
| x::tl ->
push_ss (push ss1 x (Map.sel ss2.m x)) (tail ss2)
let check_disjoint ss1 ss2 =
admit ();
not (L.existsb (fun v1 -> L.mem v1 ss2.l) ss1.l)
let rec diff_aux (ss1 ss2:ss_t) (acc:ss_t { Set.disjoint (dom acc) (dom ss2) })
: Tot (ss:ss_t { Set.disjoint (dom ss) (dom ss2) }) (decreases L.length ss1.l) =
match ss1.l with
| [] -> acc
| x::l ->
if L.mem x ss2.l
then let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
else let acc_l = x::acc.l in
let acc_m = Map.upd acc.m x (Map.sel ss1.m x) in
assume (no_repeats acc_l /\ is_dom acc_l acc_m);
let acc = { l = acc_l; m = acc_m } in
let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
let diff ss1 ss2 = diff_aux ss1 ss2 empty
#push-options "--warn_error -271"
let push_as_map (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat (as_map (push_ss ss1 ss2))] =
let rec aux (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat ()] =
match ss2.l with
| [] -> ()
| x::tl -> aux (push ss1 x (Map.sel ss2.m x)) (tail ss2)
in
()
#pop-options
let rec remove_l (l:ss_dom) (x:var { L.memP x l })
: Pure ss_dom
(requires True)
(ensures fun r -> forall (y:var). L.memP y r <==> (L.memP y l /\ y =!= x)) =
match l with
| [] -> assert False; []
| y::tl ->
if y = x then tl
else y::(remove_l tl x)
let rec is_dom_remove
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { Map.contains m x })
: Lemma (is_dom (remove_l l x) (remove_map m x))
[SMTPat (remove_l l x); SMTPat (remove_map m x)] =
match l with
| [] -> ()
| y::tl ->
if x = y then ()
else let t_y = Map.sel m y in
let m1 = remove_map m y in
is_dom_remove tl m1 x;
assert (is_dom (remove_l tl x) (remove_map m1 x));
is_dom_push (remove_l tl x) (remove_map m1 x) y t_y;
assert (Map.equal (Map.upd (remove_map m1 x) y t_y)
(remove_map m x))
let rec ss_term (t:term) (ss:ss_t) : Tot term (decreases L.length ss.l) =
match ss.l with
| [] -> t
| y::tl ->
let t = subst_term t [ NT y (Map.sel ss.m y) ] in
ss_term t (tail ss)
let rec ss_st_term (t:st_term) (ss:ss_t) : Tot st_term (decreases L.length ss.l) =
match ss.l with
| [] -> t
| y::tl ->
let t = subst_st_term t [ NT y (Map.sel ss.m y) ] in
ss_st_term t (tail ss) | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: Pulse.Syntax.Base.st_comp -> ss: Pulse.Checker.Prover.Substs.ss_t
-> Prims.Tot Pulse.Syntax.Base.st_comp | Prims.Tot | [
"",
"total"
] | [] | [
"Pulse.Syntax.Base.st_comp",
"Pulse.Checker.Prover.Substs.ss_t",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__l",
"Pulse.Syntax.Base.var",
"Prims.list",
"Pulse.Checker.Prover.Substs.ss_st_comp",
"Pulse.Checker.Prover.Substs.tail",
"Pulse.Syntax.Naming.subst_st_comp",
"Prims.Cons",
"Pulse.Syntax.Naming.subst_elt",
"Pulse.Syntax.Naming.NT",
"FStar.Map.sel",
"Pulse.Syntax.Base.term",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__m",
"Prims.Nil"
] | [
"recursion"
] | false | false | false | true | false | let rec ss_st_comp (s: st_comp) (ss: ss_t) : Tot st_comp (decreases L.length ss.l) =
| match ss.l with
| [] -> s
| y :: tl ->
let s = subst_st_comp s [NT y (Map.sel ss.m y)] in
ss_st_comp s (tail ss) | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.mfail | val mfail : str: Prims.string -> FStar.Tactics.Effect.Tac _ | let mfail str =
raise (MetaAnalysis str) | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 26,
"end_line": 128,
"start_col": 0,
"start_line": 127
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | str: Prims.string -> FStar.Tactics.Effect.Tac _ | FStar.Tactics.Effect.Tac | [] | [] | [
"Prims.string",
"FStar.Tactics.Effect.raise",
"FStar.InteractiveHelpers.Base.MetaAnalysis"
] | [] | false | true | false | false | false | let mfail str =
| raise (MetaAnalysis str) | false |
|
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.print_dbg | val print_dbg : bool -> string -> Tac unit | val print_dbg : bool -> string -> Tac unit | let print_dbg debug s =
if debug then print s | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 23,
"end_line": 134,
"start_col": 0,
"start_line": 133
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | debug: Prims.bool -> s: Prims.string -> FStar.Tactics.Effect.Tac Prims.unit | FStar.Tactics.Effect.Tac | [] | [] | [
"Prims.bool",
"Prims.string",
"FStar.Stubs.Tactics.V1.Builtins.print",
"Prims.unit"
] | [] | false | true | false | false | false | let print_dbg debug s =
| if debug then print s | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.ss_term | val ss_term (t:term) (ss:ss_t) : term | val ss_term (t:term) (ss:ss_t) : term | let rec ss_term (t:term) (ss:ss_t) : Tot term (decreases L.length ss.l) =
match ss.l with
| [] -> t
| y::tl ->
let t = subst_term t [ NT y (Map.sel ss.m y) ] in
ss_term t (tail ss) | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 23,
"end_line": 177,
"start_col": 0,
"start_line": 172
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
}
let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l
let as_map (ss:ss_t) = ss.m
let empty = { l = []; m = Map.const_on Set.empty tm_unknown }
let is_dom_push
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { ~ (Map.contains m x ) })
(t:term)
: Lemma (is_dom (x::l) (Map.upd m x t)) =
assert (Map.equal (remove_map (Map.upd m x t) x) m)
let push (ss:ss_t) (x:var { ~ (contains ss x) }) (t:term) : ss_t =
is_dom_push ss.l ss.m x t;
{ l = x::ss.l;
m = Map.upd ss.m x t }
let tail (ss:ss_t { Cons? ss.l }) : ss_t =
{ l = L.tl ss.l; m = remove_map ss.m (L.hd ss.l) }
let rec push_ss (ss1:ss_t) (ss2:ss_t { Set.disjoint (dom ss1) (dom ss2) })
: Tot ss_t (decreases L.length ss2.l) =
match ss2.l with
| [] -> ss1
| x::tl ->
push_ss (push ss1 x (Map.sel ss2.m x)) (tail ss2)
let check_disjoint ss1 ss2 =
admit ();
not (L.existsb (fun v1 -> L.mem v1 ss2.l) ss1.l)
let rec diff_aux (ss1 ss2:ss_t) (acc:ss_t { Set.disjoint (dom acc) (dom ss2) })
: Tot (ss:ss_t { Set.disjoint (dom ss) (dom ss2) }) (decreases L.length ss1.l) =
match ss1.l with
| [] -> acc
| x::l ->
if L.mem x ss2.l
then let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
else let acc_l = x::acc.l in
let acc_m = Map.upd acc.m x (Map.sel ss1.m x) in
assume (no_repeats acc_l /\ is_dom acc_l acc_m);
let acc = { l = acc_l; m = acc_m } in
let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
let diff ss1 ss2 = diff_aux ss1 ss2 empty
#push-options "--warn_error -271"
let push_as_map (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat (as_map (push_ss ss1 ss2))] =
let rec aux (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat ()] =
match ss2.l with
| [] -> ()
| x::tl -> aux (push ss1 x (Map.sel ss2.m x)) (tail ss2)
in
()
#pop-options
let rec remove_l (l:ss_dom) (x:var { L.memP x l })
: Pure ss_dom
(requires True)
(ensures fun r -> forall (y:var). L.memP y r <==> (L.memP y l /\ y =!= x)) =
match l with
| [] -> assert False; []
| y::tl ->
if y = x then tl
else y::(remove_l tl x)
let rec is_dom_remove
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { Map.contains m x })
: Lemma (is_dom (remove_l l x) (remove_map m x))
[SMTPat (remove_l l x); SMTPat (remove_map m x)] =
match l with
| [] -> ()
| y::tl ->
if x = y then ()
else let t_y = Map.sel m y in
let m1 = remove_map m y in
is_dom_remove tl m1 x;
assert (is_dom (remove_l tl x) (remove_map m1 x));
is_dom_push (remove_l tl x) (remove_map m1 x) y t_y;
assert (Map.equal (Map.upd (remove_map m1 x) y t_y)
(remove_map m x)) | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: Pulse.Syntax.Base.term -> ss: Pulse.Checker.Prover.Substs.ss_t
-> Prims.Tot Pulse.Syntax.Base.term | Prims.Tot | [
"",
"total"
] | [] | [
"Pulse.Syntax.Base.term",
"Pulse.Checker.Prover.Substs.ss_t",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__l",
"Pulse.Syntax.Base.var",
"Prims.list",
"Pulse.Checker.Prover.Substs.ss_term",
"Pulse.Checker.Prover.Substs.tail",
"Pulse.Syntax.Naming.subst_term",
"Prims.Cons",
"Pulse.Syntax.Naming.subst_elt",
"Pulse.Syntax.Naming.NT",
"FStar.Map.sel",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__m",
"Prims.Nil"
] | [
"recursion"
] | false | false | false | true | false | let rec ss_term (t: term) (ss: ss_t) : Tot term (decreases L.length ss.l) =
| match ss.l with
| [] -> t
| y :: tl ->
let t = subst_term t [NT y (Map.sel ss.m y)] in
ss_term t (tail ss) | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.opt_apply | val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b) | val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b) | let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x') | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 26,
"end_line": 36,
"start_col": 0,
"start_line": 33
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: (_: a -> b) -> x: FStar.Pervasives.Native.option a -> FStar.Pervasives.Native.option b | Prims.Tot | [
"total"
] | [] | [
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.Some"
] | [] | false | false | false | true | false | let opt_apply #a #b f x =
| match x with
| None -> None
| Some x' -> Some (f x') | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.opt_tapply | val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b) | val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b) | let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x') | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 26,
"end_line": 42,
"start_col": 0,
"start_line": 39
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x') | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: (_: a -> FStar.Tactics.Effect.Tac b) -> x: FStar.Pervasives.Native.option a
-> FStar.Tactics.Effect.Tac (FStar.Pervasives.Native.option b) | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.Some"
] | [] | false | true | false | false | false | let opt_tapply #a #b f x =
| match x with
| None -> None
| Some x' -> Some (f x') | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.term_construct | val term_construct (t : term) : Tac string | val term_construct (t : term) : Tac string | let term_construct (t : term) : Tac string =
term_view_construct (inspect t) | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 33,
"end_line": 160,
"start_col": 0,
"start_line": 159
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: FStar.Stubs.Reflection.Types.term -> FStar.Tactics.Effect.Tac Prims.string | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.Stubs.Reflection.Types.term",
"FStar.InteractiveHelpers.Base.term_view_construct",
"Prims.string",
"FStar.Stubs.Reflection.V1.Data.term_view",
"FStar.Stubs.Tactics.V1.Builtins.inspect"
] | [] | false | true | false | false | false | let term_construct (t: term) : Tac string =
| term_view_construct (inspect t) | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.term_view_construct | val term_view_construct (t : term_view) : Tac string | val term_view_construct (t : term_view) : Tac string | let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown" | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 21,
"end_line": 155,
"start_col": 0,
"start_line": 139
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | t: FStar.Stubs.Reflection.V1.Data.term_view -> FStar.Tactics.Effect.Tac Prims.string | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.Stubs.Reflection.V1.Data.term_view",
"FStar.Stubs.Reflection.Types.bv",
"FStar.Stubs.Reflection.Types.fv",
"FStar.Stubs.Reflection.Types.term",
"FStar.Stubs.Reflection.V1.Data.argv",
"FStar.Stubs.Reflection.Types.binder",
"FStar.Stubs.Reflection.Types.comp",
"FStar.Stubs.Reflection.Types.universe",
"FStar.Stubs.Reflection.Types.typ",
"FStar.Stubs.Reflection.V1.Data.vconst",
"Prims.nat",
"FStar.Stubs.Reflection.Types.ctx_uvar_and_subst",
"Prims.bool",
"Prims.list",
"FStar.Pervasives.Native.option",
"FStar.Stubs.Reflection.Types.match_returns_ascription",
"FStar.Stubs.Reflection.V1.Data.branch",
"Prims.string"
] | [] | false | true | false | false | false | let term_view_construct (t: term_view) : Tac string =
| match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown" | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.filter_ascriptions | val filter_ascriptions : bool -> term -> Tac term | val filter_ascriptions : bool -> term -> Tac term | let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 15,
"end_line": 178,
"start_col": 0,
"start_line": 172
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | dbg: Prims.bool -> t: FStar.Stubs.Reflection.Types.term
-> FStar.Tactics.Effect.Tac FStar.Stubs.Reflection.Types.term | FStar.Tactics.Effect.Tac | [] | [] | [
"Prims.bool",
"FStar.Stubs.Reflection.Types.term",
"FStar.Tactics.Visit.visit_tm",
"FStar.Pervasives.Native.option",
"FStar.Stubs.Reflection.Types.comp",
"FStar.Stubs.Reflection.V1.Data.term_view",
"FStar.Stubs.Tactics.V1.Builtins.inspect",
"Prims.unit",
"FStar.InteractiveHelpers.Base.print_dbg",
"Prims.string",
"Prims.op_Hat",
"FStar.Stubs.Tactics.V1.Builtins.term_to_string",
"FStar.InteractiveHelpers.Base.term_view_construct"
] | [] | false | true | false | false | false | let filter_ascriptions dbg t =
| print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t);
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _ | Tv_AscribedC e _ _ _ -> e
| _ -> t)
t | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.option_to_string | val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string | val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string | let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")" | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 36,
"end_line": 48,
"start_col": 0,
"start_line": 45
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x') | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | f: (_: a -> FStar.Tactics.Effect.Tac Prims.string) -> x: FStar.Pervasives.Native.option a
-> FStar.Tactics.Effect.Tac Prims.string | FStar.Tactics.Effect.Tac | [] | [] | [
"Prims.string",
"FStar.Pervasives.Native.option",
"Prims.op_Hat"
] | [] | false | true | false | false | false | let option_to_string #a f x =
| match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")" | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.opt_cons | val opt_cons (#a: Type) (opt_x: option a) (ls: list a) : Tot (list a) | val opt_cons (#a: Type) (opt_x: option a) (ls: list a) : Tot (list a) | let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 14,
"end_line": 54,
"start_col": 0,
"start_line": 51
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")" | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | opt_x: FStar.Pervasives.Native.option a -> ls: Prims.list a -> Prims.list a | Prims.Tot | [
"total"
] | [] | [
"FStar.Pervasives.Native.option",
"Prims.list",
"Prims.Cons"
] | [] | false | false | false | true | false | let opt_cons (#a: Type) (opt_x: option a) (ls: list a) : Tot (list a) =
| match opt_x with
| Some x -> x :: ls
| None -> ls | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.prettify_term | val prettify_term : bool -> term -> Tac term | val prettify_term : bool -> term -> Tac term | let prettify_term dbg t = filter_ascriptions dbg t | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 50,
"end_line": 186,
"start_col": 0,
"start_line": 186
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away. | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | dbg: Prims.bool -> t: FStar.Stubs.Reflection.Types.term
-> FStar.Tactics.Effect.Tac FStar.Stubs.Reflection.Types.term | FStar.Tactics.Effect.Tac | [] | [] | [
"Prims.bool",
"FStar.Stubs.Reflection.Types.term",
"FStar.InteractiveHelpers.Base.filter_ascriptions"
] | [] | false | true | false | false | false | let prettify_term dbg t =
| filter_ascriptions dbg t | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.bind_map_push | val bind_map_push : m: FStar.InteractiveHelpers.Base.bind_map a -> b: FStar.Stubs.Reflection.Types.bv -> x: a
-> Prims.list (FStar.Stubs.Reflection.Types.bv * a) | let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 66,
"end_line": 196,
"start_col": 0,
"start_line": 196
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: FStar.InteractiveHelpers.Base.bind_map a -> b: FStar.Stubs.Reflection.Types.bv -> x: a
-> Prims.list (FStar.Stubs.Reflection.Types.bv * a) | Prims.Tot | [
"total"
] | [] | [
"FStar.InteractiveHelpers.Base.bind_map",
"FStar.Stubs.Reflection.Types.bv",
"Prims.Cons",
"FStar.Pervasives.Native.tuple2",
"FStar.Pervasives.Native.Mktuple2",
"Prims.list"
] | [] | false | false | false | true | false | let bind_map_push (#a: Type) (m: bind_map a) (b: bv) (x: a) =
| (b, x) :: m | false |
|
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.ss_st_comp_commutes | val ss_st_comp_commutes (s:st_comp) (ss:ss_t)
: Lemma (ensures
ss_st_comp s ss ==
{ s with res = ss_term s.res ss;
pre = ss_term s.pre ss;
post = ss_term s.post ss; }) // no shifting required
[SMTPat (ss_st_comp s ss)] | val ss_st_comp_commutes (s:st_comp) (ss:ss_t)
: Lemma (ensures
ss_st_comp s ss ==
{ s with res = ss_term s.res ss;
pre = ss_term s.pre ss;
post = ss_term s.post ss; }) // no shifting required
[SMTPat (ss_st_comp s ss)] | let rec ss_st_comp_commutes (s:st_comp) (ss:ss_t)
: Lemma (ensures
ss_st_comp s ss ==
{ s with res = ss_term s.res ss;
pre = ss_term s.pre ss;
post = ss_term s.post ss; }) // no shifting required
(decreases L.length ss.l)
[SMTPat (ss_st_comp s ss)] =
match ss.l with
| [] -> ()
| y::tl -> ss_st_comp_commutes (subst_st_comp s [ NT y (Map.sel ss.m y) ]) (tail ss) | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 86,
"end_line": 229,
"start_col": 0,
"start_line": 219
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
}
let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l
let as_map (ss:ss_t) = ss.m
let empty = { l = []; m = Map.const_on Set.empty tm_unknown }
let is_dom_push
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { ~ (Map.contains m x ) })
(t:term)
: Lemma (is_dom (x::l) (Map.upd m x t)) =
assert (Map.equal (remove_map (Map.upd m x t) x) m)
let push (ss:ss_t) (x:var { ~ (contains ss x) }) (t:term) : ss_t =
is_dom_push ss.l ss.m x t;
{ l = x::ss.l;
m = Map.upd ss.m x t }
let tail (ss:ss_t { Cons? ss.l }) : ss_t =
{ l = L.tl ss.l; m = remove_map ss.m (L.hd ss.l) }
let rec push_ss (ss1:ss_t) (ss2:ss_t { Set.disjoint (dom ss1) (dom ss2) })
: Tot ss_t (decreases L.length ss2.l) =
match ss2.l with
| [] -> ss1
| x::tl ->
push_ss (push ss1 x (Map.sel ss2.m x)) (tail ss2)
let check_disjoint ss1 ss2 =
admit ();
not (L.existsb (fun v1 -> L.mem v1 ss2.l) ss1.l)
let rec diff_aux (ss1 ss2:ss_t) (acc:ss_t { Set.disjoint (dom acc) (dom ss2) })
: Tot (ss:ss_t { Set.disjoint (dom ss) (dom ss2) }) (decreases L.length ss1.l) =
match ss1.l with
| [] -> acc
| x::l ->
if L.mem x ss2.l
then let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
else let acc_l = x::acc.l in
let acc_m = Map.upd acc.m x (Map.sel ss1.m x) in
assume (no_repeats acc_l /\ is_dom acc_l acc_m);
let acc = { l = acc_l; m = acc_m } in
let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
let diff ss1 ss2 = diff_aux ss1 ss2 empty
#push-options "--warn_error -271"
let push_as_map (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat (as_map (push_ss ss1 ss2))] =
let rec aux (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat ()] =
match ss2.l with
| [] -> ()
| x::tl -> aux (push ss1 x (Map.sel ss2.m x)) (tail ss2)
in
()
#pop-options
let rec remove_l (l:ss_dom) (x:var { L.memP x l })
: Pure ss_dom
(requires True)
(ensures fun r -> forall (y:var). L.memP y r <==> (L.memP y l /\ y =!= x)) =
match l with
| [] -> assert False; []
| y::tl ->
if y = x then tl
else y::(remove_l tl x)
let rec is_dom_remove
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { Map.contains m x })
: Lemma (is_dom (remove_l l x) (remove_map m x))
[SMTPat (remove_l l x); SMTPat (remove_map m x)] =
match l with
| [] -> ()
| y::tl ->
if x = y then ()
else let t_y = Map.sel m y in
let m1 = remove_map m y in
is_dom_remove tl m1 x;
assert (is_dom (remove_l tl x) (remove_map m1 x));
is_dom_push (remove_l tl x) (remove_map m1 x) y t_y;
assert (Map.equal (Map.upd (remove_map m1 x) y t_y)
(remove_map m x))
let rec ss_term (t:term) (ss:ss_t) : Tot term (decreases L.length ss.l) =
match ss.l with
| [] -> t
| y::tl ->
let t = subst_term t [ NT y (Map.sel ss.m y) ] in
ss_term t (tail ss)
let rec ss_st_term (t:st_term) (ss:ss_t) : Tot st_term (decreases L.length ss.l) =
match ss.l with
| [] -> t
| y::tl ->
let t = subst_st_term t [ NT y (Map.sel ss.m y) ] in
ss_st_term t (tail ss)
let rec ss_st_comp (s:st_comp) (ss:ss_t)
: Tot st_comp (decreases L.length ss.l) =
match ss.l with
| [] -> s
| y::tl ->
let s = subst_st_comp s [ NT y (Map.sel ss.m y) ] in
ss_st_comp s (tail ss)
let rec ss_comp (c:comp) (ss:ss_t)
: Tot comp (decreases L.length ss.l) =
match ss.l with
| [] -> c
| y::tl ->
let c = subst_comp c [ NT y (Map.sel ss.m y) ] in
ss_comp c (tail ss)
let rec ss_binder (b:binder) (ss:ss_t)
: Tot binder (decreases L.length ss.l) =
match ss.l with
| [] -> b
| y::tl ->
let b = subst_binder b [ NT y (Map.sel ss.m y) ] in
ss_binder b (tail ss)
let rec ss_env (g:env) (ss:ss_t)
: Tot (g':env { fstar_env g' == fstar_env g /\
Env.dom g' == Env.dom g })
(decreases L.length ss.l) =
admit ();
match ss.l with
| [] -> g
| y::tl -> ss_env (subst_env g [ NT y (Map.sel ss.m y) ]) (tail ss) | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: Pulse.Syntax.Base.st_comp -> ss: Pulse.Checker.Prover.Substs.ss_t
-> FStar.Pervasives.Lemma
(ensures
Pulse.Checker.Prover.Substs.ss_st_comp s ss ==
Pulse.Syntax.Base.Mkst_comp (Mkst_comp?.u s)
(Pulse.Checker.Prover.Substs.ss_term (Mkst_comp?.res s) ss)
(Pulse.Checker.Prover.Substs.ss_term (Mkst_comp?.pre s) ss)
(Pulse.Checker.Prover.Substs.ss_term (Mkst_comp?.post s) ss))
(decreases FStar.List.Tot.Base.length (Mkss_t?.l ss))
[SMTPat (Pulse.Checker.Prover.Substs.ss_st_comp s ss)] | FStar.Pervasives.Lemma | [
"",
"lemma"
] | [] | [
"Pulse.Syntax.Base.st_comp",
"Pulse.Checker.Prover.Substs.ss_t",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__l",
"Pulse.Syntax.Base.var",
"Prims.list",
"Pulse.Checker.Prover.Substs.ss_st_comp_commutes",
"Pulse.Syntax.Naming.subst_st_comp",
"Prims.Cons",
"Pulse.Syntax.Naming.subst_elt",
"Pulse.Syntax.Naming.NT",
"FStar.Map.sel",
"Pulse.Syntax.Base.term",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__m",
"Prims.Nil",
"Pulse.Checker.Prover.Substs.tail",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"Pulse.Checker.Prover.Substs.ss_st_comp",
"Pulse.Syntax.Base.Mkst_comp",
"Pulse.Syntax.Base.__proj__Mkst_comp__item__u",
"Pulse.Checker.Prover.Substs.ss_term",
"Pulse.Syntax.Base.__proj__Mkst_comp__item__res",
"Pulse.Syntax.Base.__proj__Mkst_comp__item__pre",
"Pulse.Syntax.Base.__proj__Mkst_comp__item__post",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat"
] | [
"recursion"
] | false | false | true | false | false | let rec ss_st_comp_commutes (s: st_comp) (ss: ss_t)
: Lemma
(ensures
ss_st_comp s ss ==
{ s with res = ss_term s.res ss; pre = ss_term s.pre ss; post = ss_term s.post ss })
(decreases L.length ss.l)
[SMTPat (ss_st_comp s ss)] =
| match ss.l with
| [] -> ()
| y :: tl -> ss_st_comp_commutes (subst_st_comp s [NT y (Map.sel ss.m y)]) (tail ss) | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.get_bind_map | val get_bind_map (e: genv) : bind_map (typ & bool & term) | val get_bind_map (e: genv) : bind_map (typ & bool & term) | let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 65,
"end_line": 239,
"start_col": 0,
"start_line": 239
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
} | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | e: FStar.InteractiveHelpers.Base.genv
-> FStar.InteractiveHelpers.Base.bind_map ((FStar.Stubs.Reflection.Types.typ * Prims.bool) *
FStar.Stubs.Reflection.Types.term) | Prims.Tot | [
"total"
] | [] | [
"FStar.InteractiveHelpers.Base.genv",
"FStar.InteractiveHelpers.Base.__proj__Mkgenv__item__bmap",
"FStar.InteractiveHelpers.Base.bind_map",
"FStar.Pervasives.Native.tuple3",
"FStar.Stubs.Reflection.Types.typ",
"Prims.bool",
"FStar.Stubs.Reflection.Types.term"
] | [] | false | false | false | true | false | let get_bind_map (e: genv) : bind_map (typ & bool & term) =
| e.bmap | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.get_env | val get_env (e: genv) : env | val get_env (e: genv) : env | let get_env (e:genv) : env = e.env | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 34,
"end_line": 238,
"start_col": 0,
"start_line": 238
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
} | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | e: FStar.InteractiveHelpers.Base.genv -> FStar.Stubs.Reflection.Types.env | Prims.Tot | [
"total"
] | [] | [
"FStar.InteractiveHelpers.Base.genv",
"FStar.InteractiveHelpers.Base.__proj__Mkgenv__item__env",
"FStar.Stubs.Reflection.Types.env"
] | [] | false | false | false | true | false | let get_env (e: genv) : env =
| e.env | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.push_as_map | val push_as_map (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures as_map (push_ss ss1 ss2) == Map.concat (as_map ss1) (as_map ss2))
[SMTPat (as_map (push_ss ss1 ss2))] | val push_as_map (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures as_map (push_ss ss1 ss2) == Map.concat (as_map ss1) (as_map ss2))
[SMTPat (as_map (push_ss ss1 ss2))] | let push_as_map (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat (as_map (push_ss ss1 ss2))] =
let rec aux (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat ()] =
match ss2.l with
| [] -> ()
| x::tl -> aux (push ss1 x (Map.sel ss2.m x)) (tail ss2)
in
() | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 4,
"end_line": 140,
"start_col": 0,
"start_line": 123
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
}
let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l
let as_map (ss:ss_t) = ss.m
let empty = { l = []; m = Map.const_on Set.empty tm_unknown }
let is_dom_push
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { ~ (Map.contains m x ) })
(t:term)
: Lemma (is_dom (x::l) (Map.upd m x t)) =
assert (Map.equal (remove_map (Map.upd m x t) x) m)
let push (ss:ss_t) (x:var { ~ (contains ss x) }) (t:term) : ss_t =
is_dom_push ss.l ss.m x t;
{ l = x::ss.l;
m = Map.upd ss.m x t }
let tail (ss:ss_t { Cons? ss.l }) : ss_t =
{ l = L.tl ss.l; m = remove_map ss.m (L.hd ss.l) }
let rec push_ss (ss1:ss_t) (ss2:ss_t { Set.disjoint (dom ss1) (dom ss2) })
: Tot ss_t (decreases L.length ss2.l) =
match ss2.l with
| [] -> ss1
| x::tl ->
push_ss (push ss1 x (Map.sel ss2.m x)) (tail ss2)
let check_disjoint ss1 ss2 =
admit ();
not (L.existsb (fun v1 -> L.mem v1 ss2.l) ss1.l)
let rec diff_aux (ss1 ss2:ss_t) (acc:ss_t { Set.disjoint (dom acc) (dom ss2) })
: Tot (ss:ss_t { Set.disjoint (dom ss) (dom ss2) }) (decreases L.length ss1.l) =
match ss1.l with
| [] -> acc
| x::l ->
if L.mem x ss2.l
then let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
else let acc_l = x::acc.l in
let acc_m = Map.upd acc.m x (Map.sel ss1.m x) in
assume (no_repeats acc_l /\ is_dom acc_l acc_m);
let acc = { l = acc_l; m = acc_m } in
let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
let diff ss1 ss2 = diff_aux ss1 ss2 empty | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ss1: Pulse.Checker.Prover.Substs.ss_t -> ss2: Pulse.Checker.Prover.Substs.ss_t
-> FStar.Pervasives.Lemma
(requires
FStar.Set.disjoint (Pulse.Checker.Prover.Substs.dom ss1)
(Pulse.Checker.Prover.Substs.dom ss2))
(ensures
Pulse.Checker.Prover.Substs.as_map (Pulse.Checker.Prover.Substs.push_ss ss1 ss2) ==
FStar.Map.concat (Pulse.Checker.Prover.Substs.as_map ss1)
(Pulse.Checker.Prover.Substs.as_map ss2))
[SMTPat (Pulse.Checker.Prover.Substs.as_map (Pulse.Checker.Prover.Substs.push_ss ss1 ss2))] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Pulse.Checker.Prover.Substs.ss_t",
"Prims.unit",
"FStar.List.Tot.Base.length",
"Pulse.Syntax.Base.var",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__l",
"FStar.Set.disjoint",
"Pulse.Checker.Prover.Substs.dom",
"Prims.squash",
"FStar.Map.equal",
"Pulse.Syntax.Base.term",
"Pulse.Checker.Prover.Substs.as_map",
"Pulse.Checker.Prover.Substs.push_ss",
"FStar.Map.concat",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil",
"Prims.list",
"Pulse.Checker.Prover.Substs.push",
"FStar.Map.sel",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__m",
"Pulse.Checker.Prover.Substs.tail",
"FStar.Map.t"
] | [] | false | false | true | false | false | let push_as_map (ss1 ss2: ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2)) (Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat (as_map (push_ss ss1 ss2))] =
| let rec aux (ss1 ss2: ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2)) (Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat ()] =
match ss2.l with
| [] -> ()
| x :: tl -> aux (push ss1 x (Map.sel ss2.m x)) (tail ss2)
in
() | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_verify | val mt_verify:
#hsz:Ghost.erased hash_size_t ->
#hash_spec:MTS.hash_fun_t #(U32.v hsz) ->
mt:const_mt_p ->
k:uint64_t ->
j:uint64_t ->
mtr:HH.rid ->
p:const_path_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let mtv0 = B.get h0 mt 0 in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Ghost.reveal (MT?.hash_spec mtv0) == hash_spec /\
mt_safe h0 mt /\
path_safe h0 mtr p /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HST.is_eternal_region (B.frameOf rt) /\
HH.disjoint (B.frameOf p) (B.frameOf rt) /\
HH.disjoint mtr (B.frameOf rt) /\
mt_verify_pre_nst (B.get h0 mt 0) k j (B.get h0 p 0) rt))
(ensures (fun h0 b h1 ->
let mt = CB.cast mt in
let p = CB.cast p in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
// memory safety:
// `rt` is not modified in this function, but we use a trick
// to allocate an auxiliary buffer in the extended region of `rt`.
modifies (B.loc_all_regions_from false (B.frameOf rt)) h0 h1 /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
S.equal (Rgl?.r_repr (hreg hsz) h0 rt) (Rgl?.r_repr (hreg hsz) h1 rt) /\
(let mtv = B.get h0 mt 0 in
let k = split_offset (MT?.offset mtv) k in
let j = split_offset (MT?.offset mtv) j in
b <==> MTH.mt_verify #(U32.v hsz) #hash_spec (U32.v k) (U32.v j)
(lift_path h0 mtr p) (Rgl?.r_repr (hreg hsz) h0 rt)))) | val mt_verify:
#hsz:Ghost.erased hash_size_t ->
#hash_spec:MTS.hash_fun_t #(U32.v hsz) ->
mt:const_mt_p ->
k:uint64_t ->
j:uint64_t ->
mtr:HH.rid ->
p:const_path_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let mtv0 = B.get h0 mt 0 in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Ghost.reveal (MT?.hash_spec mtv0) == hash_spec /\
mt_safe h0 mt /\
path_safe h0 mtr p /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HST.is_eternal_region (B.frameOf rt) /\
HH.disjoint (B.frameOf p) (B.frameOf rt) /\
HH.disjoint mtr (B.frameOf rt) /\
mt_verify_pre_nst (B.get h0 mt 0) k j (B.get h0 p 0) rt))
(ensures (fun h0 b h1 ->
let mt = CB.cast mt in
let p = CB.cast p in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
// memory safety:
// `rt` is not modified in this function, but we use a trick
// to allocate an auxiliary buffer in the extended region of `rt`.
modifies (B.loc_all_regions_from false (B.frameOf rt)) h0 h1 /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
S.equal (Rgl?.r_repr (hreg hsz) h0 rt) (Rgl?.r_repr (hreg hsz) h1 rt) /\
(let mtv = B.get h0 mt 0 in
let k = split_offset (MT?.offset mtv) k in
let j = split_offset (MT?.offset mtv) j in
b <==> MTH.mt_verify #(U32.v hsz) #hash_spec (U32.v k) (U32.v j)
(lift_path h0 mtr p) (Rgl?.r_repr (hreg hsz) h0 rt)))) | let mt_verify #_ #hash_spec mt k j mtr p rt =
let ncmt = CB.cast mt in
let ncp = CB.cast p in
let mtv = !*ncmt in
let hash_size = MT?.hash_size mtv in
let hrg = hreg hash_size in
let k = split_offset (MT?.offset mtv) k in
let j = split_offset (MT?.offset mtv) j in
let hh0 = HST.get () in
let nrid = HST.new_region (B.frameOf rt) in
let ih = rg_alloc hrg nrid in
let pth = !*ncp in
assert (MT?.hash_size mtv = hash_size);
assert (Path?.hash_size pth = hash_size);
let first = V.index (Path?.hashes pth) 0ul in
Cpy?.copy (hcpy hash_size) hash_size first ih;
let hh1 = HST.get () in
path_safe_preserved
mtr ncp (B.loc_all_regions_from false (B.frameOf rt)) hh0 hh1;
path_preserved mtr ncp (B.loc_all_regions_from false (B.frameOf rt)) hh0 hh1;
lift_path_index hh0 mtr ncp 0ul;
assert (Rgl?.r_repr hrg hh1 ih == S.index (lift_path #hash_size hh0 mtr ncp) 0);
mt_verify_ #hash_size #hash_spec k j mtr p 1ul ih false (MT?.hash_fun mtv);
let hh2 = HST.get () in
assert (Rgl?.r_repr hrg hh2 ih ==
MTH.mt_verify_ #(U32.v hash_size) #hash_spec (U32.v k) (U32.v j) (lift_path hh1 mtr ncp)
1 (Rgl?.r_repr hrg hh1 ih) false);
let r = Lib.ByteBuffer.lbytes_eq #hash_size ih rt in
rg_free hrg ih;
r | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 3,
"end_line": 3005,
"start_col": 0,
"start_line": 2976
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul)
val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True))
let mt_get_path_pre #_ mt idx p root =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root
val mt_get_path_loc_union_helper:
l1:loc -> l2:loc ->
Lemma (loc_union (loc_union l1 l2) l2 == loc_union l1 l2)
let mt_get_path_loc_union_helper l1 l2 = ()
// Construct a Merkle path for a given index `idx`, hashes `mt.hs`, and rightmost
// hashes `mt.rhs`. Note that this operation copies "pointers" into the Merkle tree
// to the output path.
#push-options "--z3rlimit 60"
val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root)))
#pop-options
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
let mt_get_path #hsz mt idx p root =
let ncmt = CB.cast mt in
let mtframe = B.frameOf ncmt in
let hh0 = HST.get () in
mt_get_root mt root;
let mtv = !*ncmt in
let hsz = MT?.hash_size mtv in
let hh1 = HST.get () in
path_safe_init_preserved mtframe p
(B.loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
hh0 hh1;
assert (MTH.mt_get_root (mt_lift hh0 ncmt) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(mt_lift hh1 ncmt, Rgl?.r_repr (hreg hsz) hh1 root));
assert (S.equal (lift_path #hsz hh1 mtframe p) S.empty);
let idx = split_offset (MT?.offset mtv) idx in
let i = MT?.i mtv in
let ofs = offset_of (MT?.i mtv) in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
assert (mt_safe_elts hh1 0ul hs i j);
assert (V.size_of (V.get hh1 hs 0ul) == j - ofs);
assert (idx < j);
hash_vv_rv_inv_includes hh1 hs 0ul (idx - ofs);
hash_vv_rv_inv_r_inv hh1 hs 0ul (idx - ofs);
hash_vv_as_seq_get_index hh1 hs 0ul (idx - ofs);
let ih = V.index (V.index hs 0ul) (idx - ofs) in
mt_path_insert #hsz mtframe p ih;
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtframe p)
(MTH.path_insert
(lift_path hh1 mtframe p)
(S.index (S.index (RV.as_seq hh1 hs) 0) (U32.v idx - U32.v ofs))));
Rgl?.r_sep (hreg hsz) root (path_loc p) hh1 hh2;
mt_safe_preserved ncmt (path_loc p) hh1 hh2;
mt_preserved ncmt (path_loc p) hh1 hh2;
assert (V.size_of (phashes hh2 p) == 1ul);
mt_get_path_ 0ul mtframe hs rhs i j idx p false;
let hh3 = HST.get () in
// memory safety
mt_get_path_loc_union_helper
(loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p);
Rgl?.r_sep (hreg hsz) root (path_loc p) hh2 hh3;
mt_safe_preserved ncmt (path_loc p) hh2 hh3;
mt_preserved ncmt (path_loc p) hh2 hh3;
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
assert (S.length (lift_path #hsz hh3 mtframe p) ==
S.length (lift_path #hsz hh2 mtframe p) +
MTH.mt_path_length (U32.v idx) (U32.v (MT?.j (B.get hh0 ncmt 0))) false);
assert (modifies (loc_union
(loc_union
(mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
hh0 hh3);
assert (mt_safe hh3 ncmt);
assert (path_safe hh3 mtframe p);
assert (Rgl?.r_inv (hreg hsz) hh3 root);
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
// correctness
mt_safe_elts_spec hh2 0ul hs i j;
assert (S.equal (lift_path hh3 mtframe p)
(MTH.mt_get_path_ 0 (RV.as_seq hh2 hs) (RV.as_seq hh2 rhs)
(U32.v i) (U32.v j) (U32.v idx)
(lift_path hh2 mtframe p) false));
assert (MTH.mt_get_path
(mt_lift hh0 ncmt) (U32.v idx) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(U32.v (MT?.j (B.get hh3 ncmt 0)),
lift_path hh3 mtframe p,
Rgl?.r_repr (hreg hsz) hh3 root));
j
#pop-options
/// Flushing
private val
mt_flush_to_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) ==
loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
#push-options "--initial_fuel 2 --max_fuel 2"
let mt_flush_to_modifies_rec_helper #hsz lv hs h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val mt_flush_to_:
hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
pi:index_t ->
i:index_t{i >= pi} ->
j:Ghost.erased index_t{
Ghost.reveal j >= i &&
U32.v (Ghost.reveal j) < pow2 (32 - U32.v lv)} ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs pi (Ghost.reveal j)))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1 /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i (Ghost.reveal j) /\
// correctness
(mt_safe_elts_spec h0 lv hs pi (Ghost.reveal j);
S.equal (RV.as_seq h1 hs)
(MTH.mt_flush_to_
(U32.v lv) (RV.as_seq h0 hs) (U32.v pi)
(U32.v i) (U32.v (Ghost.reveal j))))))
(decreases (U32.v i))
#restart-solver
#push-options "--z3rlimit 1500 --fuel 1 --ifuel 0"
let rec mt_flush_to_ hsz lv hs pi i j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs pi (Ghost.reveal j);
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
let oi = offset_of i in
let opi = offset_of pi in
if oi = opi then mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j)
else begin
/// 1) Flush hashes at the level `lv`, where the new vector is
/// not yet connected to `hs`.
let ofs = oi - opi in
let hvec = V.index hs lv in
let flushed:(rvector (hreg hsz)) = rv_flush_inplace hvec ofs in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) flushed);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of flushed == Ghost.reveal j - offset_of i); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 flushed)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) (U32.v ofs)
(S.length (RV.as_seq hh0 (V.get hh0 hs lv)))));
/// 2) Assign the flushed vector to `hs` at the level `lv`.
RV.assign hs lv flushed;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) ==
Ghost.reveal j - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector flushed) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector flushed) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 flushed)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 flushed);
// if `lv = 31` then `pi <= i <= j < 2` thus `oi = opi`,
// contradicting the branch.
assert (lv + 1ul < merkle_tree_size_lg);
assert (U32.v (Ghost.reveal j / 2ul) < pow2 (32 - U32.v (lv + 1ul)));
assert (RV.rv_inv hh2 hs);
assert (mt_safe_elts hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul));
/// 3) Recursion
mt_flush_to_ hsz (lv + 1ul) hs (pi / 2ul) (i / 2ul)
(Ghost.hide (Ghost.reveal j / 2ul));
let hh3 = HST.get () in
// 3-0) Memory safety brought from the postcondition of the recursion
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
Ghost.reveal j - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i (Ghost.reveal j);
assert (mt_safe_elts hh3 lv hs i (Ghost.reveal j));
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv + 1) (RV.as_seq hh2 hs)
(U32.v pi / 2) (U32.v i / 2) (U32.v (Ghost.reveal j) / 2)));
mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j);
MTH.mt_flush_to_rec
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j));
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j))))
end
#pop-options
// `mt_flush_to` flushes old hashes in the Merkle tree. It removes hash elements
// from `MT?.i` to **`offset_of (idx - 1)`**, but maintains the tree structure,
// i.e., the tree still holds some old internal hashes (compressed from old
// hashes) which are required to generate Merkle paths for remaining hashes.
//
// Note that `mt_flush_to` (and `mt_flush`) always remain at least one base hash
// elements. If there are `MT?.j` number of elements in the tree, because of the
// precondition `MT?.i <= idx < MT?.j` we still have `idx`-th element after
// flushing.
private inline_for_extraction
val mt_flush_to_pre_nst: mtv:merkle_tree -> idx:offset_t -> Tot bool
let mt_flush_to_pre_nst mtv idx =
offsets_connect (MT?.offset mtv) idx &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
idx >= MT?.i mtv &&
idx < MT?.j mtv)
val mt_flush_to_pre: mt:const_mt_p -> idx:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True))
let mt_flush_to_pre mt idx =
let mt = CB.cast mt in
let h0 = HST.get() in
let mtv = !*mt in
mt_flush_to_pre_nst mtv idx
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
val mt_flush_to:
mt:mt_p ->
idx:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_to_pre_nst (B.get h0 mt 0) idx))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let idx = split_offset off idx in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush_to (mt_lift h0 mt) (U32.v idx) == mt_lift h1 mt)))
let mt_flush_to mt idx =
let hh0 = HST.get () in
let mtv = !*mt in
let offset = MT?.offset mtv in
let j = MT?.j mtv in
let hsz = MT?.hash_size mtv in
let idx = split_offset offset idx in
let hs = MT?.hs mtv in
mt_flush_to_ hsz 0ul hs (MT?.i mtv) idx (Ghost.hide (MT?.j mtv));
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 hs 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv) idx (MT?.j mtv)
hs
(MT?.rhs_ok mtv) (MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv) (MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul hs idx (MT?.j mtv) (B.loc_buffer mt) hh1 hh2
#pop-options
private inline_for_extraction
val mt_flush_pre_nst: mt:merkle_tree -> Tot bool
let mt_flush_pre_nst mt = MT?.j mt > MT?.i mt
val mt_flush_pre: mt:const_mt_p -> HST.ST bool (requires (fun h0 -> mt_safe h0 (CB.cast mt))) (ensures (fun _ _ _ -> True))
let mt_flush_pre mt = mt_flush_pre_nst !*(CB.cast mt)
val mt_flush:
mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_pre_nst (B.get h0 mt 0)))
(ensures (fun h0 _ h1 ->
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush (mt_lift h0 mt) == mt_lift h1 mt))
#push-options "--z3rlimit 200 --initial_fuel 1 --max_fuel 1"
let mt_flush mt =
let mtv = !*mt in
let off = MT?.offset mtv in
let j = MT?.j mtv in
let j1 = j - 1ul in
assert (j1 < uint32_32_max);
assert (off < uint64_max);
assert (UInt.fits (U64.v off + U32.v j1) 64);
let jo = join_offset off j1 in
mt_flush_to mt jo
#pop-options
/// Retraction
private
val mt_retract_to_:
#hsz:hash_size_t ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
lv:uint32_t{lv < V.size_of hs} ->
i:index_t ->
s:index_t ->
j:index_t{i <= s && s <= j && v j < pow2 (U32.v (V.size_of hs) - v lv)}
-> HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
(modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1) /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i s /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
S.equal (RV.as_seq h1 hs)
(MTH.mt_retract_to_
(RV.as_seq h0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
))
(decreases (U32.v merkle_tree_size_lg - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
private
let rec mt_retract_to_ #hsz hs lv i s j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs i j;
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
if lv >= V.size_of hs then ()
else begin
// 1) Retract hashes at level `lv`.
let hvec = V.index hs lv in
let old_len = j - offset_of i in
let new_len = s - offset_of i in
let retracted = RV.shrink hvec new_len in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) retracted);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of retracted == new_len);
mt_safe_elts_preserved
(lv + 1ul) hs (i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 retracted)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) 0 (U32.v new_len)));
RV.assign hs lv retracted;
let hh2 = HST.get() in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == s - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector retracted) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector retracted) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 retracted)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 retracted);
if lv + 1ul < V.size_of hs then
begin
assert (mt_safe_elts hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul));
mt_safe_elts_spec hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_retract_to_ hs (lv + 1ul) (i / 2ul) (s / 2ul) (j / 2ul);
// 3-0) Memory safety brought from the postcondition of the recursion
let hh3 = HST.get () in
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) == s - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i s;
assert (mt_safe_elts hh3 lv hs i s);
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (U32.v lv + 1 < S.length (RV.as_seq hh3 hs) ==>
S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh2 hs) (U32.v lv + 1)
(U32.v i / 2) (U32.v s / 2) (U32.v j / 2)));
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts hh0 lv hs i j);
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
end
else begin
let hh3 = HST.get() in
assert ((modifies (loc_union
(RV.rv_loc_elems hh0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
hh0 hh3));
assert (RV.rv_inv hh3 hs /\ mt_safe_elts hh3 lv hs i s);
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_
(RV.as_seq hh0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
end
end
#pop-options
private inline_for_extraction
val mt_retract_to_pre_nst: mtv:merkle_tree -> r:offset_t -> Tot bool
let mt_retract_to_pre_nst mtv r =
offsets_connect (MT?.offset mtv) r &&
([@inline_let] let r = split_offset (MT?.offset mtv) r in
MT?.i mtv <= r && r < MT?.j mtv)
val mt_retract_to_pre: mt:const_mt_p -> r:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True))
let mt_retract_to_pre mt r =
let mt = CB.cast mt in
let h0 = HST.get() in
let mtv = !*mt in
mt_retract_to_pre_nst mtv r
#push-options "--z3rlimit 100"
val mt_retract_to:
mt:mt_p ->
r:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_retract_to_pre_nst (B.get h0 mt 0) r))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let r = split_offset off r in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_retract_to (mt_lift h0 mt) (U32.v r) == mt_lift h1 mt)))
let mt_retract_to mt r =
let hh0 = HST.get () in
let mtv = !*mt in
let offset = MT?.offset mtv in
let r = split_offset offset r in
let hs = MT?.hs mtv in
mt_retract_to_ hs 0ul (MT?.i mtv) (r + 1ul) (MT?.j mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 hs 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv) (MT?.offset mtv) (MT?.i mtv) (r+1ul) hs false (MT?.rhs mtv) (MT?.mroot mtv) (MT?.hash_spec mtv) (MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul hs (MT?.i mtv) (r+1ul) (B.loc_buffer mt) hh1 hh2
#pop-options
/// Client-side verification
private
val mt_verify_:
#hsz:hash_size_t ->
#hash_spec:MTS.hash_fun_t #(U32.v hsz) ->
k:index_t ->
j:index_t{k <= j} ->
mtr:HH.rid ->
p:const_path_p ->
ppos:uint32_t ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
let p = CB.cast p in
path_safe h0 mtr p /\ Rgl?.r_inv (hreg hsz) h0 acc /\
Path?.hash_size (B.get h0 p 0) = hsz /\
HH.disjoint (B.frameOf p) (B.frameOf acc) /\
HH.disjoint mtr (B.frameOf acc) /\
// Below is a very relaxed condition,
// but sufficient to ensure (+) for uint32_t is sound.
ppos <= 64ul - mt_path_length 0ul k j actd /\
ppos + mt_path_length 0ul k j actd <= V.size_of (phashes h0 p)))
(ensures (fun h0 _ h1 ->
let p = CB.cast p in
// memory safety
modifies (B.loc_all_regions_from false (B.frameOf acc)) h0 h1 /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
Rgl?.r_repr (hreg hsz) h1 acc ==
MTH.mt_verify_ #(U32.v hsz) #hash_spec (U32.v k) (U32.v j) (lift_path h0 mtr p)
(U32.v ppos) (Rgl?.r_repr (hreg hsz) h0 acc) actd))
#push-options "--z3rlimit 200 --initial_fuel 1 --max_fuel 1"
let rec mt_verify_ #hsz #hash_spec k j mtr p ppos acc actd hash_fun =
let ncp:path_p = CB.cast p in
let hh0 = HST.get () in
if j = 0ul then ()
else (let nactd = actd || (j % 2ul = 1ul) in
if k % 2ul = 0ul then begin
if j = k || (j = k + 1ul && not actd) then
mt_verify_ (k / 2ul) (j / 2ul) mtr p ppos acc nactd hash_fun
else begin
let ncpd = !*ncp in
let phash = V.index (Path?.hashes ncpd) ppos in
hash_fun acc phash acc;
let hh1 = HST.get () in
path_preserved mtr ncp
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
lift_path_index hh0 mtr ncp ppos;
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
hash_spec (Rgl?.r_repr (hreg hsz) hh0 acc)
(S.index (lift_path #hsz hh0 mtr ncp) (U32.v ppos)));
mt_verify_ (k / 2ul) (j / 2ul) mtr p (ppos + 1ul) acc nactd hash_fun
end
end
else begin
let ncpd = !*ncp in
let phash = V.index (Path?.hashes ncpd) ppos in
hash_fun phash acc acc;
let hh1 = HST.get () in
path_preserved mtr ncp
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
lift_path_index hh0 mtr ncp ppos;
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
hash_spec (S.index (lift_path #hsz hh0 mtr ncp) (U32.v ppos))
(Rgl?.r_repr (hreg hsz) hh0 acc));
mt_verify_ (k / 2ul) (j / 2ul) mtr p (ppos + 1ul) acc nactd hash_fun
end)
#pop-options
private inline_for_extraction
val mt_verify_pre_nst: mt:merkle_tree -> k:offset_t -> j:offset_t -> p:path -> rt:(hash #(MT?.hash_size mt)) -> Tot bool
let mt_verify_pre_nst mt k j p rt =
k < j &&
offsets_connect (MT?.offset mt) k &&
offsets_connect (MT?.offset mt) j &&
MT?.hash_size mt = Path?.hash_size p &&
([@inline_let] let k = split_offset (MT?.offset mt) k in
[@inline_let] let j = split_offset (MT?.offset mt) j in
// We need to add one since the first element is the hash to verify.
V.size_of (Path?.hashes p) = 1ul + mt_path_length 0ul k j false)
val mt_verify_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
k:uint64_t ->
j:uint64_t ->
mtr:HH.rid ->
p:const_path_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let mtv0 = B.get h0 mt 0 in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
mt_safe h0 mt /\
path_safe h0 mtr p /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HST.is_eternal_region (B.frameOf rt) /\
HH.disjoint (B.frameOf p) (B.frameOf rt) /\
HH.disjoint mtr (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_verify_pre #hsz mt k j mtr p rt =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_verify_pre_nst mtv k j !*p rt
// `mt_verify` verifies a Merkle path `p` with given target index `k` and
// the number of elements `j`. It recursively iterates the path with an
// accumulator `acc` (a compressed hash).
//
// Note that `mt_path_length` is given as a precondition of this operation.
// This is a postcondition of `mt_get_path` so we can call `mt_verify` with
// every path generated by `mt_get_path`.
#push-options "--z3rlimit 20"
val mt_verify:
#hsz:Ghost.erased hash_size_t ->
#hash_spec:MTS.hash_fun_t #(U32.v hsz) ->
mt:const_mt_p ->
k:uint64_t ->
j:uint64_t ->
mtr:HH.rid ->
p:const_path_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let mtv0 = B.get h0 mt 0 in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Ghost.reveal (MT?.hash_spec mtv0) == hash_spec /\
mt_safe h0 mt /\
path_safe h0 mtr p /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HST.is_eternal_region (B.frameOf rt) /\
HH.disjoint (B.frameOf p) (B.frameOf rt) /\
HH.disjoint mtr (B.frameOf rt) /\
mt_verify_pre_nst (B.get h0 mt 0) k j (B.get h0 p 0) rt))
(ensures (fun h0 b h1 ->
let mt = CB.cast mt in
let p = CB.cast p in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
// memory safety:
// `rt` is not modified in this function, but we use a trick
// to allocate an auxiliary buffer in the extended region of `rt`.
modifies (B.loc_all_regions_from false (B.frameOf rt)) h0 h1 /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
S.equal (Rgl?.r_repr (hreg hsz) h0 rt) (Rgl?.r_repr (hreg hsz) h1 rt) /\
(let mtv = B.get h0 mt 0 in
let k = split_offset (MT?.offset mtv) k in
let j = split_offset (MT?.offset mtv) j in
b <==> MTH.mt_verify #(U32.v hsz) #hash_spec (U32.v k) (U32.v j)
(lift_path h0 mtr p) (Rgl?.r_repr (hreg hsz) h0 rt))))
#pop-options | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 2,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 200,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
mt: MerkleTree.Low.const_mt_p ->
k: EverCrypt.Helpers.uint64_t ->
j: EverCrypt.Helpers.uint64_t ->
mtr: FStar.Monotonic.HyperHeap.rid ->
p: MerkleTree.Low.const_path_p ->
rt: MerkleTree.Low.Datastructures.hash
-> FStar.HyperStack.ST.ST Prims.bool | FStar.HyperStack.ST.ST | [] | [] | [
"FStar.Ghost.erased",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Spec.hash_fun_t",
"FStar.UInt32.v",
"FStar.Ghost.reveal",
"MerkleTree.Low.const_mt_p",
"EverCrypt.Helpers.uint64_t",
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.const_path_p",
"MerkleTree.Low.Datastructures.hash",
"Prims.bool",
"Prims.unit",
"LowStar.Regional.rg_free",
"Lib.ByteBuffer.lbytes_eq",
"Prims._assert",
"Prims.eq2",
"MerkleTree.New.High.hash",
"LowStar.Regional.__proj__Rgl__item__r_repr",
"MerkleTree.New.High.mt_verify_",
"MerkleTree.Low.lift_path",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"MerkleTree.Low.mt_verify_",
"FStar.UInt32.__uint_to_t",
"MerkleTree.Low.__proj__MT__item__hash_fun",
"FStar.Seq.Base.index",
"MerkleTree.Low.lift_path_index",
"MerkleTree.Low.path_preserved",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"LowStar.Monotonic.Buffer.frameOf",
"Lib.IntTypes.uint8",
"LowStar.Buffer.trivial_preorder",
"MerkleTree.Low.path_safe_preserved",
"LowStar.RVector.__proj__Cpy__item__copy",
"MerkleTree.Low.Datastructures.hreg",
"MerkleTree.Low.Datastructures.hcpy",
"MerkleTree.Low.__proj__Path__item__hash_size",
"LowStar.Vector.index",
"MerkleTree.Low.__proj__Path__item__hashes",
"Prims.b2t",
"Prims.op_Equality",
"MerkleTree.Low.__proj__MT__item__hash_size",
"MerkleTree.Low.path",
"LowStar.BufferOps.op_Bang_Star",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"LowStar.Regional.rg_alloc",
"FStar.HyperStack.ST.new_region",
"MerkleTree.Low.index_t",
"MerkleTree.Low.split_offset",
"MerkleTree.Low.__proj__MT__item__offset",
"LowStar.Regional.regional",
"MerkleTree.Low.merkle_tree",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.ConstBuffer.cast"
] | [] | false | true | false | false | false | let mt_verify #_ #hash_spec mt k j mtr p rt =
| let ncmt = CB.cast mt in
let ncp = CB.cast p in
let mtv = !*ncmt in
let hash_size = MT?.hash_size mtv in
let hrg = hreg hash_size in
let k = split_offset (MT?.offset mtv) k in
let j = split_offset (MT?.offset mtv) j in
let hh0 = HST.get () in
let nrid = HST.new_region (B.frameOf rt) in
let ih = rg_alloc hrg nrid in
let pth = !*ncp in
assert (MT?.hash_size mtv = hash_size);
assert (Path?.hash_size pth = hash_size);
let first = V.index (Path?.hashes pth) 0ul in
Cpy?.copy (hcpy hash_size) hash_size first ih;
let hh1 = HST.get () in
path_safe_preserved mtr ncp (B.loc_all_regions_from false (B.frameOf rt)) hh0 hh1;
path_preserved mtr ncp (B.loc_all_regions_from false (B.frameOf rt)) hh0 hh1;
lift_path_index hh0 mtr ncp 0ul;
assert (Rgl?.r_repr hrg hh1 ih == S.index (lift_path #hash_size hh0 mtr ncp) 0);
mt_verify_ #hash_size #hash_spec k j mtr p 1ul ih false (MT?.hash_fun mtv);
let hh2 = HST.get () in
assert (Rgl?.r_repr hrg hh2 ih ==
MTH.mt_verify_ #(U32.v hash_size)
#hash_spec
(U32.v k)
(U32.v j)
(lift_path hh1 mtr ncp)
1
(Rgl?.r_repr hrg hh1 ih)
false);
let r = Lib.ByteBuffer.lbytes_eq #hash_size ih rt in
rg_free hrg ih;
r | false |
Vale.X64.Leakage.fst | Vale.X64.Leakage.check_if_loop_consumes_fixed_time | val check_if_loop_consumes_fixed_time (code:S.code{While? code}) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[code; count_publics ts; 0]) | val check_if_loop_consumes_fixed_time (code:S.code{While? code}) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[code; count_publics ts; 0]) | let rec check_if_block_consumes_fixed_time (block:S.codes) (ts:analysis_taints) : bool & analysis_taints =
match block with
| [] -> true, ts
| hd::tl -> let fixedTime, ts_int = check_if_code_consumes_fixed_time hd ts in
if (not fixedTime) then fixedTime, ts_int
else check_if_block_consumes_fixed_time tl ts_int
and check_if_code_consumes_fixed_time (code:S.code) (ts:analysis_taints) : bool & analysis_taints =
match code with
| Ins ins -> let b, ts = check_if_ins_consumes_fixed_time ins ts in b, ts
| Block block -> check_if_block_consumes_fixed_time block ts
| IfElse ifCond ifTrue ifFalse ->
let o1 = operand_taint 0 (S.get_fst_ocmp ifCond) ts in
let o2 = operand_taint 0 (S.get_snd_ocmp ifCond) ts in
let predTaint = merge_taint o1 o2 in
if (Secret? predTaint) then (false, ts)
else
let o1Public = operand_does_not_use_secrets (S.get_fst_ocmp ifCond) ts in
if (not o1Public) then (false, ts)
else
let o2Public = operand_does_not_use_secrets (S.get_snd_ocmp ifCond) ts in
if (not o2Public) then (false, ts)
else
let validIfTrue, tsIfTrue = check_if_code_consumes_fixed_time ifTrue ts in
if (not validIfTrue) then (false, ts)
else
let validIfFalse, tsIfFalse = check_if_code_consumes_fixed_time ifFalse ts in
if (not validIfFalse) then (false, ts)
else
(true, combine_analysis_taints tsIfTrue tsIfFalse)
| While cond body -> check_if_loop_consumes_fixed_time code ts
and check_if_loop_consumes_fixed_time c (ts:analysis_taints) : (bool & analysis_taints) =
let ts = normalize_taints ts in
let While pred body = c in
let o1 = operand_taint 0 (S.get_fst_ocmp pred) ts in
let o2 = operand_taint 0 (S.get_snd_ocmp pred) ts in
let predTaint = merge_taint o1 o2 in
if (Secret? predTaint) then false, ts
else
let o1Public = operand_does_not_use_secrets (S.get_fst_ocmp pred) ts in
if (not o1Public) then (false, ts)
else
let o2Public = operand_does_not_use_secrets (S.get_snd_ocmp pred) ts in
if (not o2Public) then (false, ts)
else
let fixedTime, next_ts = check_if_code_consumes_fixed_time body ts in
if (not fixedTime) then (false, ts)
else
let combined_ts = combine_analysis_taints ts next_ts in
assert (taintstate_monotone ts combined_ts);
if eq_leakage_taints combined_ts.lts ts.lts then
true, combined_ts
else (
monotone_decreases_count ts combined_ts;
check_if_loop_consumes_fixed_time c combined_ts
) | {
"file_name": "vale/code/arch/x64/Vale.X64.Leakage.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 5,
"end_line": 251,
"start_col": 0,
"start_line": 192
} | module Vale.X64.Leakage
open FStar.Mul
open Vale.X64.Machine_s
module S = Vale.X64.Machine_Semantics_s
open Vale.X64.Leakage_s
open Vale.X64.Leakage_Helpers
open Vale.X64.Leakage_Ins
unfold let machine_eval_ocmp = S.machine_eval_ocmp
unfold let machine_eval_code = S.machine_eval_code
unfold let machine_eval_codes = S.machine_eval_codes
unfold let machine_eval_while = S.machine_eval_while
#reset-options "--initial_ifuel 0 --max_ifuel 1 --initial_fuel 1 --max_fuel 1"
let normalize_taints (ts:analysis_taints) : analysis_taints =
let AnalysisTaints lts rts = ts in
AnalysisTaints lts (regs_to_map (map_to_regs rts))
let combine_reg_taints (regs1 regs2:reg_taint) : reg_taint =
FunctionalExtensionality.on reg (fun x -> merge_taint (regs1 x) (regs2 x))
let rec eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : bool =
if k = 0 then true
else regs1 (Reg rf (k - 1)) = regs2 (Reg rf (k - 1)) && eq_regs_file regs1 regs2 rf (k - 1)
let rec eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : bool =
if k = 0 then true
else eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1)) && eq_regs regs1 regs2 (k - 1)
let rec lemma_eq_regs_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(ensures eq_regs_file regs1 regs2 rf k <==>
(forall (i:nat).{:pattern (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i)))
=
if k > 0 then lemma_eq_regs_file regs1 regs2 rf (k - 1)
let rec lemma_eq_regs (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : Lemma
(ensures
eq_regs regs1 regs2 k <==>
(forall (i j:nat).{:pattern (Reg i j)} i < k /\ j < n_regs i ==>
regs1 (Reg i j) == regs2 (Reg i j)))
=
if k > 0 then (
lemma_eq_regs_file regs1 regs2 (k - 1) (n_regs (k - 1));
lemma_eq_regs regs1 regs2 (k - 1)
)
let eq_registers (regs1 regs2:reg_taint) : (b:bool{b <==> regs1 == regs2}) =
lemma_eq_regs regs1 regs2 n_reg_files;
let b = eq_regs regs1 regs2 n_reg_files in
if b then (
assert (FStar.FunctionalExtensionality.feq regs1 regs2)
);
b
let eq_leakage_taints (ts1 ts2:leakage_taints) : (b:bool{b <==> ts1 == ts2}) =
eq_registers ts1.regTaint ts2.regTaint &&
ts1.flagsTaint = ts2.flagsTaint &&
ts1.cfFlagsTaint = ts2.cfFlagsTaint &&
ts1.ofFlagsTaint = ts2.ofFlagsTaint
let taintstate_monotone_regs (ts ts':reg_taint) =
(forall (r:reg).{:pattern (ts' r) \/ (ts r)}
Public? (ts' r) ==> Public? (ts r))
let taintstate_monotone (ts ts':analysis_taints) =
let ts = ts.lts in
let ts' = ts'.lts in
taintstate_monotone_regs ts.regTaint ts'.regTaint /\
(Public? (ts'.flagsTaint) ==> Public? (ts.flagsTaint)) /\
(Public? (ts'.cfFlagsTaint) ==> Public? (ts.cfFlagsTaint)) /\
(Public? (ts'.ofFlagsTaint) ==> Public? (ts.ofFlagsTaint))
let taintstate_monotone_trans (ts1:analysis_taints) (ts2:analysis_taints) (ts3:analysis_taints)
: Lemma (taintstate_monotone ts1 ts2 /\ taintstate_monotone ts2 ts3 ==> taintstate_monotone ts1 ts3) = ()
let isConstant_monotone (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isConstantTimeGivenStates code fuel ts2.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isConstantTimeGivenStates code fuel ts1.lts s1 s2)
= ()
let isExplicit_monotone (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints) (code:S.code)
(fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts.lts ts1.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts.lts ts2.lts s1 s2)
= ()
let isExplicit_monotone2 (ts:analysis_taints) (ts1:analysis_taints) (ts2:analysis_taints)
(code:S.code) (fuel:nat) (s1:S.machine_state) (s2:S.machine_state)
: Lemma (isExplicitLeakageFreeGivenStates code fuel ts2.lts ts.lts s1 s2 /\ taintstate_monotone ts1 ts2 ==> isExplicitLeakageFreeGivenStates code fuel ts1.lts ts.lts s1 s2)
= ()
let combine_leakage_taints (ts1:leakage_taints) (ts2:leakage_taints) : leakage_taints =
let LeakageTaints rs1 fs1 c1 o1 = ts1 in
let LeakageTaints rs2 fs2 c2 o2 = ts2 in
let rs = combine_reg_taints rs1 rs2 in
LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
let combine_analysis_taints (ts1:analysis_taints) (ts2:analysis_taints)
: (ts:analysis_taints{taintstate_monotone ts1 ts /\ taintstate_monotone ts2 ts /\ ts.lts == combine_leakage_taints ts1.lts ts2.lts})
=
let AnalysisTaints (LeakageTaints rs1_old fs1 c1 o1) rts1 = ts1 in
let AnalysisTaints (LeakageTaints rs2_old fs2 c2 o2) rts2 = ts2 in
let rts1 = ts1.rts in
let rts2 = ts2.rts in
let rs1 = map_to_regs rts1 in // \
let rs2 = map_to_regs rts2 in // - build efficient representations of reg_taint before calling combine_reg_taints
assert (FStar.FunctionalExtensionality.feq rs1 rs1_old);
assert (FStar.FunctionalExtensionality.feq rs2 rs2_old);
let rs = combine_reg_taints rs1 rs2 in
let rts = regs_to_map rs in
let lts = LeakageTaints
rs
(merge_taint fs1 fs2)
(merge_taint c1 c2)
(merge_taint o1 o2)
in
AnalysisTaints lts rts
let count_public_register (regs:reg_taint) (r:reg) = if Public? (regs r) then 1 else 0
let rec count_public_registers_file (regs:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : nat =
if k = 0 then 0
else count_public_register regs (Reg rf (k - 1)) + count_public_registers_file regs rf (k - 1)
let rec lemma_count_public_registers_file (regs1 regs2:reg_taint) (rf:reg_file_id) (k:nat{k <= n_regs rf}) : Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers_file regs1 rf k >= count_public_registers_file regs2 rf k
)
(ensures
count_public_registers_file regs1 rf k == count_public_registers_file regs2 rf k /\
(forall (i:nat).{:pattern regs1 (Reg rf i) \/ regs2 (Reg rf i)} i < k ==> regs1 (Reg rf i) == regs2 (Reg rf i))
)
=
if k > 0 then lemma_count_public_registers_file regs1 regs2 rf (k - 1)
let rec count_public_registers (regs:reg_taint) (k:nat{k <= n_reg_files}) : nat =
if k = 0 then 0
else count_public_registers_file regs (k - 1) (n_regs (k - 1)) + count_public_registers regs (k - 1)
let rec lemma_count_public_registers (regs1 regs2:reg_taint) (k:nat{k <= n_reg_files}) : Lemma
(requires
taintstate_monotone_regs regs2 regs1 /\
count_public_registers regs1 k >= count_public_registers regs2 k
)
(ensures
count_public_registers regs1 k == count_public_registers regs2 k /\
(forall (r:reg).{:pattern regs1 r \/ regs2 r} Reg?.rf r < k ==> regs1 r == regs2 r)
)
=
if k > 0 then (
let n = n_regs (k - 1) in
if count_public_registers_file regs1 (k - 1) n >= count_public_registers_file regs2 (k - 1) n then
lemma_count_public_registers_file regs1 regs2 (k - 1) n;
lemma_count_public_registers regs1 regs2 (k - 1)
)
let count_flagTaint (ts:analysis_taints) : nat = if Public? ts.lts.flagsTaint then 1 else 0
let count_cfFlagTaint (ts:analysis_taints) : nat = if Public? ts.lts.cfFlagsTaint then 1 else 0
let count_ofFlagTaint (ts:analysis_taints) : nat = if Public? ts.lts.ofFlagsTaint then 1 else 0
let count_publics (ts:analysis_taints) : nat =
count_public_registers ts.lts.regTaint n_reg_files +
count_flagTaint ts +
count_cfFlagTaint ts +
count_ofFlagTaint ts
let monotone_decreases_count (ts ts':analysis_taints) : Lemma
(requires taintstate_monotone ts ts' /\ not (eq_leakage_taints ts.lts ts'.lts))
(ensures count_publics ts' < count_publics ts)
=
let regs1 = ts'.lts.regTaint in
let regs2 = ts.lts.regTaint in
if count_public_registers regs1 n_reg_files >= count_public_registers regs2 n_reg_files then (
lemma_count_public_registers regs1 regs2 n_reg_files;
assert (FStar.FunctionalExtensionality.feq regs1 regs2)
)
val check_if_block_consumes_fixed_time (block:S.codes) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[block])
val check_if_code_consumes_fixed_time (code:S.code) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[code; count_publics ts; 1])
val check_if_loop_consumes_fixed_time (code:S.code{While? code}) (ts:analysis_taints) : Tot (bool & analysis_taints)
(decreases %[code; count_publics ts; 0]) | {
"checked_file": "/",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Leakage_s.fst.checked",
"Vale.X64.Leakage_Ins.fsti.checked",
"Vale.X64.Leakage_Helpers.fst.checked",
"Vale.Lib.MapTree.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Leakage.fst"
} | [
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Ins",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Ins",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Leakage_s",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": true,
"z3rlimit": 600,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | code: Vale.X64.Machine_Semantics_s.code{While? code} -> ts: Vale.X64.Leakage_Helpers.analysis_taints
-> Prims.Tot (Prims.bool * Vale.X64.Leakage_Helpers.analysis_taints) | Prims.Tot | [
"total",
""
] | [
"check_if_block_consumes_fixed_time",
"check_if_code_consumes_fixed_time",
"check_if_loop_consumes_fixed_time"
] | [
"Vale.X64.Machine_Semantics_s.code",
"Prims.b2t",
"Vale.X64.Machine_s.uu___is_While",
"Vale.X64.Bytes_Code_s.instruction_t",
"Vale.X64.Machine_Semantics_s.instr_annotation",
"Vale.X64.Bytes_Code_s.ocmp",
"Vale.X64.Leakage_Helpers.analysis_taints",
"Vale.X64.Machine_s.precode",
"Vale.Arch.HeapTypes_s.uu___is_Secret",
"FStar.Pervasives.Native.Mktuple2",
"Prims.bool",
"Prims.op_Negation",
"Vale.X64.Leakage.eq_leakage_taints",
"Vale.X64.Leakage_Helpers.__proj__AnalysisTaints__item__lts",
"Vale.X64.Leakage.check_if_loop_consumes_fixed_time",
"Prims.unit",
"Vale.X64.Leakage.monotone_decreases_count",
"FStar.Pervasives.Native.tuple2",
"Prims._assert",
"Vale.X64.Leakage.taintstate_monotone",
"Prims.l_and",
"Prims.eq2",
"Vale.X64.Leakage_s.leakage_taints",
"Vale.X64.Leakage.combine_leakage_taints",
"Vale.X64.Leakage.combine_analysis_taints",
"Vale.X64.Leakage.check_if_code_consumes_fixed_time",
"Vale.X64.Leakage_Helpers.operand_does_not_use_secrets",
"Vale.X64.Machine_s.nat64",
"Vale.X64.Machine_s.reg_64",
"Vale.X64.Machine_Semantics_s.get_snd_ocmp",
"Vale.X64.Machine_Semantics_s.get_fst_ocmp",
"Vale.Arch.HeapTypes_s.taint",
"Vale.X64.Leakage_Helpers.merge_taint",
"Vale.X64.Leakage_Helpers.operand_taint",
"Vale.X64.Leakage.normalize_taints"
] | [
"mutual recursion"
] | false | false | false | false | false | let rec check_if_loop_consumes_fixed_time c (ts: analysis_taints) : (bool & analysis_taints) =
| let ts = normalize_taints ts in
let While pred body = c in
let o1 = operand_taint 0 (S.get_fst_ocmp pred) ts in
let o2 = operand_taint 0 (S.get_snd_ocmp pred) ts in
let predTaint = merge_taint o1 o2 in
if (Secret? predTaint)
then false, ts
else
let o1Public = operand_does_not_use_secrets (S.get_fst_ocmp pred) ts in
if (not o1Public)
then (false, ts)
else
let o2Public = operand_does_not_use_secrets (S.get_snd_ocmp pred) ts in
if (not o2Public)
then (false, ts)
else
let fixedTime, next_ts = check_if_code_consumes_fixed_time body ts in
if (not fixedTime)
then (false, ts)
else
let combined_ts = combine_analysis_taints ts next_ts in
assert (taintstate_monotone ts combined_ts);
if eq_leakage_taints combined_ts.lts ts.lts
then true, combined_ts
else
(monotone_decreases_count ts combined_ts;
check_if_loop_consumes_fixed_time c combined_ts) | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.genv_push_binder | val genv_push_binder (ge: genv) (b: binder) (abs: bool) (t: option term) : Tac genv | val genv_push_binder (ge: genv) (b: binder) (abs: bool) (t: option term) : Tac genv | let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 56,
"end_line": 282,
"start_col": 0,
"start_line": 281
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars' | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
ge: FStar.InteractiveHelpers.Base.genv ->
b: FStar.Stubs.Reflection.Types.binder ->
abs: Prims.bool ->
t: FStar.Pervasives.Native.option FStar.Stubs.Reflection.Types.term
-> FStar.Tactics.Effect.Tac FStar.InteractiveHelpers.Base.genv | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.InteractiveHelpers.Base.genv",
"FStar.Stubs.Reflection.Types.binder",
"Prims.bool",
"FStar.Pervasives.Native.option",
"FStar.Stubs.Reflection.Types.term",
"FStar.InteractiveHelpers.Base.genv_push_bv",
"FStar.Reflection.V1.Derived.bv_of_binder",
"FStar.Stubs.Reflection.Types.typ",
"FStar.Tactics.V1.Derived.binder_sort"
] | [] | false | true | false | false | false | let genv_push_binder (ge: genv) (b: binder) (abs: bool) (t: option term) : Tac genv =
| genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.genv_get | val genv_get (ge: genv) (b: bv) : Tot (option (typ & bool & term)) | val genv_get (ge: genv) (b: bv) : Tot (option (typ & bool & term)) | let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 24,
"end_line": 262,
"start_col": 0,
"start_line": 261
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ge: FStar.InteractiveHelpers.Base.genv -> b: FStar.Stubs.Reflection.Types.bv
-> FStar.Pervasives.Native.option ((FStar.Stubs.Reflection.Types.typ * Prims.bool) *
FStar.Stubs.Reflection.Types.term) | Prims.Tot | [
"total"
] | [] | [
"FStar.InteractiveHelpers.Base.genv",
"FStar.Stubs.Reflection.Types.bv",
"FStar.InteractiveHelpers.Base.bind_map_get",
"FStar.Pervasives.Native.tuple3",
"FStar.Stubs.Reflection.Types.typ",
"Prims.bool",
"FStar.Stubs.Reflection.Types.term",
"FStar.InteractiveHelpers.Base.__proj__Mkgenv__item__bmap",
"FStar.Pervasives.Native.option"
] | [] | false | false | false | true | false | let genv_get (ge: genv) (b: bv) : Tot (option (typ & bool & term)) =
| bind_map_get ge.bmap b | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.genv_to_string | val genv_to_string : genv -> Tac string | val genv_to_string : genv -> Tac string | let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 34,
"end_line": 259,
"start_col": 0,
"start_line": 244
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] [] | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ge: FStar.InteractiveHelpers.Base.genv -> FStar.Tactics.Effect.Tac Prims.string | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.InteractiveHelpers.Base.genv",
"Prims.op_Hat",
"Prims.list",
"Prims.string",
"FStar.List.Tot.Base.fold_left",
"FStar.Tactics.Util.map",
"FStar.Pervasives.Native.tuple2",
"FStar.Stubs.Reflection.Types.bv",
"FStar.Stubs.Reflection.Types.typ",
"FStar.InteractiveHelpers.Base.abv_to_string",
"FStar.InteractiveHelpers.Base.__proj__Mkgenv__item__svars",
"FStar.Pervasives.Native.tuple3",
"Prims.bool",
"FStar.Stubs.Reflection.Types.term",
"FStar.InteractiveHelpers.Base.__proj__Mkgenv__item__bmap",
"Prims.string_of_bool",
"FStar.Stubs.Tactics.V1.Builtins.term_to_string",
"FStar.Stubs.Reflection.Types.binder",
"FStar.Stubs.Reflection.V1.Builtins.binders_of_env",
"FStar.InteractiveHelpers.Base.__proj__Mkgenv__item__env",
"FStar.Reflection.V1.Derived.bv_of_binder"
] | [] | false | true | false | false | false | let genv_to_string ge =
| let binder_to_string (b: binder) : Tac string = abv_to_string (bv_of_binder b) ^ "\n" in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e: bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^ " -> (" ^ string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^ "> bmap:\n" ^ flatten bmap_str ^ "> svars:\n" ^ flatten svars_str | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.binder_is_shadowed | val binder_is_shadowed (ge: genv) (b: binder) : Tot bool | val binder_is_shadowed (ge: genv) (b: binder) : Tot bool | let binder_is_shadowed (ge : genv) (b : binder) : Tot bool =
bv_is_shadowed ge (bv_of_binder b) | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 36,
"end_line": 289,
"start_col": 0,
"start_line": 288
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars'
let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t
/// Check if a binder is shadowed by another more recent binder
let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool =
List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ge: FStar.InteractiveHelpers.Base.genv -> b: FStar.Stubs.Reflection.Types.binder -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"FStar.InteractiveHelpers.Base.genv",
"FStar.Stubs.Reflection.Types.binder",
"FStar.InteractiveHelpers.Base.bv_is_shadowed",
"FStar.Reflection.V1.Derived.bv_of_binder",
"Prims.bool"
] | [] | false | false | false | true | false | let binder_is_shadowed (ge: genv) (b: binder) : Tot bool =
| bv_is_shadowed ge (bv_of_binder b) | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.find_shadowed_binders | val find_shadowed_binders (ge: genv) (bl: list binder) : Tot (list (binder & bool)) | val find_shadowed_binders (ge: genv) (bl: list binder) : Tot (list (binder & bool)) | let find_shadowed_binders (ge : genv) (bl : list binder) : Tot (list (binder & bool)) =
List.Tot.map (fun b -> b, binder_is_shadowed ge b) bl | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 55,
"end_line": 295,
"start_col": 0,
"start_line": 294
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars'
let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t
/// Check if a binder is shadowed by another more recent binder
let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool =
List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars
let binder_is_shadowed (ge : genv) (b : binder) : Tot bool =
bv_is_shadowed ge (bv_of_binder b)
let find_shadowed_bvs (ge : genv) (bl : list bv) : Tot (list (bv & bool)) =
List.Tot.map (fun b -> b, bv_is_shadowed ge b) bl | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ge: FStar.InteractiveHelpers.Base.genv -> bl: Prims.list FStar.Stubs.Reflection.Types.binder
-> Prims.list (FStar.Stubs.Reflection.Types.binder * Prims.bool) | Prims.Tot | [
"total"
] | [] | [
"FStar.InteractiveHelpers.Base.genv",
"Prims.list",
"FStar.Stubs.Reflection.Types.binder",
"FStar.List.Tot.Base.map",
"FStar.Pervasives.Native.tuple2",
"Prims.bool",
"FStar.Pervasives.Native.Mktuple2",
"FStar.InteractiveHelpers.Base.binder_is_shadowed"
] | [] | false | false | false | true | false | let find_shadowed_binders (ge: genv) (bl: list binder) : Tot (list (binder & bool)) =
| List.Tot.map (fun b -> b, binder_is_shadowed ge b) bl | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.find_shadowed_bvs | val find_shadowed_bvs (ge: genv) (bl: list bv) : Tot (list (bv & bool)) | val find_shadowed_bvs (ge: genv) (bl: list bv) : Tot (list (bv & bool)) | let find_shadowed_bvs (ge : genv) (bl : list bv) : Tot (list (bv & bool)) =
List.Tot.map (fun b -> b, bv_is_shadowed ge b) bl | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 51,
"end_line": 292,
"start_col": 0,
"start_line": 291
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars'
let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t
/// Check if a binder is shadowed by another more recent binder
let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool =
List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars
let binder_is_shadowed (ge : genv) (b : binder) : Tot bool =
bv_is_shadowed ge (bv_of_binder b) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ge: FStar.InteractiveHelpers.Base.genv -> bl: Prims.list FStar.Stubs.Reflection.Types.bv
-> Prims.list (FStar.Stubs.Reflection.Types.bv * Prims.bool) | Prims.Tot | [
"total"
] | [] | [
"FStar.InteractiveHelpers.Base.genv",
"Prims.list",
"FStar.Stubs.Reflection.Types.bv",
"FStar.List.Tot.Base.map",
"FStar.Pervasives.Native.tuple2",
"Prims.bool",
"FStar.Pervasives.Native.Mktuple2",
"FStar.InteractiveHelpers.Base.bv_is_shadowed"
] | [] | false | false | false | true | false | let find_shadowed_bvs (ge: genv) (bl: list bv) : Tot (list (bv & bool)) =
| List.Tot.map (fun b -> b, bv_is_shadowed ge b) bl | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.is_permutation | val is_permutation (nts:nt_substs) (ss:ss_t) : Type0 | val is_permutation (nts:nt_substs) (ss:ss_t) : Type0 | let rec is_permutation (nts:nt_substs) (ss:ss_t) : Type0 =
match nts, ss.l with
| [], [] -> True
| (NT x e)::nts_rest, _::_ ->
Map.contains ss.m x /\
Map.sel ss.m x == e /\
is_permutation nts_rest ({l=remove_l ss.l x; m=remove_map ss.m x})
| _ -> False | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 14,
"end_line": 280,
"start_col": 0,
"start_line": 273
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
}
let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l
let as_map (ss:ss_t) = ss.m
let empty = { l = []; m = Map.const_on Set.empty tm_unknown }
let is_dom_push
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { ~ (Map.contains m x ) })
(t:term)
: Lemma (is_dom (x::l) (Map.upd m x t)) =
assert (Map.equal (remove_map (Map.upd m x t) x) m)
let push (ss:ss_t) (x:var { ~ (contains ss x) }) (t:term) : ss_t =
is_dom_push ss.l ss.m x t;
{ l = x::ss.l;
m = Map.upd ss.m x t }
let tail (ss:ss_t { Cons? ss.l }) : ss_t =
{ l = L.tl ss.l; m = remove_map ss.m (L.hd ss.l) }
let rec push_ss (ss1:ss_t) (ss2:ss_t { Set.disjoint (dom ss1) (dom ss2) })
: Tot ss_t (decreases L.length ss2.l) =
match ss2.l with
| [] -> ss1
| x::tl ->
push_ss (push ss1 x (Map.sel ss2.m x)) (tail ss2)
let check_disjoint ss1 ss2 =
admit ();
not (L.existsb (fun v1 -> L.mem v1 ss2.l) ss1.l)
let rec diff_aux (ss1 ss2:ss_t) (acc:ss_t { Set.disjoint (dom acc) (dom ss2) })
: Tot (ss:ss_t { Set.disjoint (dom ss) (dom ss2) }) (decreases L.length ss1.l) =
match ss1.l with
| [] -> acc
| x::l ->
if L.mem x ss2.l
then let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
else let acc_l = x::acc.l in
let acc_m = Map.upd acc.m x (Map.sel ss1.m x) in
assume (no_repeats acc_l /\ is_dom acc_l acc_m);
let acc = { l = acc_l; m = acc_m } in
let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
let diff ss1 ss2 = diff_aux ss1 ss2 empty
#push-options "--warn_error -271"
let push_as_map (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat (as_map (push_ss ss1 ss2))] =
let rec aux (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat ()] =
match ss2.l with
| [] -> ()
| x::tl -> aux (push ss1 x (Map.sel ss2.m x)) (tail ss2)
in
()
#pop-options
let rec remove_l (l:ss_dom) (x:var { L.memP x l })
: Pure ss_dom
(requires True)
(ensures fun r -> forall (y:var). L.memP y r <==> (L.memP y l /\ y =!= x)) =
match l with
| [] -> assert False; []
| y::tl ->
if y = x then tl
else y::(remove_l tl x)
let rec is_dom_remove
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { Map.contains m x })
: Lemma (is_dom (remove_l l x) (remove_map m x))
[SMTPat (remove_l l x); SMTPat (remove_map m x)] =
match l with
| [] -> ()
| y::tl ->
if x = y then ()
else let t_y = Map.sel m y in
let m1 = remove_map m y in
is_dom_remove tl m1 x;
assert (is_dom (remove_l tl x) (remove_map m1 x));
is_dom_push (remove_l tl x) (remove_map m1 x) y t_y;
assert (Map.equal (Map.upd (remove_map m1 x) y t_y)
(remove_map m x))
let rec ss_term (t:term) (ss:ss_t) : Tot term (decreases L.length ss.l) =
match ss.l with
| [] -> t
| y::tl ->
let t = subst_term t [ NT y (Map.sel ss.m y) ] in
ss_term t (tail ss)
let rec ss_st_term (t:st_term) (ss:ss_t) : Tot st_term (decreases L.length ss.l) =
match ss.l with
| [] -> t
| y::tl ->
let t = subst_st_term t [ NT y (Map.sel ss.m y) ] in
ss_st_term t (tail ss)
let rec ss_st_comp (s:st_comp) (ss:ss_t)
: Tot st_comp (decreases L.length ss.l) =
match ss.l with
| [] -> s
| y::tl ->
let s = subst_st_comp s [ NT y (Map.sel ss.m y) ] in
ss_st_comp s (tail ss)
let rec ss_comp (c:comp) (ss:ss_t)
: Tot comp (decreases L.length ss.l) =
match ss.l with
| [] -> c
| y::tl ->
let c = subst_comp c [ NT y (Map.sel ss.m y) ] in
ss_comp c (tail ss)
let rec ss_binder (b:binder) (ss:ss_t)
: Tot binder (decreases L.length ss.l) =
match ss.l with
| [] -> b
| y::tl ->
let b = subst_binder b [ NT y (Map.sel ss.m y) ] in
ss_binder b (tail ss)
let rec ss_env (g:env) (ss:ss_t)
: Tot (g':env { fstar_env g' == fstar_env g /\
Env.dom g' == Env.dom g })
(decreases L.length ss.l) =
admit ();
match ss.l with
| [] -> g
| y::tl -> ss_env (subst_env g [ NT y (Map.sel ss.m y) ]) (tail ss)
let rec ss_st_comp_commutes (s:st_comp) (ss:ss_t)
: Lemma (ensures
ss_st_comp s ss ==
{ s with res = ss_term s.res ss;
pre = ss_term s.pre ss;
post = ss_term s.post ss; }) // no shifting required
(decreases L.length ss.l)
[SMTPat (ss_st_comp s ss)] =
match ss.l with
| [] -> ()
| y::tl -> ss_st_comp_commutes (subst_st_comp s [ NT y (Map.sel ss.m y) ]) (tail ss)
let rec ss_comp_commutes (c:comp) (ss:ss_t)
: Lemma (ensures
(let r = ss_comp c ss in
(C_Tot? c ==> r == C_Tot (ss_term (comp_res c) ss)) /\
(C_ST? c ==> r == C_ST (ss_st_comp (st_comp_of_comp c) ss)) /\
(C_STAtomic? c ==> r == C_STAtomic (ss_term (comp_inames c) ss)
(C_STAtomic?.obs c)
(ss_st_comp (st_comp_of_comp c) ss)) /\
(C_STGhost? c ==> r == C_STGhost (ss_st_comp (st_comp_of_comp c) ss))))
(decreases L.length ss.l)
[SMTPat (ss_comp c ss)] =
match ss.l with
| [] -> ()
| y::tl -> ss_comp_commutes (subst_comp c [ NT y (Map.sel ss.m y) ]) (tail ss)
let rec nt_substs_st_comp_commutes (s:st_comp) (nts:nt_substs)
: Lemma (ensures
nt_subst_st_comp s nts ==
{ s with res = nt_subst_term s.res nts;
pre = nt_subst_term s.pre nts;
post = nt_subst_term s.post nts; }) // no shifting required
(decreases nts)
[SMTPat (nt_subst_st_comp s nts)] =
match nts with
| [] -> ()
| (NT x e)::nts_tl -> nt_substs_st_comp_commutes (nt_subst_st_comp s [ NT x e ]) nts_tl
let rec nt_subst_comp_commutes (c:comp) (nts:nt_substs)
: Lemma (ensures
(let r = nt_subst_comp c nts in
(C_Tot? c ==> r == C_Tot (nt_subst_term (comp_res c) nts)) /\
(C_ST? c ==> r == C_ST (nt_subst_st_comp (st_comp_of_comp c) nts)) /\
(C_STAtomic? c ==> r == C_STAtomic (nt_subst_term (comp_inames c) nts)
(C_STAtomic?.obs c)
(nt_subst_st_comp (st_comp_of_comp c) nts)) /\
(C_STGhost? c ==> r == C_STGhost (nt_subst_st_comp (st_comp_of_comp c) nts))))
(decreases nts)
[SMTPat (nt_subst_comp c nts)] =
match nts with
| [] -> ()
| (NT x e)::nts_tl -> nt_subst_comp_commutes (nt_subst_comp c [ NT x e ]) nts_tl | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | nts: Pulse.Checker.Prover.Substs.nt_substs -> ss: Pulse.Checker.Prover.Substs.ss_t -> Type0 | Prims.Tot | [
"total"
] | [] | [
"Pulse.Checker.Prover.Substs.nt_substs",
"Pulse.Checker.Prover.Substs.ss_t",
"FStar.Pervasives.Native.Mktuple2",
"Prims.list",
"Pulse.Syntax.Naming.subst_elt",
"Pulse.Syntax.Base.var",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__l",
"Prims.l_True",
"Pulse.Syntax.Base.term",
"Prims.l_and",
"Prims.b2t",
"FStar.Map.contains",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__m",
"Prims.eq2",
"FStar.Map.sel",
"Pulse.Checker.Prover.Substs.is_permutation",
"Pulse.Checker.Prover.Substs.Mkss_t",
"Pulse.Checker.Prover.Substs.remove_l",
"Pulse.Checker.Prover.Substs.remove_map",
"FStar.Pervasives.Native.tuple2",
"Prims.l_False"
] | [
"recursion"
] | false | false | false | true | true | let rec is_permutation (nts: nt_substs) (ss: ss_t) : Type0 =
| match nts, ss.l with
| [], [] -> True
| NT x e :: nts_rest, _ :: _ ->
Map.contains ss.m x /\ Map.sel ss.m x == e /\
is_permutation nts_rest ({ l = remove_l ss.l x; m = remove_map ss.m x })
| _ -> False | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.genv_push_fresh_binder | val genv_push_fresh_binder (ge: genv) (basename: string) (ty: typ) : Tac (genv & binder) | val genv_push_fresh_binder (ge: genv) (basename: string) (ty: typ) : Tac (genv & binder) | let genv_push_fresh_binder (ge : genv) (basename : string) (ty : typ) : Tac (genv & binder) =
let b = fresh_binder ge.env basename ty in
(* TODO: we can have a shortcircuit push (which performs less checks) *)
let ge' = genv_push_binder ge b true None in
ge', b | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 8,
"end_line": 335,
"start_col": 0,
"start_line": 331
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars'
let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t
/// Check if a binder is shadowed by another more recent binder
let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool =
List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars
let binder_is_shadowed (ge : genv) (b : binder) : Tot bool =
bv_is_shadowed ge (bv_of_binder b)
let find_shadowed_bvs (ge : genv) (bl : list bv) : Tot (list (bv & bool)) =
List.Tot.map (fun b -> b, bv_is_shadowed ge b) bl
let find_shadowed_binders (ge : genv) (bl : list binder) : Tot (list (binder & bool)) =
List.Tot.map (fun b -> b, binder_is_shadowed ge b) bl
val bv_is_abstract : genv -> bv -> Tot bool
let bv_is_abstract ge bv =
match genv_get ge bv with
| None -> false
| Some (_, abs, _) -> abs
val binder_is_abstract : genv -> binder -> Tot bool
let binder_is_abstract ge b =
bv_is_abstract ge (bv_of_binder b)
val genv_abstract_bvs : genv -> Tot (list (bv & typ))
let genv_abstract_bvs ge =
List.Tot.concatMap
(fun (bv, (ty, abs, _)) -> if abs then [bv,ty] else []) ge.bmap
/// Versions of ``fresh_bv`` and ``fresh_binder`` inspired by the standard library
/// We make sure the name is fresh because we need to be able to generate valid code
/// (it is thus not enough to have a fresh integer).
let rec _fresh_bv binder_names basename i : Tac bv =
let name = basename ^ string_of_int i in
(* In worst case the performance is quadratic in the number of binders.
* TODO: fix that, it actually probably happens for anonymous variables ('_') *)
if List.mem name binder_names then _fresh_bv binder_names basename (i+1)
else fresh_bv_named name
let fresh_bv (e : env) (basename : string) : Tac bv =
let binders = binders_of_env e in
let binder_names = Tactics.map name_of_binder binders in
_fresh_bv binder_names basename 0
let fresh_binder (e : env) (basename : string) (ty : typ) : Tac binder =
let bv = fresh_bv e basename in
mk_binder bv ty | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
ge: FStar.InteractiveHelpers.Base.genv ->
basename: Prims.string ->
ty: FStar.Stubs.Reflection.Types.typ
-> FStar.Tactics.Effect.Tac
(FStar.InteractiveHelpers.Base.genv * FStar.Stubs.Reflection.Types.binder) | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.InteractiveHelpers.Base.genv",
"Prims.string",
"FStar.Stubs.Reflection.Types.typ",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Stubs.Reflection.Types.binder",
"FStar.Pervasives.Native.tuple2",
"FStar.InteractiveHelpers.Base.genv_push_binder",
"FStar.Pervasives.Native.None",
"FStar.Stubs.Reflection.Types.term",
"FStar.InteractiveHelpers.Base.fresh_binder",
"FStar.InteractiveHelpers.Base.__proj__Mkgenv__item__env"
] | [] | false | true | false | false | false | let genv_push_fresh_binder (ge: genv) (basename: string) (ty: typ) : Tac (genv & binder) =
| let b = fresh_binder ge.env basename ty in
let ge' = genv_push_binder ge b true None in
ge', b | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.genv_abstract_bvs | val genv_abstract_bvs : genv -> Tot (list (bv & typ)) | val genv_abstract_bvs : genv -> Tot (list (bv & typ)) | let genv_abstract_bvs ge =
List.Tot.concatMap
(fun (bv, (ty, abs, _)) -> if abs then [bv,ty] else []) ge.bmap | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 67,
"end_line": 310,
"start_col": 0,
"start_line": 308
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars'
let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t
/// Check if a binder is shadowed by another more recent binder
let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool =
List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars
let binder_is_shadowed (ge : genv) (b : binder) : Tot bool =
bv_is_shadowed ge (bv_of_binder b)
let find_shadowed_bvs (ge : genv) (bl : list bv) : Tot (list (bv & bool)) =
List.Tot.map (fun b -> b, bv_is_shadowed ge b) bl
let find_shadowed_binders (ge : genv) (bl : list binder) : Tot (list (binder & bool)) =
List.Tot.map (fun b -> b, binder_is_shadowed ge b) bl
val bv_is_abstract : genv -> bv -> Tot bool
let bv_is_abstract ge bv =
match genv_get ge bv with
| None -> false
| Some (_, abs, _) -> abs
val binder_is_abstract : genv -> binder -> Tot bool
let binder_is_abstract ge b =
bv_is_abstract ge (bv_of_binder b) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ge: FStar.InteractiveHelpers.Base.genv
-> Prims.list (FStar.Stubs.Reflection.Types.bv * FStar.Stubs.Reflection.Types.typ) | Prims.Tot | [
"total"
] | [] | [
"FStar.InteractiveHelpers.Base.genv",
"FStar.List.Tot.Base.concatMap",
"FStar.Pervasives.Native.tuple2",
"FStar.Stubs.Reflection.Types.bv",
"FStar.Pervasives.Native.tuple3",
"FStar.Stubs.Reflection.Types.typ",
"Prims.bool",
"FStar.Stubs.Reflection.Types.term",
"Prims.Cons",
"FStar.Pervasives.Native.Mktuple2",
"Prims.Nil",
"Prims.list",
"FStar.InteractiveHelpers.Base.__proj__Mkgenv__item__bmap"
] | [] | false | false | false | true | false | let genv_abstract_bvs ge =
| List.Tot.concatMap (fun (bv, (ty, abs, _)) -> if abs then [bv, ty] else []) ge.bmap | false |
Test.Vectors.Chacha20Poly1305.fst | Test.Vectors.Chacha20Poly1305.input7 | val input7:(b: B.buffer UInt8.t {B.length b = 512 /\ B.recallable b /\ B.disjoint b aad7}) | val input7:(b: B.buffer UInt8.t {B.length b = 512 /\ B.recallable b /\ B.disjoint b aad7}) | let input7: (b: B.buffer UInt8.t { B.length b = 512 /\ B.recallable b /\ B.disjoint b aad7 }) =
B.recall aad7;[@inline_let] let l = [ 0xc3uy; 0x09uy; 0x94uy; 0x62uy; 0xe6uy; 0x46uy; 0x2euy; 0x10uy; 0xbeuy; 0x00uy; 0xe4uy; 0xfcuy; 0xf3uy; 0x40uy; 0xa3uy; 0xe2uy; 0x0fuy; 0xc2uy; 0x8buy; 0x28uy; 0xdcuy; 0xbauy; 0xb4uy; 0x3cuy; 0xe4uy; 0x21uy; 0x58uy; 0x61uy; 0xcduy; 0x8buy; 0xcduy; 0xfbuy; 0xacuy; 0x94uy; 0xa1uy; 0x45uy; 0xf5uy; 0x1cuy; 0xe1uy; 0x12uy; 0xe0uy; 0x3buy; 0x67uy; 0x21uy; 0x54uy; 0x5euy; 0x8cuy; 0xaauy; 0xcfuy; 0xdbuy; 0xb4uy; 0x51uy; 0xd4uy; 0x13uy; 0xdauy; 0xe6uy; 0x83uy; 0x89uy; 0xb6uy; 0x92uy; 0xe9uy; 0x21uy; 0x76uy; 0xa4uy; 0x93uy; 0x7duy; 0x0euy; 0xfduy; 0x96uy; 0x36uy; 0x03uy; 0x91uy; 0x43uy; 0x5cuy; 0x92uy; 0x49uy; 0x62uy; 0x61uy; 0x7buy; 0xebuy; 0x43uy; 0x89uy; 0xb8uy; 0x12uy; 0x20uy; 0x43uy; 0xd4uy; 0x47uy; 0x06uy; 0x84uy; 0xeeuy; 0x47uy; 0xe9uy; 0x8auy; 0x73uy; 0x15uy; 0x0fuy; 0x72uy; 0xcfuy; 0xeduy; 0xceuy; 0x96uy; 0xb2uy; 0x7fuy; 0x21uy; 0x45uy; 0x76uy; 0xebuy; 0x26uy; 0x28uy; 0x83uy; 0x6auy; 0xaduy; 0xaauy; 0xa6uy; 0x81uy; 0xd8uy; 0x55uy; 0xb1uy; 0xa3uy; 0x85uy; 0xb3uy; 0x0cuy; 0xdfuy; 0xf1uy; 0x69uy; 0x2duy; 0x97uy; 0x05uy; 0x2auy; 0xbcuy; 0x7cuy; 0x7buy; 0x25uy; 0xf8uy; 0x80uy; 0x9duy; 0x39uy; 0x25uy; 0xf3uy; 0x62uy; 0xf0uy; 0x66uy; 0x5euy; 0xf4uy; 0xa0uy; 0xcfuy; 0xd8uy; 0xfduy; 0x4fuy; 0xb1uy; 0x1fuy; 0x60uy; 0x3auy; 0x08uy; 0x47uy; 0xafuy; 0xe1uy; 0xf6uy; 0x10uy; 0x77uy; 0x09uy; 0xa7uy; 0x27uy; 0x8fuy; 0x9auy; 0x97uy; 0x5auy; 0x26uy; 0xfauy; 0xfeuy; 0x41uy; 0x32uy; 0x83uy; 0x10uy; 0xe0uy; 0x1duy; 0xbfuy; 0x64uy; 0x0duy; 0xf4uy; 0x1cuy; 0x32uy; 0x35uy; 0xe5uy; 0x1buy; 0x36uy; 0xefuy; 0xd4uy; 0x4auy; 0x93uy; 0x4duy; 0x00uy; 0x7cuy; 0xecuy; 0x02uy; 0x07uy; 0x8buy; 0x5duy; 0x7duy; 0x1buy; 0x0euy; 0xd1uy; 0xa6uy; 0xa5uy; 0x5duy; 0x7duy; 0x57uy; 0x88uy; 0xa8uy; 0xccuy; 0x81uy; 0xb4uy; 0x86uy; 0x4euy; 0xb4uy; 0x40uy; 0xe9uy; 0x1duy; 0xc3uy; 0xb1uy; 0x24uy; 0x3euy; 0x7fuy; 0xccuy; 0x8auy; 0x24uy; 0x9buy; 0xdfuy; 0x6duy; 0xf0uy; 0x39uy; 0x69uy; 0x3euy; 0x4cuy; 0xc0uy; 0x96uy; 0xe4uy; 0x13uy; 0xdauy; 0x90uy; 0xdauy; 0xf4uy; 0x95uy; 0x66uy; 0x8buy; 0x17uy; 0x17uy; 0xfeuy; 0x39uy; 0x43uy; 0x25uy; 0xaauy; 0xdauy; 0xa0uy; 0x43uy; 0x3cuy; 0xb1uy; 0x41uy; 0x02uy; 0xa3uy; 0xf0uy; 0xa7uy; 0x19uy; 0x59uy; 0xbcuy; 0x1duy; 0x7duy; 0x6cuy; 0x6duy; 0x91uy; 0x09uy; 0x5cuy; 0xb7uy; 0x5buy; 0x01uy; 0xd1uy; 0x6fuy; 0x17uy; 0x21uy; 0x97uy; 0xbfuy; 0x89uy; 0x71uy; 0xa5uy; 0xb0uy; 0x6euy; 0x07uy; 0x45uy; 0xfduy; 0x9duy; 0xeauy; 0x07uy; 0xf6uy; 0x7auy; 0x9fuy; 0x10uy; 0x18uy; 0x22uy; 0x30uy; 0x73uy; 0xacuy; 0xd4uy; 0x6buy; 0x72uy; 0x44uy; 0xeduy; 0xd9uy; 0x19uy; 0x9buy; 0x2duy; 0x4auy; 0x41uy; 0xdduy; 0xd1uy; 0x85uy; 0x5euy; 0x37uy; 0x19uy; 0xeduy; 0xd2uy; 0x15uy; 0x8fuy; 0x5euy; 0x91uy; 0xdbuy; 0x33uy; 0xf2uy; 0xe4uy; 0xdbuy; 0xffuy; 0x98uy; 0xfbuy; 0xa3uy; 0xb5uy; 0xcauy; 0x21uy; 0x69uy; 0x08uy; 0xe7uy; 0x8auy; 0xdfuy; 0x90uy; 0xffuy; 0x3euy; 0xe9uy; 0x20uy; 0x86uy; 0x3cuy; 0xe9uy; 0xfcuy; 0x0buy; 0xfeuy; 0x5cuy; 0x61uy; 0xaauy; 0x13uy; 0x92uy; 0x7fuy; 0x7buy; 0xecuy; 0xe0uy; 0x6duy; 0xa8uy; 0x23uy; 0x22uy; 0xf6uy; 0x6buy; 0x77uy; 0xc4uy; 0xfeuy; 0x40uy; 0x07uy; 0x3buy; 0xb6uy; 0xf6uy; 0x8euy; 0x5fuy; 0xd4uy; 0xb9uy; 0xb7uy; 0x0fuy; 0x21uy; 0x04uy; 0xefuy; 0x83uy; 0x63uy; 0x91uy; 0x69uy; 0x40uy; 0xa3uy; 0x48uy; 0x5cuy; 0xd2uy; 0x60uy; 0xf9uy; 0x4fuy; 0x6cuy; 0x47uy; 0x8buy; 0x3buy; 0xb1uy; 0x9fuy; 0x8euy; 0xeeuy; 0x16uy; 0x8auy; 0x13uy; 0xfcuy; 0x46uy; 0x17uy; 0xc3uy; 0xc3uy; 0x32uy; 0x56uy; 0xf8uy; 0x3cuy; 0x85uy; 0x3auy; 0xb6uy; 0x3euy; 0xaauy; 0x89uy; 0x4fuy; 0xb3uy; 0xdfuy; 0x38uy; 0xfduy; 0xf1uy; 0xe4uy; 0x3auy; 0xc0uy; 0xe6uy; 0x58uy; 0xb5uy; 0x8fuy; 0xc5uy; 0x29uy; 0xa2uy; 0x92uy; 0x4auy; 0xb6uy; 0xa0uy; 0x34uy; 0x7fuy; 0xabuy; 0xb5uy; 0x8auy; 0x90uy; 0xa1uy; 0xdbuy; 0x4duy; 0xcauy; 0xb6uy; 0x2cuy; 0x41uy; 0x3cuy; 0xf7uy; 0x2buy; 0x21uy; 0xc3uy; 0xfduy; 0xf4uy; 0x17uy; 0x5cuy; 0xb5uy; 0x33uy; 0x17uy; 0x68uy; 0x2buy; 0x08uy; 0x30uy; 0xf3uy; 0xf7uy; 0x30uy; 0x3cuy; 0x96uy; 0xe6uy; 0x6auy; 0x20uy; 0x97uy; 0xe7uy; 0x4duy; 0x10uy; 0x5fuy; 0x47uy; 0x5fuy; 0x49uy; 0x96uy; 0x09uy; 0xf0uy; 0x27uy; 0x91uy; 0xc8uy; 0xf8uy; 0x5auy; 0x2euy; 0x79uy; 0xb5uy; 0xe2uy; 0xb8uy; 0xe8uy; 0xb9uy; 0x7buy; 0xd5uy; 0x10uy; 0xcbuy; 0xffuy; 0x5duy; 0x14uy; 0x73uy; 0xf3uy; ] in
assert_norm (List.Tot.length l = 512);
B.gcmalloc_of_list HyperStack.root l | {
"file_name": "providers/test/vectors/Test.Vectors.Chacha20Poly1305.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 314,
"start_col": 0,
"start_line": 311
} | module Test.Vectors.Chacha20Poly1305
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x1cuy; 0x92uy; 0x40uy; 0xa5uy; 0xebuy; 0x55uy; 0xd3uy; 0x8auy; 0xf3uy; 0x33uy; 0x88uy; 0x86uy; 0x04uy; 0xf6uy; 0xb5uy; 0xf0uy; 0x47uy; 0x39uy; 0x17uy; 0xc1uy; 0x40uy; 0x2buy; 0x80uy; 0x09uy; 0x9duy; 0xcauy; 0x5cuy; 0xbcuy; 0x20uy; 0x70uy; 0x75uy; 0xc0uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
32ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x01uy; 0x02uy; 0x03uy; 0x04uy; 0x05uy; 0x06uy; 0x07uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xf3uy; 0x33uy; 0x88uy; 0x86uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x4euy; 0x91uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
12ul
let input0: (b: B.buffer UInt8.t { B.length b = 265 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ 0x49uy; 0x6euy; 0x74uy; 0x65uy; 0x72uy; 0x6euy; 0x65uy; 0x74uy; 0x2duy; 0x44uy; 0x72uy; 0x61uy; 0x66uy; 0x74uy; 0x73uy; 0x20uy; 0x61uy; 0x72uy; 0x65uy; 0x20uy; 0x64uy; 0x72uy; 0x61uy; 0x66uy; 0x74uy; 0x20uy; 0x64uy; 0x6fuy; 0x63uy; 0x75uy; 0x6duy; 0x65uy; 0x6euy; 0x74uy; 0x73uy; 0x20uy; 0x76uy; 0x61uy; 0x6cuy; 0x69uy; 0x64uy; 0x20uy; 0x66uy; 0x6fuy; 0x72uy; 0x20uy; 0x61uy; 0x20uy; 0x6duy; 0x61uy; 0x78uy; 0x69uy; 0x6duy; 0x75uy; 0x6duy; 0x20uy; 0x6fuy; 0x66uy; 0x20uy; 0x73uy; 0x69uy; 0x78uy; 0x20uy; 0x6duy; 0x6fuy; 0x6euy; 0x74uy; 0x68uy; 0x73uy; 0x20uy; 0x61uy; 0x6euy; 0x64uy; 0x20uy; 0x6duy; 0x61uy; 0x79uy; 0x20uy; 0x62uy; 0x65uy; 0x20uy; 0x75uy; 0x70uy; 0x64uy; 0x61uy; 0x74uy; 0x65uy; 0x64uy; 0x2cuy; 0x20uy; 0x72uy; 0x65uy; 0x70uy; 0x6cuy; 0x61uy; 0x63uy; 0x65uy; 0x64uy; 0x2cuy; 0x20uy; 0x6fuy; 0x72uy; 0x20uy; 0x6fuy; 0x62uy; 0x73uy; 0x6fuy; 0x6cuy; 0x65uy; 0x74uy; 0x65uy; 0x64uy; 0x20uy; 0x62uy; 0x79uy; 0x20uy; 0x6fuy; 0x74uy; 0x68uy; 0x65uy; 0x72uy; 0x20uy; 0x64uy; 0x6fuy; 0x63uy; 0x75uy; 0x6duy; 0x65uy; 0x6euy; 0x74uy; 0x73uy; 0x20uy; 0x61uy; 0x74uy; 0x20uy; 0x61uy; 0x6euy; 0x79uy; 0x20uy; 0x74uy; 0x69uy; 0x6duy; 0x65uy; 0x2euy; 0x20uy; 0x49uy; 0x74uy; 0x20uy; 0x69uy; 0x73uy; 0x20uy; 0x69uy; 0x6euy; 0x61uy; 0x70uy; 0x70uy; 0x72uy; 0x6fuy; 0x70uy; 0x72uy; 0x69uy; 0x61uy; 0x74uy; 0x65uy; 0x20uy; 0x74uy; 0x6fuy; 0x20uy; 0x75uy; 0x73uy; 0x65uy; 0x20uy; 0x49uy; 0x6euy; 0x74uy; 0x65uy; 0x72uy; 0x6euy; 0x65uy; 0x74uy; 0x2duy; 0x44uy; 0x72uy; 0x61uy; 0x66uy; 0x74uy; 0x73uy; 0x20uy; 0x61uy; 0x73uy; 0x20uy; 0x72uy; 0x65uy; 0x66uy; 0x65uy; 0x72uy; 0x65uy; 0x6euy; 0x63uy; 0x65uy; 0x20uy; 0x6duy; 0x61uy; 0x74uy; 0x65uy; 0x72uy; 0x69uy; 0x61uy; 0x6cuy; 0x20uy; 0x6fuy; 0x72uy; 0x20uy; 0x74uy; 0x6fuy; 0x20uy; 0x63uy; 0x69uy; 0x74uy; 0x65uy; 0x20uy; 0x74uy; 0x68uy; 0x65uy; 0x6duy; 0x20uy; 0x6fuy; 0x74uy; 0x68uy; 0x65uy; 0x72uy; 0x20uy; 0x74uy; 0x68uy; 0x61uy; 0x6euy; 0x20uy; 0x61uy; 0x73uy; 0x20uy; 0x2fuy; 0xe2uy; 0x80uy; 0x9cuy; 0x77uy; 0x6fuy; 0x72uy; 0x6buy; 0x20uy; 0x69uy; 0x6euy; 0x20uy; 0x70uy; 0x72uy; 0x6fuy; 0x67uy; 0x72uy; 0x65uy; 0x73uy; 0x73uy; 0x2euy; 0x2fuy; 0xe2uy; 0x80uy; 0x9duy; ] in
assert_norm (List.Tot.length l = 265);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
265ul
let output0: (b: B.buffer UInt8.t { B.length b = 281 /\ B.recallable b }) =
[@inline_let] let l = [ 0x64uy; 0xa0uy; 0x86uy; 0x15uy; 0x75uy; 0x86uy; 0x1auy; 0xf4uy; 0x60uy; 0xf0uy; 0x62uy; 0xc7uy; 0x9buy; 0xe6uy; 0x43uy; 0xbduy; 0x5euy; 0x80uy; 0x5cuy; 0xfduy; 0x34uy; 0x5cuy; 0xf3uy; 0x89uy; 0xf1uy; 0x08uy; 0x67uy; 0x0auy; 0xc7uy; 0x6cuy; 0x8cuy; 0xb2uy; 0x4cuy; 0x6cuy; 0xfcuy; 0x18uy; 0x75uy; 0x5duy; 0x43uy; 0xeeuy; 0xa0uy; 0x9euy; 0xe9uy; 0x4euy; 0x38uy; 0x2duy; 0x26uy; 0xb0uy; 0xbduy; 0xb7uy; 0xb7uy; 0x3cuy; 0x32uy; 0x1buy; 0x01uy; 0x00uy; 0xd4uy; 0xf0uy; 0x3buy; 0x7fuy; 0x35uy; 0x58uy; 0x94uy; 0xcfuy; 0x33uy; 0x2fuy; 0x83uy; 0x0euy; 0x71uy; 0x0buy; 0x97uy; 0xceuy; 0x98uy; 0xc8uy; 0xa8uy; 0x4auy; 0xbduy; 0x0buy; 0x94uy; 0x81uy; 0x14uy; 0xaduy; 0x17uy; 0x6euy; 0x00uy; 0x8duy; 0x33uy; 0xbduy; 0x60uy; 0xf9uy; 0x82uy; 0xb1uy; 0xffuy; 0x37uy; 0xc8uy; 0x55uy; 0x97uy; 0x97uy; 0xa0uy; 0x6euy; 0xf4uy; 0xf0uy; 0xefuy; 0x61uy; 0xc1uy; 0x86uy; 0x32uy; 0x4euy; 0x2buy; 0x35uy; 0x06uy; 0x38uy; 0x36uy; 0x06uy; 0x90uy; 0x7buy; 0x6auy; 0x7cuy; 0x02uy; 0xb0uy; 0xf9uy; 0xf6uy; 0x15uy; 0x7buy; 0x53uy; 0xc8uy; 0x67uy; 0xe4uy; 0xb9uy; 0x16uy; 0x6cuy; 0x76uy; 0x7buy; 0x80uy; 0x4duy; 0x46uy; 0xa5uy; 0x9buy; 0x52uy; 0x16uy; 0xcduy; 0xe7uy; 0xa4uy; 0xe9uy; 0x90uy; 0x40uy; 0xc5uy; 0xa4uy; 0x04uy; 0x33uy; 0x22uy; 0x5euy; 0xe2uy; 0x82uy; 0xa1uy; 0xb0uy; 0xa0uy; 0x6cuy; 0x52uy; 0x3euy; 0xafuy; 0x45uy; 0x34uy; 0xd7uy; 0xf8uy; 0x3fuy; 0xa1uy; 0x15uy; 0x5buy; 0x00uy; 0x47uy; 0x71uy; 0x8cuy; 0xbcuy; 0x54uy; 0x6auy; 0x0duy; 0x07uy; 0x2buy; 0x04uy; 0xb3uy; 0x56uy; 0x4euy; 0xeauy; 0x1buy; 0x42uy; 0x22uy; 0x73uy; 0xf5uy; 0x48uy; 0x27uy; 0x1auy; 0x0buy; 0xb2uy; 0x31uy; 0x60uy; 0x53uy; 0xfauy; 0x76uy; 0x99uy; 0x19uy; 0x55uy; 0xebuy; 0xd6uy; 0x31uy; 0x59uy; 0x43uy; 0x4euy; 0xceuy; 0xbbuy; 0x4euy; 0x46uy; 0x6duy; 0xaeuy; 0x5auy; 0x10uy; 0x73uy; 0xa6uy; 0x72uy; 0x76uy; 0x27uy; 0x09uy; 0x7auy; 0x10uy; 0x49uy; 0xe6uy; 0x17uy; 0xd9uy; 0x1duy; 0x36uy; 0x10uy; 0x94uy; 0xfauy; 0x68uy; 0xf0uy; 0xffuy; 0x77uy; 0x98uy; 0x71uy; 0x30uy; 0x30uy; 0x5buy; 0xeauy; 0xbauy; 0x2euy; 0xdauy; 0x04uy; 0xdfuy; 0x99uy; 0x7buy; 0x71uy; 0x4duy; 0x6cuy; 0x6fuy; 0x2cuy; 0x29uy; 0xa6uy; 0xaduy; 0x5cuy; 0xb4uy; 0x02uy; 0x2buy; 0x02uy; 0x70uy; 0x9buy; 0xeeuy; 0xaduy; 0x9duy; 0x67uy; 0x89uy; 0x0cuy; 0xbbuy; 0x22uy; 0x39uy; 0x23uy; 0x36uy; 0xfeuy; 0xa1uy; 0x85uy; 0x1fuy; 0x38uy; ] in
assert_norm (List.Tot.length l = 281);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
281ul
let key1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4cuy; 0xf5uy; 0x96uy; 0x83uy; 0x38uy; 0xe6uy; 0xaeuy; 0x7fuy; 0x2duy; 0x29uy; 0x25uy; 0x76uy; 0xd5uy; 0x75uy; 0x27uy; 0x86uy; 0x91uy; 0x9auy; 0x27uy; 0x7auy; 0xfbuy; 0x46uy; 0xc5uy; 0xefuy; 0x94uy; 0x81uy; 0x79uy; 0x57uy; 0x14uy; 0x59uy; 0x40uy; 0x68uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
32ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0xcauy; 0xbfuy; 0x33uy; 0x71uy; 0x32uy; 0x45uy; 0x77uy; 0x8euy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
0ul
let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xeauy; 0xe0uy; 0x1euy; 0x9euy; 0x2cuy; 0x91uy; 0xaauy; 0xe1uy; 0xdbuy; 0x5duy; 0x99uy; 0x3fuy; 0x8auy; 0xf7uy; 0x69uy; 0x92uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output1_len: (x:UInt32.t { UInt32.v x = B.length output1 }) =
16ul
let key2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x2duy; 0xb0uy; 0x5duy; 0x40uy; 0xc8uy; 0xeduy; 0x44uy; 0x88uy; 0x34uy; 0xd1uy; 0x13uy; 0xafuy; 0x57uy; 0xa1uy; 0xebuy; 0x3auy; 0x2auy; 0x80uy; 0x51uy; 0x36uy; 0xecuy; 0x5buy; 0xbcuy; 0x08uy; 0x93uy; 0x84uy; 0x21uy; 0xb5uy; 0x13uy; 0x88uy; 0x3cuy; 0x0duy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key2_len: (x:UInt32.t { UInt32.v x = B.length key2 }) =
32ul
let nonce2: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x3duy; 0x86uy; 0xb5uy; 0x6buy; 0xc8uy; 0xa3uy; 0x1fuy; 0x1duy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce2_len: (x:UInt32.t { UInt32.v x = B.length nonce2 }) =
12ul
let aad2: (b: B.buffer UInt8.t { B.length b = 8 /\ B.recallable b }) =
[@inline_let] let l = [ 0x33uy; 0x10uy; 0x41uy; 0x12uy; 0x1fuy; 0xf3uy; 0xd2uy; 0x6buy; ] in
assert_norm (List.Tot.length l = 8);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad2_len: (x:UInt32.t { UInt32.v x = B.length aad2 }) =
8ul
let input2: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad2 }) =
B.recall aad2;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input2_len: (x:UInt32.t { UInt32.v x = B.length input2 }) =
0ul
let output2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdduy; 0x6buy; 0x3buy; 0x82uy; 0xceuy; 0x5auy; 0xbduy; 0xd6uy; 0xa9uy; 0x35uy; 0x83uy; 0xd8uy; 0x8cuy; 0x3duy; 0x85uy; 0x77uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output2_len: (x:UInt32.t { UInt32.v x = B.length output2 }) =
16ul
let key3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4buy; 0x28uy; 0x4buy; 0xa3uy; 0x7buy; 0xbeuy; 0xe9uy; 0xf8uy; 0x31uy; 0x80uy; 0x82uy; 0xd7uy; 0xd8uy; 0xe8uy; 0xb5uy; 0xa1uy; 0xe2uy; 0x18uy; 0x18uy; 0x8auy; 0x9cuy; 0xfauy; 0xa3uy; 0x3duy; 0x25uy; 0x71uy; 0x3euy; 0x40uy; 0xbcuy; 0x54uy; 0x7auy; 0x3euy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key3_len: (x:UInt32.t { UInt32.v x = B.length key3 }) =
32ul
let nonce3: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0xd2uy; 0x32uy; 0x1fuy; 0x29uy; 0x28uy; 0xc6uy; 0xc4uy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce3_len: (x:UInt32.t { UInt32.v x = B.length nonce3 }) =
12ul
let aad3: (b: B.buffer UInt8.t { B.length b = 8 /\ B.recallable b }) =
[@inline_let] let l = [ 0x6auy; 0xe2uy; 0xaduy; 0x3fuy; 0x88uy; 0x39uy; 0x5auy; 0x40uy; ] in
assert_norm (List.Tot.length l = 8);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad3_len: (x:UInt32.t { UInt32.v x = B.length aad3 }) =
8ul
let input3: (b: B.buffer UInt8.t { B.length b = 1 /\ B.recallable b /\ B.disjoint b aad3 }) =
B.recall aad3;[@inline_let] let l = [ 0xa4uy; ] in
assert_norm (List.Tot.length l = 1);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input3_len: (x:UInt32.t { UInt32.v x = B.length input3 }) =
1ul
let output3: (b: B.buffer UInt8.t { B.length b = 17 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb7uy; 0x1buy; 0xb0uy; 0x73uy; 0x59uy; 0xb0uy; 0x84uy; 0xb2uy; 0x6duy; 0x8euy; 0xabuy; 0x94uy; 0x31uy; 0xa1uy; 0xaeuy; 0xacuy; 0x89uy; ] in
assert_norm (List.Tot.length l = 17);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output3_len: (x:UInt32.t { UInt32.v x = B.length output3 }) =
17ul
let key4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x66uy; 0xcauy; 0x9cuy; 0x23uy; 0x2auy; 0x4buy; 0x4buy; 0x31uy; 0x0euy; 0x92uy; 0x89uy; 0x8buy; 0xf4uy; 0x93uy; 0xc7uy; 0x87uy; 0x98uy; 0xa3uy; 0xd8uy; 0x39uy; 0xf8uy; 0xf4uy; 0xa7uy; 0x01uy; 0xc0uy; 0x2euy; 0x0auy; 0xa6uy; 0x7euy; 0x5auy; 0x78uy; 0x87uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key4_len: (x:UInt32.t { UInt32.v x = B.length key4 }) =
32ul
let nonce4: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x20uy; 0x1cuy; 0xaauy; 0x5fuy; 0x9cuy; 0xbfuy; 0x92uy; 0x30uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce4_len: (x:UInt32.t { UInt32.v x = B.length nonce4 }) =
12ul
let aad4: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad4_len: (x:UInt32.t { UInt32.v x = B.length aad4 }) =
0ul
let input4: (b: B.buffer UInt8.t { B.length b = 1 /\ B.recallable b /\ B.disjoint b aad4 }) =
B.recall aad4;[@inline_let] let l = [ 0x2duy; ] in
assert_norm (List.Tot.length l = 1);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input4_len: (x:UInt32.t { UInt32.v x = B.length input4 }) =
1ul
let output4: (b: B.buffer UInt8.t { B.length b = 17 /\ B.recallable b }) =
[@inline_let] let l = [ 0xbfuy; 0xe1uy; 0x5buy; 0x0buy; 0xdbuy; 0x6buy; 0xf5uy; 0x5euy; 0x6cuy; 0x5duy; 0x84uy; 0x44uy; 0x39uy; 0x81uy; 0xc1uy; 0x9cuy; 0xacuy; ] in
assert_norm (List.Tot.length l = 17);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output4_len: (x:UInt32.t { UInt32.v x = B.length output4 }) =
17ul
let key5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x68uy; 0x7buy; 0x8duy; 0x8euy; 0xe3uy; 0xc4uy; 0xdduy; 0xaeuy; 0xdfuy; 0x72uy; 0x7fuy; 0x53uy; 0x72uy; 0x25uy; 0x1euy; 0x78uy; 0x91uy; 0xcbuy; 0x69uy; 0x76uy; 0x1fuy; 0x49uy; 0x93uy; 0xf9uy; 0x6fuy; 0x21uy; 0xccuy; 0x39uy; 0x9cuy; 0xaduy; 0xb1uy; 0x01uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key5_len: (x:UInt32.t { UInt32.v x = B.length key5 }) =
32ul
let nonce5: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0xdfuy; 0x51uy; 0x84uy; 0x82uy; 0x42uy; 0x0cuy; 0x75uy; 0x9cuy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce5_len: (x:UInt32.t { UInt32.v x = B.length nonce5 }) =
12ul
let aad5: (b: B.buffer UInt8.t { B.length b = 7 /\ B.recallable b }) =
[@inline_let] let l = [ 0x70uy; 0xd3uy; 0x33uy; 0xf3uy; 0x8buy; 0x18uy; 0x0buy; ] in
assert_norm (List.Tot.length l = 7);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad5_len: (x:UInt32.t { UInt32.v x = B.length aad5 }) =
7ul
let input5: (b: B.buffer UInt8.t { B.length b = 129 /\ B.recallable b /\ B.disjoint b aad5 }) =
B.recall aad5;[@inline_let] let l = [ 0x33uy; 0x2fuy; 0x94uy; 0xc1uy; 0xa4uy; 0xefuy; 0xccuy; 0x2auy; 0x5buy; 0xa6uy; 0xe5uy; 0x8fuy; 0x1duy; 0x40uy; 0xf0uy; 0x92uy; 0x3cuy; 0xd9uy; 0x24uy; 0x11uy; 0xa9uy; 0x71uy; 0xf9uy; 0x37uy; 0x14uy; 0x99uy; 0xfauy; 0xbeuy; 0xe6uy; 0x80uy; 0xdeuy; 0x50uy; 0xc9uy; 0x96uy; 0xd4uy; 0xb0uy; 0xecuy; 0x9euy; 0x17uy; 0xecuy; 0xd2uy; 0x5euy; 0x72uy; 0x99uy; 0xfcuy; 0x0auy; 0xe1uy; 0xcbuy; 0x48uy; 0xd2uy; 0x85uy; 0xdduy; 0x2fuy; 0x90uy; 0xe0uy; 0x66uy; 0x3buy; 0xe6uy; 0x20uy; 0x74uy; 0xbeuy; 0x23uy; 0x8fuy; 0xcbuy; 0xb4uy; 0xe4uy; 0xdauy; 0x48uy; 0x40uy; 0xa6uy; 0xd1uy; 0x1buy; 0xc7uy; 0x42uy; 0xceuy; 0x2fuy; 0x0cuy; 0xa6uy; 0x85uy; 0x6euy; 0x87uy; 0x37uy; 0x03uy; 0xb1uy; 0x7cuy; 0x25uy; 0x96uy; 0xa3uy; 0x05uy; 0xd8uy; 0xb0uy; 0xf4uy; 0xeduy; 0xeauy; 0xc2uy; 0xf0uy; 0x31uy; 0x98uy; 0x6cuy; 0xd1uy; 0x14uy; 0x25uy; 0xc0uy; 0xcbuy; 0x01uy; 0x74uy; 0xd0uy; 0x82uy; 0xf4uy; 0x36uy; 0xf5uy; 0x41uy; 0xd5uy; 0xdcuy; 0xcauy; 0xc5uy; 0xbbuy; 0x98uy; 0xfeuy; 0xfcuy; 0x69uy; 0x21uy; 0x70uy; 0xd8uy; 0xa4uy; 0x4buy; 0xc8uy; 0xdeuy; 0x8fuy; ] in
assert_norm (List.Tot.length l = 129);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input5_len: (x:UInt32.t { UInt32.v x = B.length input5 }) =
129ul
let output5: (b: B.buffer UInt8.t { B.length b = 145 /\ B.recallable b }) =
[@inline_let] let l = [ 0x8buy; 0x06uy; 0xd3uy; 0x31uy; 0xb0uy; 0x93uy; 0x45uy; 0xb1uy; 0x75uy; 0x6euy; 0x26uy; 0xf9uy; 0x67uy; 0xbcuy; 0x90uy; 0x15uy; 0x81uy; 0x2cuy; 0xb5uy; 0xf0uy; 0xc6uy; 0x2buy; 0xc7uy; 0x8cuy; 0x56uy; 0xd1uy; 0xbfuy; 0x69uy; 0x6cuy; 0x07uy; 0xa0uy; 0xdauy; 0x65uy; 0x27uy; 0xc9uy; 0x90uy; 0x3duy; 0xefuy; 0x4buy; 0x11uy; 0x0fuy; 0x19uy; 0x07uy; 0xfduy; 0x29uy; 0x92uy; 0xd9uy; 0xc8uy; 0xf7uy; 0x99uy; 0x2euy; 0x4auy; 0xd0uy; 0xb8uy; 0x2cuy; 0xdcuy; 0x93uy; 0xf5uy; 0x9euy; 0x33uy; 0x78uy; 0xd1uy; 0x37uy; 0xc3uy; 0x66uy; 0xd7uy; 0x5euy; 0xbcuy; 0x44uy; 0xbfuy; 0x53uy; 0xa5uy; 0xbcuy; 0xc4uy; 0xcbuy; 0x7buy; 0x3auy; 0x8euy; 0x7fuy; 0x02uy; 0xbduy; 0xbbuy; 0xe7uy; 0xcauy; 0xa6uy; 0x6cuy; 0x6buy; 0x93uy; 0x21uy; 0x93uy; 0x10uy; 0x61uy; 0xe7uy; 0x69uy; 0xd0uy; 0x78uy; 0xf3uy; 0x07uy; 0x5auy; 0x1auy; 0x8fuy; 0x73uy; 0xaauy; 0xb1uy; 0x4euy; 0xd3uy; 0xdauy; 0x4fuy; 0xf3uy; 0x32uy; 0xe1uy; 0x66uy; 0x3euy; 0x6cuy; 0xc6uy; 0x13uy; 0xbauy; 0x06uy; 0x5buy; 0xfcuy; 0x6auy; 0xe5uy; 0x6fuy; 0x60uy; 0xfbuy; 0x07uy; 0x40uy; 0xb0uy; 0x8cuy; 0x9duy; 0x84uy; 0x43uy; 0x6buy; 0xc1uy; 0xf7uy; 0x8duy; 0x8duy; 0x31uy; 0xf7uy; 0x7auy; 0x39uy; 0x4duy; 0x8fuy; 0x9auy; 0xebuy; ] in
assert_norm (List.Tot.length l = 145);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output5_len: (x:UInt32.t { UInt32.v x = B.length output5 }) =
145ul
let key6: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x8duy; 0xb8uy; 0x91uy; 0x48uy; 0xf0uy; 0xe7uy; 0x0auy; 0xbduy; 0xf9uy; 0x3fuy; 0xcduy; 0xd9uy; 0xa0uy; 0x1euy; 0x42uy; 0x4cuy; 0xe7uy; 0xdeuy; 0x25uy; 0x3duy; 0xa3uy; 0xd7uy; 0x05uy; 0x80uy; 0x8duy; 0xf2uy; 0x82uy; 0xacuy; 0x44uy; 0x16uy; 0x51uy; 0x01uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key6_len: (x:UInt32.t { UInt32.v x = B.length key6 }) =
32ul
let nonce6: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0xdeuy; 0x7buy; 0xefuy; 0xc3uy; 0x65uy; 0x1buy; 0x68uy; 0xb0uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce6_len: (x:UInt32.t { UInt32.v x = B.length nonce6 }) =
12ul
let aad6: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad6_len: (x:UInt32.t { UInt32.v x = B.length aad6 }) =
0ul
let input6: (b: B.buffer UInt8.t { B.length b = 256 /\ B.recallable b /\ B.disjoint b aad6 }) =
B.recall aad6;[@inline_let] let l = [ 0x9buy; 0x18uy; 0xdbuy; 0xdduy; 0x9auy; 0x0fuy; 0x3euy; 0xa5uy; 0x15uy; 0x17uy; 0xdeuy; 0xdfuy; 0x08uy; 0x9duy; 0x65uy; 0x0auy; 0x67uy; 0x30uy; 0x12uy; 0xe2uy; 0x34uy; 0x77uy; 0x4buy; 0xc1uy; 0xd9uy; 0xc6uy; 0x1fuy; 0xabuy; 0xc6uy; 0x18uy; 0x50uy; 0x17uy; 0xa7uy; 0x9duy; 0x3cuy; 0xa6uy; 0xc5uy; 0x35uy; 0x8cuy; 0x1cuy; 0xc0uy; 0xa1uy; 0x7cuy; 0x9fuy; 0x03uy; 0x89uy; 0xcauy; 0xe1uy; 0xe6uy; 0xe9uy; 0xd4uy; 0xd3uy; 0x88uy; 0xdbuy; 0xb4uy; 0x51uy; 0x9duy; 0xecuy; 0xb4uy; 0xfcuy; 0x52uy; 0xeeuy; 0x6duy; 0xf1uy; 0x75uy; 0x42uy; 0xc6uy; 0xfduy; 0xbduy; 0x7auy; 0x8euy; 0x86uy; 0xfcuy; 0x44uy; 0xb3uy; 0x4fuy; 0xf3uy; 0xeauy; 0x67uy; 0x5auy; 0x41uy; 0x13uy; 0xbauy; 0xb0uy; 0xdcuy; 0xe1uy; 0xd3uy; 0x2auy; 0x7cuy; 0x22uy; 0xb3uy; 0xcauy; 0xacuy; 0x6auy; 0x37uy; 0x98uy; 0x3euy; 0x1duy; 0x40uy; 0x97uy; 0xf7uy; 0x9buy; 0x1duy; 0x36uy; 0x6buy; 0xb3uy; 0x28uy; 0xbduy; 0x60uy; 0x82uy; 0x47uy; 0x34uy; 0xaauy; 0x2fuy; 0x7duy; 0xe9uy; 0xa8uy; 0x70uy; 0x81uy; 0x57uy; 0xd4uy; 0xb9uy; 0x77uy; 0x0auy; 0x9duy; 0x29uy; 0xa7uy; 0x84uy; 0x52uy; 0x4fuy; 0xc2uy; 0x4auy; 0x40uy; 0x3buy; 0x3cuy; 0xd4uy; 0xc9uy; 0x2auy; 0xdbuy; 0x4auy; 0x53uy; 0xc4uy; 0xbeuy; 0x80uy; 0xe9uy; 0x51uy; 0x7fuy; 0x8fuy; 0xc7uy; 0xa2uy; 0xceuy; 0x82uy; 0x5cuy; 0x91uy; 0x1euy; 0x74uy; 0xd9uy; 0xd0uy; 0xbduy; 0xd5uy; 0xf3uy; 0xfduy; 0xdauy; 0x4duy; 0x25uy; 0xb4uy; 0xbbuy; 0x2duy; 0xacuy; 0x2fuy; 0x3duy; 0x71uy; 0x85uy; 0x7buy; 0xcfuy; 0x3cuy; 0x7buy; 0x3euy; 0x0euy; 0x22uy; 0x78uy; 0x0cuy; 0x29uy; 0xbfuy; 0xe4uy; 0xf4uy; 0x57uy; 0xb3uy; 0xcbuy; 0x49uy; 0xa0uy; 0xfcuy; 0x1euy; 0x05uy; 0x4euy; 0x16uy; 0xbcuy; 0xd5uy; 0xa8uy; 0xa3uy; 0xeeuy; 0x05uy; 0x35uy; 0xc6uy; 0x7cuy; 0xabuy; 0x60uy; 0x14uy; 0x55uy; 0x1auy; 0x8euy; 0xc5uy; 0x88uy; 0x5duy; 0xd5uy; 0x81uy; 0xc2uy; 0x81uy; 0xa5uy; 0xc4uy; 0x60uy; 0xdbuy; 0xafuy; 0x77uy; 0x91uy; 0xe1uy; 0xceuy; 0xa2uy; 0x7euy; 0x7fuy; 0x42uy; 0xe3uy; 0xb0uy; 0x13uy; 0x1cuy; 0x1fuy; 0x25uy; 0x60uy; 0x21uy; 0xe2uy; 0x40uy; 0x5fuy; 0x99uy; 0xb7uy; 0x73uy; 0xecuy; 0x9buy; 0x2buy; 0xf0uy; 0x65uy; 0x11uy; 0xc8uy; 0xd0uy; 0x0auy; 0x9fuy; 0xd3uy; ] in
assert_norm (List.Tot.length l = 256);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input6_len: (x:UInt32.t { UInt32.v x = B.length input6 }) =
256ul
let output6: (b: B.buffer UInt8.t { B.length b = 272 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x04uy; 0xc2uy; 0xeduy; 0x8duy; 0xfduy; 0x97uy; 0x5cuy; 0xd2uy; 0xb7uy; 0xe2uy; 0xc1uy; 0x6buy; 0xa3uy; 0xbauy; 0xf8uy; 0xc9uy; 0x50uy; 0xc3uy; 0xc6uy; 0xa5uy; 0xe3uy; 0xa4uy; 0x7cuy; 0xc3uy; 0x23uy; 0x49uy; 0x5euy; 0xa9uy; 0xb9uy; 0x32uy; 0xebuy; 0x8auy; 0x7cuy; 0xcauy; 0xe5uy; 0xecuy; 0xfbuy; 0x7cuy; 0xc0uy; 0xcbuy; 0x7duy; 0xdcuy; 0x2cuy; 0x9duy; 0x92uy; 0x55uy; 0x21uy; 0x0auy; 0xc8uy; 0x43uy; 0x63uy; 0x59uy; 0x0auy; 0x31uy; 0x70uy; 0x82uy; 0x67uy; 0x41uy; 0x03uy; 0xf8uy; 0xdfuy; 0xf2uy; 0xacuy; 0xa7uy; 0x02uy; 0xd4uy; 0xd5uy; 0x8auy; 0x2duy; 0xc8uy; 0x99uy; 0x19uy; 0x66uy; 0xd0uy; 0xf6uy; 0x88uy; 0x2cuy; 0x77uy; 0xd9uy; 0xd4uy; 0x0duy; 0x6cuy; 0xbduy; 0x98uy; 0xdeuy; 0xe7uy; 0x7fuy; 0xaduy; 0x7euy; 0x8auy; 0xfbuy; 0xe9uy; 0x4buy; 0xe5uy; 0xf7uy; 0xe5uy; 0x50uy; 0xa0uy; 0x90uy; 0x3fuy; 0xd6uy; 0x22uy; 0x53uy; 0xe3uy; 0xfeuy; 0x1buy; 0xccuy; 0x79uy; 0x3buy; 0xecuy; 0x12uy; 0x47uy; 0x52uy; 0xa7uy; 0xd6uy; 0x04uy; 0xe3uy; 0x52uy; 0xe6uy; 0x93uy; 0x90uy; 0x91uy; 0x32uy; 0x73uy; 0x79uy; 0xb8uy; 0xd0uy; 0x31uy; 0xdeuy; 0x1fuy; 0x9fuy; 0x2fuy; 0x05uy; 0x38uy; 0x54uy; 0x2fuy; 0x35uy; 0x04uy; 0x39uy; 0xe0uy; 0xa7uy; 0xbauy; 0xc6uy; 0x52uy; 0xf6uy; 0x37uy; 0x65uy; 0x4cuy; 0x07uy; 0xa9uy; 0x7euy; 0xb3uy; 0x21uy; 0x6fuy; 0x74uy; 0x8cuy; 0xc9uy; 0xdeuy; 0xdbuy; 0x65uy; 0x1buy; 0x9buy; 0xaauy; 0x60uy; 0xb1uy; 0x03uy; 0x30uy; 0x6buy; 0xb2uy; 0x03uy; 0xc4uy; 0x1cuy; 0x04uy; 0xf8uy; 0x0fuy; 0x64uy; 0xafuy; 0x46uy; 0xe4uy; 0x65uy; 0x99uy; 0x49uy; 0xe2uy; 0xeauy; 0xceuy; 0x78uy; 0x00uy; 0xd8uy; 0x8buy; 0xd5uy; 0x2euy; 0xcfuy; 0xfcuy; 0x40uy; 0x49uy; 0xe8uy; 0x58uy; 0xdcuy; 0x34uy; 0x9cuy; 0x8cuy; 0x61uy; 0xbfuy; 0x0auy; 0x8euy; 0xecuy; 0x39uy; 0xa9uy; 0x30uy; 0x05uy; 0x5auy; 0xd2uy; 0x56uy; 0x01uy; 0xc7uy; 0xdauy; 0x8fuy; 0x4euy; 0xbbuy; 0x43uy; 0xa3uy; 0x3auy; 0xf9uy; 0x15uy; 0x2auy; 0xd0uy; 0xa0uy; 0x7auy; 0x87uy; 0x34uy; 0x82uy; 0xfeuy; 0x8auy; 0xd1uy; 0x2duy; 0x5euy; 0xc7uy; 0xbfuy; 0x04uy; 0x53uy; 0x5fuy; 0x3buy; 0x36uy; 0xd4uy; 0x25uy; 0x5cuy; 0x34uy; 0x7auy; 0x8duy; 0xd5uy; 0x05uy; 0xceuy; 0x72uy; 0xcauy; 0xefuy; 0x7auy; 0x4buy; 0xbcuy; 0xb0uy; 0x10uy; 0x5cuy; 0x96uy; 0x42uy; 0x3auy; 0x00uy; 0x98uy; 0xcduy; 0x15uy; 0xe8uy; 0xb7uy; 0x53uy; ] in
assert_norm (List.Tot.length l = 272);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output6_len: (x:UInt32.t { UInt32.v x = B.length output6 }) =
272ul
let key7: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xf2uy; 0xaauy; 0x4fuy; 0x99uy; 0xfduy; 0x3euy; 0xa8uy; 0x53uy; 0xc1uy; 0x44uy; 0xe9uy; 0x81uy; 0x18uy; 0xdcuy; 0xf5uy; 0xf0uy; 0x3euy; 0x44uy; 0x15uy; 0x59uy; 0xe0uy; 0xc5uy; 0x44uy; 0x86uy; 0xc3uy; 0x91uy; 0xa8uy; 0x75uy; 0xc0uy; 0x12uy; 0x46uy; 0xbauy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key7_len: (x:UInt32.t { UInt32.v x = B.length key7 }) =
32ul
let nonce7: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x0euy; 0x0duy; 0x57uy; 0xbbuy; 0x7buy; 0x40uy; 0x54uy; 0x02uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce7_len: (x:UInt32.t { UInt32.v x = B.length nonce7 }) =
12ul
let aad7: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad7_len: (x:UInt32.t { UInt32.v x = B.length aad7 }) =
0ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Chacha20Poly1305.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b:
LowStar.Buffer.buffer FStar.UInt8.t
{ LowStar.Monotonic.Buffer.length b = 512 /\ LowStar.Monotonic.Buffer.recallable b /\
LowStar.Monotonic.Buffer.disjoint b Test.Vectors.Chacha20Poly1305.aad7 } | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"LowStar.Monotonic.Buffer.disjoint",
"Test.Vectors.Chacha20Poly1305.aad7",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil",
"LowStar.Monotonic.Buffer.recall"
] | [] | false | false | false | false | false | let input7:(b: B.buffer UInt8.t {B.length b = 512 /\ B.recallable b /\ B.disjoint b aad7}) =
| B.recall aad7;
[@@ inline_let ]let l =
[
0xc3uy; 0x09uy; 0x94uy; 0x62uy; 0xe6uy; 0x46uy; 0x2euy; 0x10uy; 0xbeuy; 0x00uy; 0xe4uy; 0xfcuy;
0xf3uy; 0x40uy; 0xa3uy; 0xe2uy; 0x0fuy; 0xc2uy; 0x8buy; 0x28uy; 0xdcuy; 0xbauy; 0xb4uy; 0x3cuy;
0xe4uy; 0x21uy; 0x58uy; 0x61uy; 0xcduy; 0x8buy; 0xcduy; 0xfbuy; 0xacuy; 0x94uy; 0xa1uy; 0x45uy;
0xf5uy; 0x1cuy; 0xe1uy; 0x12uy; 0xe0uy; 0x3buy; 0x67uy; 0x21uy; 0x54uy; 0x5euy; 0x8cuy; 0xaauy;
0xcfuy; 0xdbuy; 0xb4uy; 0x51uy; 0xd4uy; 0x13uy; 0xdauy; 0xe6uy; 0x83uy; 0x89uy; 0xb6uy; 0x92uy;
0xe9uy; 0x21uy; 0x76uy; 0xa4uy; 0x93uy; 0x7duy; 0x0euy; 0xfduy; 0x96uy; 0x36uy; 0x03uy; 0x91uy;
0x43uy; 0x5cuy; 0x92uy; 0x49uy; 0x62uy; 0x61uy; 0x7buy; 0xebuy; 0x43uy; 0x89uy; 0xb8uy; 0x12uy;
0x20uy; 0x43uy; 0xd4uy; 0x47uy; 0x06uy; 0x84uy; 0xeeuy; 0x47uy; 0xe9uy; 0x8auy; 0x73uy; 0x15uy;
0x0fuy; 0x72uy; 0xcfuy; 0xeduy; 0xceuy; 0x96uy; 0xb2uy; 0x7fuy; 0x21uy; 0x45uy; 0x76uy; 0xebuy;
0x26uy; 0x28uy; 0x83uy; 0x6auy; 0xaduy; 0xaauy; 0xa6uy; 0x81uy; 0xd8uy; 0x55uy; 0xb1uy; 0xa3uy;
0x85uy; 0xb3uy; 0x0cuy; 0xdfuy; 0xf1uy; 0x69uy; 0x2duy; 0x97uy; 0x05uy; 0x2auy; 0xbcuy; 0x7cuy;
0x7buy; 0x25uy; 0xf8uy; 0x80uy; 0x9duy; 0x39uy; 0x25uy; 0xf3uy; 0x62uy; 0xf0uy; 0x66uy; 0x5euy;
0xf4uy; 0xa0uy; 0xcfuy; 0xd8uy; 0xfduy; 0x4fuy; 0xb1uy; 0x1fuy; 0x60uy; 0x3auy; 0x08uy; 0x47uy;
0xafuy; 0xe1uy; 0xf6uy; 0x10uy; 0x77uy; 0x09uy; 0xa7uy; 0x27uy; 0x8fuy; 0x9auy; 0x97uy; 0x5auy;
0x26uy; 0xfauy; 0xfeuy; 0x41uy; 0x32uy; 0x83uy; 0x10uy; 0xe0uy; 0x1duy; 0xbfuy; 0x64uy; 0x0duy;
0xf4uy; 0x1cuy; 0x32uy; 0x35uy; 0xe5uy; 0x1buy; 0x36uy; 0xefuy; 0xd4uy; 0x4auy; 0x93uy; 0x4duy;
0x00uy; 0x7cuy; 0xecuy; 0x02uy; 0x07uy; 0x8buy; 0x5duy; 0x7duy; 0x1buy; 0x0euy; 0xd1uy; 0xa6uy;
0xa5uy; 0x5duy; 0x7duy; 0x57uy; 0x88uy; 0xa8uy; 0xccuy; 0x81uy; 0xb4uy; 0x86uy; 0x4euy; 0xb4uy;
0x40uy; 0xe9uy; 0x1duy; 0xc3uy; 0xb1uy; 0x24uy; 0x3euy; 0x7fuy; 0xccuy; 0x8auy; 0x24uy; 0x9buy;
0xdfuy; 0x6duy; 0xf0uy; 0x39uy; 0x69uy; 0x3euy; 0x4cuy; 0xc0uy; 0x96uy; 0xe4uy; 0x13uy; 0xdauy;
0x90uy; 0xdauy; 0xf4uy; 0x95uy; 0x66uy; 0x8buy; 0x17uy; 0x17uy; 0xfeuy; 0x39uy; 0x43uy; 0x25uy;
0xaauy; 0xdauy; 0xa0uy; 0x43uy; 0x3cuy; 0xb1uy; 0x41uy; 0x02uy; 0xa3uy; 0xf0uy; 0xa7uy; 0x19uy;
0x59uy; 0xbcuy; 0x1duy; 0x7duy; 0x6cuy; 0x6duy; 0x91uy; 0x09uy; 0x5cuy; 0xb7uy; 0x5buy; 0x01uy;
0xd1uy; 0x6fuy; 0x17uy; 0x21uy; 0x97uy; 0xbfuy; 0x89uy; 0x71uy; 0xa5uy; 0xb0uy; 0x6euy; 0x07uy;
0x45uy; 0xfduy; 0x9duy; 0xeauy; 0x07uy; 0xf6uy; 0x7auy; 0x9fuy; 0x10uy; 0x18uy; 0x22uy; 0x30uy;
0x73uy; 0xacuy; 0xd4uy; 0x6buy; 0x72uy; 0x44uy; 0xeduy; 0xd9uy; 0x19uy; 0x9buy; 0x2duy; 0x4auy;
0x41uy; 0xdduy; 0xd1uy; 0x85uy; 0x5euy; 0x37uy; 0x19uy; 0xeduy; 0xd2uy; 0x15uy; 0x8fuy; 0x5euy;
0x91uy; 0xdbuy; 0x33uy; 0xf2uy; 0xe4uy; 0xdbuy; 0xffuy; 0x98uy; 0xfbuy; 0xa3uy; 0xb5uy; 0xcauy;
0x21uy; 0x69uy; 0x08uy; 0xe7uy; 0x8auy; 0xdfuy; 0x90uy; 0xffuy; 0x3euy; 0xe9uy; 0x20uy; 0x86uy;
0x3cuy; 0xe9uy; 0xfcuy; 0x0buy; 0xfeuy; 0x5cuy; 0x61uy; 0xaauy; 0x13uy; 0x92uy; 0x7fuy; 0x7buy;
0xecuy; 0xe0uy; 0x6duy; 0xa8uy; 0x23uy; 0x22uy; 0xf6uy; 0x6buy; 0x77uy; 0xc4uy; 0xfeuy; 0x40uy;
0x07uy; 0x3buy; 0xb6uy; 0xf6uy; 0x8euy; 0x5fuy; 0xd4uy; 0xb9uy; 0xb7uy; 0x0fuy; 0x21uy; 0x04uy;
0xefuy; 0x83uy; 0x63uy; 0x91uy; 0x69uy; 0x40uy; 0xa3uy; 0x48uy; 0x5cuy; 0xd2uy; 0x60uy; 0xf9uy;
0x4fuy; 0x6cuy; 0x47uy; 0x8buy; 0x3buy; 0xb1uy; 0x9fuy; 0x8euy; 0xeeuy; 0x16uy; 0x8auy; 0x13uy;
0xfcuy; 0x46uy; 0x17uy; 0xc3uy; 0xc3uy; 0x32uy; 0x56uy; 0xf8uy; 0x3cuy; 0x85uy; 0x3auy; 0xb6uy;
0x3euy; 0xaauy; 0x89uy; 0x4fuy; 0xb3uy; 0xdfuy; 0x38uy; 0xfduy; 0xf1uy; 0xe4uy; 0x3auy; 0xc0uy;
0xe6uy; 0x58uy; 0xb5uy; 0x8fuy; 0xc5uy; 0x29uy; 0xa2uy; 0x92uy; 0x4auy; 0xb6uy; 0xa0uy; 0x34uy;
0x7fuy; 0xabuy; 0xb5uy; 0x8auy; 0x90uy; 0xa1uy; 0xdbuy; 0x4duy; 0xcauy; 0xb6uy; 0x2cuy; 0x41uy;
0x3cuy; 0xf7uy; 0x2buy; 0x21uy; 0xc3uy; 0xfduy; 0xf4uy; 0x17uy; 0x5cuy; 0xb5uy; 0x33uy; 0x17uy;
0x68uy; 0x2buy; 0x08uy; 0x30uy; 0xf3uy; 0xf7uy; 0x30uy; 0x3cuy; 0x96uy; 0xe6uy; 0x6auy; 0x20uy;
0x97uy; 0xe7uy; 0x4duy; 0x10uy; 0x5fuy; 0x47uy; 0x5fuy; 0x49uy; 0x96uy; 0x09uy; 0xf0uy; 0x27uy;
0x91uy; 0xc8uy; 0xf8uy; 0x5auy; 0x2euy; 0x79uy; 0xb5uy; 0xe2uy; 0xb8uy; 0xe8uy; 0xb9uy; 0x7buy;
0xd5uy; 0x10uy; 0xcbuy; 0xffuy; 0x5duy; 0x14uy; 0x73uy; 0xf3uy
]
in
assert_norm (List.Tot.length l = 512);
B.gcmalloc_of_list HyperStack.root l | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.fresh_bv | val fresh_bv (e: env) (basename: string) : Tac bv | val fresh_bv (e: env) (basename: string) : Tac bv | let fresh_bv (e : env) (basename : string) : Tac bv =
let binders = binders_of_env e in
let binder_names = Tactics.map name_of_binder binders in
_fresh_bv binder_names basename 0 | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 35,
"end_line": 325,
"start_col": 0,
"start_line": 322
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars'
let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t
/// Check if a binder is shadowed by another more recent binder
let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool =
List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars
let binder_is_shadowed (ge : genv) (b : binder) : Tot bool =
bv_is_shadowed ge (bv_of_binder b)
let find_shadowed_bvs (ge : genv) (bl : list bv) : Tot (list (bv & bool)) =
List.Tot.map (fun b -> b, bv_is_shadowed ge b) bl
let find_shadowed_binders (ge : genv) (bl : list binder) : Tot (list (binder & bool)) =
List.Tot.map (fun b -> b, binder_is_shadowed ge b) bl
val bv_is_abstract : genv -> bv -> Tot bool
let bv_is_abstract ge bv =
match genv_get ge bv with
| None -> false
| Some (_, abs, _) -> abs
val binder_is_abstract : genv -> binder -> Tot bool
let binder_is_abstract ge b =
bv_is_abstract ge (bv_of_binder b)
val genv_abstract_bvs : genv -> Tot (list (bv & typ))
let genv_abstract_bvs ge =
List.Tot.concatMap
(fun (bv, (ty, abs, _)) -> if abs then [bv,ty] else []) ge.bmap
/// Versions of ``fresh_bv`` and ``fresh_binder`` inspired by the standard library
/// We make sure the name is fresh because we need to be able to generate valid code
/// (it is thus not enough to have a fresh integer).
let rec _fresh_bv binder_names basename i : Tac bv =
let name = basename ^ string_of_int i in
(* In worst case the performance is quadratic in the number of binders.
* TODO: fix that, it actually probably happens for anonymous variables ('_') *)
if List.mem name binder_names then _fresh_bv binder_names basename (i+1)
else fresh_bv_named name | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | e: FStar.Stubs.Reflection.Types.env -> basename: Prims.string
-> FStar.Tactics.Effect.Tac FStar.Stubs.Reflection.Types.bv | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.Stubs.Reflection.Types.env",
"Prims.string",
"FStar.InteractiveHelpers.Base._fresh_bv",
"FStar.Stubs.Reflection.Types.bv",
"Prims.list",
"FStar.Tactics.Util.map",
"FStar.Stubs.Reflection.Types.binder",
"FStar.Tactics.V1.Derived.name_of_binder",
"FStar.Stubs.Reflection.Types.binders",
"FStar.Stubs.Reflection.V1.Builtins.binders_of_env"
] | [] | false | true | false | false | false | let fresh_bv (e: env) (basename: string) : Tac bv =
| let binders = binders_of_env e in
let binder_names = Tactics.map name_of_binder binders in
_fresh_bv binder_names basename 0 | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.bv_is_shadowed | val bv_is_shadowed (ge: genv) (bv: bv) : Tot bool | val bv_is_shadowed (ge: genv) (bv: bv) : Tot bool | let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool =
List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 53,
"end_line": 286,
"start_col": 0,
"start_line": 285
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars'
let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ge: FStar.InteractiveHelpers.Base.genv -> bv: FStar.Stubs.Reflection.Types.bv -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"FStar.InteractiveHelpers.Base.genv",
"FStar.Stubs.Reflection.Types.bv",
"FStar.List.Tot.Base.existsb",
"FStar.Pervasives.Native.tuple2",
"FStar.Stubs.Reflection.Types.typ",
"FStar.InteractiveHelpers.Base.bv_eq",
"Prims.bool",
"FStar.InteractiveHelpers.Base.__proj__Mkgenv__item__svars"
] | [] | false | false | false | true | false | let bv_is_shadowed (ge: genv) (bv: bv) : Tot bool =
| List.Tot.existsb (fun (b, _) -> bv_eq bv b) ge.svars | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.binder_is_abstract | val binder_is_abstract : genv -> binder -> Tot bool | val binder_is_abstract : genv -> binder -> Tot bool | let binder_is_abstract ge b =
bv_is_abstract ge (bv_of_binder b) | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 36,
"end_line": 305,
"start_col": 0,
"start_line": 304
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars'
let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t
/// Check if a binder is shadowed by another more recent binder
let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool =
List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars
let binder_is_shadowed (ge : genv) (b : binder) : Tot bool =
bv_is_shadowed ge (bv_of_binder b)
let find_shadowed_bvs (ge : genv) (bl : list bv) : Tot (list (bv & bool)) =
List.Tot.map (fun b -> b, bv_is_shadowed ge b) bl
let find_shadowed_binders (ge : genv) (bl : list binder) : Tot (list (binder & bool)) =
List.Tot.map (fun b -> b, binder_is_shadowed ge b) bl
val bv_is_abstract : genv -> bv -> Tot bool
let bv_is_abstract ge bv =
match genv_get ge bv with
| None -> false
| Some (_, abs, _) -> abs | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ge: FStar.InteractiveHelpers.Base.genv -> b: FStar.Stubs.Reflection.Types.binder -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"FStar.InteractiveHelpers.Base.genv",
"FStar.Stubs.Reflection.Types.binder",
"FStar.InteractiveHelpers.Base.bv_is_abstract",
"FStar.Reflection.V1.Derived.bv_of_binder",
"Prims.bool"
] | [] | false | false | false | true | false | let binder_is_abstract ge b =
| bv_is_abstract ge (bv_of_binder b) | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.fresh_binder | val fresh_binder (e: env) (basename: string) (ty: typ) : Tac binder | val fresh_binder (e: env) (basename: string) (ty: typ) : Tac binder | let fresh_binder (e : env) (basename : string) (ty : typ) : Tac binder =
let bv = fresh_bv e basename in
mk_binder bv ty | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 17,
"end_line": 329,
"start_col": 0,
"start_line": 327
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars'
let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t
/// Check if a binder is shadowed by another more recent binder
let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool =
List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars
let binder_is_shadowed (ge : genv) (b : binder) : Tot bool =
bv_is_shadowed ge (bv_of_binder b)
let find_shadowed_bvs (ge : genv) (bl : list bv) : Tot (list (bv & bool)) =
List.Tot.map (fun b -> b, bv_is_shadowed ge b) bl
let find_shadowed_binders (ge : genv) (bl : list binder) : Tot (list (binder & bool)) =
List.Tot.map (fun b -> b, binder_is_shadowed ge b) bl
val bv_is_abstract : genv -> bv -> Tot bool
let bv_is_abstract ge bv =
match genv_get ge bv with
| None -> false
| Some (_, abs, _) -> abs
val binder_is_abstract : genv -> binder -> Tot bool
let binder_is_abstract ge b =
bv_is_abstract ge (bv_of_binder b)
val genv_abstract_bvs : genv -> Tot (list (bv & typ))
let genv_abstract_bvs ge =
List.Tot.concatMap
(fun (bv, (ty, abs, _)) -> if abs then [bv,ty] else []) ge.bmap
/// Versions of ``fresh_bv`` and ``fresh_binder`` inspired by the standard library
/// We make sure the name is fresh because we need to be able to generate valid code
/// (it is thus not enough to have a fresh integer).
let rec _fresh_bv binder_names basename i : Tac bv =
let name = basename ^ string_of_int i in
(* In worst case the performance is quadratic in the number of binders.
* TODO: fix that, it actually probably happens for anonymous variables ('_') *)
if List.mem name binder_names then _fresh_bv binder_names basename (i+1)
else fresh_bv_named name
let fresh_bv (e : env) (basename : string) : Tac bv =
let binders = binders_of_env e in
let binder_names = Tactics.map name_of_binder binders in
_fresh_bv binder_names basename 0 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
e: FStar.Stubs.Reflection.Types.env ->
basename: Prims.string ->
ty: FStar.Stubs.Reflection.Types.typ
-> FStar.Tactics.Effect.Tac FStar.Stubs.Reflection.Types.binder | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.Stubs.Reflection.Types.env",
"Prims.string",
"FStar.Stubs.Reflection.Types.typ",
"FStar.Reflection.V1.Derived.mk_binder",
"FStar.Stubs.Reflection.Types.binder",
"FStar.Stubs.Reflection.Types.bv",
"FStar.InteractiveHelpers.Base.fresh_bv"
] | [] | false | true | false | false | false | let fresh_binder (e: env) (basename: string) (ty: typ) : Tac binder =
| let bv = fresh_bv e basename in
mk_binder bv ty | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.unzip | val unzip (#a #b: _) (l: list (a & b)) : Tot (list a & list b) | val unzip (#a #b: _) (l: list (a & b)) : Tot (list a & list b) | let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2) | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 26,
"end_line": 79,
"start_col": 0,
"start_line": 74
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | l: Prims.list (a * b) -> Prims.list a * Prims.list b | Prims.Tot | [
"total"
] | [] | [
"Prims.list",
"FStar.Pervasives.Native.tuple2",
"FStar.Pervasives.Native.Mktuple2",
"Prims.Nil",
"Prims.Cons",
"FStar.InteractiveHelpers.Base.unzip"
] | [
"recursion"
] | false | false | false | true | false | let rec unzip #a #b (l: list (a & b)) : Tot (list a & list b) =
| match l with
| [] -> ([], [])
| (hd1, hd2) :: tl ->
let tl1, tl2 = unzip tl in
(hd1 :: tl1, hd2 :: tl2) | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.ss_comp_commutes | val ss_comp_commutes (c:comp) (ss:ss_t)
: Lemma (ensures
(let r = ss_comp c ss in
(C_Tot? c ==> r == C_Tot (ss_term (comp_res c) ss)) /\
(C_ST? c ==> r == C_ST (ss_st_comp (st_comp_of_comp c) ss)) /\
(C_STAtomic? c ==> r == C_STAtomic (ss_term (comp_inames c) ss)
(C_STAtomic?.obs c)
(ss_st_comp (st_comp_of_comp c) ss)) /\
(C_STGhost? c ==> r == C_STGhost (ss_st_comp (st_comp_of_comp c) ss))))
[SMTPat (ss_comp c ss)] | val ss_comp_commutes (c:comp) (ss:ss_t)
: Lemma (ensures
(let r = ss_comp c ss in
(C_Tot? c ==> r == C_Tot (ss_term (comp_res c) ss)) /\
(C_ST? c ==> r == C_ST (ss_st_comp (st_comp_of_comp c) ss)) /\
(C_STAtomic? c ==> r == C_STAtomic (ss_term (comp_inames c) ss)
(C_STAtomic?.obs c)
(ss_st_comp (st_comp_of_comp c) ss)) /\
(C_STGhost? c ==> r == C_STGhost (ss_st_comp (st_comp_of_comp c) ss))))
[SMTPat (ss_comp c ss)] | let rec ss_comp_commutes (c:comp) (ss:ss_t)
: Lemma (ensures
(let r = ss_comp c ss in
(C_Tot? c ==> r == C_Tot (ss_term (comp_res c) ss)) /\
(C_ST? c ==> r == C_ST (ss_st_comp (st_comp_of_comp c) ss)) /\
(C_STAtomic? c ==> r == C_STAtomic (ss_term (comp_inames c) ss)
(C_STAtomic?.obs c)
(ss_st_comp (st_comp_of_comp c) ss)) /\
(C_STGhost? c ==> r == C_STGhost (ss_st_comp (st_comp_of_comp c) ss))))
(decreases L.length ss.l)
[SMTPat (ss_comp c ss)] =
match ss.l with
| [] -> ()
| y::tl -> ss_comp_commutes (subst_comp c [ NT y (Map.sel ss.m y) ]) (tail ss) | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 80,
"end_line": 244,
"start_col": 0,
"start_line": 231
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
}
let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l
let as_map (ss:ss_t) = ss.m
let empty = { l = []; m = Map.const_on Set.empty tm_unknown }
let is_dom_push
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { ~ (Map.contains m x ) })
(t:term)
: Lemma (is_dom (x::l) (Map.upd m x t)) =
assert (Map.equal (remove_map (Map.upd m x t) x) m)
let push (ss:ss_t) (x:var { ~ (contains ss x) }) (t:term) : ss_t =
is_dom_push ss.l ss.m x t;
{ l = x::ss.l;
m = Map.upd ss.m x t }
let tail (ss:ss_t { Cons? ss.l }) : ss_t =
{ l = L.tl ss.l; m = remove_map ss.m (L.hd ss.l) }
let rec push_ss (ss1:ss_t) (ss2:ss_t { Set.disjoint (dom ss1) (dom ss2) })
: Tot ss_t (decreases L.length ss2.l) =
match ss2.l with
| [] -> ss1
| x::tl ->
push_ss (push ss1 x (Map.sel ss2.m x)) (tail ss2)
let check_disjoint ss1 ss2 =
admit ();
not (L.existsb (fun v1 -> L.mem v1 ss2.l) ss1.l)
let rec diff_aux (ss1 ss2:ss_t) (acc:ss_t { Set.disjoint (dom acc) (dom ss2) })
: Tot (ss:ss_t { Set.disjoint (dom ss) (dom ss2) }) (decreases L.length ss1.l) =
match ss1.l with
| [] -> acc
| x::l ->
if L.mem x ss2.l
then let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
else let acc_l = x::acc.l in
let acc_m = Map.upd acc.m x (Map.sel ss1.m x) in
assume (no_repeats acc_l /\ is_dom acc_l acc_m);
let acc = { l = acc_l; m = acc_m } in
let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
let diff ss1 ss2 = diff_aux ss1 ss2 empty
#push-options "--warn_error -271"
let push_as_map (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat (as_map (push_ss ss1 ss2))] =
let rec aux (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat ()] =
match ss2.l with
| [] -> ()
| x::tl -> aux (push ss1 x (Map.sel ss2.m x)) (tail ss2)
in
()
#pop-options
let rec remove_l (l:ss_dom) (x:var { L.memP x l })
: Pure ss_dom
(requires True)
(ensures fun r -> forall (y:var). L.memP y r <==> (L.memP y l /\ y =!= x)) =
match l with
| [] -> assert False; []
| y::tl ->
if y = x then tl
else y::(remove_l tl x)
let rec is_dom_remove
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { Map.contains m x })
: Lemma (is_dom (remove_l l x) (remove_map m x))
[SMTPat (remove_l l x); SMTPat (remove_map m x)] =
match l with
| [] -> ()
| y::tl ->
if x = y then ()
else let t_y = Map.sel m y in
let m1 = remove_map m y in
is_dom_remove tl m1 x;
assert (is_dom (remove_l tl x) (remove_map m1 x));
is_dom_push (remove_l tl x) (remove_map m1 x) y t_y;
assert (Map.equal (Map.upd (remove_map m1 x) y t_y)
(remove_map m x))
let rec ss_term (t:term) (ss:ss_t) : Tot term (decreases L.length ss.l) =
match ss.l with
| [] -> t
| y::tl ->
let t = subst_term t [ NT y (Map.sel ss.m y) ] in
ss_term t (tail ss)
let rec ss_st_term (t:st_term) (ss:ss_t) : Tot st_term (decreases L.length ss.l) =
match ss.l with
| [] -> t
| y::tl ->
let t = subst_st_term t [ NT y (Map.sel ss.m y) ] in
ss_st_term t (tail ss)
let rec ss_st_comp (s:st_comp) (ss:ss_t)
: Tot st_comp (decreases L.length ss.l) =
match ss.l with
| [] -> s
| y::tl ->
let s = subst_st_comp s [ NT y (Map.sel ss.m y) ] in
ss_st_comp s (tail ss)
let rec ss_comp (c:comp) (ss:ss_t)
: Tot comp (decreases L.length ss.l) =
match ss.l with
| [] -> c
| y::tl ->
let c = subst_comp c [ NT y (Map.sel ss.m y) ] in
ss_comp c (tail ss)
let rec ss_binder (b:binder) (ss:ss_t)
: Tot binder (decreases L.length ss.l) =
match ss.l with
| [] -> b
| y::tl ->
let b = subst_binder b [ NT y (Map.sel ss.m y) ] in
ss_binder b (tail ss)
let rec ss_env (g:env) (ss:ss_t)
: Tot (g':env { fstar_env g' == fstar_env g /\
Env.dom g' == Env.dom g })
(decreases L.length ss.l) =
admit ();
match ss.l with
| [] -> g
| y::tl -> ss_env (subst_env g [ NT y (Map.sel ss.m y) ]) (tail ss)
let rec ss_st_comp_commutes (s:st_comp) (ss:ss_t)
: Lemma (ensures
ss_st_comp s ss ==
{ s with res = ss_term s.res ss;
pre = ss_term s.pre ss;
post = ss_term s.post ss; }) // no shifting required
(decreases L.length ss.l)
[SMTPat (ss_st_comp s ss)] =
match ss.l with
| [] -> ()
| y::tl -> ss_st_comp_commutes (subst_st_comp s [ NT y (Map.sel ss.m y) ]) (tail ss) | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | c: Pulse.Syntax.Base.comp -> ss: Pulse.Checker.Prover.Substs.ss_t
-> FStar.Pervasives.Lemma
(ensures
(let r = Pulse.Checker.Prover.Substs.ss_comp c ss in
(C_Tot? c ==>
r ==
Pulse.Syntax.Base.C_Tot
(Pulse.Checker.Prover.Substs.ss_term (Pulse.Syntax.Base.comp_res c) ss)) /\
(C_ST? c ==>
r ==
Pulse.Syntax.Base.C_ST
(Pulse.Checker.Prover.Substs.ss_st_comp (Pulse.Syntax.Base.st_comp_of_comp c) ss)) /\
(C_STAtomic? c ==>
r ==
Pulse.Syntax.Base.C_STAtomic
(Pulse.Checker.Prover.Substs.ss_term (Pulse.Syntax.Base.comp_inames c) ss)
(C_STAtomic?.obs c)
(Pulse.Checker.Prover.Substs.ss_st_comp (Pulse.Syntax.Base.st_comp_of_comp c) ss)) /\
(C_STGhost? c ==>
r ==
Pulse.Syntax.Base.C_STGhost
(Pulse.Checker.Prover.Substs.ss_st_comp (Pulse.Syntax.Base.st_comp_of_comp c) ss))))
(decreases FStar.List.Tot.Base.length (Mkss_t?.l ss))
[SMTPat (Pulse.Checker.Prover.Substs.ss_comp c ss)] | FStar.Pervasives.Lemma | [
"",
"lemma"
] | [] | [
"Pulse.Syntax.Base.comp",
"Pulse.Checker.Prover.Substs.ss_t",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__l",
"Pulse.Syntax.Base.var",
"Prims.list",
"Pulse.Checker.Prover.Substs.ss_comp_commutes",
"Pulse.Syntax.Naming.subst_comp",
"Prims.Cons",
"Pulse.Syntax.Naming.subst_elt",
"Pulse.Syntax.Naming.NT",
"FStar.Map.sel",
"Pulse.Syntax.Base.term",
"Pulse.Checker.Prover.Substs.__proj__Mkss_t__item__m",
"Prims.Nil",
"Pulse.Checker.Prover.Substs.tail",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.l_and",
"Prims.l_imp",
"Prims.b2t",
"Pulse.Syntax.Base.uu___is_C_Tot",
"Prims.eq2",
"Pulse.Syntax.Base.C_Tot",
"Pulse.Checker.Prover.Substs.ss_term",
"Pulse.Syntax.Base.comp_res",
"Pulse.Syntax.Base.uu___is_C_ST",
"Pulse.Syntax.Base.C_ST",
"Pulse.Checker.Prover.Substs.ss_st_comp",
"Pulse.Syntax.Base.st_comp_of_comp",
"Pulse.Syntax.Base.uu___is_C_STAtomic",
"Pulse.Syntax.Base.C_STAtomic",
"Pulse.Syntax.Base.comp_inames",
"Pulse.Syntax.Base.__proj__C_STAtomic__item__obs",
"Pulse.Syntax.Base.uu___is_C_STGhost",
"Pulse.Syntax.Base.C_STGhost",
"Pulse.Checker.Prover.Substs.ss_comp",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat"
] | [
"recursion"
] | false | false | true | false | false | let rec ss_comp_commutes (c: comp) (ss: ss_t)
: Lemma
(ensures
(let r = ss_comp c ss in
(C_Tot? c ==> r == C_Tot (ss_term (comp_res c) ss)) /\
(C_ST? c ==> r == C_ST (ss_st_comp (st_comp_of_comp c) ss)) /\
(C_STAtomic? c ==>
r ==
C_STAtomic (ss_term (comp_inames c) ss)
(C_STAtomic?.obs c)
(ss_st_comp (st_comp_of_comp c) ss)) /\
(C_STGhost? c ==> r == C_STGhost (ss_st_comp (st_comp_of_comp c) ss))))
(decreases L.length ss.l)
[SMTPat (ss_comp c ss)] =
| match ss.l with
| [] -> ()
| y :: tl -> ss_comp_commutes (subst_comp c [NT y (Map.sel ss.m y)]) (tail ss) | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.print_binder_info | val print_binder_info (full: bool) (b: binder) : Tac unit | val print_binder_info (full: bool) (b: binder) : Tac unit | let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b) | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 33,
"end_line": 105,
"start_col": 0,
"start_line": 87
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")" | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | full: Prims.bool -> b: FStar.Stubs.Reflection.Types.binder -> FStar.Tactics.Effect.Tac Prims.unit | FStar.Tactics.Effect.Tac | [] | [] | [
"Prims.bool",
"FStar.Stubs.Reflection.Types.binder",
"FStar.Stubs.Reflection.V1.Builtins.inspect_binder",
"FStar.Stubs.Reflection.Types.bv",
"FStar.Stubs.Reflection.V1.Data.aqualv",
"Prims.list",
"FStar.Stubs.Reflection.Types.term",
"FStar.Stubs.Reflection.Types.typ",
"FStar.Stubs.Tactics.V1.Builtins.print",
"Prims.unit",
"Prims.string",
"Prims.op_Hat",
"Prims.string_of_int",
"FStar.Stubs.Reflection.V1.Data.__proj__Mkbv_view__item__bv_index",
"FStar.Stubs.Tactics.V1.Builtins.term_to_string",
"FStar.Tactics.V1.Derived.name_of_bv",
"FStar.Tactics.V1.Derived.binder_to_string",
"FStar.Tactics.V1.Derived.name_of_binder",
"FStar.Stubs.Reflection.V1.Data.bv_view",
"Prims.precedes",
"FStar.Stubs.Reflection.V1.Builtins.inspect_bv"
] | [] | false | true | false | false | false | let print_binder_info (full: bool) (b: binder) : Tac unit =
| let open inspect_binder b as
binder_view
in
let qual_str =
match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full
then
print ("> print_binder_info:" ^
"\n- name: " ^
(name_of_binder b) ^
"\n- as string: " ^
(binder_to_string b) ^
"\n- aqual: " ^
qual_str ^
"\n- ppname: " ^
name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^ "\n- sort: " ^ term_to_string binder_sort)
else print (binder_to_string b) | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.genv_push_fresh_bv | val genv_push_fresh_bv (ge: genv) (basename: string) (ty: typ) : Tac (genv & bv) | val genv_push_fresh_bv (ge: genv) (basename: string) (ty: typ) : Tac (genv & bv) | let genv_push_fresh_bv (ge : genv) (basename : string) (ty : typ) : Tac (genv & bv) =
let ge', b = genv_push_fresh_binder ge basename ty in
ge', bv_of_binder b | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 21,
"end_line": 345,
"start_col": 0,
"start_line": 343
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars'
let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t
/// Check if a binder is shadowed by another more recent binder
let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool =
List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars
let binder_is_shadowed (ge : genv) (b : binder) : Tot bool =
bv_is_shadowed ge (bv_of_binder b)
let find_shadowed_bvs (ge : genv) (bl : list bv) : Tot (list (bv & bool)) =
List.Tot.map (fun b -> b, bv_is_shadowed ge b) bl
let find_shadowed_binders (ge : genv) (bl : list binder) : Tot (list (binder & bool)) =
List.Tot.map (fun b -> b, binder_is_shadowed ge b) bl
val bv_is_abstract : genv -> bv -> Tot bool
let bv_is_abstract ge bv =
match genv_get ge bv with
| None -> false
| Some (_, abs, _) -> abs
val binder_is_abstract : genv -> binder -> Tot bool
let binder_is_abstract ge b =
bv_is_abstract ge (bv_of_binder b)
val genv_abstract_bvs : genv -> Tot (list (bv & typ))
let genv_abstract_bvs ge =
List.Tot.concatMap
(fun (bv, (ty, abs, _)) -> if abs then [bv,ty] else []) ge.bmap
/// Versions of ``fresh_bv`` and ``fresh_binder`` inspired by the standard library
/// We make sure the name is fresh because we need to be able to generate valid code
/// (it is thus not enough to have a fresh integer).
let rec _fresh_bv binder_names basename i : Tac bv =
let name = basename ^ string_of_int i in
(* In worst case the performance is quadratic in the number of binders.
* TODO: fix that, it actually probably happens for anonymous variables ('_') *)
if List.mem name binder_names then _fresh_bv binder_names basename (i+1)
else fresh_bv_named name
let fresh_bv (e : env) (basename : string) : Tac bv =
let binders = binders_of_env e in
let binder_names = Tactics.map name_of_binder binders in
_fresh_bv binder_names basename 0
let fresh_binder (e : env) (basename : string) (ty : typ) : Tac binder =
let bv = fresh_bv e basename in
mk_binder bv ty
let genv_push_fresh_binder (ge : genv) (basename : string) (ty : typ) : Tac (genv & binder) =
let b = fresh_binder ge.env basename ty in
(* TODO: we can have a shortcircuit push (which performs less checks) *)
let ge' = genv_push_binder ge b true None in
ge', b
// TODO: actually we should use push_fresh_bv more
let push_fresh_binder (e : env) (basename : string) (ty : typ) : Tac (env & binder) =
let b = fresh_binder e basename ty in
let e' = push_binder e b in
e', b | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
ge: FStar.InteractiveHelpers.Base.genv ->
basename: Prims.string ->
ty: FStar.Stubs.Reflection.Types.typ
-> FStar.Tactics.Effect.Tac (FStar.InteractiveHelpers.Base.genv * FStar.Stubs.Reflection.Types.bv) | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.InteractiveHelpers.Base.genv",
"Prims.string",
"FStar.Stubs.Reflection.Types.typ",
"FStar.Stubs.Reflection.Types.binder",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Stubs.Reflection.Types.bv",
"FStar.Reflection.V1.Derived.bv_of_binder",
"FStar.Pervasives.Native.tuple2",
"FStar.InteractiveHelpers.Base.genv_push_fresh_binder"
] | [] | false | true | false | false | false | let genv_push_fresh_bv (ge: genv) (basename: string) (ty: typ) : Tac (genv & bv) =
| let ge', b = genv_push_fresh_binder ge basename ty in
ge', bv_of_binder b | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.is_dom_remove | val is_dom_remove (l: ss_dom) (m: ss_map{is_dom l m}) (x: var{Map.contains m x})
: Lemma (is_dom (remove_l l x) (remove_map m x))
[SMTPat (remove_l l x); SMTPat (remove_map m x)] | val is_dom_remove (l: ss_dom) (m: ss_map{is_dom l m}) (x: var{Map.contains m x})
: Lemma (is_dom (remove_l l x) (remove_map m x))
[SMTPat (remove_l l x); SMTPat (remove_map m x)] | let rec is_dom_remove
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { Map.contains m x })
: Lemma (is_dom (remove_l l x) (remove_map m x))
[SMTPat (remove_l l x); SMTPat (remove_map m x)] =
match l with
| [] -> ()
| y::tl ->
if x = y then ()
else let t_y = Map.sel m y in
let m1 = remove_map m y in
is_dom_remove tl m1 x;
assert (is_dom (remove_l tl x) (remove_map m1 x));
is_dom_push (remove_l tl x) (remove_map m1 x) y t_y;
assert (Map.equal (Map.upd (remove_map m1 x) y t_y)
(remove_map m x)) | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 44,
"end_line": 170,
"start_col": 0,
"start_line": 153
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
}
let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l
let as_map (ss:ss_t) = ss.m
let empty = { l = []; m = Map.const_on Set.empty tm_unknown }
let is_dom_push
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { ~ (Map.contains m x ) })
(t:term)
: Lemma (is_dom (x::l) (Map.upd m x t)) =
assert (Map.equal (remove_map (Map.upd m x t) x) m)
let push (ss:ss_t) (x:var { ~ (contains ss x) }) (t:term) : ss_t =
is_dom_push ss.l ss.m x t;
{ l = x::ss.l;
m = Map.upd ss.m x t }
let tail (ss:ss_t { Cons? ss.l }) : ss_t =
{ l = L.tl ss.l; m = remove_map ss.m (L.hd ss.l) }
let rec push_ss (ss1:ss_t) (ss2:ss_t { Set.disjoint (dom ss1) (dom ss2) })
: Tot ss_t (decreases L.length ss2.l) =
match ss2.l with
| [] -> ss1
| x::tl ->
push_ss (push ss1 x (Map.sel ss2.m x)) (tail ss2)
let check_disjoint ss1 ss2 =
admit ();
not (L.existsb (fun v1 -> L.mem v1 ss2.l) ss1.l)
let rec diff_aux (ss1 ss2:ss_t) (acc:ss_t { Set.disjoint (dom acc) (dom ss2) })
: Tot (ss:ss_t { Set.disjoint (dom ss) (dom ss2) }) (decreases L.length ss1.l) =
match ss1.l with
| [] -> acc
| x::l ->
if L.mem x ss2.l
then let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
else let acc_l = x::acc.l in
let acc_m = Map.upd acc.m x (Map.sel ss1.m x) in
assume (no_repeats acc_l /\ is_dom acc_l acc_m);
let acc = { l = acc_l; m = acc_m } in
let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
let diff ss1 ss2 = diff_aux ss1 ss2 empty
#push-options "--warn_error -271"
let push_as_map (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat (as_map (push_ss ss1 ss2))] =
let rec aux (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat ()] =
match ss2.l with
| [] -> ()
| x::tl -> aux (push ss1 x (Map.sel ss2.m x)) (tail ss2)
in
()
#pop-options
let rec remove_l (l:ss_dom) (x:var { L.memP x l })
: Pure ss_dom
(requires True)
(ensures fun r -> forall (y:var). L.memP y r <==> (L.memP y l /\ y =!= x)) =
match l with
| [] -> assert False; []
| y::tl ->
if y = x then tl
else y::(remove_l tl x) | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
l: Pulse.Checker.Prover.Substs.ss_dom ->
m: Pulse.Checker.Prover.Substs.ss_map{Pulse.Checker.Prover.Substs.is_dom l m} ->
x: Pulse.Syntax.Base.var{FStar.Map.contains m x}
-> FStar.Pervasives.Lemma
(ensures
Pulse.Checker.Prover.Substs.is_dom (Pulse.Checker.Prover.Substs.remove_l l x)
(Pulse.Checker.Prover.Substs.remove_map m x))
[
SMTPat (Pulse.Checker.Prover.Substs.remove_l l x);
SMTPat (Pulse.Checker.Prover.Substs.remove_map m x)
] | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Pulse.Checker.Prover.Substs.ss_dom",
"Pulse.Checker.Prover.Substs.ss_map",
"Pulse.Checker.Prover.Substs.is_dom",
"Pulse.Syntax.Base.var",
"Prims.b2t",
"FStar.Map.contains",
"Pulse.Syntax.Base.term",
"Prims.list",
"Prims.op_Equality",
"Prims.bool",
"Prims._assert",
"FStar.Map.equal",
"FStar.Map.upd",
"Pulse.Checker.Prover.Substs.remove_map",
"Prims.unit",
"Pulse.Checker.Prover.Substs.is_dom_push",
"Pulse.Checker.Prover.Substs.remove_l",
"Pulse.Checker.Prover.Substs.is_dom_remove",
"FStar.Map.t",
"FStar.Map.sel",
"Prims.l_True",
"Prims.squash",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [
"recursion"
] | false | false | true | false | false | let rec is_dom_remove (l: ss_dom) (m: ss_map{is_dom l m}) (x: var{Map.contains m x})
: Lemma (is_dom (remove_l l x) (remove_map m x))
[SMTPat (remove_l l x); SMTPat (remove_map m x)] =
| match l with
| [] -> ()
| y :: tl ->
if x = y
then ()
else
let t_y = Map.sel m y in
let m1 = remove_map m y in
is_dom_remove tl m1 x;
assert (is_dom (remove_l tl x) (remove_map m1 x));
is_dom_push (remove_l tl x) (remove_map m1 x) y t_y;
assert (Map.equal (Map.upd (remove_map m1 x) y t_y) (remove_map m x)) | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.acomp_to_string | val acomp_to_string (c: comp) : Tac string | val acomp_to_string (c: comp) : Tac string | let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 86,
"end_line": 124,
"start_col": 0,
"start_line": 110
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | c: FStar.Stubs.Reflection.Types.comp -> FStar.Tactics.Effect.Tac Prims.string | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.Stubs.Reflection.Types.comp",
"FStar.Stubs.Reflection.V1.Builtins.inspect_comp",
"FStar.Stubs.Reflection.Types.typ",
"Prims.op_Hat",
"Prims.string",
"FStar.Stubs.Tactics.V1.Builtins.term_to_string",
"FStar.Stubs.Reflection.Types.term",
"FStar.Stubs.Reflection.V1.Data.universes",
"FStar.Stubs.Reflection.Types.name",
"Prims.list",
"FStar.Stubs.Reflection.V1.Data.argv",
"FStar.Reflection.V1.Derived.flatten_name",
"FStar.List.Tot.Base.fold_left",
"FStar.Tactics.Util.map",
"FStar.Pervasives.Native.tuple2",
"FStar.Stubs.Reflection.V1.Data.aqualv"
] | [] | false | true | false | false | false | let acomp_to_string (c: comp) : Tac string =
| match inspect_comp c with
| C_Total ret -> "C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret -> "C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns -> "C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a: term) : Tac string = " (" ^ term_to_string a ^ ")" in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.bind_map_get | val bind_map_get (#a: Type) (m: bind_map a) (b: bv) : Tot (option a) | val bind_map_get (#a: Type) (m: bind_map a) (b: bv) : Tot (option a) | let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 68,
"end_line": 202,
"start_col": 0,
"start_line": 198
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: FStar.InteractiveHelpers.Base.bind_map a -> b: FStar.Stubs.Reflection.Types.bv
-> FStar.Pervasives.Native.option a | Prims.Tot | [
"total"
] | [] | [
"FStar.InteractiveHelpers.Base.bind_map",
"FStar.Stubs.Reflection.Types.bv",
"FStar.Pervasives.Native.None",
"Prims.list",
"FStar.Pervasives.Native.tuple2",
"Prims.op_Equality",
"FStar.Order.order",
"FStar.Stubs.Reflection.V1.Builtins.compare_bv",
"FStar.Order.Eq",
"FStar.Pervasives.Native.Some",
"Prims.bool",
"FStar.InteractiveHelpers.Base.bind_map_get",
"FStar.Pervasives.Native.option"
] | [
"recursion"
] | false | false | false | true | false | let rec bind_map_get (#a: Type) (m: bind_map a) (b: bv) : Tot (option a) =
| match m with
| [] -> None
| (b', x) :: m' -> if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.push_fresh_var | val push_fresh_var : env -> string -> typ -> Tac (term & binder & env) | val push_fresh_var : env -> string -> typ -> Tac (term & binder & env) | let push_fresh_var e0 basename ty =
let e1, b1 = push_fresh_binder e0 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
v1, b1, e1 | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 12,
"end_line": 351,
"start_col": 0,
"start_line": 348
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars'
let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t
/// Check if a binder is shadowed by another more recent binder
let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool =
List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars
let binder_is_shadowed (ge : genv) (b : binder) : Tot bool =
bv_is_shadowed ge (bv_of_binder b)
let find_shadowed_bvs (ge : genv) (bl : list bv) : Tot (list (bv & bool)) =
List.Tot.map (fun b -> b, bv_is_shadowed ge b) bl
let find_shadowed_binders (ge : genv) (bl : list binder) : Tot (list (binder & bool)) =
List.Tot.map (fun b -> b, binder_is_shadowed ge b) bl
val bv_is_abstract : genv -> bv -> Tot bool
let bv_is_abstract ge bv =
match genv_get ge bv with
| None -> false
| Some (_, abs, _) -> abs
val binder_is_abstract : genv -> binder -> Tot bool
let binder_is_abstract ge b =
bv_is_abstract ge (bv_of_binder b)
val genv_abstract_bvs : genv -> Tot (list (bv & typ))
let genv_abstract_bvs ge =
List.Tot.concatMap
(fun (bv, (ty, abs, _)) -> if abs then [bv,ty] else []) ge.bmap
/// Versions of ``fresh_bv`` and ``fresh_binder`` inspired by the standard library
/// We make sure the name is fresh because we need to be able to generate valid code
/// (it is thus not enough to have a fresh integer).
let rec _fresh_bv binder_names basename i : Tac bv =
let name = basename ^ string_of_int i in
(* In worst case the performance is quadratic in the number of binders.
* TODO: fix that, it actually probably happens for anonymous variables ('_') *)
if List.mem name binder_names then _fresh_bv binder_names basename (i+1)
else fresh_bv_named name
let fresh_bv (e : env) (basename : string) : Tac bv =
let binders = binders_of_env e in
let binder_names = Tactics.map name_of_binder binders in
_fresh_bv binder_names basename 0
let fresh_binder (e : env) (basename : string) (ty : typ) : Tac binder =
let bv = fresh_bv e basename in
mk_binder bv ty
let genv_push_fresh_binder (ge : genv) (basename : string) (ty : typ) : Tac (genv & binder) =
let b = fresh_binder ge.env basename ty in
(* TODO: we can have a shortcircuit push (which performs less checks) *)
let ge' = genv_push_binder ge b true None in
ge', b
// TODO: actually we should use push_fresh_bv more
let push_fresh_binder (e : env) (basename : string) (ty : typ) : Tac (env & binder) =
let b = fresh_binder e basename ty in
let e' = push_binder e b in
e', b
let genv_push_fresh_bv (ge : genv) (basename : string) (ty : typ) : Tac (genv & bv) =
let ge', b = genv_push_fresh_binder ge basename ty in
ge', bv_of_binder b | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
e0: FStar.Stubs.Reflection.Types.env ->
basename: Prims.string ->
ty: FStar.Stubs.Reflection.Types.typ
-> FStar.Tactics.Effect.Tac
((FStar.Stubs.Reflection.Types.term * FStar.Stubs.Reflection.Types.binder) *
FStar.Stubs.Reflection.Types.env) | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.Stubs.Reflection.Types.env",
"Prims.string",
"FStar.Stubs.Reflection.Types.typ",
"FStar.Stubs.Reflection.Types.binder",
"FStar.Pervasives.Native.Mktuple3",
"FStar.Stubs.Reflection.Types.term",
"FStar.Pervasives.Native.tuple3",
"FStar.Stubs.Tactics.V1.Builtins.pack",
"FStar.Stubs.Reflection.V1.Data.Tv_Var",
"FStar.Reflection.V1.Derived.bv_of_binder",
"FStar.Pervasives.Native.tuple2",
"FStar.InteractiveHelpers.Base.push_fresh_binder"
] | [] | false | true | false | false | false | let push_fresh_var e0 basename ty =
| let e1, b1 = push_fresh_binder e0 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
v1, b1, e1 | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.push_fresh_binder | val push_fresh_binder (e: env) (basename: string) (ty: typ) : Tac (env & binder) | val push_fresh_binder (e: env) (basename: string) (ty: typ) : Tac (env & binder) | let push_fresh_binder (e : env) (basename : string) (ty : typ) : Tac (env & binder) =
let b = fresh_binder e basename ty in
let e' = push_binder e b in
e', b | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 7,
"end_line": 341,
"start_col": 0,
"start_line": 338
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars'
let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t
/// Check if a binder is shadowed by another more recent binder
let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool =
List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars
let binder_is_shadowed (ge : genv) (b : binder) : Tot bool =
bv_is_shadowed ge (bv_of_binder b)
let find_shadowed_bvs (ge : genv) (bl : list bv) : Tot (list (bv & bool)) =
List.Tot.map (fun b -> b, bv_is_shadowed ge b) bl
let find_shadowed_binders (ge : genv) (bl : list binder) : Tot (list (binder & bool)) =
List.Tot.map (fun b -> b, binder_is_shadowed ge b) bl
val bv_is_abstract : genv -> bv -> Tot bool
let bv_is_abstract ge bv =
match genv_get ge bv with
| None -> false
| Some (_, abs, _) -> abs
val binder_is_abstract : genv -> binder -> Tot bool
let binder_is_abstract ge b =
bv_is_abstract ge (bv_of_binder b)
val genv_abstract_bvs : genv -> Tot (list (bv & typ))
let genv_abstract_bvs ge =
List.Tot.concatMap
(fun (bv, (ty, abs, _)) -> if abs then [bv,ty] else []) ge.bmap
/// Versions of ``fresh_bv`` and ``fresh_binder`` inspired by the standard library
/// We make sure the name is fresh because we need to be able to generate valid code
/// (it is thus not enough to have a fresh integer).
let rec _fresh_bv binder_names basename i : Tac bv =
let name = basename ^ string_of_int i in
(* In worst case the performance is quadratic in the number of binders.
* TODO: fix that, it actually probably happens for anonymous variables ('_') *)
if List.mem name binder_names then _fresh_bv binder_names basename (i+1)
else fresh_bv_named name
let fresh_bv (e : env) (basename : string) : Tac bv =
let binders = binders_of_env e in
let binder_names = Tactics.map name_of_binder binders in
_fresh_bv binder_names basename 0
let fresh_binder (e : env) (basename : string) (ty : typ) : Tac binder =
let bv = fresh_bv e basename in
mk_binder bv ty
let genv_push_fresh_binder (ge : genv) (basename : string) (ty : typ) : Tac (genv & binder) =
let b = fresh_binder ge.env basename ty in
(* TODO: we can have a shortcircuit push (which performs less checks) *)
let ge' = genv_push_binder ge b true None in
ge', b | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
e: FStar.Stubs.Reflection.Types.env ->
basename: Prims.string ->
ty: FStar.Stubs.Reflection.Types.typ
-> FStar.Tactics.Effect.Tac
(FStar.Stubs.Reflection.Types.env * FStar.Stubs.Reflection.Types.binder) | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.Stubs.Reflection.Types.env",
"Prims.string",
"FStar.Stubs.Reflection.Types.typ",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Stubs.Reflection.Types.binder",
"FStar.Stubs.Reflection.V1.Builtins.push_binder",
"FStar.Pervasives.Native.tuple2",
"FStar.InteractiveHelpers.Base.fresh_binder"
] | [] | false | true | false | false | false | let push_fresh_binder (e: env) (basename: string) (ty: typ) : Tac (env & binder) =
| let b = fresh_binder e basename ty in
let e' = push_binder e b in
e', b | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.norm_apply_subst | val norm_apply_subst : env -> term -> list ((bv & typ) & term) -> Tac term | val norm_apply_subst : env -> term -> list ((bv & typ) & term) -> Tac term | let norm_apply_subst e t subst =
let bl, vl = unzip subst in
let bl = List.Tot.map (fun (bv,ty) -> mk_binder bv ty) bl in
let t1 = mk_abs bl t in
let t2 = mk_e_app t1 vl in
norm_term_env e [] t2 | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 23,
"end_line": 391,
"start_col": 0,
"start_line": 386
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars'
let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t
/// Check if a binder is shadowed by another more recent binder
let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool =
List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars
let binder_is_shadowed (ge : genv) (b : binder) : Tot bool =
bv_is_shadowed ge (bv_of_binder b)
let find_shadowed_bvs (ge : genv) (bl : list bv) : Tot (list (bv & bool)) =
List.Tot.map (fun b -> b, bv_is_shadowed ge b) bl
let find_shadowed_binders (ge : genv) (bl : list binder) : Tot (list (binder & bool)) =
List.Tot.map (fun b -> b, binder_is_shadowed ge b) bl
val bv_is_abstract : genv -> bv -> Tot bool
let bv_is_abstract ge bv =
match genv_get ge bv with
| None -> false
| Some (_, abs, _) -> abs
val binder_is_abstract : genv -> binder -> Tot bool
let binder_is_abstract ge b =
bv_is_abstract ge (bv_of_binder b)
val genv_abstract_bvs : genv -> Tot (list (bv & typ))
let genv_abstract_bvs ge =
List.Tot.concatMap
(fun (bv, (ty, abs, _)) -> if abs then [bv,ty] else []) ge.bmap
/// Versions of ``fresh_bv`` and ``fresh_binder`` inspired by the standard library
/// We make sure the name is fresh because we need to be able to generate valid code
/// (it is thus not enough to have a fresh integer).
let rec _fresh_bv binder_names basename i : Tac bv =
let name = basename ^ string_of_int i in
(* In worst case the performance is quadratic in the number of binders.
* TODO: fix that, it actually probably happens for anonymous variables ('_') *)
if List.mem name binder_names then _fresh_bv binder_names basename (i+1)
else fresh_bv_named name
let fresh_bv (e : env) (basename : string) : Tac bv =
let binders = binders_of_env e in
let binder_names = Tactics.map name_of_binder binders in
_fresh_bv binder_names basename 0
let fresh_binder (e : env) (basename : string) (ty : typ) : Tac binder =
let bv = fresh_bv e basename in
mk_binder bv ty
let genv_push_fresh_binder (ge : genv) (basename : string) (ty : typ) : Tac (genv & binder) =
let b = fresh_binder ge.env basename ty in
(* TODO: we can have a shortcircuit push (which performs less checks) *)
let ge' = genv_push_binder ge b true None in
ge', b
// TODO: actually we should use push_fresh_bv more
let push_fresh_binder (e : env) (basename : string) (ty : typ) : Tac (env & binder) =
let b = fresh_binder e basename ty in
let e' = push_binder e b in
e', b
let genv_push_fresh_bv (ge : genv) (basename : string) (ty : typ) : Tac (genv & bv) =
let ge', b = genv_push_fresh_binder ge basename ty in
ge', bv_of_binder b
val push_fresh_var : env -> string -> typ -> Tac (term & binder & env)
let push_fresh_var e0 basename ty =
let e1, b1 = push_fresh_binder e0 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
v1, b1, e1
val genv_push_fresh_var : genv -> string -> typ -> Tac (term & binder & genv)
let genv_push_fresh_var ge0 basename ty =
let ge1, b1 = genv_push_fresh_binder ge0 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
v1, b1, ge1
val push_two_fresh_vars : env -> string -> typ -> Tac (term & binder & term & binder & env)
let push_two_fresh_vars e0 basename ty =
let e1, b1 = push_fresh_binder e0 basename ty in
let e2, b2 = push_fresh_binder e1 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
let v2 = pack (Tv_Var (bv_of_binder b2)) in
v1, b1, v2, b2, e2
val genv_push_two_fresh_vars : genv -> string -> typ -> Tac (term & binder & term & binder & genv)
let genv_push_two_fresh_vars ge0 basename ty =
let ge1, b1 = genv_push_fresh_binder ge0 basename ty in
let ge2, b2 = genv_push_fresh_binder ge1 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
let v2 = pack (Tv_Var (bv_of_binder b2)) in
v1, b1, v2, b2, ge2
(*** Substitutions *)
/// Substitutions
/// Custom substitutions using the normalizer. This is the easiest and safest
/// way to perform a substitution: if you want to substitute [v] with [t] in [exp],
/// just normalize [(fun v -> exp) t]. Note that this may be computationally expensive.
val norm_apply_subst : env -> term -> list ((bv & typ) & term) -> Tac term
val norm_apply_subst_in_comp : env -> comp -> list ((bv & typ) & term) -> Tac comp | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
e: FStar.Stubs.Reflection.Types.env ->
t: FStar.Stubs.Reflection.Types.term ->
subst:
Prims.list ((FStar.Stubs.Reflection.Types.bv * FStar.Stubs.Reflection.Types.typ) *
FStar.Stubs.Reflection.Types.term)
-> FStar.Tactics.Effect.Tac FStar.Stubs.Reflection.Types.term | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.Stubs.Reflection.Types.env",
"FStar.Stubs.Reflection.Types.term",
"Prims.list",
"FStar.Pervasives.Native.tuple2",
"FStar.Stubs.Reflection.Types.bv",
"FStar.Stubs.Reflection.Types.typ",
"FStar.Stubs.Tactics.V1.Builtins.norm_term_env",
"Prims.Nil",
"FStar.Pervasives.norm_step",
"FStar.Reflection.V1.Derived.mk_e_app",
"FStar.Tactics.V1.Derived.mk_abs",
"FStar.Stubs.Reflection.Types.binder",
"FStar.List.Tot.Base.map",
"FStar.Reflection.V1.Derived.mk_binder",
"FStar.InteractiveHelpers.Base.unzip"
] | [] | false | true | false | false | false | let norm_apply_subst e t subst =
| let bl, vl = unzip subst in
let bl = List.Tot.map (fun (bv, ty) -> mk_binder bv ty) bl in
let t1 = mk_abs bl t in
let t2 = mk_e_app t1 vl in
norm_term_env e [] t2 | false |
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.nt_substs_st_comp_commutes | val nt_substs_st_comp_commutes (s:st_comp) (nts:nt_substs)
: Lemma (ensures
nt_subst_st_comp s nts ==
{ s with res = nt_subst_term s.res nts;
pre = nt_subst_term s.pre nts;
post = nt_subst_term s.post nts; }) // no shifting required
[SMTPat (nt_subst_st_comp s nts)] | val nt_substs_st_comp_commutes (s:st_comp) (nts:nt_substs)
: Lemma (ensures
nt_subst_st_comp s nts ==
{ s with res = nt_subst_term s.res nts;
pre = nt_subst_term s.pre nts;
post = nt_subst_term s.post nts; }) // no shifting required
[SMTPat (nt_subst_st_comp s nts)] | let rec nt_substs_st_comp_commutes (s:st_comp) (nts:nt_substs)
: Lemma (ensures
nt_subst_st_comp s nts ==
{ s with res = nt_subst_term s.res nts;
pre = nt_subst_term s.pre nts;
post = nt_subst_term s.post nts; }) // no shifting required
(decreases nts)
[SMTPat (nt_subst_st_comp s nts)] =
match nts with
| [] -> ()
| (NT x e)::nts_tl -> nt_substs_st_comp_commutes (nt_subst_st_comp s [ NT x e ]) nts_tl | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 89,
"end_line": 256,
"start_col": 0,
"start_line": 246
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
}
let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l
let as_map (ss:ss_t) = ss.m
let empty = { l = []; m = Map.const_on Set.empty tm_unknown }
let is_dom_push
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { ~ (Map.contains m x ) })
(t:term)
: Lemma (is_dom (x::l) (Map.upd m x t)) =
assert (Map.equal (remove_map (Map.upd m x t) x) m)
let push (ss:ss_t) (x:var { ~ (contains ss x) }) (t:term) : ss_t =
is_dom_push ss.l ss.m x t;
{ l = x::ss.l;
m = Map.upd ss.m x t }
let tail (ss:ss_t { Cons? ss.l }) : ss_t =
{ l = L.tl ss.l; m = remove_map ss.m (L.hd ss.l) }
let rec push_ss (ss1:ss_t) (ss2:ss_t { Set.disjoint (dom ss1) (dom ss2) })
: Tot ss_t (decreases L.length ss2.l) =
match ss2.l with
| [] -> ss1
| x::tl ->
push_ss (push ss1 x (Map.sel ss2.m x)) (tail ss2)
let check_disjoint ss1 ss2 =
admit ();
not (L.existsb (fun v1 -> L.mem v1 ss2.l) ss1.l)
let rec diff_aux (ss1 ss2:ss_t) (acc:ss_t { Set.disjoint (dom acc) (dom ss2) })
: Tot (ss:ss_t { Set.disjoint (dom ss) (dom ss2) }) (decreases L.length ss1.l) =
match ss1.l with
| [] -> acc
| x::l ->
if L.mem x ss2.l
then let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
else let acc_l = x::acc.l in
let acc_m = Map.upd acc.m x (Map.sel ss1.m x) in
assume (no_repeats acc_l /\ is_dom acc_l acc_m);
let acc = { l = acc_l; m = acc_m } in
let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
let diff ss1 ss2 = diff_aux ss1 ss2 empty
#push-options "--warn_error -271"
let push_as_map (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat (as_map (push_ss ss1 ss2))] =
let rec aux (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat ()] =
match ss2.l with
| [] -> ()
| x::tl -> aux (push ss1 x (Map.sel ss2.m x)) (tail ss2)
in
()
#pop-options
let rec remove_l (l:ss_dom) (x:var { L.memP x l })
: Pure ss_dom
(requires True)
(ensures fun r -> forall (y:var). L.memP y r <==> (L.memP y l /\ y =!= x)) =
match l with
| [] -> assert False; []
| y::tl ->
if y = x then tl
else y::(remove_l tl x)
let rec is_dom_remove
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { Map.contains m x })
: Lemma (is_dom (remove_l l x) (remove_map m x))
[SMTPat (remove_l l x); SMTPat (remove_map m x)] =
match l with
| [] -> ()
| y::tl ->
if x = y then ()
else let t_y = Map.sel m y in
let m1 = remove_map m y in
is_dom_remove tl m1 x;
assert (is_dom (remove_l tl x) (remove_map m1 x));
is_dom_push (remove_l tl x) (remove_map m1 x) y t_y;
assert (Map.equal (Map.upd (remove_map m1 x) y t_y)
(remove_map m x))
let rec ss_term (t:term) (ss:ss_t) : Tot term (decreases L.length ss.l) =
match ss.l with
| [] -> t
| y::tl ->
let t = subst_term t [ NT y (Map.sel ss.m y) ] in
ss_term t (tail ss)
let rec ss_st_term (t:st_term) (ss:ss_t) : Tot st_term (decreases L.length ss.l) =
match ss.l with
| [] -> t
| y::tl ->
let t = subst_st_term t [ NT y (Map.sel ss.m y) ] in
ss_st_term t (tail ss)
let rec ss_st_comp (s:st_comp) (ss:ss_t)
: Tot st_comp (decreases L.length ss.l) =
match ss.l with
| [] -> s
| y::tl ->
let s = subst_st_comp s [ NT y (Map.sel ss.m y) ] in
ss_st_comp s (tail ss)
let rec ss_comp (c:comp) (ss:ss_t)
: Tot comp (decreases L.length ss.l) =
match ss.l with
| [] -> c
| y::tl ->
let c = subst_comp c [ NT y (Map.sel ss.m y) ] in
ss_comp c (tail ss)
let rec ss_binder (b:binder) (ss:ss_t)
: Tot binder (decreases L.length ss.l) =
match ss.l with
| [] -> b
| y::tl ->
let b = subst_binder b [ NT y (Map.sel ss.m y) ] in
ss_binder b (tail ss)
let rec ss_env (g:env) (ss:ss_t)
: Tot (g':env { fstar_env g' == fstar_env g /\
Env.dom g' == Env.dom g })
(decreases L.length ss.l) =
admit ();
match ss.l with
| [] -> g
| y::tl -> ss_env (subst_env g [ NT y (Map.sel ss.m y) ]) (tail ss)
let rec ss_st_comp_commutes (s:st_comp) (ss:ss_t)
: Lemma (ensures
ss_st_comp s ss ==
{ s with res = ss_term s.res ss;
pre = ss_term s.pre ss;
post = ss_term s.post ss; }) // no shifting required
(decreases L.length ss.l)
[SMTPat (ss_st_comp s ss)] =
match ss.l with
| [] -> ()
| y::tl -> ss_st_comp_commutes (subst_st_comp s [ NT y (Map.sel ss.m y) ]) (tail ss)
let rec ss_comp_commutes (c:comp) (ss:ss_t)
: Lemma (ensures
(let r = ss_comp c ss in
(C_Tot? c ==> r == C_Tot (ss_term (comp_res c) ss)) /\
(C_ST? c ==> r == C_ST (ss_st_comp (st_comp_of_comp c) ss)) /\
(C_STAtomic? c ==> r == C_STAtomic (ss_term (comp_inames c) ss)
(C_STAtomic?.obs c)
(ss_st_comp (st_comp_of_comp c) ss)) /\
(C_STGhost? c ==> r == C_STGhost (ss_st_comp (st_comp_of_comp c) ss))))
(decreases L.length ss.l)
[SMTPat (ss_comp c ss)] =
match ss.l with
| [] -> ()
| y::tl -> ss_comp_commutes (subst_comp c [ NT y (Map.sel ss.m y) ]) (tail ss) | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: Pulse.Syntax.Base.st_comp -> nts: Pulse.Checker.Prover.Substs.nt_substs
-> FStar.Pervasives.Lemma
(ensures
Pulse.Checker.Prover.Substs.nt_subst_st_comp s nts ==
Pulse.Syntax.Base.Mkst_comp (Mkst_comp?.u s)
(Pulse.Checker.Prover.Substs.nt_subst_term (Mkst_comp?.res s) nts)
(Pulse.Checker.Prover.Substs.nt_subst_term (Mkst_comp?.pre s) nts)
(Pulse.Checker.Prover.Substs.nt_subst_term (Mkst_comp?.post s) nts))
(decreases nts)
[SMTPat (Pulse.Checker.Prover.Substs.nt_subst_st_comp s nts)] | FStar.Pervasives.Lemma | [
"",
"lemma"
] | [] | [
"Pulse.Syntax.Base.st_comp",
"Pulse.Checker.Prover.Substs.nt_substs",
"Pulse.Syntax.Base.var",
"Pulse.Syntax.Base.term",
"Prims.list",
"Pulse.Syntax.Naming.subst_elt",
"Pulse.Checker.Prover.Substs.nt_substs_st_comp_commutes",
"Pulse.Checker.Prover.Substs.nt_subst_st_comp",
"Prims.Cons",
"Pulse.Syntax.Naming.NT",
"Prims.Nil",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"Pulse.Syntax.Base.Mkst_comp",
"Pulse.Syntax.Base.__proj__Mkst_comp__item__u",
"Pulse.Checker.Prover.Substs.nt_subst_term",
"Pulse.Syntax.Base.__proj__Mkst_comp__item__res",
"Pulse.Syntax.Base.__proj__Mkst_comp__item__pre",
"Pulse.Syntax.Base.__proj__Mkst_comp__item__post",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat"
] | [
"recursion"
] | false | false | true | false | false | let rec nt_substs_st_comp_commutes (s: st_comp) (nts: nt_substs)
: Lemma
(ensures
nt_subst_st_comp s nts ==
{
s with
res = nt_subst_term s.res nts;
pre = nt_subst_term s.pre nts;
post = nt_subst_term s.post nts
}) (decreases nts) [SMTPat (nt_subst_st_comp s nts)] =
| match nts with
| [] -> ()
| NT x e :: nts_tl -> nt_substs_st_comp_commutes (nt_subst_st_comp s [NT x e]) nts_tl | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.push_two_fresh_vars | val push_two_fresh_vars : env -> string -> typ -> Tac (term & binder & term & binder & env) | val push_two_fresh_vars : env -> string -> typ -> Tac (term & binder & term & binder & env) | let push_two_fresh_vars e0 basename ty =
let e1, b1 = push_fresh_binder e0 basename ty in
let e2, b2 = push_fresh_binder e1 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
let v2 = pack (Tv_Var (bv_of_binder b2)) in
v1, b1, v2, b2, e2 | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 20,
"end_line": 365,
"start_col": 0,
"start_line": 360
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars'
let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t
/// Check if a binder is shadowed by another more recent binder
let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool =
List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars
let binder_is_shadowed (ge : genv) (b : binder) : Tot bool =
bv_is_shadowed ge (bv_of_binder b)
let find_shadowed_bvs (ge : genv) (bl : list bv) : Tot (list (bv & bool)) =
List.Tot.map (fun b -> b, bv_is_shadowed ge b) bl
let find_shadowed_binders (ge : genv) (bl : list binder) : Tot (list (binder & bool)) =
List.Tot.map (fun b -> b, binder_is_shadowed ge b) bl
val bv_is_abstract : genv -> bv -> Tot bool
let bv_is_abstract ge bv =
match genv_get ge bv with
| None -> false
| Some (_, abs, _) -> abs
val binder_is_abstract : genv -> binder -> Tot bool
let binder_is_abstract ge b =
bv_is_abstract ge (bv_of_binder b)
val genv_abstract_bvs : genv -> Tot (list (bv & typ))
let genv_abstract_bvs ge =
List.Tot.concatMap
(fun (bv, (ty, abs, _)) -> if abs then [bv,ty] else []) ge.bmap
/// Versions of ``fresh_bv`` and ``fresh_binder`` inspired by the standard library
/// We make sure the name is fresh because we need to be able to generate valid code
/// (it is thus not enough to have a fresh integer).
let rec _fresh_bv binder_names basename i : Tac bv =
let name = basename ^ string_of_int i in
(* In worst case the performance is quadratic in the number of binders.
* TODO: fix that, it actually probably happens for anonymous variables ('_') *)
if List.mem name binder_names then _fresh_bv binder_names basename (i+1)
else fresh_bv_named name
let fresh_bv (e : env) (basename : string) : Tac bv =
let binders = binders_of_env e in
let binder_names = Tactics.map name_of_binder binders in
_fresh_bv binder_names basename 0
let fresh_binder (e : env) (basename : string) (ty : typ) : Tac binder =
let bv = fresh_bv e basename in
mk_binder bv ty
let genv_push_fresh_binder (ge : genv) (basename : string) (ty : typ) : Tac (genv & binder) =
let b = fresh_binder ge.env basename ty in
(* TODO: we can have a shortcircuit push (which performs less checks) *)
let ge' = genv_push_binder ge b true None in
ge', b
// TODO: actually we should use push_fresh_bv more
let push_fresh_binder (e : env) (basename : string) (ty : typ) : Tac (env & binder) =
let b = fresh_binder e basename ty in
let e' = push_binder e b in
e', b
let genv_push_fresh_bv (ge : genv) (basename : string) (ty : typ) : Tac (genv & bv) =
let ge', b = genv_push_fresh_binder ge basename ty in
ge', bv_of_binder b
val push_fresh_var : env -> string -> typ -> Tac (term & binder & env)
let push_fresh_var e0 basename ty =
let e1, b1 = push_fresh_binder e0 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
v1, b1, e1
val genv_push_fresh_var : genv -> string -> typ -> Tac (term & binder & genv)
let genv_push_fresh_var ge0 basename ty =
let ge1, b1 = genv_push_fresh_binder ge0 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
v1, b1, ge1 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
e0: FStar.Stubs.Reflection.Types.env ->
basename: Prims.string ->
ty: FStar.Stubs.Reflection.Types.typ
-> FStar.Tactics.Effect.Tac
((((FStar.Stubs.Reflection.Types.term * FStar.Stubs.Reflection.Types.binder) *
FStar.Stubs.Reflection.Types.term) *
FStar.Stubs.Reflection.Types.binder) *
FStar.Stubs.Reflection.Types.env) | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.Stubs.Reflection.Types.env",
"Prims.string",
"FStar.Stubs.Reflection.Types.typ",
"FStar.Stubs.Reflection.Types.binder",
"FStar.Pervasives.Native.Mktuple5",
"FStar.Stubs.Reflection.Types.term",
"FStar.Pervasives.Native.tuple5",
"FStar.Stubs.Tactics.V1.Builtins.pack",
"FStar.Stubs.Reflection.V1.Data.Tv_Var",
"FStar.Reflection.V1.Derived.bv_of_binder",
"FStar.Pervasives.Native.tuple2",
"FStar.InteractiveHelpers.Base.push_fresh_binder"
] | [] | false | true | false | false | false | let push_two_fresh_vars e0 basename ty =
| let e1, b1 = push_fresh_binder e0 basename ty in
let e2, b2 = push_fresh_binder e1 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
let v2 = pack (Tv_Var (bv_of_binder b2)) in
v1, b1, v2, b2, e2 | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.genv_push_fresh_var | val genv_push_fresh_var : genv -> string -> typ -> Tac (term & binder & genv) | val genv_push_fresh_var : genv -> string -> typ -> Tac (term & binder & genv) | let genv_push_fresh_var ge0 basename ty =
let ge1, b1 = genv_push_fresh_binder ge0 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
v1, b1, ge1 | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 13,
"end_line": 357,
"start_col": 0,
"start_line": 354
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars'
let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t
/// Check if a binder is shadowed by another more recent binder
let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool =
List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars
let binder_is_shadowed (ge : genv) (b : binder) : Tot bool =
bv_is_shadowed ge (bv_of_binder b)
let find_shadowed_bvs (ge : genv) (bl : list bv) : Tot (list (bv & bool)) =
List.Tot.map (fun b -> b, bv_is_shadowed ge b) bl
let find_shadowed_binders (ge : genv) (bl : list binder) : Tot (list (binder & bool)) =
List.Tot.map (fun b -> b, binder_is_shadowed ge b) bl
val bv_is_abstract : genv -> bv -> Tot bool
let bv_is_abstract ge bv =
match genv_get ge bv with
| None -> false
| Some (_, abs, _) -> abs
val binder_is_abstract : genv -> binder -> Tot bool
let binder_is_abstract ge b =
bv_is_abstract ge (bv_of_binder b)
val genv_abstract_bvs : genv -> Tot (list (bv & typ))
let genv_abstract_bvs ge =
List.Tot.concatMap
(fun (bv, (ty, abs, _)) -> if abs then [bv,ty] else []) ge.bmap
/// Versions of ``fresh_bv`` and ``fresh_binder`` inspired by the standard library
/// We make sure the name is fresh because we need to be able to generate valid code
/// (it is thus not enough to have a fresh integer).
let rec _fresh_bv binder_names basename i : Tac bv =
let name = basename ^ string_of_int i in
(* In worst case the performance is quadratic in the number of binders.
* TODO: fix that, it actually probably happens for anonymous variables ('_') *)
if List.mem name binder_names then _fresh_bv binder_names basename (i+1)
else fresh_bv_named name
let fresh_bv (e : env) (basename : string) : Tac bv =
let binders = binders_of_env e in
let binder_names = Tactics.map name_of_binder binders in
_fresh_bv binder_names basename 0
let fresh_binder (e : env) (basename : string) (ty : typ) : Tac binder =
let bv = fresh_bv e basename in
mk_binder bv ty
let genv_push_fresh_binder (ge : genv) (basename : string) (ty : typ) : Tac (genv & binder) =
let b = fresh_binder ge.env basename ty in
(* TODO: we can have a shortcircuit push (which performs less checks) *)
let ge' = genv_push_binder ge b true None in
ge', b
// TODO: actually we should use push_fresh_bv more
let push_fresh_binder (e : env) (basename : string) (ty : typ) : Tac (env & binder) =
let b = fresh_binder e basename ty in
let e' = push_binder e b in
e', b
let genv_push_fresh_bv (ge : genv) (basename : string) (ty : typ) : Tac (genv & bv) =
let ge', b = genv_push_fresh_binder ge basename ty in
ge', bv_of_binder b
val push_fresh_var : env -> string -> typ -> Tac (term & binder & env)
let push_fresh_var e0 basename ty =
let e1, b1 = push_fresh_binder e0 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
v1, b1, e1 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
ge0: FStar.InteractiveHelpers.Base.genv ->
basename: Prims.string ->
ty: FStar.Stubs.Reflection.Types.typ
-> FStar.Tactics.Effect.Tac
((FStar.Stubs.Reflection.Types.term * FStar.Stubs.Reflection.Types.binder) *
FStar.InteractiveHelpers.Base.genv) | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.InteractiveHelpers.Base.genv",
"Prims.string",
"FStar.Stubs.Reflection.Types.typ",
"FStar.Stubs.Reflection.Types.binder",
"FStar.Pervasives.Native.Mktuple3",
"FStar.Stubs.Reflection.Types.term",
"FStar.Pervasives.Native.tuple3",
"FStar.Stubs.Tactics.V1.Builtins.pack",
"FStar.Stubs.Reflection.V1.Data.Tv_Var",
"FStar.Reflection.V1.Derived.bv_of_binder",
"FStar.Pervasives.Native.tuple2",
"FStar.InteractiveHelpers.Base.genv_push_fresh_binder"
] | [] | false | true | false | false | false | let genv_push_fresh_var ge0 basename ty =
| let ge1, b1 = genv_push_fresh_binder ge0 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
v1, b1, ge1 | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.genv_push_two_fresh_vars | val genv_push_two_fresh_vars : genv -> string -> typ -> Tac (term & binder & term & binder & genv) | val genv_push_two_fresh_vars : genv -> string -> typ -> Tac (term & binder & term & binder & genv) | let genv_push_two_fresh_vars ge0 basename ty =
let ge1, b1 = genv_push_fresh_binder ge0 basename ty in
let ge2, b2 = genv_push_fresh_binder ge1 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
let v2 = pack (Tv_Var (bv_of_binder b2)) in
v1, b1, v2, b2, ge2 | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 21,
"end_line": 373,
"start_col": 0,
"start_line": 368
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars'
let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t
/// Check if a binder is shadowed by another more recent binder
let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool =
List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars
let binder_is_shadowed (ge : genv) (b : binder) : Tot bool =
bv_is_shadowed ge (bv_of_binder b)
let find_shadowed_bvs (ge : genv) (bl : list bv) : Tot (list (bv & bool)) =
List.Tot.map (fun b -> b, bv_is_shadowed ge b) bl
let find_shadowed_binders (ge : genv) (bl : list binder) : Tot (list (binder & bool)) =
List.Tot.map (fun b -> b, binder_is_shadowed ge b) bl
val bv_is_abstract : genv -> bv -> Tot bool
let bv_is_abstract ge bv =
match genv_get ge bv with
| None -> false
| Some (_, abs, _) -> abs
val binder_is_abstract : genv -> binder -> Tot bool
let binder_is_abstract ge b =
bv_is_abstract ge (bv_of_binder b)
val genv_abstract_bvs : genv -> Tot (list (bv & typ))
let genv_abstract_bvs ge =
List.Tot.concatMap
(fun (bv, (ty, abs, _)) -> if abs then [bv,ty] else []) ge.bmap
/// Versions of ``fresh_bv`` and ``fresh_binder`` inspired by the standard library
/// We make sure the name is fresh because we need to be able to generate valid code
/// (it is thus not enough to have a fresh integer).
let rec _fresh_bv binder_names basename i : Tac bv =
let name = basename ^ string_of_int i in
(* In worst case the performance is quadratic in the number of binders.
* TODO: fix that, it actually probably happens for anonymous variables ('_') *)
if List.mem name binder_names then _fresh_bv binder_names basename (i+1)
else fresh_bv_named name
let fresh_bv (e : env) (basename : string) : Tac bv =
let binders = binders_of_env e in
let binder_names = Tactics.map name_of_binder binders in
_fresh_bv binder_names basename 0
let fresh_binder (e : env) (basename : string) (ty : typ) : Tac binder =
let bv = fresh_bv e basename in
mk_binder bv ty
let genv_push_fresh_binder (ge : genv) (basename : string) (ty : typ) : Tac (genv & binder) =
let b = fresh_binder ge.env basename ty in
(* TODO: we can have a shortcircuit push (which performs less checks) *)
let ge' = genv_push_binder ge b true None in
ge', b
// TODO: actually we should use push_fresh_bv more
let push_fresh_binder (e : env) (basename : string) (ty : typ) : Tac (env & binder) =
let b = fresh_binder e basename ty in
let e' = push_binder e b in
e', b
let genv_push_fresh_bv (ge : genv) (basename : string) (ty : typ) : Tac (genv & bv) =
let ge', b = genv_push_fresh_binder ge basename ty in
ge', bv_of_binder b
val push_fresh_var : env -> string -> typ -> Tac (term & binder & env)
let push_fresh_var e0 basename ty =
let e1, b1 = push_fresh_binder e0 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
v1, b1, e1
val genv_push_fresh_var : genv -> string -> typ -> Tac (term & binder & genv)
let genv_push_fresh_var ge0 basename ty =
let ge1, b1 = genv_push_fresh_binder ge0 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
v1, b1, ge1
val push_two_fresh_vars : env -> string -> typ -> Tac (term & binder & term & binder & env)
let push_two_fresh_vars e0 basename ty =
let e1, b1 = push_fresh_binder e0 basename ty in
let e2, b2 = push_fresh_binder e1 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
let v2 = pack (Tv_Var (bv_of_binder b2)) in
v1, b1, v2, b2, e2 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
ge0: FStar.InteractiveHelpers.Base.genv ->
basename: Prims.string ->
ty: FStar.Stubs.Reflection.Types.typ
-> FStar.Tactics.Effect.Tac
((((FStar.Stubs.Reflection.Types.term * FStar.Stubs.Reflection.Types.binder) *
FStar.Stubs.Reflection.Types.term) *
FStar.Stubs.Reflection.Types.binder) *
FStar.InteractiveHelpers.Base.genv) | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.InteractiveHelpers.Base.genv",
"Prims.string",
"FStar.Stubs.Reflection.Types.typ",
"FStar.Stubs.Reflection.Types.binder",
"FStar.Pervasives.Native.Mktuple5",
"FStar.Stubs.Reflection.Types.term",
"FStar.Pervasives.Native.tuple5",
"FStar.Stubs.Tactics.V1.Builtins.pack",
"FStar.Stubs.Reflection.V1.Data.Tv_Var",
"FStar.Reflection.V1.Derived.bv_of_binder",
"FStar.Pervasives.Native.tuple2",
"FStar.InteractiveHelpers.Base.genv_push_fresh_binder"
] | [] | false | true | false | false | false | let genv_push_two_fresh_vars ge0 basename ty =
| let ge1, b1 = genv_push_fresh_binder ge0 basename ty in
let ge2, b2 = genv_push_fresh_binder ge1 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
let v2 = pack (Tv_Var (bv_of_binder b2)) in
v1, b1, v2, b2, ge2 | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.apply_subst_in_comp | val apply_subst_in_comp : e: FStar.Stubs.Reflection.Types.env ->
c: FStar.Stubs.Reflection.Types.comp ->
subst:
Prims.list ((FStar.Stubs.Reflection.Types.bv * FStar.Stubs.Reflection.Types.typ) *
FStar.Stubs.Reflection.Types.term)
-> FStar.Tactics.Effect.Tac FStar.Stubs.Reflection.Types.comp | let apply_subst_in_comp = norm_apply_subst_in_comp | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 50,
"end_line": 585,
"start_col": 0,
"start_line": 585
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars'
let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t
/// Check if a binder is shadowed by another more recent binder
let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool =
List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars
let binder_is_shadowed (ge : genv) (b : binder) : Tot bool =
bv_is_shadowed ge (bv_of_binder b)
let find_shadowed_bvs (ge : genv) (bl : list bv) : Tot (list (bv & bool)) =
List.Tot.map (fun b -> b, bv_is_shadowed ge b) bl
let find_shadowed_binders (ge : genv) (bl : list binder) : Tot (list (binder & bool)) =
List.Tot.map (fun b -> b, binder_is_shadowed ge b) bl
val bv_is_abstract : genv -> bv -> Tot bool
let bv_is_abstract ge bv =
match genv_get ge bv with
| None -> false
| Some (_, abs, _) -> abs
val binder_is_abstract : genv -> binder -> Tot bool
let binder_is_abstract ge b =
bv_is_abstract ge (bv_of_binder b)
val genv_abstract_bvs : genv -> Tot (list (bv & typ))
let genv_abstract_bvs ge =
List.Tot.concatMap
(fun (bv, (ty, abs, _)) -> if abs then [bv,ty] else []) ge.bmap
/// Versions of ``fresh_bv`` and ``fresh_binder`` inspired by the standard library
/// We make sure the name is fresh because we need to be able to generate valid code
/// (it is thus not enough to have a fresh integer).
let rec _fresh_bv binder_names basename i : Tac bv =
let name = basename ^ string_of_int i in
(* In worst case the performance is quadratic in the number of binders.
* TODO: fix that, it actually probably happens for anonymous variables ('_') *)
if List.mem name binder_names then _fresh_bv binder_names basename (i+1)
else fresh_bv_named name
let fresh_bv (e : env) (basename : string) : Tac bv =
let binders = binders_of_env e in
let binder_names = Tactics.map name_of_binder binders in
_fresh_bv binder_names basename 0
let fresh_binder (e : env) (basename : string) (ty : typ) : Tac binder =
let bv = fresh_bv e basename in
mk_binder bv ty
let genv_push_fresh_binder (ge : genv) (basename : string) (ty : typ) : Tac (genv & binder) =
let b = fresh_binder ge.env basename ty in
(* TODO: we can have a shortcircuit push (which performs less checks) *)
let ge' = genv_push_binder ge b true None in
ge', b
// TODO: actually we should use push_fresh_bv more
let push_fresh_binder (e : env) (basename : string) (ty : typ) : Tac (env & binder) =
let b = fresh_binder e basename ty in
let e' = push_binder e b in
e', b
let genv_push_fresh_bv (ge : genv) (basename : string) (ty : typ) : Tac (genv & bv) =
let ge', b = genv_push_fresh_binder ge basename ty in
ge', bv_of_binder b
val push_fresh_var : env -> string -> typ -> Tac (term & binder & env)
let push_fresh_var e0 basename ty =
let e1, b1 = push_fresh_binder e0 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
v1, b1, e1
val genv_push_fresh_var : genv -> string -> typ -> Tac (term & binder & genv)
let genv_push_fresh_var ge0 basename ty =
let ge1, b1 = genv_push_fresh_binder ge0 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
v1, b1, ge1
val push_two_fresh_vars : env -> string -> typ -> Tac (term & binder & term & binder & env)
let push_two_fresh_vars e0 basename ty =
let e1, b1 = push_fresh_binder e0 basename ty in
let e2, b2 = push_fresh_binder e1 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
let v2 = pack (Tv_Var (bv_of_binder b2)) in
v1, b1, v2, b2, e2
val genv_push_two_fresh_vars : genv -> string -> typ -> Tac (term & binder & term & binder & genv)
let genv_push_two_fresh_vars ge0 basename ty =
let ge1, b1 = genv_push_fresh_binder ge0 basename ty in
let ge2, b2 = genv_push_fresh_binder ge1 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
let v2 = pack (Tv_Var (bv_of_binder b2)) in
v1, b1, v2, b2, ge2
(*** Substitutions *)
/// Substitutions
/// Custom substitutions using the normalizer. This is the easiest and safest
/// way to perform a substitution: if you want to substitute [v] with [t] in [exp],
/// just normalize [(fun v -> exp) t]. Note that this may be computationally expensive.
val norm_apply_subst : env -> term -> list ((bv & typ) & term) -> Tac term
val norm_apply_subst_in_comp : env -> comp -> list ((bv & typ) & term) -> Tac comp
let norm_apply_subst e t subst =
let bl, vl = unzip subst in
let bl = List.Tot.map (fun (bv,ty) -> mk_binder bv ty) bl in
let t1 = mk_abs bl t in
let t2 = mk_e_app t1 vl in
norm_term_env e [] t2
let norm_apply_subst_in_comp e c subst =
let subst = (fun x -> norm_apply_subst e x subst) in
let subst_in_aqualv a : Tac aqualv =
match a with
| Q_Implicit
| Q_Explicit -> a
| Q_Meta t -> Q_Meta (subst t)
in
match inspect_comp c with
| C_Total ret ->
let ret = subst ret in
pack_comp (C_Total ret)
| C_GTotal ret ->
let ret = subst ret in
pack_comp (C_GTotal ret)
| C_Lemma pre post patterns ->
let pre = subst pre in
let post = subst post in
let patterns = subst patterns in
pack_comp (C_Lemma pre post patterns)
| C_Eff us eff_name result eff_args decrs ->
let result = subst result in
let eff_args = map (fun (x, a) -> (subst x, subst_in_aqualv a)) eff_args in
let decrs = map subst decrs in
pack_comp (C_Eff us eff_name result eff_args decrs)
/// As substitution with normalization is very expensive, we implemented another
/// technique which works by exploring terms. This is super fast, but the terms
/// seem not to be reconstructed in the same way, which has a big impact on pretty printing.
/// For example, terms like [A /\ B] get printed as [Prims.l_and A B].
val deep_apply_subst : env -> term -> list (bv & term) -> Tac term
// Whenever we encounter a construction which introduces a binder, we need to apply
// the substitution in the binder type. Note that this gives a new binder, with
// which we need to replace the old one in what follows.
// Also note that it should be possible to rewrite [deep_apply_subst] in terms of [visit_tm],
// but [deep_apply_subst] seems to be a bit more precise with regard to type replacements (not
// sure it is really important, though).
val deep_apply_subst_in_bv : env -> bv -> list (bv & term) -> Tac (bv & list (bv & term))
val deep_apply_subst_in_binder : env -> binder -> list (bv & term) -> Tac (binder & list (bv & term))
val deep_apply_subst_in_comp : env -> comp -> list (bv & term) -> Tac comp
val deep_apply_subst_in_pattern : env -> pattern -> list (bv & term) -> Tac (pattern & list (bv & term))
let rec deep_apply_subst e t subst =
match inspect t with
| Tv_Var b ->
begin match bind_map_get subst b with
| None -> t
| Some t' -> t'
end
| Tv_BVar b ->
(* Note: Tv_BVar shouldn't happen *)
begin match bind_map_get subst b with
| None -> t
| Some t' -> t'
end
| Tv_FVar _ -> t
| Tv_App hd (a,qual) ->
let hd = deep_apply_subst e hd subst in
let a = deep_apply_subst e a subst in
pack (Tv_App hd (a, qual))
| Tv_Abs br body ->
let body = deep_apply_subst e body subst in
pack (Tv_Abs br body)
| Tv_Arrow br c ->
let br, subst = deep_apply_subst_in_binder e br subst in
let c = deep_apply_subst_in_comp e c subst in
pack (Tv_Arrow br c)
| Tv_Type _ -> t
| Tv_Refine bv sort ref ->
let sort = deep_apply_subst e sort subst in
let bv, subst = deep_apply_subst_in_bv e bv subst in
let ref = deep_apply_subst e ref subst in
pack (Tv_Refine bv sort ref)
| Tv_Const _ -> t
| Tv_Uvar _ _ -> t
| Tv_Let recf attrs bv ty def body ->
(* No need to substitute in the attributes - that we filter for safety *)
let ty = deep_apply_subst e ty subst in
let def = deep_apply_subst e def subst in
let bv, subst = deep_apply_subst_in_bv e bv subst in
let body = deep_apply_subst e body subst in
pack (Tv_Let recf [] bv ty def body)
| Tv_Match scrutinee ret_opt branches -> (* TODO: type of pattern variables *)
let scrutinee = deep_apply_subst e scrutinee subst in
let ret_opt = map_opt (fun (b, asc) ->
let b, subst = deep_apply_subst_in_binder e b subst in
let asc =
match asc with
| Inl t, tacopt, use_eq ->
Inl (deep_apply_subst e t subst),
map_opt (fun tac -> deep_apply_subst e tac subst) tacopt,
use_eq
| Inr c, tacopt, use_eq ->
Inr (deep_apply_subst_in_comp e c subst),
map_opt (fun tac -> deep_apply_subst e tac subst) tacopt,
use_eq in
b, asc) ret_opt in
(* For the branches: we don't need to explore the patterns *)
let deep_apply_subst_in_branch branch =
let pat, tm = branch in
let pat, subst = deep_apply_subst_in_pattern e pat subst in
let tm = deep_apply_subst e tm subst in
pat, tm
in
let branches = map deep_apply_subst_in_branch branches in
pack (Tv_Match scrutinee ret_opt branches)
| Tv_AscribedT exp ty tac use_eq ->
let exp = deep_apply_subst e exp subst in
let ty = deep_apply_subst e ty subst in
(* no need to apply it on the tactic - that we filter for safety *)
pack (Tv_AscribedT exp ty None use_eq)
| Tv_AscribedC exp c tac use_eq ->
let exp = deep_apply_subst e exp subst in
let c = deep_apply_subst_in_comp e c subst in
(* no need to apply it on the tactic - that we filter for safety *)
pack (Tv_AscribedC exp c None use_eq)
| _ ->
(* Unknown *)
t
and deep_apply_subst_in_bv e bv subst =
(* No substitution needs to happen for variables
(there is no longer a sort). But, shift the substitution. *)
bv, (bv, pack (Tv_Var bv))::subst
(*
* AR: TODO: should apply subst in attrs?
*)
and deep_apply_subst_in_binder e br subst =
let open inspect_binder br <: binder_view in
let binder_sort = deep_apply_subst e binder_sort subst in
let binder_bv, subst = deep_apply_subst_in_bv e binder_bv subst in
pack_binder {
binder_bv=binder_bv;
binder_qual=binder_qual;
binder_attrs=binder_attrs;
binder_sort=binder_sort;
}, subst
and deep_apply_subst_in_comp e c subst =
let subst = (fun x -> deep_apply_subst e x subst) in
let subst_in_aqualv a : Tac aqualv =
match a with
| Q_Implicit
| Q_Explicit -> a
| Q_Meta t -> Q_Meta (subst t)
in
match inspect_comp c with
| C_Total ret ->
let ret = subst ret in
pack_comp (C_Total ret)
| C_GTotal ret ->
let ret = subst ret in
pack_comp (C_GTotal ret)
| C_Lemma pre post patterns ->
let pre = subst pre in
let post = subst post in
let patterns = subst patterns in
pack_comp (C_Lemma pre post patterns)
| C_Eff us eff_name result eff_args decrs ->
let result = subst result in
let eff_args = map (fun (x, a) -> (subst x, subst_in_aqualv a)) eff_args in
let decrs = map subst decrs in
pack_comp (C_Eff us eff_name result eff_args decrs)
and deep_apply_subst_in_pattern e pat subst =
match pat with
| Pat_Constant _ -> pat, subst
| Pat_Cons fv us patterns ->
(* The types of the variables in the patterns should be independent of each
* other: we use fold_left only to incrementally update the substitution *)
let patterns, subst =
fold_right (fun (pat, b) (pats, subst) ->
let pat, subst = deep_apply_subst_in_pattern e pat subst in
((pat, b) :: pats, subst)) patterns ([], subst)
in
Pat_Cons fv us patterns, subst
| Pat_Var bv st ->
let st = Sealed.seal (deep_apply_subst e (unseal st) subst) in
let bv, subst = deep_apply_subst_in_bv e bv subst in
Pat_Var bv st, subst
| Pat_Dot_Term eopt ->
Pat_Dot_Term (map_opt (fun t -> deep_apply_subst e t subst) eopt), subst
/// The substitution functions actually used in the rest of the meta F* functions.
/// For now, we use normalization because even though it is sometimes slow it
/// gives prettier terms, and readability is the priority. In order to mitigate
/// the performance issue, we try to minimize the number of calls to those functions,
/// by doing lazy instantiations for example (rather than incrementally apply
/// substitutions in a term, accumulate the substitutions and perform them all at once).
/// TODO: would it be good to have a native substitution function in F* | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
e: FStar.Stubs.Reflection.Types.env ->
c: FStar.Stubs.Reflection.Types.comp ->
subst:
Prims.list ((FStar.Stubs.Reflection.Types.bv * FStar.Stubs.Reflection.Types.typ) *
FStar.Stubs.Reflection.Types.term)
-> FStar.Tactics.Effect.Tac FStar.Stubs.Reflection.Types.comp | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.InteractiveHelpers.Base.norm_apply_subst_in_comp"
] | [] | false | true | false | false | false | let apply_subst_in_comp =
| norm_apply_subst_in_comp | false |
|
Pulse.Checker.Prover.Substs.fst | Pulse.Checker.Prover.Substs.nt_subst_comp_commutes | val nt_subst_comp_commutes (c:comp) (nts:nt_substs)
: Lemma (ensures
(let r = nt_subst_comp c nts in
(C_Tot? c ==> r == C_Tot (nt_subst_term (comp_res c) nts)) /\
(C_ST? c ==> r == C_ST (nt_subst_st_comp (st_comp_of_comp c) nts)) /\
(C_STAtomic? c ==> r == C_STAtomic (nt_subst_term (comp_inames c) nts)
(C_STAtomic?.obs c)
(nt_subst_st_comp (st_comp_of_comp c) nts)) /\
(C_STGhost? c ==> r == C_STGhost (nt_subst_st_comp (st_comp_of_comp c) nts))))
[SMTPat (nt_subst_comp c nts)] | val nt_subst_comp_commutes (c:comp) (nts:nt_substs)
: Lemma (ensures
(let r = nt_subst_comp c nts in
(C_Tot? c ==> r == C_Tot (nt_subst_term (comp_res c) nts)) /\
(C_ST? c ==> r == C_ST (nt_subst_st_comp (st_comp_of_comp c) nts)) /\
(C_STAtomic? c ==> r == C_STAtomic (nt_subst_term (comp_inames c) nts)
(C_STAtomic?.obs c)
(nt_subst_st_comp (st_comp_of_comp c) nts)) /\
(C_STGhost? c ==> r == C_STGhost (nt_subst_st_comp (st_comp_of_comp c) nts))))
[SMTPat (nt_subst_comp c nts)] | let rec nt_subst_comp_commutes (c:comp) (nts:nt_substs)
: Lemma (ensures
(let r = nt_subst_comp c nts in
(C_Tot? c ==> r == C_Tot (nt_subst_term (comp_res c) nts)) /\
(C_ST? c ==> r == C_ST (nt_subst_st_comp (st_comp_of_comp c) nts)) /\
(C_STAtomic? c ==> r == C_STAtomic (nt_subst_term (comp_inames c) nts)
(C_STAtomic?.obs c)
(nt_subst_st_comp (st_comp_of_comp c) nts)) /\
(C_STGhost? c ==> r == C_STGhost (nt_subst_st_comp (st_comp_of_comp c) nts))))
(decreases nts)
[SMTPat (nt_subst_comp c nts)] =
match nts with
| [] -> ()
| (NT x e)::nts_tl -> nt_subst_comp_commutes (nt_subst_comp c [ NT x e ]) nts_tl | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Substs.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 82,
"end_line": 271,
"start_col": 0,
"start_line": 258
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Checker.Prover.Substs
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Typing.Env
open Pulse.Typing
open Pulse.Checker.Pure
module L = FStar.List.Tot
module Env = Pulse.Typing.Env
module Metatheory = Pulse.Typing.Metatheory
let coerce_eq (#a #b:Type) (x:a) (_:squash (a == b)) : y:b {y == x} = x
let rec no_repeats (l:list var) : Type0 =
match l with
| [] -> True
| x::tl -> (~ (L.memP x tl)) /\ no_repeats tl
type ss_dom = l:list var { no_repeats l }
type ss_map = m:Map.t var term {
forall (x:var). (~ (Map.contains m x)) ==> Map.sel m x == tm_unknown
}
let remove_map (m:ss_map) (x:var) =
Map.restrict (Set.complement (Set.singleton x)) (Map.upd m x tm_unknown)
let rec is_dom (l:ss_dom) (m:ss_map) : Type0 =
match l with
| [] -> Set.equal (Map.domain m) Set.empty
| x::tl ->
Map.contains m x /\ is_dom tl (remove_map m x)
let rec is_dom_mem (l:ss_dom) (m:ss_map)
: Lemma
(requires is_dom l m)
(ensures forall (x:var).{:pattern L.memP x l \/ Map.contains m x}
L.memP x l <==> Map.contains m x)
[SMTPat (is_dom l m)] =
match l with
| [] -> ()
| y::tl -> is_dom_mem tl (remove_map m y)
noeq
type ss_t = {
l : ss_dom;
m : m:ss_map { is_dom l m }
}
let ln_ss_t (s:ss_t) =
List.Tot.for_all (fun x -> ln (Map.sel s.m x)) s.l
let as_map (ss:ss_t) = ss.m
let empty = { l = []; m = Map.const_on Set.empty tm_unknown }
let is_dom_push
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { ~ (Map.contains m x ) })
(t:term)
: Lemma (is_dom (x::l) (Map.upd m x t)) =
assert (Map.equal (remove_map (Map.upd m x t) x) m)
let push (ss:ss_t) (x:var { ~ (contains ss x) }) (t:term) : ss_t =
is_dom_push ss.l ss.m x t;
{ l = x::ss.l;
m = Map.upd ss.m x t }
let tail (ss:ss_t { Cons? ss.l }) : ss_t =
{ l = L.tl ss.l; m = remove_map ss.m (L.hd ss.l) }
let rec push_ss (ss1:ss_t) (ss2:ss_t { Set.disjoint (dom ss1) (dom ss2) })
: Tot ss_t (decreases L.length ss2.l) =
match ss2.l with
| [] -> ss1
| x::tl ->
push_ss (push ss1 x (Map.sel ss2.m x)) (tail ss2)
let check_disjoint ss1 ss2 =
admit ();
not (L.existsb (fun v1 -> L.mem v1 ss2.l) ss1.l)
let rec diff_aux (ss1 ss2:ss_t) (acc:ss_t { Set.disjoint (dom acc) (dom ss2) })
: Tot (ss:ss_t { Set.disjoint (dom ss) (dom ss2) }) (decreases L.length ss1.l) =
match ss1.l with
| [] -> acc
| x::l ->
if L.mem x ss2.l
then let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
else let acc_l = x::acc.l in
let acc_m = Map.upd acc.m x (Map.sel ss1.m x) in
assume (no_repeats acc_l /\ is_dom acc_l acc_m);
let acc = { l = acc_l; m = acc_m } in
let ss1 = { ss1 with l; m = remove_map ss1.m x } in
diff_aux ss1 ss2 acc
let diff ss1 ss2 = diff_aux ss1 ss2 empty
#push-options "--warn_error -271"
let push_as_map (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat (as_map (push_ss ss1 ss2))] =
let rec aux (ss1 ss2:ss_t)
: Lemma (requires Set.disjoint (dom ss1) (dom ss2))
(ensures Map.equal (as_map (push_ss ss1 ss2))
(Map.concat (as_map ss1) (as_map ss2)))
(decreases L.length ss2.l)
[SMTPat ()] =
match ss2.l with
| [] -> ()
| x::tl -> aux (push ss1 x (Map.sel ss2.m x)) (tail ss2)
in
()
#pop-options
let rec remove_l (l:ss_dom) (x:var { L.memP x l })
: Pure ss_dom
(requires True)
(ensures fun r -> forall (y:var). L.memP y r <==> (L.memP y l /\ y =!= x)) =
match l with
| [] -> assert False; []
| y::tl ->
if y = x then tl
else y::(remove_l tl x)
let rec is_dom_remove
(l:ss_dom)
(m:ss_map { is_dom l m })
(x:var { Map.contains m x })
: Lemma (is_dom (remove_l l x) (remove_map m x))
[SMTPat (remove_l l x); SMTPat (remove_map m x)] =
match l with
| [] -> ()
| y::tl ->
if x = y then ()
else let t_y = Map.sel m y in
let m1 = remove_map m y in
is_dom_remove tl m1 x;
assert (is_dom (remove_l tl x) (remove_map m1 x));
is_dom_push (remove_l tl x) (remove_map m1 x) y t_y;
assert (Map.equal (Map.upd (remove_map m1 x) y t_y)
(remove_map m x))
let rec ss_term (t:term) (ss:ss_t) : Tot term (decreases L.length ss.l) =
match ss.l with
| [] -> t
| y::tl ->
let t = subst_term t [ NT y (Map.sel ss.m y) ] in
ss_term t (tail ss)
let rec ss_st_term (t:st_term) (ss:ss_t) : Tot st_term (decreases L.length ss.l) =
match ss.l with
| [] -> t
| y::tl ->
let t = subst_st_term t [ NT y (Map.sel ss.m y) ] in
ss_st_term t (tail ss)
let rec ss_st_comp (s:st_comp) (ss:ss_t)
: Tot st_comp (decreases L.length ss.l) =
match ss.l with
| [] -> s
| y::tl ->
let s = subst_st_comp s [ NT y (Map.sel ss.m y) ] in
ss_st_comp s (tail ss)
let rec ss_comp (c:comp) (ss:ss_t)
: Tot comp (decreases L.length ss.l) =
match ss.l with
| [] -> c
| y::tl ->
let c = subst_comp c [ NT y (Map.sel ss.m y) ] in
ss_comp c (tail ss)
let rec ss_binder (b:binder) (ss:ss_t)
: Tot binder (decreases L.length ss.l) =
match ss.l with
| [] -> b
| y::tl ->
let b = subst_binder b [ NT y (Map.sel ss.m y) ] in
ss_binder b (tail ss)
let rec ss_env (g:env) (ss:ss_t)
: Tot (g':env { fstar_env g' == fstar_env g /\
Env.dom g' == Env.dom g })
(decreases L.length ss.l) =
admit ();
match ss.l with
| [] -> g
| y::tl -> ss_env (subst_env g [ NT y (Map.sel ss.m y) ]) (tail ss)
let rec ss_st_comp_commutes (s:st_comp) (ss:ss_t)
: Lemma (ensures
ss_st_comp s ss ==
{ s with res = ss_term s.res ss;
pre = ss_term s.pre ss;
post = ss_term s.post ss; }) // no shifting required
(decreases L.length ss.l)
[SMTPat (ss_st_comp s ss)] =
match ss.l with
| [] -> ()
| y::tl -> ss_st_comp_commutes (subst_st_comp s [ NT y (Map.sel ss.m y) ]) (tail ss)
let rec ss_comp_commutes (c:comp) (ss:ss_t)
: Lemma (ensures
(let r = ss_comp c ss in
(C_Tot? c ==> r == C_Tot (ss_term (comp_res c) ss)) /\
(C_ST? c ==> r == C_ST (ss_st_comp (st_comp_of_comp c) ss)) /\
(C_STAtomic? c ==> r == C_STAtomic (ss_term (comp_inames c) ss)
(C_STAtomic?.obs c)
(ss_st_comp (st_comp_of_comp c) ss)) /\
(C_STGhost? c ==> r == C_STGhost (ss_st_comp (st_comp_of_comp c) ss))))
(decreases L.length ss.l)
[SMTPat (ss_comp c ss)] =
match ss.l with
| [] -> ()
| y::tl -> ss_comp_commutes (subst_comp c [ NT y (Map.sel ss.m y) ]) (tail ss)
let rec nt_substs_st_comp_commutes (s:st_comp) (nts:nt_substs)
: Lemma (ensures
nt_subst_st_comp s nts ==
{ s with res = nt_subst_term s.res nts;
pre = nt_subst_term s.pre nts;
post = nt_subst_term s.post nts; }) // no shifting required
(decreases nts)
[SMTPat (nt_subst_st_comp s nts)] =
match nts with
| [] -> ()
| (NT x e)::nts_tl -> nt_substs_st_comp_commutes (nt_subst_st_comp s [ NT x e ]) nts_tl | {
"checked_file": "/",
"dependencies": [
"Pulse.Typing.Metatheory.fsti.checked",
"Pulse.Typing.Env.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Pure.fsti.checked",
"prims.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Printf.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.IndefiniteDescription.fsti.checked"
],
"interface_file": true,
"source_file": "Pulse.Checker.Prover.Substs.fst"
} | [
{
"abbrev": true,
"full_module": "Pulse.Typing.Metatheory",
"short_module": "Metatheory"
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Pure",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Typing.Env",
"short_module": "Env"
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Env",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | c: Pulse.Syntax.Base.comp -> nts: Pulse.Checker.Prover.Substs.nt_substs
-> FStar.Pervasives.Lemma
(ensures
(let r = Pulse.Checker.Prover.Substs.nt_subst_comp c nts in
(C_Tot? c ==>
r ==
Pulse.Syntax.Base.C_Tot
(Pulse.Checker.Prover.Substs.nt_subst_term (Pulse.Syntax.Base.comp_res c) nts)) /\
(C_ST? c ==>
r ==
Pulse.Syntax.Base.C_ST
(Pulse.Checker.Prover.Substs.nt_subst_st_comp (Pulse.Syntax.Base.st_comp_of_comp c) nts)
) /\
(C_STAtomic? c ==>
r ==
Pulse.Syntax.Base.C_STAtomic
(Pulse.Checker.Prover.Substs.nt_subst_term (Pulse.Syntax.Base.comp_inames c) nts)
(C_STAtomic?.obs c)
(Pulse.Checker.Prover.Substs.nt_subst_st_comp (Pulse.Syntax.Base.st_comp_of_comp c)
nts)) /\
(C_STGhost? c ==>
r ==
Pulse.Syntax.Base.C_STGhost
(Pulse.Checker.Prover.Substs.nt_subst_st_comp (Pulse.Syntax.Base.st_comp_of_comp c) nts)
))) (decreases nts) [SMTPat (Pulse.Checker.Prover.Substs.nt_subst_comp c nts)] | FStar.Pervasives.Lemma | [
"",
"lemma"
] | [] | [
"Pulse.Syntax.Base.comp",
"Pulse.Checker.Prover.Substs.nt_substs",
"Pulse.Syntax.Base.var",
"Pulse.Syntax.Base.term",
"Prims.list",
"Pulse.Syntax.Naming.subst_elt",
"Pulse.Checker.Prover.Substs.nt_subst_comp_commutes",
"Pulse.Checker.Prover.Substs.nt_subst_comp",
"Prims.Cons",
"Pulse.Syntax.Naming.NT",
"Prims.Nil",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.l_and",
"Prims.l_imp",
"Prims.b2t",
"Pulse.Syntax.Base.uu___is_C_Tot",
"Prims.eq2",
"Pulse.Syntax.Base.C_Tot",
"Pulse.Checker.Prover.Substs.nt_subst_term",
"Pulse.Syntax.Base.comp_res",
"Pulse.Syntax.Base.uu___is_C_ST",
"Pulse.Syntax.Base.C_ST",
"Pulse.Checker.Prover.Substs.nt_subst_st_comp",
"Pulse.Syntax.Base.st_comp_of_comp",
"Pulse.Syntax.Base.uu___is_C_STAtomic",
"Pulse.Syntax.Base.C_STAtomic",
"Pulse.Syntax.Base.comp_inames",
"Pulse.Syntax.Base.__proj__C_STAtomic__item__obs",
"Pulse.Syntax.Base.uu___is_C_STGhost",
"Pulse.Syntax.Base.C_STGhost",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat"
] | [
"recursion"
] | false | false | true | false | false | let rec nt_subst_comp_commutes (c: comp) (nts: nt_substs)
: Lemma
(ensures
(let r = nt_subst_comp c nts in
(C_Tot? c ==> r == C_Tot (nt_subst_term (comp_res c) nts)) /\
(C_ST? c ==> r == C_ST (nt_subst_st_comp (st_comp_of_comp c) nts)) /\
(C_STAtomic? c ==>
r ==
C_STAtomic (nt_subst_term (comp_inames c) nts)
(C_STAtomic?.obs c)
(nt_subst_st_comp (st_comp_of_comp c) nts)) /\
(C_STGhost? c ==> r == C_STGhost (nt_subst_st_comp (st_comp_of_comp c) nts))))
(decreases nts)
[SMTPat (nt_subst_comp c nts)] =
| match nts with
| [] -> ()
| NT x e :: nts_tl -> nt_subst_comp_commutes (nt_subst_comp c [NT x e]) nts_tl | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.bind_map_get_from_name | val bind_map_get_from_name (#a: Type) (m: bind_map a) (name: string) : Tac (option (bv & a)) | val bind_map_get_from_name (#a: Type) (m: bind_map a) (name: string) : Tac (option (bv & a)) | let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 88,
"end_line": 210,
"start_col": 0,
"start_line": 204
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | m: FStar.InteractiveHelpers.Base.bind_map a -> name: Prims.string
-> FStar.Tactics.Effect.Tac (FStar.Pervasives.Native.option (FStar.Stubs.Reflection.Types.bv * a)) | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.InteractiveHelpers.Base.bind_map",
"Prims.string",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.tuple2",
"FStar.Stubs.Reflection.Types.bv",
"FStar.Pervasives.Native.option",
"Prims.list",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.Mktuple2",
"Prims.bool",
"FStar.InteractiveHelpers.Base.bind_map_get_from_name",
"Prims.op_Equality",
"FStar.Tactics.Unseal.unseal",
"FStar.Stubs.Reflection.V1.Data.__proj__Mkbv_view__item__bv_ppname",
"FStar.Stubs.Reflection.V1.Data.bv_view",
"Prims.precedes",
"FStar.Stubs.Reflection.V1.Builtins.inspect_bv"
] | [
"recursion"
] | false | true | false | false | false | let rec bind_map_get_from_name (#a: Type) (m: bind_map a) (name: string) : Tac (option (bv & a)) =
| match m with
| [] -> None
| (b', x) :: m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.apply_subst | val apply_subst : e: FStar.Stubs.Reflection.Types.env ->
t: FStar.Stubs.Reflection.Types.term ->
subst:
Prims.list ((FStar.Stubs.Reflection.Types.bv * FStar.Stubs.Reflection.Types.typ) *
FStar.Stubs.Reflection.Types.term)
-> FStar.Tactics.Effect.Tac FStar.Stubs.Reflection.Types.term | let apply_subst = norm_apply_subst | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 34,
"end_line": 584,
"start_col": 0,
"start_line": 584
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars'
let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t
/// Check if a binder is shadowed by another more recent binder
let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool =
List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars
let binder_is_shadowed (ge : genv) (b : binder) : Tot bool =
bv_is_shadowed ge (bv_of_binder b)
let find_shadowed_bvs (ge : genv) (bl : list bv) : Tot (list (bv & bool)) =
List.Tot.map (fun b -> b, bv_is_shadowed ge b) bl
let find_shadowed_binders (ge : genv) (bl : list binder) : Tot (list (binder & bool)) =
List.Tot.map (fun b -> b, binder_is_shadowed ge b) bl
val bv_is_abstract : genv -> bv -> Tot bool
let bv_is_abstract ge bv =
match genv_get ge bv with
| None -> false
| Some (_, abs, _) -> abs
val binder_is_abstract : genv -> binder -> Tot bool
let binder_is_abstract ge b =
bv_is_abstract ge (bv_of_binder b)
val genv_abstract_bvs : genv -> Tot (list (bv & typ))
let genv_abstract_bvs ge =
List.Tot.concatMap
(fun (bv, (ty, abs, _)) -> if abs then [bv,ty] else []) ge.bmap
/// Versions of ``fresh_bv`` and ``fresh_binder`` inspired by the standard library
/// We make sure the name is fresh because we need to be able to generate valid code
/// (it is thus not enough to have a fresh integer).
let rec _fresh_bv binder_names basename i : Tac bv =
let name = basename ^ string_of_int i in
(* In worst case the performance is quadratic in the number of binders.
* TODO: fix that, it actually probably happens for anonymous variables ('_') *)
if List.mem name binder_names then _fresh_bv binder_names basename (i+1)
else fresh_bv_named name
let fresh_bv (e : env) (basename : string) : Tac bv =
let binders = binders_of_env e in
let binder_names = Tactics.map name_of_binder binders in
_fresh_bv binder_names basename 0
let fresh_binder (e : env) (basename : string) (ty : typ) : Tac binder =
let bv = fresh_bv e basename in
mk_binder bv ty
let genv_push_fresh_binder (ge : genv) (basename : string) (ty : typ) : Tac (genv & binder) =
let b = fresh_binder ge.env basename ty in
(* TODO: we can have a shortcircuit push (which performs less checks) *)
let ge' = genv_push_binder ge b true None in
ge', b
// TODO: actually we should use push_fresh_bv more
let push_fresh_binder (e : env) (basename : string) (ty : typ) : Tac (env & binder) =
let b = fresh_binder e basename ty in
let e' = push_binder e b in
e', b
let genv_push_fresh_bv (ge : genv) (basename : string) (ty : typ) : Tac (genv & bv) =
let ge', b = genv_push_fresh_binder ge basename ty in
ge', bv_of_binder b
val push_fresh_var : env -> string -> typ -> Tac (term & binder & env)
let push_fresh_var e0 basename ty =
let e1, b1 = push_fresh_binder e0 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
v1, b1, e1
val genv_push_fresh_var : genv -> string -> typ -> Tac (term & binder & genv)
let genv_push_fresh_var ge0 basename ty =
let ge1, b1 = genv_push_fresh_binder ge0 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
v1, b1, ge1
val push_two_fresh_vars : env -> string -> typ -> Tac (term & binder & term & binder & env)
let push_two_fresh_vars e0 basename ty =
let e1, b1 = push_fresh_binder e0 basename ty in
let e2, b2 = push_fresh_binder e1 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
let v2 = pack (Tv_Var (bv_of_binder b2)) in
v1, b1, v2, b2, e2
val genv_push_two_fresh_vars : genv -> string -> typ -> Tac (term & binder & term & binder & genv)
let genv_push_two_fresh_vars ge0 basename ty =
let ge1, b1 = genv_push_fresh_binder ge0 basename ty in
let ge2, b2 = genv_push_fresh_binder ge1 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
let v2 = pack (Tv_Var (bv_of_binder b2)) in
v1, b1, v2, b2, ge2
(*** Substitutions *)
/// Substitutions
/// Custom substitutions using the normalizer. This is the easiest and safest
/// way to perform a substitution: if you want to substitute [v] with [t] in [exp],
/// just normalize [(fun v -> exp) t]. Note that this may be computationally expensive.
val norm_apply_subst : env -> term -> list ((bv & typ) & term) -> Tac term
val norm_apply_subst_in_comp : env -> comp -> list ((bv & typ) & term) -> Tac comp
let norm_apply_subst e t subst =
let bl, vl = unzip subst in
let bl = List.Tot.map (fun (bv,ty) -> mk_binder bv ty) bl in
let t1 = mk_abs bl t in
let t2 = mk_e_app t1 vl in
norm_term_env e [] t2
let norm_apply_subst_in_comp e c subst =
let subst = (fun x -> norm_apply_subst e x subst) in
let subst_in_aqualv a : Tac aqualv =
match a with
| Q_Implicit
| Q_Explicit -> a
| Q_Meta t -> Q_Meta (subst t)
in
match inspect_comp c with
| C_Total ret ->
let ret = subst ret in
pack_comp (C_Total ret)
| C_GTotal ret ->
let ret = subst ret in
pack_comp (C_GTotal ret)
| C_Lemma pre post patterns ->
let pre = subst pre in
let post = subst post in
let patterns = subst patterns in
pack_comp (C_Lemma pre post patterns)
| C_Eff us eff_name result eff_args decrs ->
let result = subst result in
let eff_args = map (fun (x, a) -> (subst x, subst_in_aqualv a)) eff_args in
let decrs = map subst decrs in
pack_comp (C_Eff us eff_name result eff_args decrs)
/// As substitution with normalization is very expensive, we implemented another
/// technique which works by exploring terms. This is super fast, but the terms
/// seem not to be reconstructed in the same way, which has a big impact on pretty printing.
/// For example, terms like [A /\ B] get printed as [Prims.l_and A B].
val deep_apply_subst : env -> term -> list (bv & term) -> Tac term
// Whenever we encounter a construction which introduces a binder, we need to apply
// the substitution in the binder type. Note that this gives a new binder, with
// which we need to replace the old one in what follows.
// Also note that it should be possible to rewrite [deep_apply_subst] in terms of [visit_tm],
// but [deep_apply_subst] seems to be a bit more precise with regard to type replacements (not
// sure it is really important, though).
val deep_apply_subst_in_bv : env -> bv -> list (bv & term) -> Tac (bv & list (bv & term))
val deep_apply_subst_in_binder : env -> binder -> list (bv & term) -> Tac (binder & list (bv & term))
val deep_apply_subst_in_comp : env -> comp -> list (bv & term) -> Tac comp
val deep_apply_subst_in_pattern : env -> pattern -> list (bv & term) -> Tac (pattern & list (bv & term))
let rec deep_apply_subst e t subst =
match inspect t with
| Tv_Var b ->
begin match bind_map_get subst b with
| None -> t
| Some t' -> t'
end
| Tv_BVar b ->
(* Note: Tv_BVar shouldn't happen *)
begin match bind_map_get subst b with
| None -> t
| Some t' -> t'
end
| Tv_FVar _ -> t
| Tv_App hd (a,qual) ->
let hd = deep_apply_subst e hd subst in
let a = deep_apply_subst e a subst in
pack (Tv_App hd (a, qual))
| Tv_Abs br body ->
let body = deep_apply_subst e body subst in
pack (Tv_Abs br body)
| Tv_Arrow br c ->
let br, subst = deep_apply_subst_in_binder e br subst in
let c = deep_apply_subst_in_comp e c subst in
pack (Tv_Arrow br c)
| Tv_Type _ -> t
| Tv_Refine bv sort ref ->
let sort = deep_apply_subst e sort subst in
let bv, subst = deep_apply_subst_in_bv e bv subst in
let ref = deep_apply_subst e ref subst in
pack (Tv_Refine bv sort ref)
| Tv_Const _ -> t
| Tv_Uvar _ _ -> t
| Tv_Let recf attrs bv ty def body ->
(* No need to substitute in the attributes - that we filter for safety *)
let ty = deep_apply_subst e ty subst in
let def = deep_apply_subst e def subst in
let bv, subst = deep_apply_subst_in_bv e bv subst in
let body = deep_apply_subst e body subst in
pack (Tv_Let recf [] bv ty def body)
| Tv_Match scrutinee ret_opt branches -> (* TODO: type of pattern variables *)
let scrutinee = deep_apply_subst e scrutinee subst in
let ret_opt = map_opt (fun (b, asc) ->
let b, subst = deep_apply_subst_in_binder e b subst in
let asc =
match asc with
| Inl t, tacopt, use_eq ->
Inl (deep_apply_subst e t subst),
map_opt (fun tac -> deep_apply_subst e tac subst) tacopt,
use_eq
| Inr c, tacopt, use_eq ->
Inr (deep_apply_subst_in_comp e c subst),
map_opt (fun tac -> deep_apply_subst e tac subst) tacopt,
use_eq in
b, asc) ret_opt in
(* For the branches: we don't need to explore the patterns *)
let deep_apply_subst_in_branch branch =
let pat, tm = branch in
let pat, subst = deep_apply_subst_in_pattern e pat subst in
let tm = deep_apply_subst e tm subst in
pat, tm
in
let branches = map deep_apply_subst_in_branch branches in
pack (Tv_Match scrutinee ret_opt branches)
| Tv_AscribedT exp ty tac use_eq ->
let exp = deep_apply_subst e exp subst in
let ty = deep_apply_subst e ty subst in
(* no need to apply it on the tactic - that we filter for safety *)
pack (Tv_AscribedT exp ty None use_eq)
| Tv_AscribedC exp c tac use_eq ->
let exp = deep_apply_subst e exp subst in
let c = deep_apply_subst_in_comp e c subst in
(* no need to apply it on the tactic - that we filter for safety *)
pack (Tv_AscribedC exp c None use_eq)
| _ ->
(* Unknown *)
t
and deep_apply_subst_in_bv e bv subst =
(* No substitution needs to happen for variables
(there is no longer a sort). But, shift the substitution. *)
bv, (bv, pack (Tv_Var bv))::subst
(*
* AR: TODO: should apply subst in attrs?
*)
and deep_apply_subst_in_binder e br subst =
let open inspect_binder br <: binder_view in
let binder_sort = deep_apply_subst e binder_sort subst in
let binder_bv, subst = deep_apply_subst_in_bv e binder_bv subst in
pack_binder {
binder_bv=binder_bv;
binder_qual=binder_qual;
binder_attrs=binder_attrs;
binder_sort=binder_sort;
}, subst
and deep_apply_subst_in_comp e c subst =
let subst = (fun x -> deep_apply_subst e x subst) in
let subst_in_aqualv a : Tac aqualv =
match a with
| Q_Implicit
| Q_Explicit -> a
| Q_Meta t -> Q_Meta (subst t)
in
match inspect_comp c with
| C_Total ret ->
let ret = subst ret in
pack_comp (C_Total ret)
| C_GTotal ret ->
let ret = subst ret in
pack_comp (C_GTotal ret)
| C_Lemma pre post patterns ->
let pre = subst pre in
let post = subst post in
let patterns = subst patterns in
pack_comp (C_Lemma pre post patterns)
| C_Eff us eff_name result eff_args decrs ->
let result = subst result in
let eff_args = map (fun (x, a) -> (subst x, subst_in_aqualv a)) eff_args in
let decrs = map subst decrs in
pack_comp (C_Eff us eff_name result eff_args decrs)
and deep_apply_subst_in_pattern e pat subst =
match pat with
| Pat_Constant _ -> pat, subst
| Pat_Cons fv us patterns ->
(* The types of the variables in the patterns should be independent of each
* other: we use fold_left only to incrementally update the substitution *)
let patterns, subst =
fold_right (fun (pat, b) (pats, subst) ->
let pat, subst = deep_apply_subst_in_pattern e pat subst in
((pat, b) :: pats, subst)) patterns ([], subst)
in
Pat_Cons fv us patterns, subst
| Pat_Var bv st ->
let st = Sealed.seal (deep_apply_subst e (unseal st) subst) in
let bv, subst = deep_apply_subst_in_bv e bv subst in
Pat_Var bv st, subst
| Pat_Dot_Term eopt ->
Pat_Dot_Term (map_opt (fun t -> deep_apply_subst e t subst) eopt), subst
/// The substitution functions actually used in the rest of the meta F* functions.
/// For now, we use normalization because even though it is sometimes slow it
/// gives prettier terms, and readability is the priority. In order to mitigate
/// the performance issue, we try to minimize the number of calls to those functions,
/// by doing lazy instantiations for example (rather than incrementally apply
/// substitutions in a term, accumulate the substitutions and perform them all at once). | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
e: FStar.Stubs.Reflection.Types.env ->
t: FStar.Stubs.Reflection.Types.term ->
subst:
Prims.list ((FStar.Stubs.Reflection.Types.bv * FStar.Stubs.Reflection.Types.typ) *
FStar.Stubs.Reflection.Types.term)
-> FStar.Tactics.Effect.Tac FStar.Stubs.Reflection.Types.term | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.InteractiveHelpers.Base.norm_apply_subst"
] | [] | false | true | false | false | false | let apply_subst =
| norm_apply_subst | false |
|
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.genv_push_bv | val genv_push_bv (ge: genv) (b: bv) (sort: typ) (abs: bool) (t: option term) : Tac genv | val genv_push_bv (ge: genv) (b: bv) (sort: typ) (abs: bool) (t: option term) : Tac genv | let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars' | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 25,
"end_line": 279,
"start_col": 0,
"start_line": 272
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
ge: FStar.InteractiveHelpers.Base.genv ->
b: FStar.Stubs.Reflection.Types.bv ->
sort: FStar.Stubs.Reflection.Types.typ ->
abs: Prims.bool ->
t: FStar.Pervasives.Native.option FStar.Stubs.Reflection.Types.term
-> FStar.Tactics.Effect.Tac FStar.InteractiveHelpers.Base.genv | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.InteractiveHelpers.Base.genv",
"FStar.Stubs.Reflection.Types.bv",
"FStar.Stubs.Reflection.Types.typ",
"Prims.bool",
"FStar.Pervasives.Native.option",
"FStar.Stubs.Reflection.Types.term",
"FStar.InteractiveHelpers.Base.mk_genv",
"Prims.list",
"FStar.Pervasives.Native.tuple2",
"FStar.Pervasives.Native.tuple3",
"FStar.InteractiveHelpers.Base.bind_map_push",
"FStar.InteractiveHelpers.Base.__proj__Mkgenv__item__bmap",
"FStar.Pervasives.Native.Mktuple3",
"FStar.Pervasives.Native.uu___is_Some",
"FStar.Pervasives.Native.__proj__Some__item__v",
"FStar.Stubs.Tactics.V1.Builtins.pack",
"FStar.Stubs.Reflection.V1.Data.Tv_Var",
"FStar.Stubs.Reflection.Types.env",
"FStar.Stubs.Reflection.V1.Builtins.push_binder",
"FStar.InteractiveHelpers.Base.__proj__Mkgenv__item__env",
"Prims.Cons",
"FStar.Pervasives.Native.fst",
"FStar.InteractiveHelpers.Base.__proj__Mkgenv__item__svars",
"FStar.InteractiveHelpers.Base.genv_get_from_name",
"Prims.string",
"FStar.Tactics.V1.Derived.name_of_bv",
"FStar.Stubs.Reflection.Types.binder",
"FStar.Reflection.V1.Derived.mk_binder"
] | [] | false | true | false | false | false | let genv_push_bv (ge: genv) (b: bv) (sort: typ) (abs: bool) (t: option term) : Tac genv =
| let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars' | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.genv_get_from_name | val genv_get_from_name (ge: genv) (name: string) : Tac (option ((bv & typ) & (bool & term))) | val genv_get_from_name (ge: genv) (name: string) : Tac (option ((bv & typ) & (bool & term))) | let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x)) | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 56,
"end_line": 268,
"start_col": 0,
"start_line": 264
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ge: FStar.InteractiveHelpers.Base.genv -> name: Prims.string
-> FStar.Tactics.Effect.Tac
(FStar.Pervasives.Native.option ((FStar.Stubs.Reflection.Types.bv *
FStar.Stubs.Reflection.Types.typ) *
(Prims.bool * FStar.Stubs.Reflection.Types.term))) | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.InteractiveHelpers.Base.genv",
"Prims.string",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.tuple2",
"FStar.Stubs.Reflection.Types.bv",
"FStar.Stubs.Reflection.Types.typ",
"Prims.bool",
"FStar.Stubs.Reflection.Types.term",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple3",
"FStar.InteractiveHelpers.Base.bind_map_get_from_name",
"FStar.InteractiveHelpers.Base.__proj__Mkgenv__item__bmap"
] | [] | false | true | false | false | false | let genv_get_from_name (ge: genv) (name: string) : Tac (option ((bv & typ) & (bool & term))) =
| match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x)) | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.bv_is_abstract | val bv_is_abstract : genv -> bv -> Tot bool | val bv_is_abstract : genv -> bv -> Tot bool | let bv_is_abstract ge bv =
match genv_get ge bv with
| None -> false
| Some (_, abs, _) -> abs | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 27,
"end_line": 301,
"start_col": 0,
"start_line": 298
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars'
let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t
/// Check if a binder is shadowed by another more recent binder
let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool =
List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars
let binder_is_shadowed (ge : genv) (b : binder) : Tot bool =
bv_is_shadowed ge (bv_of_binder b)
let find_shadowed_bvs (ge : genv) (bl : list bv) : Tot (list (bv & bool)) =
List.Tot.map (fun b -> b, bv_is_shadowed ge b) bl
let find_shadowed_binders (ge : genv) (bl : list binder) : Tot (list (binder & bool)) =
List.Tot.map (fun b -> b, binder_is_shadowed ge b) bl | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | ge: FStar.InteractiveHelpers.Base.genv -> bv: FStar.Stubs.Reflection.Types.bv -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"FStar.InteractiveHelpers.Base.genv",
"FStar.Stubs.Reflection.Types.bv",
"FStar.InteractiveHelpers.Base.genv_get",
"FStar.Stubs.Reflection.Types.typ",
"Prims.bool",
"FStar.Stubs.Reflection.Types.term"
] | [] | false | false | false | true | false | let bv_is_abstract ge bv =
| match genv_get ge bv with
| None -> false
| Some (_, abs, _) -> abs | false |
Test.Vectors.Chacha20Poly1305.fst | Test.Vectors.Chacha20Poly1305.input8 | val input8:(b: B.buffer UInt8.t {B.length b = 513 /\ B.recallable b /\ B.disjoint b aad8}) | val input8:(b: B.buffer UInt8.t {B.length b = 513 /\ B.recallable b /\ B.disjoint b aad8}) | let input8: (b: B.buffer UInt8.t { B.length b = 513 /\ B.recallable b /\ B.disjoint b aad8 }) =
B.recall aad8;[@inline_let] let l = [ 0xe6uy; 0xc3uy; 0xdbuy; 0x63uy; 0x55uy; 0x15uy; 0xe3uy; 0x5buy; 0xb7uy; 0x4buy; 0x27uy; 0x8buy; 0x5auy; 0xdduy; 0xc2uy; 0xe8uy; 0x3auy; 0x6buy; 0xd7uy; 0x81uy; 0x96uy; 0x35uy; 0x97uy; 0xcauy; 0xd7uy; 0x68uy; 0xe8uy; 0xefuy; 0xceuy; 0xabuy; 0xdauy; 0x09uy; 0x6euy; 0xd6uy; 0x8euy; 0xcbuy; 0x55uy; 0xb5uy; 0xe1uy; 0xe5uy; 0x57uy; 0xfduy; 0xc4uy; 0xe3uy; 0xe0uy; 0x18uy; 0x4fuy; 0x85uy; 0xf5uy; 0x3fuy; 0x7euy; 0x4buy; 0x88uy; 0xc9uy; 0x52uy; 0x44uy; 0x0fuy; 0xeauy; 0xafuy; 0x1fuy; 0x71uy; 0x48uy; 0x9fuy; 0x97uy; 0x6duy; 0xb9uy; 0x6fuy; 0x00uy; 0xa6uy; 0xdeuy; 0x2buy; 0x77uy; 0x8buy; 0x15uy; 0xaduy; 0x10uy; 0xa0uy; 0x2buy; 0x7buy; 0x41uy; 0x90uy; 0x03uy; 0x2duy; 0x69uy; 0xaeuy; 0xccuy; 0x77uy; 0x7cuy; 0xa5uy; 0x9duy; 0x29uy; 0x22uy; 0xc2uy; 0xeauy; 0xb4uy; 0x00uy; 0x1auy; 0xd2uy; 0x7auy; 0x98uy; 0x8auy; 0xf9uy; 0xf7uy; 0x82uy; 0xb0uy; 0xabuy; 0xd8uy; 0xa6uy; 0x94uy; 0x8duy; 0x58uy; 0x2fuy; 0x01uy; 0x9euy; 0x00uy; 0x20uy; 0xfcuy; 0x49uy; 0xdcuy; 0x0euy; 0x03uy; 0xe8uy; 0x45uy; 0x10uy; 0xd6uy; 0xa8uy; 0xdauy; 0x55uy; 0x10uy; 0x9auy; 0xdfuy; 0x67uy; 0x22uy; 0x8buy; 0x43uy; 0xabuy; 0x00uy; 0xbbuy; 0x02uy; 0xc8uy; 0xdduy; 0x7buy; 0x97uy; 0x17uy; 0xd7uy; 0x1duy; 0x9euy; 0x02uy; 0x5euy; 0x48uy; 0xdeuy; 0x8euy; 0xcfuy; 0x99uy; 0x07uy; 0x95uy; 0x92uy; 0x3cuy; 0x5fuy; 0x9fuy; 0xc5uy; 0x8auy; 0xc0uy; 0x23uy; 0xaauy; 0xd5uy; 0x8cuy; 0x82uy; 0x6euy; 0x16uy; 0x92uy; 0xb1uy; 0x12uy; 0x17uy; 0x07uy; 0xc3uy; 0xfbuy; 0x36uy; 0xf5uy; 0x6cuy; 0x35uy; 0xd6uy; 0x06uy; 0x1fuy; 0x9fuy; 0xa7uy; 0x94uy; 0xa2uy; 0x38uy; 0x63uy; 0x9cuy; 0xb0uy; 0x71uy; 0xb3uy; 0xa5uy; 0xd2uy; 0xd8uy; 0xbauy; 0x9fuy; 0x08uy; 0x01uy; 0xb3uy; 0xffuy; 0x04uy; 0x97uy; 0x73uy; 0x45uy; 0x1buy; 0xd5uy; 0xa9uy; 0x9cuy; 0x80uy; 0xafuy; 0x04uy; 0x9auy; 0x85uy; 0xdbuy; 0x32uy; 0x5buy; 0x5duy; 0x1auy; 0xc1uy; 0x36uy; 0x28uy; 0x10uy; 0x79uy; 0xf1uy; 0x3cuy; 0xbfuy; 0x1auy; 0x41uy; 0x5cuy; 0x4euy; 0xdfuy; 0xb2uy; 0x7cuy; 0x79uy; 0x3buy; 0x7auy; 0x62uy; 0x3duy; 0x4buy; 0xc9uy; 0x9buy; 0x2auy; 0x2euy; 0x7cuy; 0xa2uy; 0xb1uy; 0x11uy; 0x98uy; 0xa7uy; 0x34uy; 0x1auy; 0x00uy; 0xf3uy; 0xd1uy; 0xbcuy; 0x18uy; 0x22uy; 0xbauy; 0x02uy; 0x56uy; 0x62uy; 0x31uy; 0x10uy; 0x11uy; 0x6duy; 0xe0uy; 0x54uy; 0x9duy; 0x40uy; 0x1fuy; 0x26uy; 0x80uy; 0x41uy; 0xcauy; 0x3fuy; 0x68uy; 0x0fuy; 0x32uy; 0x1duy; 0x0auy; 0x8euy; 0x79uy; 0xd8uy; 0xa4uy; 0x1buy; 0x29uy; 0x1cuy; 0x90uy; 0x8euy; 0xc5uy; 0xe3uy; 0xb4uy; 0x91uy; 0x37uy; 0x9auy; 0x97uy; 0x86uy; 0x99uy; 0xd5uy; 0x09uy; 0xc5uy; 0xbbuy; 0xa3uy; 0x3fuy; 0x21uy; 0x29uy; 0x82uy; 0x14uy; 0x5cuy; 0xabuy; 0x25uy; 0xfbuy; 0xf2uy; 0x4fuy; 0x58uy; 0x26uy; 0xd4uy; 0x83uy; 0xaauy; 0x66uy; 0x89uy; 0x67uy; 0x7euy; 0xc0uy; 0x49uy; 0xe1uy; 0x11uy; 0x10uy; 0x7fuy; 0x7auy; 0xdauy; 0x29uy; 0x04uy; 0xffuy; 0xf0uy; 0xcbuy; 0x09uy; 0x7cuy; 0x9duy; 0xfauy; 0x03uy; 0x6fuy; 0x81uy; 0x09uy; 0x31uy; 0x60uy; 0xfbuy; 0x08uy; 0xfauy; 0x74uy; 0xd3uy; 0x64uy; 0x44uy; 0x7cuy; 0x55uy; 0x85uy; 0xecuy; 0x9cuy; 0x6euy; 0x25uy; 0xb7uy; 0x6cuy; 0xc5uy; 0x37uy; 0xb6uy; 0x83uy; 0x87uy; 0x72uy; 0x95uy; 0x8buy; 0x9duy; 0xe1uy; 0x69uy; 0x5cuy; 0x31uy; 0x95uy; 0x42uy; 0xa6uy; 0x2cuy; 0xd1uy; 0x36uy; 0x47uy; 0x1fuy; 0xecuy; 0x54uy; 0xabuy; 0xa2uy; 0x1cuy; 0xd8uy; 0x00uy; 0xccuy; 0xbcuy; 0x0duy; 0x65uy; 0xe2uy; 0x67uy; 0xbfuy; 0xbcuy; 0xeauy; 0xeeuy; 0x9euy; 0xe4uy; 0x36uy; 0x95uy; 0xbeuy; 0x73uy; 0xd9uy; 0xa6uy; 0xd9uy; 0x0fuy; 0xa0uy; 0xccuy; 0x82uy; 0x76uy; 0x26uy; 0xaduy; 0x5buy; 0x58uy; 0x6cuy; 0x4euy; 0xabuy; 0x29uy; 0x64uy; 0xd3uy; 0xd9uy; 0xa9uy; 0x08uy; 0x8cuy; 0x1duy; 0xa1uy; 0x4fuy; 0x80uy; 0xd8uy; 0x3fuy; 0x94uy; 0xfbuy; 0xd3uy; 0x7buy; 0xfcuy; 0xd1uy; 0x2buy; 0xc3uy; 0x21uy; 0xebuy; 0xe5uy; 0x1cuy; 0x84uy; 0x23uy; 0x7fuy; 0x4buy; 0xfauy; 0xdbuy; 0x34uy; 0x18uy; 0xa2uy; 0xc2uy; 0xe5uy; 0x13uy; 0xfeuy; 0x6cuy; 0x49uy; 0x81uy; 0xd2uy; 0x73uy; 0xe7uy; 0xe2uy; 0xd7uy; 0xe4uy; 0x4fuy; 0x4buy; 0x08uy; 0x6euy; 0xb1uy; 0x12uy; 0x22uy; 0x10uy; 0x9duy; 0xacuy; 0x51uy; 0x1euy; 0x17uy; 0xd9uy; 0x8auy; 0x0buy; 0x42uy; 0x88uy; 0x16uy; 0x81uy; 0x37uy; 0x7cuy; 0x6auy; 0xf7uy; 0xefuy; 0x2duy; 0xe3uy; 0xd9uy; 0xf8uy; 0x5fuy; 0xe0uy; 0x53uy; 0x27uy; 0x74uy; 0xb9uy; 0xe2uy; 0xd6uy; 0x1cuy; 0x80uy; 0x2cuy; 0x52uy; 0x65uy; ] in
assert_norm (List.Tot.length l = 513);
B.gcmalloc_of_list HyperStack.root l | {
"file_name": "providers/test/vectors/Test.Vectors.Chacha20Poly1305.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 38,
"end_line": 354,
"start_col": 0,
"start_line": 351
} | module Test.Vectors.Chacha20Poly1305
module B = LowStar.Buffer
#set-options "--max_fuel 0 --max_ifuel 0"
let key0: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x1cuy; 0x92uy; 0x40uy; 0xa5uy; 0xebuy; 0x55uy; 0xd3uy; 0x8auy; 0xf3uy; 0x33uy; 0x88uy; 0x86uy; 0x04uy; 0xf6uy; 0xb5uy; 0xf0uy; 0x47uy; 0x39uy; 0x17uy; 0xc1uy; 0x40uy; 0x2buy; 0x80uy; 0x09uy; 0x9duy; 0xcauy; 0x5cuy; 0xbcuy; 0x20uy; 0x70uy; 0x75uy; 0xc0uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key0_len: (x:UInt32.t { UInt32.v x = B.length key0 }) =
32ul
let nonce0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x01uy; 0x02uy; 0x03uy; 0x04uy; 0x05uy; 0x06uy; 0x07uy; 0x08uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce0_len: (x:UInt32.t { UInt32.v x = B.length nonce0 }) =
12ul
let aad0: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0xf3uy; 0x33uy; 0x88uy; 0x86uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x4euy; 0x91uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad0_len: (x:UInt32.t { UInt32.v x = B.length aad0 }) =
12ul
let input0: (b: B.buffer UInt8.t { B.length b = 265 /\ B.recallable b /\ B.disjoint b aad0 }) =
B.recall aad0;[@inline_let] let l = [ 0x49uy; 0x6euy; 0x74uy; 0x65uy; 0x72uy; 0x6euy; 0x65uy; 0x74uy; 0x2duy; 0x44uy; 0x72uy; 0x61uy; 0x66uy; 0x74uy; 0x73uy; 0x20uy; 0x61uy; 0x72uy; 0x65uy; 0x20uy; 0x64uy; 0x72uy; 0x61uy; 0x66uy; 0x74uy; 0x20uy; 0x64uy; 0x6fuy; 0x63uy; 0x75uy; 0x6duy; 0x65uy; 0x6euy; 0x74uy; 0x73uy; 0x20uy; 0x76uy; 0x61uy; 0x6cuy; 0x69uy; 0x64uy; 0x20uy; 0x66uy; 0x6fuy; 0x72uy; 0x20uy; 0x61uy; 0x20uy; 0x6duy; 0x61uy; 0x78uy; 0x69uy; 0x6duy; 0x75uy; 0x6duy; 0x20uy; 0x6fuy; 0x66uy; 0x20uy; 0x73uy; 0x69uy; 0x78uy; 0x20uy; 0x6duy; 0x6fuy; 0x6euy; 0x74uy; 0x68uy; 0x73uy; 0x20uy; 0x61uy; 0x6euy; 0x64uy; 0x20uy; 0x6duy; 0x61uy; 0x79uy; 0x20uy; 0x62uy; 0x65uy; 0x20uy; 0x75uy; 0x70uy; 0x64uy; 0x61uy; 0x74uy; 0x65uy; 0x64uy; 0x2cuy; 0x20uy; 0x72uy; 0x65uy; 0x70uy; 0x6cuy; 0x61uy; 0x63uy; 0x65uy; 0x64uy; 0x2cuy; 0x20uy; 0x6fuy; 0x72uy; 0x20uy; 0x6fuy; 0x62uy; 0x73uy; 0x6fuy; 0x6cuy; 0x65uy; 0x74uy; 0x65uy; 0x64uy; 0x20uy; 0x62uy; 0x79uy; 0x20uy; 0x6fuy; 0x74uy; 0x68uy; 0x65uy; 0x72uy; 0x20uy; 0x64uy; 0x6fuy; 0x63uy; 0x75uy; 0x6duy; 0x65uy; 0x6euy; 0x74uy; 0x73uy; 0x20uy; 0x61uy; 0x74uy; 0x20uy; 0x61uy; 0x6euy; 0x79uy; 0x20uy; 0x74uy; 0x69uy; 0x6duy; 0x65uy; 0x2euy; 0x20uy; 0x49uy; 0x74uy; 0x20uy; 0x69uy; 0x73uy; 0x20uy; 0x69uy; 0x6euy; 0x61uy; 0x70uy; 0x70uy; 0x72uy; 0x6fuy; 0x70uy; 0x72uy; 0x69uy; 0x61uy; 0x74uy; 0x65uy; 0x20uy; 0x74uy; 0x6fuy; 0x20uy; 0x75uy; 0x73uy; 0x65uy; 0x20uy; 0x49uy; 0x6euy; 0x74uy; 0x65uy; 0x72uy; 0x6euy; 0x65uy; 0x74uy; 0x2duy; 0x44uy; 0x72uy; 0x61uy; 0x66uy; 0x74uy; 0x73uy; 0x20uy; 0x61uy; 0x73uy; 0x20uy; 0x72uy; 0x65uy; 0x66uy; 0x65uy; 0x72uy; 0x65uy; 0x6euy; 0x63uy; 0x65uy; 0x20uy; 0x6duy; 0x61uy; 0x74uy; 0x65uy; 0x72uy; 0x69uy; 0x61uy; 0x6cuy; 0x20uy; 0x6fuy; 0x72uy; 0x20uy; 0x74uy; 0x6fuy; 0x20uy; 0x63uy; 0x69uy; 0x74uy; 0x65uy; 0x20uy; 0x74uy; 0x68uy; 0x65uy; 0x6duy; 0x20uy; 0x6fuy; 0x74uy; 0x68uy; 0x65uy; 0x72uy; 0x20uy; 0x74uy; 0x68uy; 0x61uy; 0x6euy; 0x20uy; 0x61uy; 0x73uy; 0x20uy; 0x2fuy; 0xe2uy; 0x80uy; 0x9cuy; 0x77uy; 0x6fuy; 0x72uy; 0x6buy; 0x20uy; 0x69uy; 0x6euy; 0x20uy; 0x70uy; 0x72uy; 0x6fuy; 0x67uy; 0x72uy; 0x65uy; 0x73uy; 0x73uy; 0x2euy; 0x2fuy; 0xe2uy; 0x80uy; 0x9duy; ] in
assert_norm (List.Tot.length l = 265);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input0_len: (x:UInt32.t { UInt32.v x = B.length input0 }) =
265ul
let output0: (b: B.buffer UInt8.t { B.length b = 281 /\ B.recallable b }) =
[@inline_let] let l = [ 0x64uy; 0xa0uy; 0x86uy; 0x15uy; 0x75uy; 0x86uy; 0x1auy; 0xf4uy; 0x60uy; 0xf0uy; 0x62uy; 0xc7uy; 0x9buy; 0xe6uy; 0x43uy; 0xbduy; 0x5euy; 0x80uy; 0x5cuy; 0xfduy; 0x34uy; 0x5cuy; 0xf3uy; 0x89uy; 0xf1uy; 0x08uy; 0x67uy; 0x0auy; 0xc7uy; 0x6cuy; 0x8cuy; 0xb2uy; 0x4cuy; 0x6cuy; 0xfcuy; 0x18uy; 0x75uy; 0x5duy; 0x43uy; 0xeeuy; 0xa0uy; 0x9euy; 0xe9uy; 0x4euy; 0x38uy; 0x2duy; 0x26uy; 0xb0uy; 0xbduy; 0xb7uy; 0xb7uy; 0x3cuy; 0x32uy; 0x1buy; 0x01uy; 0x00uy; 0xd4uy; 0xf0uy; 0x3buy; 0x7fuy; 0x35uy; 0x58uy; 0x94uy; 0xcfuy; 0x33uy; 0x2fuy; 0x83uy; 0x0euy; 0x71uy; 0x0buy; 0x97uy; 0xceuy; 0x98uy; 0xc8uy; 0xa8uy; 0x4auy; 0xbduy; 0x0buy; 0x94uy; 0x81uy; 0x14uy; 0xaduy; 0x17uy; 0x6euy; 0x00uy; 0x8duy; 0x33uy; 0xbduy; 0x60uy; 0xf9uy; 0x82uy; 0xb1uy; 0xffuy; 0x37uy; 0xc8uy; 0x55uy; 0x97uy; 0x97uy; 0xa0uy; 0x6euy; 0xf4uy; 0xf0uy; 0xefuy; 0x61uy; 0xc1uy; 0x86uy; 0x32uy; 0x4euy; 0x2buy; 0x35uy; 0x06uy; 0x38uy; 0x36uy; 0x06uy; 0x90uy; 0x7buy; 0x6auy; 0x7cuy; 0x02uy; 0xb0uy; 0xf9uy; 0xf6uy; 0x15uy; 0x7buy; 0x53uy; 0xc8uy; 0x67uy; 0xe4uy; 0xb9uy; 0x16uy; 0x6cuy; 0x76uy; 0x7buy; 0x80uy; 0x4duy; 0x46uy; 0xa5uy; 0x9buy; 0x52uy; 0x16uy; 0xcduy; 0xe7uy; 0xa4uy; 0xe9uy; 0x90uy; 0x40uy; 0xc5uy; 0xa4uy; 0x04uy; 0x33uy; 0x22uy; 0x5euy; 0xe2uy; 0x82uy; 0xa1uy; 0xb0uy; 0xa0uy; 0x6cuy; 0x52uy; 0x3euy; 0xafuy; 0x45uy; 0x34uy; 0xd7uy; 0xf8uy; 0x3fuy; 0xa1uy; 0x15uy; 0x5buy; 0x00uy; 0x47uy; 0x71uy; 0x8cuy; 0xbcuy; 0x54uy; 0x6auy; 0x0duy; 0x07uy; 0x2buy; 0x04uy; 0xb3uy; 0x56uy; 0x4euy; 0xeauy; 0x1buy; 0x42uy; 0x22uy; 0x73uy; 0xf5uy; 0x48uy; 0x27uy; 0x1auy; 0x0buy; 0xb2uy; 0x31uy; 0x60uy; 0x53uy; 0xfauy; 0x76uy; 0x99uy; 0x19uy; 0x55uy; 0xebuy; 0xd6uy; 0x31uy; 0x59uy; 0x43uy; 0x4euy; 0xceuy; 0xbbuy; 0x4euy; 0x46uy; 0x6duy; 0xaeuy; 0x5auy; 0x10uy; 0x73uy; 0xa6uy; 0x72uy; 0x76uy; 0x27uy; 0x09uy; 0x7auy; 0x10uy; 0x49uy; 0xe6uy; 0x17uy; 0xd9uy; 0x1duy; 0x36uy; 0x10uy; 0x94uy; 0xfauy; 0x68uy; 0xf0uy; 0xffuy; 0x77uy; 0x98uy; 0x71uy; 0x30uy; 0x30uy; 0x5buy; 0xeauy; 0xbauy; 0x2euy; 0xdauy; 0x04uy; 0xdfuy; 0x99uy; 0x7buy; 0x71uy; 0x4duy; 0x6cuy; 0x6fuy; 0x2cuy; 0x29uy; 0xa6uy; 0xaduy; 0x5cuy; 0xb4uy; 0x02uy; 0x2buy; 0x02uy; 0x70uy; 0x9buy; 0xeeuy; 0xaduy; 0x9duy; 0x67uy; 0x89uy; 0x0cuy; 0xbbuy; 0x22uy; 0x39uy; 0x23uy; 0x36uy; 0xfeuy; 0xa1uy; 0x85uy; 0x1fuy; 0x38uy; ] in
assert_norm (List.Tot.length l = 281);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output0_len: (x:UInt32.t { UInt32.v x = B.length output0 }) =
281ul
let key1: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4cuy; 0xf5uy; 0x96uy; 0x83uy; 0x38uy; 0xe6uy; 0xaeuy; 0x7fuy; 0x2duy; 0x29uy; 0x25uy; 0x76uy; 0xd5uy; 0x75uy; 0x27uy; 0x86uy; 0x91uy; 0x9auy; 0x27uy; 0x7auy; 0xfbuy; 0x46uy; 0xc5uy; 0xefuy; 0x94uy; 0x81uy; 0x79uy; 0x57uy; 0x14uy; 0x59uy; 0x40uy; 0x68uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key1_len: (x:UInt32.t { UInt32.v x = B.length key1 }) =
32ul
let nonce1: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0xcauy; 0xbfuy; 0x33uy; 0x71uy; 0x32uy; 0x45uy; 0x77uy; 0x8euy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce1_len: (x:UInt32.t { UInt32.v x = B.length nonce1 }) =
12ul
let aad1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad1_len: (x:UInt32.t { UInt32.v x = B.length aad1 }) =
0ul
let input1: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad1 }) =
B.recall aad1;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input1_len: (x:UInt32.t { UInt32.v x = B.length input1 }) =
0ul
let output1: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xeauy; 0xe0uy; 0x1euy; 0x9euy; 0x2cuy; 0x91uy; 0xaauy; 0xe1uy; 0xdbuy; 0x5duy; 0x99uy; 0x3fuy; 0x8auy; 0xf7uy; 0x69uy; 0x92uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output1_len: (x:UInt32.t { UInt32.v x = B.length output1 }) =
16ul
let key2: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x2duy; 0xb0uy; 0x5duy; 0x40uy; 0xc8uy; 0xeduy; 0x44uy; 0x88uy; 0x34uy; 0xd1uy; 0x13uy; 0xafuy; 0x57uy; 0xa1uy; 0xebuy; 0x3auy; 0x2auy; 0x80uy; 0x51uy; 0x36uy; 0xecuy; 0x5buy; 0xbcuy; 0x08uy; 0x93uy; 0x84uy; 0x21uy; 0xb5uy; 0x13uy; 0x88uy; 0x3cuy; 0x0duy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key2_len: (x:UInt32.t { UInt32.v x = B.length key2 }) =
32ul
let nonce2: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x3duy; 0x86uy; 0xb5uy; 0x6buy; 0xc8uy; 0xa3uy; 0x1fuy; 0x1duy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce2_len: (x:UInt32.t { UInt32.v x = B.length nonce2 }) =
12ul
let aad2: (b: B.buffer UInt8.t { B.length b = 8 /\ B.recallable b }) =
[@inline_let] let l = [ 0x33uy; 0x10uy; 0x41uy; 0x12uy; 0x1fuy; 0xf3uy; 0xd2uy; 0x6buy; ] in
assert_norm (List.Tot.length l = 8);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad2_len: (x:UInt32.t { UInt32.v x = B.length aad2 }) =
8ul
let input2: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b /\ B.disjoint b aad2 }) =
B.recall aad2;[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input2_len: (x:UInt32.t { UInt32.v x = B.length input2 }) =
0ul
let output2: (b: B.buffer UInt8.t { B.length b = 16 /\ B.recallable b }) =
[@inline_let] let l = [ 0xdduy; 0x6buy; 0x3buy; 0x82uy; 0xceuy; 0x5auy; 0xbduy; 0xd6uy; 0xa9uy; 0x35uy; 0x83uy; 0xd8uy; 0x8cuy; 0x3duy; 0x85uy; 0x77uy; ] in
assert_norm (List.Tot.length l = 16);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output2_len: (x:UInt32.t { UInt32.v x = B.length output2 }) =
16ul
let key3: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x4buy; 0x28uy; 0x4buy; 0xa3uy; 0x7buy; 0xbeuy; 0xe9uy; 0xf8uy; 0x31uy; 0x80uy; 0x82uy; 0xd7uy; 0xd8uy; 0xe8uy; 0xb5uy; 0xa1uy; 0xe2uy; 0x18uy; 0x18uy; 0x8auy; 0x9cuy; 0xfauy; 0xa3uy; 0x3duy; 0x25uy; 0x71uy; 0x3euy; 0x40uy; 0xbcuy; 0x54uy; 0x7auy; 0x3euy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key3_len: (x:UInt32.t { UInt32.v x = B.length key3 }) =
32ul
let nonce3: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0xd2uy; 0x32uy; 0x1fuy; 0x29uy; 0x28uy; 0xc6uy; 0xc4uy; 0xc4uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce3_len: (x:UInt32.t { UInt32.v x = B.length nonce3 }) =
12ul
let aad3: (b: B.buffer UInt8.t { B.length b = 8 /\ B.recallable b }) =
[@inline_let] let l = [ 0x6auy; 0xe2uy; 0xaduy; 0x3fuy; 0x88uy; 0x39uy; 0x5auy; 0x40uy; ] in
assert_norm (List.Tot.length l = 8);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad3_len: (x:UInt32.t { UInt32.v x = B.length aad3 }) =
8ul
let input3: (b: B.buffer UInt8.t { B.length b = 1 /\ B.recallable b /\ B.disjoint b aad3 }) =
B.recall aad3;[@inline_let] let l = [ 0xa4uy; ] in
assert_norm (List.Tot.length l = 1);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input3_len: (x:UInt32.t { UInt32.v x = B.length input3 }) =
1ul
let output3: (b: B.buffer UInt8.t { B.length b = 17 /\ B.recallable b }) =
[@inline_let] let l = [ 0xb7uy; 0x1buy; 0xb0uy; 0x73uy; 0x59uy; 0xb0uy; 0x84uy; 0xb2uy; 0x6duy; 0x8euy; 0xabuy; 0x94uy; 0x31uy; 0xa1uy; 0xaeuy; 0xacuy; 0x89uy; ] in
assert_norm (List.Tot.length l = 17);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output3_len: (x:UInt32.t { UInt32.v x = B.length output3 }) =
17ul
let key4: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x66uy; 0xcauy; 0x9cuy; 0x23uy; 0x2auy; 0x4buy; 0x4buy; 0x31uy; 0x0euy; 0x92uy; 0x89uy; 0x8buy; 0xf4uy; 0x93uy; 0xc7uy; 0x87uy; 0x98uy; 0xa3uy; 0xd8uy; 0x39uy; 0xf8uy; 0xf4uy; 0xa7uy; 0x01uy; 0xc0uy; 0x2euy; 0x0auy; 0xa6uy; 0x7euy; 0x5auy; 0x78uy; 0x87uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key4_len: (x:UInt32.t { UInt32.v x = B.length key4 }) =
32ul
let nonce4: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x20uy; 0x1cuy; 0xaauy; 0x5fuy; 0x9cuy; 0xbfuy; 0x92uy; 0x30uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce4_len: (x:UInt32.t { UInt32.v x = B.length nonce4 }) =
12ul
let aad4: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad4_len: (x:UInt32.t { UInt32.v x = B.length aad4 }) =
0ul
let input4: (b: B.buffer UInt8.t { B.length b = 1 /\ B.recallable b /\ B.disjoint b aad4 }) =
B.recall aad4;[@inline_let] let l = [ 0x2duy; ] in
assert_norm (List.Tot.length l = 1);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input4_len: (x:UInt32.t { UInt32.v x = B.length input4 }) =
1ul
let output4: (b: B.buffer UInt8.t { B.length b = 17 /\ B.recallable b }) =
[@inline_let] let l = [ 0xbfuy; 0xe1uy; 0x5buy; 0x0buy; 0xdbuy; 0x6buy; 0xf5uy; 0x5euy; 0x6cuy; 0x5duy; 0x84uy; 0x44uy; 0x39uy; 0x81uy; 0xc1uy; 0x9cuy; 0xacuy; ] in
assert_norm (List.Tot.length l = 17);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output4_len: (x:UInt32.t { UInt32.v x = B.length output4 }) =
17ul
let key5: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x68uy; 0x7buy; 0x8duy; 0x8euy; 0xe3uy; 0xc4uy; 0xdduy; 0xaeuy; 0xdfuy; 0x72uy; 0x7fuy; 0x53uy; 0x72uy; 0x25uy; 0x1euy; 0x78uy; 0x91uy; 0xcbuy; 0x69uy; 0x76uy; 0x1fuy; 0x49uy; 0x93uy; 0xf9uy; 0x6fuy; 0x21uy; 0xccuy; 0x39uy; 0x9cuy; 0xaduy; 0xb1uy; 0x01uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key5_len: (x:UInt32.t { UInt32.v x = B.length key5 }) =
32ul
let nonce5: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0xdfuy; 0x51uy; 0x84uy; 0x82uy; 0x42uy; 0x0cuy; 0x75uy; 0x9cuy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce5_len: (x:UInt32.t { UInt32.v x = B.length nonce5 }) =
12ul
let aad5: (b: B.buffer UInt8.t { B.length b = 7 /\ B.recallable b }) =
[@inline_let] let l = [ 0x70uy; 0xd3uy; 0x33uy; 0xf3uy; 0x8buy; 0x18uy; 0x0buy; ] in
assert_norm (List.Tot.length l = 7);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad5_len: (x:UInt32.t { UInt32.v x = B.length aad5 }) =
7ul
let input5: (b: B.buffer UInt8.t { B.length b = 129 /\ B.recallable b /\ B.disjoint b aad5 }) =
B.recall aad5;[@inline_let] let l = [ 0x33uy; 0x2fuy; 0x94uy; 0xc1uy; 0xa4uy; 0xefuy; 0xccuy; 0x2auy; 0x5buy; 0xa6uy; 0xe5uy; 0x8fuy; 0x1duy; 0x40uy; 0xf0uy; 0x92uy; 0x3cuy; 0xd9uy; 0x24uy; 0x11uy; 0xa9uy; 0x71uy; 0xf9uy; 0x37uy; 0x14uy; 0x99uy; 0xfauy; 0xbeuy; 0xe6uy; 0x80uy; 0xdeuy; 0x50uy; 0xc9uy; 0x96uy; 0xd4uy; 0xb0uy; 0xecuy; 0x9euy; 0x17uy; 0xecuy; 0xd2uy; 0x5euy; 0x72uy; 0x99uy; 0xfcuy; 0x0auy; 0xe1uy; 0xcbuy; 0x48uy; 0xd2uy; 0x85uy; 0xdduy; 0x2fuy; 0x90uy; 0xe0uy; 0x66uy; 0x3buy; 0xe6uy; 0x20uy; 0x74uy; 0xbeuy; 0x23uy; 0x8fuy; 0xcbuy; 0xb4uy; 0xe4uy; 0xdauy; 0x48uy; 0x40uy; 0xa6uy; 0xd1uy; 0x1buy; 0xc7uy; 0x42uy; 0xceuy; 0x2fuy; 0x0cuy; 0xa6uy; 0x85uy; 0x6euy; 0x87uy; 0x37uy; 0x03uy; 0xb1uy; 0x7cuy; 0x25uy; 0x96uy; 0xa3uy; 0x05uy; 0xd8uy; 0xb0uy; 0xf4uy; 0xeduy; 0xeauy; 0xc2uy; 0xf0uy; 0x31uy; 0x98uy; 0x6cuy; 0xd1uy; 0x14uy; 0x25uy; 0xc0uy; 0xcbuy; 0x01uy; 0x74uy; 0xd0uy; 0x82uy; 0xf4uy; 0x36uy; 0xf5uy; 0x41uy; 0xd5uy; 0xdcuy; 0xcauy; 0xc5uy; 0xbbuy; 0x98uy; 0xfeuy; 0xfcuy; 0x69uy; 0x21uy; 0x70uy; 0xd8uy; 0xa4uy; 0x4buy; 0xc8uy; 0xdeuy; 0x8fuy; ] in
assert_norm (List.Tot.length l = 129);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input5_len: (x:UInt32.t { UInt32.v x = B.length input5 }) =
129ul
let output5: (b: B.buffer UInt8.t { B.length b = 145 /\ B.recallable b }) =
[@inline_let] let l = [ 0x8buy; 0x06uy; 0xd3uy; 0x31uy; 0xb0uy; 0x93uy; 0x45uy; 0xb1uy; 0x75uy; 0x6euy; 0x26uy; 0xf9uy; 0x67uy; 0xbcuy; 0x90uy; 0x15uy; 0x81uy; 0x2cuy; 0xb5uy; 0xf0uy; 0xc6uy; 0x2buy; 0xc7uy; 0x8cuy; 0x56uy; 0xd1uy; 0xbfuy; 0x69uy; 0x6cuy; 0x07uy; 0xa0uy; 0xdauy; 0x65uy; 0x27uy; 0xc9uy; 0x90uy; 0x3duy; 0xefuy; 0x4buy; 0x11uy; 0x0fuy; 0x19uy; 0x07uy; 0xfduy; 0x29uy; 0x92uy; 0xd9uy; 0xc8uy; 0xf7uy; 0x99uy; 0x2euy; 0x4auy; 0xd0uy; 0xb8uy; 0x2cuy; 0xdcuy; 0x93uy; 0xf5uy; 0x9euy; 0x33uy; 0x78uy; 0xd1uy; 0x37uy; 0xc3uy; 0x66uy; 0xd7uy; 0x5euy; 0xbcuy; 0x44uy; 0xbfuy; 0x53uy; 0xa5uy; 0xbcuy; 0xc4uy; 0xcbuy; 0x7buy; 0x3auy; 0x8euy; 0x7fuy; 0x02uy; 0xbduy; 0xbbuy; 0xe7uy; 0xcauy; 0xa6uy; 0x6cuy; 0x6buy; 0x93uy; 0x21uy; 0x93uy; 0x10uy; 0x61uy; 0xe7uy; 0x69uy; 0xd0uy; 0x78uy; 0xf3uy; 0x07uy; 0x5auy; 0x1auy; 0x8fuy; 0x73uy; 0xaauy; 0xb1uy; 0x4euy; 0xd3uy; 0xdauy; 0x4fuy; 0xf3uy; 0x32uy; 0xe1uy; 0x66uy; 0x3euy; 0x6cuy; 0xc6uy; 0x13uy; 0xbauy; 0x06uy; 0x5buy; 0xfcuy; 0x6auy; 0xe5uy; 0x6fuy; 0x60uy; 0xfbuy; 0x07uy; 0x40uy; 0xb0uy; 0x8cuy; 0x9duy; 0x84uy; 0x43uy; 0x6buy; 0xc1uy; 0xf7uy; 0x8duy; 0x8duy; 0x31uy; 0xf7uy; 0x7auy; 0x39uy; 0x4duy; 0x8fuy; 0x9auy; 0xebuy; ] in
assert_norm (List.Tot.length l = 145);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output5_len: (x:UInt32.t { UInt32.v x = B.length output5 }) =
145ul
let key6: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0x8duy; 0xb8uy; 0x91uy; 0x48uy; 0xf0uy; 0xe7uy; 0x0auy; 0xbduy; 0xf9uy; 0x3fuy; 0xcduy; 0xd9uy; 0xa0uy; 0x1euy; 0x42uy; 0x4cuy; 0xe7uy; 0xdeuy; 0x25uy; 0x3duy; 0xa3uy; 0xd7uy; 0x05uy; 0x80uy; 0x8duy; 0xf2uy; 0x82uy; 0xacuy; 0x44uy; 0x16uy; 0x51uy; 0x01uy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key6_len: (x:UInt32.t { UInt32.v x = B.length key6 }) =
32ul
let nonce6: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0xdeuy; 0x7buy; 0xefuy; 0xc3uy; 0x65uy; 0x1buy; 0x68uy; 0xb0uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce6_len: (x:UInt32.t { UInt32.v x = B.length nonce6 }) =
12ul
let aad6: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad6_len: (x:UInt32.t { UInt32.v x = B.length aad6 }) =
0ul
let input6: (b: B.buffer UInt8.t { B.length b = 256 /\ B.recallable b /\ B.disjoint b aad6 }) =
B.recall aad6;[@inline_let] let l = [ 0x9buy; 0x18uy; 0xdbuy; 0xdduy; 0x9auy; 0x0fuy; 0x3euy; 0xa5uy; 0x15uy; 0x17uy; 0xdeuy; 0xdfuy; 0x08uy; 0x9duy; 0x65uy; 0x0auy; 0x67uy; 0x30uy; 0x12uy; 0xe2uy; 0x34uy; 0x77uy; 0x4buy; 0xc1uy; 0xd9uy; 0xc6uy; 0x1fuy; 0xabuy; 0xc6uy; 0x18uy; 0x50uy; 0x17uy; 0xa7uy; 0x9duy; 0x3cuy; 0xa6uy; 0xc5uy; 0x35uy; 0x8cuy; 0x1cuy; 0xc0uy; 0xa1uy; 0x7cuy; 0x9fuy; 0x03uy; 0x89uy; 0xcauy; 0xe1uy; 0xe6uy; 0xe9uy; 0xd4uy; 0xd3uy; 0x88uy; 0xdbuy; 0xb4uy; 0x51uy; 0x9duy; 0xecuy; 0xb4uy; 0xfcuy; 0x52uy; 0xeeuy; 0x6duy; 0xf1uy; 0x75uy; 0x42uy; 0xc6uy; 0xfduy; 0xbduy; 0x7auy; 0x8euy; 0x86uy; 0xfcuy; 0x44uy; 0xb3uy; 0x4fuy; 0xf3uy; 0xeauy; 0x67uy; 0x5auy; 0x41uy; 0x13uy; 0xbauy; 0xb0uy; 0xdcuy; 0xe1uy; 0xd3uy; 0x2auy; 0x7cuy; 0x22uy; 0xb3uy; 0xcauy; 0xacuy; 0x6auy; 0x37uy; 0x98uy; 0x3euy; 0x1duy; 0x40uy; 0x97uy; 0xf7uy; 0x9buy; 0x1duy; 0x36uy; 0x6buy; 0xb3uy; 0x28uy; 0xbduy; 0x60uy; 0x82uy; 0x47uy; 0x34uy; 0xaauy; 0x2fuy; 0x7duy; 0xe9uy; 0xa8uy; 0x70uy; 0x81uy; 0x57uy; 0xd4uy; 0xb9uy; 0x77uy; 0x0auy; 0x9duy; 0x29uy; 0xa7uy; 0x84uy; 0x52uy; 0x4fuy; 0xc2uy; 0x4auy; 0x40uy; 0x3buy; 0x3cuy; 0xd4uy; 0xc9uy; 0x2auy; 0xdbuy; 0x4auy; 0x53uy; 0xc4uy; 0xbeuy; 0x80uy; 0xe9uy; 0x51uy; 0x7fuy; 0x8fuy; 0xc7uy; 0xa2uy; 0xceuy; 0x82uy; 0x5cuy; 0x91uy; 0x1euy; 0x74uy; 0xd9uy; 0xd0uy; 0xbduy; 0xd5uy; 0xf3uy; 0xfduy; 0xdauy; 0x4duy; 0x25uy; 0xb4uy; 0xbbuy; 0x2duy; 0xacuy; 0x2fuy; 0x3duy; 0x71uy; 0x85uy; 0x7buy; 0xcfuy; 0x3cuy; 0x7buy; 0x3euy; 0x0euy; 0x22uy; 0x78uy; 0x0cuy; 0x29uy; 0xbfuy; 0xe4uy; 0xf4uy; 0x57uy; 0xb3uy; 0xcbuy; 0x49uy; 0xa0uy; 0xfcuy; 0x1euy; 0x05uy; 0x4euy; 0x16uy; 0xbcuy; 0xd5uy; 0xa8uy; 0xa3uy; 0xeeuy; 0x05uy; 0x35uy; 0xc6uy; 0x7cuy; 0xabuy; 0x60uy; 0x14uy; 0x55uy; 0x1auy; 0x8euy; 0xc5uy; 0x88uy; 0x5duy; 0xd5uy; 0x81uy; 0xc2uy; 0x81uy; 0xa5uy; 0xc4uy; 0x60uy; 0xdbuy; 0xafuy; 0x77uy; 0x91uy; 0xe1uy; 0xceuy; 0xa2uy; 0x7euy; 0x7fuy; 0x42uy; 0xe3uy; 0xb0uy; 0x13uy; 0x1cuy; 0x1fuy; 0x25uy; 0x60uy; 0x21uy; 0xe2uy; 0x40uy; 0x5fuy; 0x99uy; 0xb7uy; 0x73uy; 0xecuy; 0x9buy; 0x2buy; 0xf0uy; 0x65uy; 0x11uy; 0xc8uy; 0xd0uy; 0x0auy; 0x9fuy; 0xd3uy; ] in
assert_norm (List.Tot.length l = 256);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input6_len: (x:UInt32.t { UInt32.v x = B.length input6 }) =
256ul
let output6: (b: B.buffer UInt8.t { B.length b = 272 /\ B.recallable b }) =
[@inline_let] let l = [ 0x85uy; 0x04uy; 0xc2uy; 0xeduy; 0x8duy; 0xfduy; 0x97uy; 0x5cuy; 0xd2uy; 0xb7uy; 0xe2uy; 0xc1uy; 0x6buy; 0xa3uy; 0xbauy; 0xf8uy; 0xc9uy; 0x50uy; 0xc3uy; 0xc6uy; 0xa5uy; 0xe3uy; 0xa4uy; 0x7cuy; 0xc3uy; 0x23uy; 0x49uy; 0x5euy; 0xa9uy; 0xb9uy; 0x32uy; 0xebuy; 0x8auy; 0x7cuy; 0xcauy; 0xe5uy; 0xecuy; 0xfbuy; 0x7cuy; 0xc0uy; 0xcbuy; 0x7duy; 0xdcuy; 0x2cuy; 0x9duy; 0x92uy; 0x55uy; 0x21uy; 0x0auy; 0xc8uy; 0x43uy; 0x63uy; 0x59uy; 0x0auy; 0x31uy; 0x70uy; 0x82uy; 0x67uy; 0x41uy; 0x03uy; 0xf8uy; 0xdfuy; 0xf2uy; 0xacuy; 0xa7uy; 0x02uy; 0xd4uy; 0xd5uy; 0x8auy; 0x2duy; 0xc8uy; 0x99uy; 0x19uy; 0x66uy; 0xd0uy; 0xf6uy; 0x88uy; 0x2cuy; 0x77uy; 0xd9uy; 0xd4uy; 0x0duy; 0x6cuy; 0xbduy; 0x98uy; 0xdeuy; 0xe7uy; 0x7fuy; 0xaduy; 0x7euy; 0x8auy; 0xfbuy; 0xe9uy; 0x4buy; 0xe5uy; 0xf7uy; 0xe5uy; 0x50uy; 0xa0uy; 0x90uy; 0x3fuy; 0xd6uy; 0x22uy; 0x53uy; 0xe3uy; 0xfeuy; 0x1buy; 0xccuy; 0x79uy; 0x3buy; 0xecuy; 0x12uy; 0x47uy; 0x52uy; 0xa7uy; 0xd6uy; 0x04uy; 0xe3uy; 0x52uy; 0xe6uy; 0x93uy; 0x90uy; 0x91uy; 0x32uy; 0x73uy; 0x79uy; 0xb8uy; 0xd0uy; 0x31uy; 0xdeuy; 0x1fuy; 0x9fuy; 0x2fuy; 0x05uy; 0x38uy; 0x54uy; 0x2fuy; 0x35uy; 0x04uy; 0x39uy; 0xe0uy; 0xa7uy; 0xbauy; 0xc6uy; 0x52uy; 0xf6uy; 0x37uy; 0x65uy; 0x4cuy; 0x07uy; 0xa9uy; 0x7euy; 0xb3uy; 0x21uy; 0x6fuy; 0x74uy; 0x8cuy; 0xc9uy; 0xdeuy; 0xdbuy; 0x65uy; 0x1buy; 0x9buy; 0xaauy; 0x60uy; 0xb1uy; 0x03uy; 0x30uy; 0x6buy; 0xb2uy; 0x03uy; 0xc4uy; 0x1cuy; 0x04uy; 0xf8uy; 0x0fuy; 0x64uy; 0xafuy; 0x46uy; 0xe4uy; 0x65uy; 0x99uy; 0x49uy; 0xe2uy; 0xeauy; 0xceuy; 0x78uy; 0x00uy; 0xd8uy; 0x8buy; 0xd5uy; 0x2euy; 0xcfuy; 0xfcuy; 0x40uy; 0x49uy; 0xe8uy; 0x58uy; 0xdcuy; 0x34uy; 0x9cuy; 0x8cuy; 0x61uy; 0xbfuy; 0x0auy; 0x8euy; 0xecuy; 0x39uy; 0xa9uy; 0x30uy; 0x05uy; 0x5auy; 0xd2uy; 0x56uy; 0x01uy; 0xc7uy; 0xdauy; 0x8fuy; 0x4euy; 0xbbuy; 0x43uy; 0xa3uy; 0x3auy; 0xf9uy; 0x15uy; 0x2auy; 0xd0uy; 0xa0uy; 0x7auy; 0x87uy; 0x34uy; 0x82uy; 0xfeuy; 0x8auy; 0xd1uy; 0x2duy; 0x5euy; 0xc7uy; 0xbfuy; 0x04uy; 0x53uy; 0x5fuy; 0x3buy; 0x36uy; 0xd4uy; 0x25uy; 0x5cuy; 0x34uy; 0x7auy; 0x8duy; 0xd5uy; 0x05uy; 0xceuy; 0x72uy; 0xcauy; 0xefuy; 0x7auy; 0x4buy; 0xbcuy; 0xb0uy; 0x10uy; 0x5cuy; 0x96uy; 0x42uy; 0x3auy; 0x00uy; 0x98uy; 0xcduy; 0x15uy; 0xe8uy; 0xb7uy; 0x53uy; ] in
assert_norm (List.Tot.length l = 272);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output6_len: (x:UInt32.t { UInt32.v x = B.length output6 }) =
272ul
let key7: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xf2uy; 0xaauy; 0x4fuy; 0x99uy; 0xfduy; 0x3euy; 0xa8uy; 0x53uy; 0xc1uy; 0x44uy; 0xe9uy; 0x81uy; 0x18uy; 0xdcuy; 0xf5uy; 0xf0uy; 0x3euy; 0x44uy; 0x15uy; 0x59uy; 0xe0uy; 0xc5uy; 0x44uy; 0x86uy; 0xc3uy; 0x91uy; 0xa8uy; 0x75uy; 0xc0uy; 0x12uy; 0x46uy; 0xbauy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key7_len: (x:UInt32.t { UInt32.v x = B.length key7 }) =
32ul
let nonce7: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0x0euy; 0x0duy; 0x57uy; 0xbbuy; 0x7buy; 0x40uy; 0x54uy; 0x02uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce7_len: (x:UInt32.t { UInt32.v x = B.length nonce7 }) =
12ul
let aad7: (b: B.buffer UInt8.t { B.length b = 0 /\ B.recallable b }) =
[@inline_let] let l = [ ] in
assert_norm (List.Tot.length l = 0);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad7_len: (x:UInt32.t { UInt32.v x = B.length aad7 }) =
0ul
let input7: (b: B.buffer UInt8.t { B.length b = 512 /\ B.recallable b /\ B.disjoint b aad7 }) =
B.recall aad7;[@inline_let] let l = [ 0xc3uy; 0x09uy; 0x94uy; 0x62uy; 0xe6uy; 0x46uy; 0x2euy; 0x10uy; 0xbeuy; 0x00uy; 0xe4uy; 0xfcuy; 0xf3uy; 0x40uy; 0xa3uy; 0xe2uy; 0x0fuy; 0xc2uy; 0x8buy; 0x28uy; 0xdcuy; 0xbauy; 0xb4uy; 0x3cuy; 0xe4uy; 0x21uy; 0x58uy; 0x61uy; 0xcduy; 0x8buy; 0xcduy; 0xfbuy; 0xacuy; 0x94uy; 0xa1uy; 0x45uy; 0xf5uy; 0x1cuy; 0xe1uy; 0x12uy; 0xe0uy; 0x3buy; 0x67uy; 0x21uy; 0x54uy; 0x5euy; 0x8cuy; 0xaauy; 0xcfuy; 0xdbuy; 0xb4uy; 0x51uy; 0xd4uy; 0x13uy; 0xdauy; 0xe6uy; 0x83uy; 0x89uy; 0xb6uy; 0x92uy; 0xe9uy; 0x21uy; 0x76uy; 0xa4uy; 0x93uy; 0x7duy; 0x0euy; 0xfduy; 0x96uy; 0x36uy; 0x03uy; 0x91uy; 0x43uy; 0x5cuy; 0x92uy; 0x49uy; 0x62uy; 0x61uy; 0x7buy; 0xebuy; 0x43uy; 0x89uy; 0xb8uy; 0x12uy; 0x20uy; 0x43uy; 0xd4uy; 0x47uy; 0x06uy; 0x84uy; 0xeeuy; 0x47uy; 0xe9uy; 0x8auy; 0x73uy; 0x15uy; 0x0fuy; 0x72uy; 0xcfuy; 0xeduy; 0xceuy; 0x96uy; 0xb2uy; 0x7fuy; 0x21uy; 0x45uy; 0x76uy; 0xebuy; 0x26uy; 0x28uy; 0x83uy; 0x6auy; 0xaduy; 0xaauy; 0xa6uy; 0x81uy; 0xd8uy; 0x55uy; 0xb1uy; 0xa3uy; 0x85uy; 0xb3uy; 0x0cuy; 0xdfuy; 0xf1uy; 0x69uy; 0x2duy; 0x97uy; 0x05uy; 0x2auy; 0xbcuy; 0x7cuy; 0x7buy; 0x25uy; 0xf8uy; 0x80uy; 0x9duy; 0x39uy; 0x25uy; 0xf3uy; 0x62uy; 0xf0uy; 0x66uy; 0x5euy; 0xf4uy; 0xa0uy; 0xcfuy; 0xd8uy; 0xfduy; 0x4fuy; 0xb1uy; 0x1fuy; 0x60uy; 0x3auy; 0x08uy; 0x47uy; 0xafuy; 0xe1uy; 0xf6uy; 0x10uy; 0x77uy; 0x09uy; 0xa7uy; 0x27uy; 0x8fuy; 0x9auy; 0x97uy; 0x5auy; 0x26uy; 0xfauy; 0xfeuy; 0x41uy; 0x32uy; 0x83uy; 0x10uy; 0xe0uy; 0x1duy; 0xbfuy; 0x64uy; 0x0duy; 0xf4uy; 0x1cuy; 0x32uy; 0x35uy; 0xe5uy; 0x1buy; 0x36uy; 0xefuy; 0xd4uy; 0x4auy; 0x93uy; 0x4duy; 0x00uy; 0x7cuy; 0xecuy; 0x02uy; 0x07uy; 0x8buy; 0x5duy; 0x7duy; 0x1buy; 0x0euy; 0xd1uy; 0xa6uy; 0xa5uy; 0x5duy; 0x7duy; 0x57uy; 0x88uy; 0xa8uy; 0xccuy; 0x81uy; 0xb4uy; 0x86uy; 0x4euy; 0xb4uy; 0x40uy; 0xe9uy; 0x1duy; 0xc3uy; 0xb1uy; 0x24uy; 0x3euy; 0x7fuy; 0xccuy; 0x8auy; 0x24uy; 0x9buy; 0xdfuy; 0x6duy; 0xf0uy; 0x39uy; 0x69uy; 0x3euy; 0x4cuy; 0xc0uy; 0x96uy; 0xe4uy; 0x13uy; 0xdauy; 0x90uy; 0xdauy; 0xf4uy; 0x95uy; 0x66uy; 0x8buy; 0x17uy; 0x17uy; 0xfeuy; 0x39uy; 0x43uy; 0x25uy; 0xaauy; 0xdauy; 0xa0uy; 0x43uy; 0x3cuy; 0xb1uy; 0x41uy; 0x02uy; 0xa3uy; 0xf0uy; 0xa7uy; 0x19uy; 0x59uy; 0xbcuy; 0x1duy; 0x7duy; 0x6cuy; 0x6duy; 0x91uy; 0x09uy; 0x5cuy; 0xb7uy; 0x5buy; 0x01uy; 0xd1uy; 0x6fuy; 0x17uy; 0x21uy; 0x97uy; 0xbfuy; 0x89uy; 0x71uy; 0xa5uy; 0xb0uy; 0x6euy; 0x07uy; 0x45uy; 0xfduy; 0x9duy; 0xeauy; 0x07uy; 0xf6uy; 0x7auy; 0x9fuy; 0x10uy; 0x18uy; 0x22uy; 0x30uy; 0x73uy; 0xacuy; 0xd4uy; 0x6buy; 0x72uy; 0x44uy; 0xeduy; 0xd9uy; 0x19uy; 0x9buy; 0x2duy; 0x4auy; 0x41uy; 0xdduy; 0xd1uy; 0x85uy; 0x5euy; 0x37uy; 0x19uy; 0xeduy; 0xd2uy; 0x15uy; 0x8fuy; 0x5euy; 0x91uy; 0xdbuy; 0x33uy; 0xf2uy; 0xe4uy; 0xdbuy; 0xffuy; 0x98uy; 0xfbuy; 0xa3uy; 0xb5uy; 0xcauy; 0x21uy; 0x69uy; 0x08uy; 0xe7uy; 0x8auy; 0xdfuy; 0x90uy; 0xffuy; 0x3euy; 0xe9uy; 0x20uy; 0x86uy; 0x3cuy; 0xe9uy; 0xfcuy; 0x0buy; 0xfeuy; 0x5cuy; 0x61uy; 0xaauy; 0x13uy; 0x92uy; 0x7fuy; 0x7buy; 0xecuy; 0xe0uy; 0x6duy; 0xa8uy; 0x23uy; 0x22uy; 0xf6uy; 0x6buy; 0x77uy; 0xc4uy; 0xfeuy; 0x40uy; 0x07uy; 0x3buy; 0xb6uy; 0xf6uy; 0x8euy; 0x5fuy; 0xd4uy; 0xb9uy; 0xb7uy; 0x0fuy; 0x21uy; 0x04uy; 0xefuy; 0x83uy; 0x63uy; 0x91uy; 0x69uy; 0x40uy; 0xa3uy; 0x48uy; 0x5cuy; 0xd2uy; 0x60uy; 0xf9uy; 0x4fuy; 0x6cuy; 0x47uy; 0x8buy; 0x3buy; 0xb1uy; 0x9fuy; 0x8euy; 0xeeuy; 0x16uy; 0x8auy; 0x13uy; 0xfcuy; 0x46uy; 0x17uy; 0xc3uy; 0xc3uy; 0x32uy; 0x56uy; 0xf8uy; 0x3cuy; 0x85uy; 0x3auy; 0xb6uy; 0x3euy; 0xaauy; 0x89uy; 0x4fuy; 0xb3uy; 0xdfuy; 0x38uy; 0xfduy; 0xf1uy; 0xe4uy; 0x3auy; 0xc0uy; 0xe6uy; 0x58uy; 0xb5uy; 0x8fuy; 0xc5uy; 0x29uy; 0xa2uy; 0x92uy; 0x4auy; 0xb6uy; 0xa0uy; 0x34uy; 0x7fuy; 0xabuy; 0xb5uy; 0x8auy; 0x90uy; 0xa1uy; 0xdbuy; 0x4duy; 0xcauy; 0xb6uy; 0x2cuy; 0x41uy; 0x3cuy; 0xf7uy; 0x2buy; 0x21uy; 0xc3uy; 0xfduy; 0xf4uy; 0x17uy; 0x5cuy; 0xb5uy; 0x33uy; 0x17uy; 0x68uy; 0x2buy; 0x08uy; 0x30uy; 0xf3uy; 0xf7uy; 0x30uy; 0x3cuy; 0x96uy; 0xe6uy; 0x6auy; 0x20uy; 0x97uy; 0xe7uy; 0x4duy; 0x10uy; 0x5fuy; 0x47uy; 0x5fuy; 0x49uy; 0x96uy; 0x09uy; 0xf0uy; 0x27uy; 0x91uy; 0xc8uy; 0xf8uy; 0x5auy; 0x2euy; 0x79uy; 0xb5uy; 0xe2uy; 0xb8uy; 0xe8uy; 0xb9uy; 0x7buy; 0xd5uy; 0x10uy; 0xcbuy; 0xffuy; 0x5duy; 0x14uy; 0x73uy; 0xf3uy; ] in
assert_norm (List.Tot.length l = 512);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let input7_len: (x:UInt32.t { UInt32.v x = B.length input7 }) =
512ul
let output7: (b: B.buffer UInt8.t { B.length b = 528 /\ B.recallable b }) =
[@inline_let] let l = [ 0x14uy; 0xf6uy; 0x41uy; 0x37uy; 0xa6uy; 0xd4uy; 0x27uy; 0xcduy; 0xdbuy; 0x06uy; 0x3euy; 0x9auy; 0x4euy; 0xabuy; 0xd5uy; 0xb1uy; 0x1euy; 0x6buy; 0xd2uy; 0xbcuy; 0x11uy; 0xf4uy; 0x28uy; 0x93uy; 0x63uy; 0x54uy; 0xefuy; 0xbbuy; 0x5euy; 0x1duy; 0x3auy; 0x1duy; 0x37uy; 0x3cuy; 0x0auy; 0x6cuy; 0x1euy; 0xc2uy; 0xd1uy; 0x2cuy; 0xb5uy; 0xa3uy; 0xb5uy; 0x7buy; 0xb8uy; 0x8fuy; 0x25uy; 0xa6uy; 0x1buy; 0x61uy; 0x1cuy; 0xecuy; 0x28uy; 0x58uy; 0x26uy; 0xa4uy; 0xa8uy; 0x33uy; 0x28uy; 0x25uy; 0x5cuy; 0x45uy; 0x05uy; 0xe5uy; 0x6cuy; 0x99uy; 0xe5uy; 0x45uy; 0xc4uy; 0xa2uy; 0x03uy; 0x84uy; 0x03uy; 0x73uy; 0x1euy; 0x8cuy; 0x49uy; 0xacuy; 0x20uy; 0xdduy; 0x8duy; 0xb3uy; 0xc4uy; 0xf5uy; 0xe7uy; 0x4fuy; 0xf1uy; 0xeduy; 0xa1uy; 0x98uy; 0xdeuy; 0xa4uy; 0x96uy; 0xdduy; 0x2fuy; 0xabuy; 0xabuy; 0x97uy; 0xcfuy; 0x3euy; 0xd2uy; 0x9euy; 0xb8uy; 0x13uy; 0x07uy; 0x28uy; 0x29uy; 0x19uy; 0xafuy; 0xfduy; 0xf2uy; 0x49uy; 0x43uy; 0xeauy; 0x49uy; 0x26uy; 0x91uy; 0xc1uy; 0x07uy; 0xd6uy; 0xbbuy; 0x81uy; 0x75uy; 0x35uy; 0x0duy; 0x24uy; 0x7fuy; 0xc8uy; 0xdauy; 0xd4uy; 0xb7uy; 0xebuy; 0xe8uy; 0x5cuy; 0x09uy; 0xa2uy; 0x2fuy; 0xdcuy; 0x28uy; 0x7duy; 0x3auy; 0x03uy; 0xfauy; 0x94uy; 0xb5uy; 0x1duy; 0x17uy; 0x99uy; 0x36uy; 0xc3uy; 0x1cuy; 0x18uy; 0x34uy; 0xe3uy; 0x9fuy; 0xf5uy; 0x55uy; 0x7cuy; 0xb0uy; 0x60uy; 0x9duy; 0xffuy; 0xacuy; 0xd4uy; 0x61uy; 0xf2uy; 0xaduy; 0xf8uy; 0xceuy; 0xc7uy; 0xbeuy; 0x5cuy; 0xd2uy; 0x95uy; 0xa8uy; 0x4buy; 0x77uy; 0x13uy; 0x19uy; 0x59uy; 0x26uy; 0xc9uy; 0xb7uy; 0x8fuy; 0x6auy; 0xcbuy; 0x2duy; 0x37uy; 0x91uy; 0xeauy; 0x92uy; 0x9cuy; 0x94uy; 0x5buy; 0xdauy; 0x0buy; 0xceuy; 0xfeuy; 0x30uy; 0x20uy; 0xf8uy; 0x51uy; 0xaduy; 0xf2uy; 0xbeuy; 0xe7uy; 0xc7uy; 0xffuy; 0xb3uy; 0x33uy; 0x91uy; 0x6auy; 0xc9uy; 0x1auy; 0x41uy; 0xc9uy; 0x0fuy; 0xf3uy; 0x10uy; 0x0euy; 0xfduy; 0x53uy; 0xffuy; 0x6cuy; 0x16uy; 0x52uy; 0xd9uy; 0xf3uy; 0xf7uy; 0x98uy; 0x2euy; 0xc9uy; 0x07uy; 0x31uy; 0x2cuy; 0x0cuy; 0x72uy; 0xd7uy; 0xc5uy; 0xc6uy; 0x08uy; 0x2auy; 0x7buy; 0xdauy; 0xbduy; 0x7euy; 0x02uy; 0xeauy; 0x1auy; 0xbbuy; 0xf2uy; 0x04uy; 0x27uy; 0x61uy; 0x28uy; 0x8euy; 0xf5uy; 0x04uy; 0x03uy; 0x1fuy; 0x4cuy; 0x07uy; 0x55uy; 0x82uy; 0xecuy; 0x1euy; 0xd7uy; 0x8buy; 0x2fuy; 0x65uy; 0x56uy; 0xd1uy; 0xd9uy; 0x1euy; 0x3cuy; 0xe9uy; 0x1fuy; 0x5euy; 0x98uy; 0x70uy; 0x38uy; 0x4auy; 0x8cuy; 0x49uy; 0xc5uy; 0x43uy; 0xa0uy; 0xa1uy; 0x8buy; 0x74uy; 0x9duy; 0x4cuy; 0x62uy; 0x0duy; 0x10uy; 0x0cuy; 0xf4uy; 0x6cuy; 0x8fuy; 0xe0uy; 0xaauy; 0x9auy; 0x8duy; 0xb7uy; 0xe0uy; 0xbeuy; 0x4cuy; 0x87uy; 0xf1uy; 0x98uy; 0x2fuy; 0xccuy; 0xeduy; 0xc0uy; 0x52uy; 0x29uy; 0xdcuy; 0x83uy; 0xf8uy; 0xfcuy; 0x2cuy; 0x0euy; 0xa8uy; 0x51uy; 0x4duy; 0x80uy; 0x0duy; 0xa3uy; 0xfeuy; 0xd8uy; 0x37uy; 0xe7uy; 0x41uy; 0x24uy; 0xfcuy; 0xfbuy; 0x75uy; 0xe3uy; 0x71uy; 0x7buy; 0x57uy; 0x45uy; 0xf5uy; 0x97uy; 0x73uy; 0x65uy; 0x63uy; 0x14uy; 0x74uy; 0xb8uy; 0x82uy; 0x9fuy; 0xf8uy; 0x60uy; 0x2fuy; 0x8auy; 0xf2uy; 0x4euy; 0xf1uy; 0x39uy; 0xdauy; 0x33uy; 0x91uy; 0xf8uy; 0x36uy; 0xe0uy; 0x8duy; 0x3fuy; 0x1fuy; 0x3buy; 0x56uy; 0xdcuy; 0xa0uy; 0x8fuy; 0x3cuy; 0x9duy; 0x71uy; 0x52uy; 0xa7uy; 0xb8uy; 0xc0uy; 0xa5uy; 0xc6uy; 0xa2uy; 0x73uy; 0xdauy; 0xf4uy; 0x4buy; 0x74uy; 0x5buy; 0x00uy; 0x3duy; 0x99uy; 0xd7uy; 0x96uy; 0xbauy; 0xe6uy; 0xe1uy; 0xa6uy; 0x96uy; 0x38uy; 0xaduy; 0xb3uy; 0xc0uy; 0xd2uy; 0xbauy; 0x91uy; 0x6buy; 0xf9uy; 0x19uy; 0xdduy; 0x3buy; 0xbeuy; 0xbeuy; 0x9cuy; 0x20uy; 0x50uy; 0xbauy; 0xa1uy; 0xd0uy; 0xceuy; 0x11uy; 0xbduy; 0x95uy; 0xd8uy; 0xd1uy; 0xdduy; 0x33uy; 0x85uy; 0x74uy; 0xdcuy; 0xdbuy; 0x66uy; 0x76uy; 0x44uy; 0xdcuy; 0x03uy; 0x74uy; 0x48uy; 0x35uy; 0x98uy; 0xb1uy; 0x18uy; 0x47uy; 0x94uy; 0x7duy; 0xffuy; 0x62uy; 0xe4uy; 0x58uy; 0x78uy; 0xabuy; 0xeduy; 0x95uy; 0x36uy; 0xd9uy; 0x84uy; 0x91uy; 0x82uy; 0x64uy; 0x41uy; 0xbbuy; 0x58uy; 0xe6uy; 0x1cuy; 0x20uy; 0x6duy; 0x15uy; 0x6buy; 0x13uy; 0x96uy; 0xe8uy; 0x35uy; 0x7fuy; 0xdcuy; 0x40uy; 0x2cuy; 0xe9uy; 0xbcuy; 0x8auy; 0x4fuy; 0x92uy; 0xecuy; 0x06uy; 0x2duy; 0x50uy; 0xdfuy; 0x93uy; 0x5duy; 0x65uy; 0x5auy; 0xa8uy; 0xfcuy; 0x20uy; 0x50uy; 0x14uy; 0xa9uy; 0x8auy; 0x7euy; 0x1duy; 0x08uy; 0x1fuy; 0xe2uy; 0x99uy; 0xd0uy; 0xbeuy; 0xfbuy; 0x3auy; 0x21uy; 0x9duy; 0xaduy; 0x86uy; 0x54uy; 0xfduy; 0x0duy; 0x98uy; 0x1cuy; 0x5auy; 0x6fuy; 0x1fuy; 0x9auy; 0x40uy; 0xcduy; 0xa2uy; 0xffuy; 0x6auy; 0xf1uy; 0x54uy; ] in
assert_norm (List.Tot.length l = 528);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let output7_len: (x:UInt32.t { UInt32.v x = B.length output7 }) =
528ul
let key8: (b: B.buffer UInt8.t { B.length b = 32 /\ B.recallable b }) =
[@inline_let] let l = [ 0xeauy; 0xbcuy; 0x56uy; 0x99uy; 0xe3uy; 0x50uy; 0xffuy; 0xc5uy; 0xccuy; 0x1auy; 0xd7uy; 0xc1uy; 0x57uy; 0x72uy; 0xeauy; 0x86uy; 0x5buy; 0x89uy; 0x88uy; 0x61uy; 0x3duy; 0x2fuy; 0x9buy; 0xb2uy; 0xe7uy; 0x9cuy; 0xecuy; 0x74uy; 0x6euy; 0x3euy; 0xf4uy; 0x3buy; ] in
assert_norm (List.Tot.length l = 32);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let key8_len: (x:UInt32.t { UInt32.v x = B.length key8 }) =
32ul
let nonce8: (b: B.buffer UInt8.t { B.length b = 12 /\ B.recallable b }) =
[@inline_let] let l = [ 0x00uy; 0x00uy; 0x00uy; 0x00uy; 0xefuy; 0x2duy; 0x63uy; 0xeeuy; 0x6buy; 0x80uy; 0x8buy; 0x78uy; ] in
assert_norm (List.Tot.length l = 12);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let nonce8_len: (x:UInt32.t { UInt32.v x = B.length nonce8 }) =
12ul
let aad8: (b: B.buffer UInt8.t { B.length b = 9 /\ B.recallable b }) =
[@inline_let] let l = [ 0x5auy; 0x27uy; 0xffuy; 0xebuy; 0xdfuy; 0x84uy; 0xb2uy; 0x9euy; 0xefuy; ] in
assert_norm (List.Tot.length l = 9);
B.gcmalloc_of_list HyperStack.root l
inline_for_extraction let aad8_len: (x:UInt32.t { UInt32.v x = B.length aad8 }) =
9ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Test.Vectors.Chacha20Poly1305.fst"
} | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test.Vectors",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | b:
LowStar.Buffer.buffer FStar.UInt8.t
{ LowStar.Monotonic.Buffer.length b = 513 /\ LowStar.Monotonic.Buffer.recallable b /\
LowStar.Monotonic.Buffer.disjoint b Test.Vectors.Chacha20Poly1305.aad8 } | Prims.Tot | [
"total"
] | [] | [
"LowStar.Buffer.gcmalloc_of_list",
"FStar.UInt8.t",
"FStar.Monotonic.HyperHeap.root",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.Pervasives.normalize_term",
"FStar.List.Tot.Base.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"LowStar.Buffer.buffer",
"LowStar.Monotonic.Buffer.disjoint",
"Test.Vectors.Chacha20Poly1305.aad8",
"Prims.list",
"Prims.Cons",
"FStar.UInt8.__uint_to_t",
"Prims.Nil",
"LowStar.Monotonic.Buffer.recall"
] | [] | false | false | false | false | false | let input8:(b: B.buffer UInt8.t {B.length b = 513 /\ B.recallable b /\ B.disjoint b aad8}) =
| B.recall aad8;
[@@ inline_let ]let l =
[
0xe6uy; 0xc3uy; 0xdbuy; 0x63uy; 0x55uy; 0x15uy; 0xe3uy; 0x5buy; 0xb7uy; 0x4buy; 0x27uy; 0x8buy;
0x5auy; 0xdduy; 0xc2uy; 0xe8uy; 0x3auy; 0x6buy; 0xd7uy; 0x81uy; 0x96uy; 0x35uy; 0x97uy; 0xcauy;
0xd7uy; 0x68uy; 0xe8uy; 0xefuy; 0xceuy; 0xabuy; 0xdauy; 0x09uy; 0x6euy; 0xd6uy; 0x8euy; 0xcbuy;
0x55uy; 0xb5uy; 0xe1uy; 0xe5uy; 0x57uy; 0xfduy; 0xc4uy; 0xe3uy; 0xe0uy; 0x18uy; 0x4fuy; 0x85uy;
0xf5uy; 0x3fuy; 0x7euy; 0x4buy; 0x88uy; 0xc9uy; 0x52uy; 0x44uy; 0x0fuy; 0xeauy; 0xafuy; 0x1fuy;
0x71uy; 0x48uy; 0x9fuy; 0x97uy; 0x6duy; 0xb9uy; 0x6fuy; 0x00uy; 0xa6uy; 0xdeuy; 0x2buy; 0x77uy;
0x8buy; 0x15uy; 0xaduy; 0x10uy; 0xa0uy; 0x2buy; 0x7buy; 0x41uy; 0x90uy; 0x03uy; 0x2duy; 0x69uy;
0xaeuy; 0xccuy; 0x77uy; 0x7cuy; 0xa5uy; 0x9duy; 0x29uy; 0x22uy; 0xc2uy; 0xeauy; 0xb4uy; 0x00uy;
0x1auy; 0xd2uy; 0x7auy; 0x98uy; 0x8auy; 0xf9uy; 0xf7uy; 0x82uy; 0xb0uy; 0xabuy; 0xd8uy; 0xa6uy;
0x94uy; 0x8duy; 0x58uy; 0x2fuy; 0x01uy; 0x9euy; 0x00uy; 0x20uy; 0xfcuy; 0x49uy; 0xdcuy; 0x0euy;
0x03uy; 0xe8uy; 0x45uy; 0x10uy; 0xd6uy; 0xa8uy; 0xdauy; 0x55uy; 0x10uy; 0x9auy; 0xdfuy; 0x67uy;
0x22uy; 0x8buy; 0x43uy; 0xabuy; 0x00uy; 0xbbuy; 0x02uy; 0xc8uy; 0xdduy; 0x7buy; 0x97uy; 0x17uy;
0xd7uy; 0x1duy; 0x9euy; 0x02uy; 0x5euy; 0x48uy; 0xdeuy; 0x8euy; 0xcfuy; 0x99uy; 0x07uy; 0x95uy;
0x92uy; 0x3cuy; 0x5fuy; 0x9fuy; 0xc5uy; 0x8auy; 0xc0uy; 0x23uy; 0xaauy; 0xd5uy; 0x8cuy; 0x82uy;
0x6euy; 0x16uy; 0x92uy; 0xb1uy; 0x12uy; 0x17uy; 0x07uy; 0xc3uy; 0xfbuy; 0x36uy; 0xf5uy; 0x6cuy;
0x35uy; 0xd6uy; 0x06uy; 0x1fuy; 0x9fuy; 0xa7uy; 0x94uy; 0xa2uy; 0x38uy; 0x63uy; 0x9cuy; 0xb0uy;
0x71uy; 0xb3uy; 0xa5uy; 0xd2uy; 0xd8uy; 0xbauy; 0x9fuy; 0x08uy; 0x01uy; 0xb3uy; 0xffuy; 0x04uy;
0x97uy; 0x73uy; 0x45uy; 0x1buy; 0xd5uy; 0xa9uy; 0x9cuy; 0x80uy; 0xafuy; 0x04uy; 0x9auy; 0x85uy;
0xdbuy; 0x32uy; 0x5buy; 0x5duy; 0x1auy; 0xc1uy; 0x36uy; 0x28uy; 0x10uy; 0x79uy; 0xf1uy; 0x3cuy;
0xbfuy; 0x1auy; 0x41uy; 0x5cuy; 0x4euy; 0xdfuy; 0xb2uy; 0x7cuy; 0x79uy; 0x3buy; 0x7auy; 0x62uy;
0x3duy; 0x4buy; 0xc9uy; 0x9buy; 0x2auy; 0x2euy; 0x7cuy; 0xa2uy; 0xb1uy; 0x11uy; 0x98uy; 0xa7uy;
0x34uy; 0x1auy; 0x00uy; 0xf3uy; 0xd1uy; 0xbcuy; 0x18uy; 0x22uy; 0xbauy; 0x02uy; 0x56uy; 0x62uy;
0x31uy; 0x10uy; 0x11uy; 0x6duy; 0xe0uy; 0x54uy; 0x9duy; 0x40uy; 0x1fuy; 0x26uy; 0x80uy; 0x41uy;
0xcauy; 0x3fuy; 0x68uy; 0x0fuy; 0x32uy; 0x1duy; 0x0auy; 0x8euy; 0x79uy; 0xd8uy; 0xa4uy; 0x1buy;
0x29uy; 0x1cuy; 0x90uy; 0x8euy; 0xc5uy; 0xe3uy; 0xb4uy; 0x91uy; 0x37uy; 0x9auy; 0x97uy; 0x86uy;
0x99uy; 0xd5uy; 0x09uy; 0xc5uy; 0xbbuy; 0xa3uy; 0x3fuy; 0x21uy; 0x29uy; 0x82uy; 0x14uy; 0x5cuy;
0xabuy; 0x25uy; 0xfbuy; 0xf2uy; 0x4fuy; 0x58uy; 0x26uy; 0xd4uy; 0x83uy; 0xaauy; 0x66uy; 0x89uy;
0x67uy; 0x7euy; 0xc0uy; 0x49uy; 0xe1uy; 0x11uy; 0x10uy; 0x7fuy; 0x7auy; 0xdauy; 0x29uy; 0x04uy;
0xffuy; 0xf0uy; 0xcbuy; 0x09uy; 0x7cuy; 0x9duy; 0xfauy; 0x03uy; 0x6fuy; 0x81uy; 0x09uy; 0x31uy;
0x60uy; 0xfbuy; 0x08uy; 0xfauy; 0x74uy; 0xd3uy; 0x64uy; 0x44uy; 0x7cuy; 0x55uy; 0x85uy; 0xecuy;
0x9cuy; 0x6euy; 0x25uy; 0xb7uy; 0x6cuy; 0xc5uy; 0x37uy; 0xb6uy; 0x83uy; 0x87uy; 0x72uy; 0x95uy;
0x8buy; 0x9duy; 0xe1uy; 0x69uy; 0x5cuy; 0x31uy; 0x95uy; 0x42uy; 0xa6uy; 0x2cuy; 0xd1uy; 0x36uy;
0x47uy; 0x1fuy; 0xecuy; 0x54uy; 0xabuy; 0xa2uy; 0x1cuy; 0xd8uy; 0x00uy; 0xccuy; 0xbcuy; 0x0duy;
0x65uy; 0xe2uy; 0x67uy; 0xbfuy; 0xbcuy; 0xeauy; 0xeeuy; 0x9euy; 0xe4uy; 0x36uy; 0x95uy; 0xbeuy;
0x73uy; 0xd9uy; 0xa6uy; 0xd9uy; 0x0fuy; 0xa0uy; 0xccuy; 0x82uy; 0x76uy; 0x26uy; 0xaduy; 0x5buy;
0x58uy; 0x6cuy; 0x4euy; 0xabuy; 0x29uy; 0x64uy; 0xd3uy; 0xd9uy; 0xa9uy; 0x08uy; 0x8cuy; 0x1duy;
0xa1uy; 0x4fuy; 0x80uy; 0xd8uy; 0x3fuy; 0x94uy; 0xfbuy; 0xd3uy; 0x7buy; 0xfcuy; 0xd1uy; 0x2buy;
0xc3uy; 0x21uy; 0xebuy; 0xe5uy; 0x1cuy; 0x84uy; 0x23uy; 0x7fuy; 0x4buy; 0xfauy; 0xdbuy; 0x34uy;
0x18uy; 0xa2uy; 0xc2uy; 0xe5uy; 0x13uy; 0xfeuy; 0x6cuy; 0x49uy; 0x81uy; 0xd2uy; 0x73uy; 0xe7uy;
0xe2uy; 0xd7uy; 0xe4uy; 0x4fuy; 0x4buy; 0x08uy; 0x6euy; 0xb1uy; 0x12uy; 0x22uy; 0x10uy; 0x9duy;
0xacuy; 0x51uy; 0x1euy; 0x17uy; 0xd9uy; 0x8auy; 0x0buy; 0x42uy; 0x88uy; 0x16uy; 0x81uy; 0x37uy;
0x7cuy; 0x6auy; 0xf7uy; 0xefuy; 0x2duy; 0xe3uy; 0xd9uy; 0xf8uy; 0x5fuy; 0xe0uy; 0x53uy; 0x27uy;
0x74uy; 0xb9uy; 0xe2uy; 0xd6uy; 0x1cuy; 0x80uy; 0x2cuy; 0x52uy; 0x65uy
]
in
assert_norm (List.Tot.length l = 513);
B.gcmalloc_of_list HyperStack.root l | false |
FStar.InteractiveHelpers.Base.fst | FStar.InteractiveHelpers.Base.opt_apply_subst | val opt_apply_subst : env -> option term -> list ((bv & typ) & term) -> Tac (option term) | val opt_apply_subst : env -> option term -> list ((bv & typ) & term) -> Tac (option term) | let opt_apply_subst e opt_t subst =
match opt_t with
| None -> None
| Some t -> Some (apply_subst e t subst) | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.Base.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 42,
"end_line": 591,
"start_col": 0,
"start_line": 588
} | module FStar.InteractiveHelpers.Base
open FStar.List
open FStar.Tactics
open FStar.Mul
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Utilities *)
val bv_eq : bv -> bv -> Tot bool
let bv_eq (bv1 bv2 : bv) =
let bvv1 = inspect_bv bv1 in
let bvv2 = inspect_bv bv2 in
(* We don't check for type equality: the fact that no two different binders
* have the same name and index is an invariant which must be enforced -
* and actually we could limit ourselves to checking the index *)
bvv1.bv_index = bvv2.bv_index
val fv_eq : fv -> fv -> Tot bool
let fv_eq fv1 fv2 =
let n1 = inspect_fv fv1 in
let n2 = inspect_fv fv2 in
n1 = n2
// TODO: use everywhere
val fv_eq_name : fv -> name -> Tot bool
let fv_eq_name fv n =
let fvn = inspect_fv fv in
fvn = n
// TODO: use more
val opt_apply (#a #b : Type) (f : a -> Tot b) (x : option a) : Tot (option b)
let opt_apply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val opt_tapply (#a #b : Type) (f : a -> Tac b) (x : option a) : Tac (option b)
let opt_tapply #a #b f x =
match x with
| None -> None
| Some x' -> Some (f x')
val option_to_string : (#a : Type) -> (a -> Tac string) -> option a -> Tac string
let option_to_string #a f x =
match x with
| None -> "None"
| Some x' -> "Some (" ^ f x' ^ ")"
let opt_cons (#a : Type) (opt_x : option a) (ls : list a) : Tot (list a) =
match opt_x with
| Some x -> x :: ls
| None -> ls
val list_to_string : #a : Type -> (a -> Tac string) -> list a -> Tac string
let list_to_string #a f ls =
(Tactics.Util.fold_left (fun s x -> s ^ " (" ^ f x ^ ");") "[" ls) ^ "]"
/// Apply a term to a list of parameters, normalize the result to make sure
/// all the abstractions are simplified
val mk_app_norm : env -> term -> list term -> Tac term
let mk_app_norm e t params =
let t1 = mk_e_app t params in
let t2 = norm_term_env e [] t1 in
t2
val opt_mk_app_norm : env -> option term -> list term -> Tac (option term)
let opt_mk_app_norm e opt_t params =
opt_tapply (fun t -> mk_app_norm e t params) opt_t
// TODO: remove
let rec unzip #a #b (l : list (a & b)) : Tot (list a & list b) =
match l with
| [] -> ([],[])
| (hd1,hd2)::tl ->
let (tl1,tl2) = unzip tl in
(hd1::tl1,hd2::tl2)
/// Alternative ``bv_to_string`` function where we print the index of the bv.
/// It can be very useful for debugging.
let abv_to_string bv : Tac string =
let bvv = inspect_bv bv in
name_of_bv bv ^ " (%" ^ string_of_int (bvv.bv_index) ^ ")"
let print_binder_info (full : bool) (b : binder) : Tac unit =
let open inspect_binder b <: binder_view in
let qual_str = match binder_qual with
| Q_Implicit -> "Implicit"
| Q_Explicit -> "Explicit"
| Q_Meta t -> "Meta: " ^ term_to_string t
in
let bview = inspect_bv binder_bv in
if full then
print (
"> print_binder_info:" ^
"\n- name: " ^ (name_of_binder b) ^
"\n- as string: " ^ (binder_to_string b) ^
"\n- aqual: " ^ qual_str ^
"\n- ppname: " ^ name_of_bv binder_bv ^
"\n- index: " ^ string_of_int bview.bv_index ^
"\n- sort: " ^ term_to_string binder_sort
)
else print (binder_to_string b)
let print_binders_info (full : bool) (e:env) : Tac unit =
iter (print_binder_info full) (binders_of_env e)
let acomp_to_string (c:comp) : Tac string =
match inspect_comp c with
| C_Total ret ->
"C_Total (" ^ term_to_string ret ^ ")"
| C_GTotal ret ->
"C_GTotal (" ^ term_to_string ret ^ ")"
| C_Lemma pre post patterns ->
"C_Lemma (" ^ term_to_string pre ^ ") (" ^ term_to_string post ^ ")"
| C_Eff us eff_name result eff_args _ ->
let eff_arg_to_string (a : term) : Tac string =
" (" ^ term_to_string a ^ ")"
in
let args_str = map (fun (x, y) -> eff_arg_to_string x) eff_args in
let args_str = List.Tot.fold_left (fun x y -> x ^ y) "" args_str in
"C_Eff (" ^ flatten_name eff_name ^ ") (" ^ term_to_string result ^ ")" ^ args_str
exception MetaAnalysis of string
let mfail str =
raise (MetaAnalysis str)
(*** Debugging *)
/// Some debugging facilities
val print_dbg : bool -> string -> Tac unit
let print_dbg debug s =
if debug then print s
/// Return the qualifier of a term as a string
val term_view_construct (t : term_view) : Tac string
let term_view_construct (t : term_view) : Tac string =
match t with
| Tv_Var _ -> "Tv_Var"
| Tv_BVar _ -> "Tv_BVar"
| Tv_FVar _ -> "Tv_FVar"
| Tv_App _ _ -> "Tv_App"
| Tv_Abs _ _ -> "Tv_Abs"
| Tv_Arrow _ _ -> "Tv_Arrow"
| Tv_Type _ -> "Tv_Type"
| Tv_Refine _ _ _ -> "Tv_Refine"
| Tv_Const _ -> "Tv_Const"
| Tv_Uvar _ _ -> "Tv_Uvar"
| Tv_Let _ _ _ _ _ _ -> "Tv_Let"
| Tv_Match _ _ _ -> "Tv_Match"
| Tv_AscribedT _ _ _ _ -> "Tv_AscribedT"
| Tv_AscribedC _ _ _ _ -> "Tv_AScribedC"
| _ -> "Tv_Unknown"
val term_construct (t : term) : Tac string
let term_construct (t : term) : Tac string =
term_view_construct (inspect t)
(*** Pretty printing *)
/// There are many issues linked to terms (pretty) printing.
/// The first issue is that when parsing terms, F* automatically inserts
/// ascriptions, which then clutter the terms printed to the user. The current
/// workaround is to filter those ascriptions in the terms before exploiting them.
/// TODO: this actually doesn't work for some unknown reason: some terms like [a /\ b]
/// become [l_and a b]...
val filter_ascriptions : bool -> term -> Tac term
let filter_ascriptions dbg t =
print_dbg dbg ("[> filter_ascriptions: " ^ term_view_construct t ^ ": " ^ term_to_string t );
visit_tm (fun t ->
match inspect t with
| Tv_AscribedT e _ _ _
| Tv_AscribedC e _ _ _ -> e
| _ -> t) t
/// Our prettification function. Apply it to all the terms which might be printed
/// back to the user. Note that the time at which the function is applied is
/// important: we can't apply it on all the assertions we export to the user, just
/// before exporting, because we may have inserted ascriptions on purpose, which
/// would then be filtered away.
val prettify_term : bool -> term -> Tac term
let prettify_term dbg t = filter_ascriptions dbg t
(*** Environments *)
/// We need a way to handle environments with variable bindings
/// and name shadowing, to properly display the context to the user.
/// A map linking variables to terms. For now we use a list to define it, because
/// there shouldn't be too many bindings.
type bind_map (a : Type) = list (bv & a)
let bind_map_push (#a:Type) (m:bind_map a) (b:bv) (x:a) = (b,x)::m
let rec bind_map_get (#a:Type) (m:bind_map a) (b:bv) : Tot (option a) =
match m with
| [] -> None
| (b', x)::m' ->
if compare_bv b b' = Order.Eq then Some x else bind_map_get m' b
let rec bind_map_get_from_name (#a:Type) (m:bind_map a) (name:string) :
Tac (option (bv & a)) =
match m with
| [] -> None
| (b', x)::m' ->
let b'v = inspect_bv b' in
if unseal b'v.bv_ppname = name then Some (b', x) else bind_map_get_from_name m' name
noeq type genv =
{
env : env;
(* Whenever we evaluate a let binding, we keep track of the relation between
* the binder and its definition.
* The boolean indicates whether or not the variable is considered abstract. We
* often need to introduce variables which don't appear in the user context, for
* example when we need to deal with a postcondition for Stack or ST, which handles
* the previous and new memory states, and which may not be available in the user
* context, or where we don't always know which variable to use.
* In this case, whenever we output the term, we write its content as an
* abstraction applied to those missing parameters. For instance, in the
* case of the assertion introduced for a post-condition:
* [> assert((fun h1 h2 -> post) h1 h2);
* Besides the informative goal, the user can replace those parameters (h1
* and h2 above) by the proper ones then normalize the assertion by using
* the appropriate command to get a valid assertion. *)
bmap : bind_map (typ & bool & term);
(* Whenever we introduce a new variable, we check whether it shadows another
* variable because it has the same name, and put it in the below
* list. Of course, for the F* internals such shadowing is not an issue, because
* the index of every variable should be different, but this is very important
* when generating code for the user *)
svars : list (bv & typ);
}
let get_env (e:genv) : env = e.env
let get_bind_map (e:genv) : bind_map (typ & bool & term) = e.bmap
let mk_genv env bmap svars : genv = Mkgenv env bmap svars
let mk_init_genv env : genv = mk_genv env [] []
val genv_to_string : genv -> Tac string
let genv_to_string ge =
let binder_to_string (b : binder) : Tac string =
abv_to_string (bv_of_binder b) ^ "\n"
in
let binders_str = map binder_to_string (binders_of_env ge.env) in
let bmap_elem_to_string (e : bv & (typ & bool & term)) : Tac string =
let bv, (_sort, abs, t) = e in
"(" ^ abv_to_string bv ^" -> (" ^
string_of_bool abs ^ ", " ^ term_to_string t ^ "))\n"
in
let bmap_str = map bmap_elem_to_string ge.bmap in
let svars_str = map (fun (bv, _) -> abv_to_string bv ^ "\n") ge.svars in
let flatten = List.Tot.fold_left (fun x y -> x ^ y) "" in
"> env:\n" ^ flatten binders_str ^
"> bmap:\n" ^ flatten bmap_str ^
"> svars:\n" ^ flatten svars_str
let genv_get (ge:genv) (b:bv) : Tot (option (typ & bool & term)) =
bind_map_get ge.bmap b
let genv_get_from_name (ge:genv) (name:string) : Tac (option ((bv & typ) & (bool & term))) =
(* tweak return a bit to include sort *)
match bind_map_get_from_name ge.bmap name with
| None -> None
| Some (bv, (sort, b, x)) -> Some ((bv, sort), (b, x))
/// Push a binder to a ``genv``. Optionally takes a ``term`` which provides the
/// term the binder is bound to (in a `let _ = _ in` construct for example).
let genv_push_bv (ge:genv) (b:bv) (sort:typ) (abs:bool) (t:option term) : Tac genv =
let br = mk_binder b sort in
let sv = genv_get_from_name ge (name_of_bv b) in
let svars' = if Some? sv then fst (Some?.v sv) :: ge.svars else ge.svars in
let e' = push_binder ge.env br in
let tm = if Some? t then Some?.v t else pack (Tv_Var b) in
let bmap' = bind_map_push ge.bmap b (sort, abs, tm) in
mk_genv e' bmap' svars'
let genv_push_binder (ge:genv) (b:binder) (abs:bool) (t:option term) : Tac genv =
genv_push_bv ge (bv_of_binder b) (binder_sort b) abs t
/// Check if a binder is shadowed by another more recent binder
let bv_is_shadowed (ge : genv) (bv : bv) : Tot bool =
List.Tot.existsb (fun (b,_) -> bv_eq bv b) ge.svars
let binder_is_shadowed (ge : genv) (b : binder) : Tot bool =
bv_is_shadowed ge (bv_of_binder b)
let find_shadowed_bvs (ge : genv) (bl : list bv) : Tot (list (bv & bool)) =
List.Tot.map (fun b -> b, bv_is_shadowed ge b) bl
let find_shadowed_binders (ge : genv) (bl : list binder) : Tot (list (binder & bool)) =
List.Tot.map (fun b -> b, binder_is_shadowed ge b) bl
val bv_is_abstract : genv -> bv -> Tot bool
let bv_is_abstract ge bv =
match genv_get ge bv with
| None -> false
| Some (_, abs, _) -> abs
val binder_is_abstract : genv -> binder -> Tot bool
let binder_is_abstract ge b =
bv_is_abstract ge (bv_of_binder b)
val genv_abstract_bvs : genv -> Tot (list (bv & typ))
let genv_abstract_bvs ge =
List.Tot.concatMap
(fun (bv, (ty, abs, _)) -> if abs then [bv,ty] else []) ge.bmap
/// Versions of ``fresh_bv`` and ``fresh_binder`` inspired by the standard library
/// We make sure the name is fresh because we need to be able to generate valid code
/// (it is thus not enough to have a fresh integer).
let rec _fresh_bv binder_names basename i : Tac bv =
let name = basename ^ string_of_int i in
(* In worst case the performance is quadratic in the number of binders.
* TODO: fix that, it actually probably happens for anonymous variables ('_') *)
if List.mem name binder_names then _fresh_bv binder_names basename (i+1)
else fresh_bv_named name
let fresh_bv (e : env) (basename : string) : Tac bv =
let binders = binders_of_env e in
let binder_names = Tactics.map name_of_binder binders in
_fresh_bv binder_names basename 0
let fresh_binder (e : env) (basename : string) (ty : typ) : Tac binder =
let bv = fresh_bv e basename in
mk_binder bv ty
let genv_push_fresh_binder (ge : genv) (basename : string) (ty : typ) : Tac (genv & binder) =
let b = fresh_binder ge.env basename ty in
(* TODO: we can have a shortcircuit push (which performs less checks) *)
let ge' = genv_push_binder ge b true None in
ge', b
// TODO: actually we should use push_fresh_bv more
let push_fresh_binder (e : env) (basename : string) (ty : typ) : Tac (env & binder) =
let b = fresh_binder e basename ty in
let e' = push_binder e b in
e', b
let genv_push_fresh_bv (ge : genv) (basename : string) (ty : typ) : Tac (genv & bv) =
let ge', b = genv_push_fresh_binder ge basename ty in
ge', bv_of_binder b
val push_fresh_var : env -> string -> typ -> Tac (term & binder & env)
let push_fresh_var e0 basename ty =
let e1, b1 = push_fresh_binder e0 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
v1, b1, e1
val genv_push_fresh_var : genv -> string -> typ -> Tac (term & binder & genv)
let genv_push_fresh_var ge0 basename ty =
let ge1, b1 = genv_push_fresh_binder ge0 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
v1, b1, ge1
val push_two_fresh_vars : env -> string -> typ -> Tac (term & binder & term & binder & env)
let push_two_fresh_vars e0 basename ty =
let e1, b1 = push_fresh_binder e0 basename ty in
let e2, b2 = push_fresh_binder e1 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
let v2 = pack (Tv_Var (bv_of_binder b2)) in
v1, b1, v2, b2, e2
val genv_push_two_fresh_vars : genv -> string -> typ -> Tac (term & binder & term & binder & genv)
let genv_push_two_fresh_vars ge0 basename ty =
let ge1, b1 = genv_push_fresh_binder ge0 basename ty in
let ge2, b2 = genv_push_fresh_binder ge1 basename ty in
let v1 = pack (Tv_Var (bv_of_binder b1)) in
let v2 = pack (Tv_Var (bv_of_binder b2)) in
v1, b1, v2, b2, ge2
(*** Substitutions *)
/// Substitutions
/// Custom substitutions using the normalizer. This is the easiest and safest
/// way to perform a substitution: if you want to substitute [v] with [t] in [exp],
/// just normalize [(fun v -> exp) t]. Note that this may be computationally expensive.
val norm_apply_subst : env -> term -> list ((bv & typ) & term) -> Tac term
val norm_apply_subst_in_comp : env -> comp -> list ((bv & typ) & term) -> Tac comp
let norm_apply_subst e t subst =
let bl, vl = unzip subst in
let bl = List.Tot.map (fun (bv,ty) -> mk_binder bv ty) bl in
let t1 = mk_abs bl t in
let t2 = mk_e_app t1 vl in
norm_term_env e [] t2
let norm_apply_subst_in_comp e c subst =
let subst = (fun x -> norm_apply_subst e x subst) in
let subst_in_aqualv a : Tac aqualv =
match a with
| Q_Implicit
| Q_Explicit -> a
| Q_Meta t -> Q_Meta (subst t)
in
match inspect_comp c with
| C_Total ret ->
let ret = subst ret in
pack_comp (C_Total ret)
| C_GTotal ret ->
let ret = subst ret in
pack_comp (C_GTotal ret)
| C_Lemma pre post patterns ->
let pre = subst pre in
let post = subst post in
let patterns = subst patterns in
pack_comp (C_Lemma pre post patterns)
| C_Eff us eff_name result eff_args decrs ->
let result = subst result in
let eff_args = map (fun (x, a) -> (subst x, subst_in_aqualv a)) eff_args in
let decrs = map subst decrs in
pack_comp (C_Eff us eff_name result eff_args decrs)
/// As substitution with normalization is very expensive, we implemented another
/// technique which works by exploring terms. This is super fast, but the terms
/// seem not to be reconstructed in the same way, which has a big impact on pretty printing.
/// For example, terms like [A /\ B] get printed as [Prims.l_and A B].
val deep_apply_subst : env -> term -> list (bv & term) -> Tac term
// Whenever we encounter a construction which introduces a binder, we need to apply
// the substitution in the binder type. Note that this gives a new binder, with
// which we need to replace the old one in what follows.
// Also note that it should be possible to rewrite [deep_apply_subst] in terms of [visit_tm],
// but [deep_apply_subst] seems to be a bit more precise with regard to type replacements (not
// sure it is really important, though).
val deep_apply_subst_in_bv : env -> bv -> list (bv & term) -> Tac (bv & list (bv & term))
val deep_apply_subst_in_binder : env -> binder -> list (bv & term) -> Tac (binder & list (bv & term))
val deep_apply_subst_in_comp : env -> comp -> list (bv & term) -> Tac comp
val deep_apply_subst_in_pattern : env -> pattern -> list (bv & term) -> Tac (pattern & list (bv & term))
let rec deep_apply_subst e t subst =
match inspect t with
| Tv_Var b ->
begin match bind_map_get subst b with
| None -> t
| Some t' -> t'
end
| Tv_BVar b ->
(* Note: Tv_BVar shouldn't happen *)
begin match bind_map_get subst b with
| None -> t
| Some t' -> t'
end
| Tv_FVar _ -> t
| Tv_App hd (a,qual) ->
let hd = deep_apply_subst e hd subst in
let a = deep_apply_subst e a subst in
pack (Tv_App hd (a, qual))
| Tv_Abs br body ->
let body = deep_apply_subst e body subst in
pack (Tv_Abs br body)
| Tv_Arrow br c ->
let br, subst = deep_apply_subst_in_binder e br subst in
let c = deep_apply_subst_in_comp e c subst in
pack (Tv_Arrow br c)
| Tv_Type _ -> t
| Tv_Refine bv sort ref ->
let sort = deep_apply_subst e sort subst in
let bv, subst = deep_apply_subst_in_bv e bv subst in
let ref = deep_apply_subst e ref subst in
pack (Tv_Refine bv sort ref)
| Tv_Const _ -> t
| Tv_Uvar _ _ -> t
| Tv_Let recf attrs bv ty def body ->
(* No need to substitute in the attributes - that we filter for safety *)
let ty = deep_apply_subst e ty subst in
let def = deep_apply_subst e def subst in
let bv, subst = deep_apply_subst_in_bv e bv subst in
let body = deep_apply_subst e body subst in
pack (Tv_Let recf [] bv ty def body)
| Tv_Match scrutinee ret_opt branches -> (* TODO: type of pattern variables *)
let scrutinee = deep_apply_subst e scrutinee subst in
let ret_opt = map_opt (fun (b, asc) ->
let b, subst = deep_apply_subst_in_binder e b subst in
let asc =
match asc with
| Inl t, tacopt, use_eq ->
Inl (deep_apply_subst e t subst),
map_opt (fun tac -> deep_apply_subst e tac subst) tacopt,
use_eq
| Inr c, tacopt, use_eq ->
Inr (deep_apply_subst_in_comp e c subst),
map_opt (fun tac -> deep_apply_subst e tac subst) tacopt,
use_eq in
b, asc) ret_opt in
(* For the branches: we don't need to explore the patterns *)
let deep_apply_subst_in_branch branch =
let pat, tm = branch in
let pat, subst = deep_apply_subst_in_pattern e pat subst in
let tm = deep_apply_subst e tm subst in
pat, tm
in
let branches = map deep_apply_subst_in_branch branches in
pack (Tv_Match scrutinee ret_opt branches)
| Tv_AscribedT exp ty tac use_eq ->
let exp = deep_apply_subst e exp subst in
let ty = deep_apply_subst e ty subst in
(* no need to apply it on the tactic - that we filter for safety *)
pack (Tv_AscribedT exp ty None use_eq)
| Tv_AscribedC exp c tac use_eq ->
let exp = deep_apply_subst e exp subst in
let c = deep_apply_subst_in_comp e c subst in
(* no need to apply it on the tactic - that we filter for safety *)
pack (Tv_AscribedC exp c None use_eq)
| _ ->
(* Unknown *)
t
and deep_apply_subst_in_bv e bv subst =
(* No substitution needs to happen for variables
(there is no longer a sort). But, shift the substitution. *)
bv, (bv, pack (Tv_Var bv))::subst
(*
* AR: TODO: should apply subst in attrs?
*)
and deep_apply_subst_in_binder e br subst =
let open inspect_binder br <: binder_view in
let binder_sort = deep_apply_subst e binder_sort subst in
let binder_bv, subst = deep_apply_subst_in_bv e binder_bv subst in
pack_binder {
binder_bv=binder_bv;
binder_qual=binder_qual;
binder_attrs=binder_attrs;
binder_sort=binder_sort;
}, subst
and deep_apply_subst_in_comp e c subst =
let subst = (fun x -> deep_apply_subst e x subst) in
let subst_in_aqualv a : Tac aqualv =
match a with
| Q_Implicit
| Q_Explicit -> a
| Q_Meta t -> Q_Meta (subst t)
in
match inspect_comp c with
| C_Total ret ->
let ret = subst ret in
pack_comp (C_Total ret)
| C_GTotal ret ->
let ret = subst ret in
pack_comp (C_GTotal ret)
| C_Lemma pre post patterns ->
let pre = subst pre in
let post = subst post in
let patterns = subst patterns in
pack_comp (C_Lemma pre post patterns)
| C_Eff us eff_name result eff_args decrs ->
let result = subst result in
let eff_args = map (fun (x, a) -> (subst x, subst_in_aqualv a)) eff_args in
let decrs = map subst decrs in
pack_comp (C_Eff us eff_name result eff_args decrs)
and deep_apply_subst_in_pattern e pat subst =
match pat with
| Pat_Constant _ -> pat, subst
| Pat_Cons fv us patterns ->
(* The types of the variables in the patterns should be independent of each
* other: we use fold_left only to incrementally update the substitution *)
let patterns, subst =
fold_right (fun (pat, b) (pats, subst) ->
let pat, subst = deep_apply_subst_in_pattern e pat subst in
((pat, b) :: pats, subst)) patterns ([], subst)
in
Pat_Cons fv us patterns, subst
| Pat_Var bv st ->
let st = Sealed.seal (deep_apply_subst e (unseal st) subst) in
let bv, subst = deep_apply_subst_in_bv e bv subst in
Pat_Var bv st, subst
| Pat_Dot_Term eopt ->
Pat_Dot_Term (map_opt (fun t -> deep_apply_subst e t subst) eopt), subst
/// The substitution functions actually used in the rest of the meta F* functions.
/// For now, we use normalization because even though it is sometimes slow it
/// gives prettier terms, and readability is the priority. In order to mitigate
/// the performance issue, we try to minimize the number of calls to those functions,
/// by doing lazy instantiations for example (rather than incrementally apply
/// substitutions in a term, accumulate the substitutions and perform them all at once).
/// TODO: would it be good to have a native substitution function in F*
let apply_subst = norm_apply_subst
let apply_subst_in_comp = norm_apply_subst_in_comp | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Util.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Sealed.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Order.fst.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.Base.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
e: FStar.Stubs.Reflection.Types.env ->
opt_t: FStar.Pervasives.Native.option FStar.Stubs.Reflection.Types.term ->
subst:
Prims.list ((FStar.Stubs.Reflection.Types.bv * FStar.Stubs.Reflection.Types.typ) *
FStar.Stubs.Reflection.Types.term)
-> FStar.Tactics.Effect.Tac (FStar.Pervasives.Native.option FStar.Stubs.Reflection.Types.term) | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.Stubs.Reflection.Types.env",
"FStar.Pervasives.Native.option",
"FStar.Stubs.Reflection.Types.term",
"Prims.list",
"FStar.Pervasives.Native.tuple2",
"FStar.Stubs.Reflection.Types.bv",
"FStar.Stubs.Reflection.Types.typ",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.Some",
"FStar.InteractiveHelpers.Base.apply_subst"
] | [] | false | true | false | false | false | let opt_apply_subst e opt_t subst =
| match opt_t with
| None -> None
| Some t -> Some (apply_subst e t subst) | false |
Spec.Frodo.Sample.fst | Spec.Frodo.Sample.frodo_sample_f | val frodo_sample_f:
a:frodo_alg
-> t:uint16
-> i:size_nat{i < cdf_table_len a}
-> res:nat{res = 0 \/ res = 1} | val frodo_sample_f:
a:frodo_alg
-> t:uint16
-> i:size_nat{i < cdf_table_len a}
-> res:nat{res = 0 \/ res = 1} | let frodo_sample_f a t i =
if v t > v (cdf_table a).[i] then 1 else 0 | {
"file_name": "specs/frodo/Spec.Frodo.Sample.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 44,
"end_line": 26,
"start_col": 0,
"start_line": 25
} | module Spec.Frodo.Sample
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
open Lib.ByteSequence
open Spec.Matrix
open Spec.Frodo.Lemmas
open Spec.Frodo.Params
module LSeq = Lib.Sequence
module Matrix = Spec.Matrix
module Loops = Lib.LoopCombinators
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
val frodo_sample_f:
a:frodo_alg
-> t:uint16
-> i:size_nat{i < cdf_table_len a}
-> res:nat{res = 0 \/ res = 1} | {
"checked_file": "/",
"dependencies": [
"Spec.Matrix.fst.checked",
"Spec.Frodo.Params.fst.checked",
"Spec.Frodo.Lemmas.fst.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lib.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Frodo.Sample.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loops"
},
{
"abbrev": true,
"full_module": "Spec.Matrix",
"short_module": "Matrix"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": false,
"full_module": "Spec.Frodo.Params",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Frodo.Lemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Matrix",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.ByteSequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Frodo",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Frodo",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
a: Spec.Frodo.Params.frodo_alg ->
t: Lib.IntTypes.uint16 ->
i: Lib.IntTypes.size_nat{i < Spec.Frodo.Params.cdf_table_len a}
-> res: Prims.nat{res = 0 \/ res = 1} | Prims.Tot | [
"total"
] | [] | [
"Spec.Frodo.Params.frodo_alg",
"Lib.IntTypes.uint16",
"Lib.IntTypes.size_nat",
"Prims.b2t",
"Prims.op_LessThan",
"Spec.Frodo.Params.cdf_table_len",
"Prims.op_GreaterThan",
"Lib.IntTypes.v",
"Lib.IntTypes.U16",
"Lib.IntTypes.SEC",
"Lib.Sequence.op_String_Access",
"Spec.Frodo.Params.cdf_table",
"Prims.bool",
"Prims.nat",
"Prims.l_or",
"Prims.op_Equality",
"Prims.int"
] | [] | false | false | false | false | false | let frodo_sample_f a t i =
| if v t > v (cdf_table a).[ i ] then 1 else 0 | false |
Spec.Frodo.Sample.fst | Spec.Frodo.Sample.frodo_sample_res | val frodo_sample_res:
a:frodo_alg
-> sign:uint16{v sign <= 1}
-> e:nat{e < cdf_table_len a}
-> uint16 | val frodo_sample_res:
a:frodo_alg
-> sign:uint16{v sign <= 1}
-> e:nat{e < cdf_table_len a}
-> uint16 | let frodo_sample_res a r0 e =
let open FStar.Math.Lib in
let e = (powx (-1) (v r0)) * e in
assert_norm (powx (-1) 1 == -1);
assert_norm (powx (-1) 0 == 1);
assert (-cdf_table_len a < e /\ e < cdf_table_len a);
u16 (e % modulus U16) | {
"file_name": "specs/frodo/Spec.Frodo.Sample.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 23,
"end_line": 53,
"start_col": 0,
"start_line": 47
} | module Spec.Frodo.Sample
open FStar.Mul
open Lib.IntTypes
open Lib.Sequence
open Lib.ByteSequence
open Spec.Matrix
open Spec.Frodo.Lemmas
open Spec.Frodo.Params
module LSeq = Lib.Sequence
module Matrix = Spec.Matrix
module Loops = Lib.LoopCombinators
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
val frodo_sample_f:
a:frodo_alg
-> t:uint16
-> i:size_nat{i < cdf_table_len a}
-> res:nat{res = 0 \/ res = 1}
let frodo_sample_f a t i =
if v t > v (cdf_table a).[i] then 1 else 0
val frodo_sample_fc:
a:frodo_alg
-> t:uint16
-> i:size_nat{i <= cdf_table_len a}
-> GTot (res:nat{0 <= res /\ res <= i})
(decreases i)
let rec frodo_sample_fc a t i =
if i = 0 then 0
else frodo_sample_f a t (i - 1) + frodo_sample_fc a t (i - 1)
val frodo_sample_res:
a:frodo_alg
-> sign:uint16{v sign <= 1}
-> e:nat{e < cdf_table_len a}
-> uint16 | {
"checked_file": "/",
"dependencies": [
"Spec.Matrix.fst.checked",
"Spec.Frodo.Params.fst.checked",
"Spec.Frodo.Lemmas.fst.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lib.fst.checked"
],
"interface_file": false,
"source_file": "Spec.Frodo.Sample.fst"
} | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loops"
},
{
"abbrev": true,
"full_module": "Spec.Matrix",
"short_module": "Matrix"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": false,
"full_module": "Spec.Frodo.Params",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Frodo.Lemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Matrix",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.ByteSequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Frodo",
"short_module": null
},
{
"abbrev": false,
"full_module": "Spec.Frodo",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
a: Spec.Frodo.Params.frodo_alg ->
sign: Lib.IntTypes.uint16{Lib.IntTypes.v sign <= 1} ->
e: Prims.nat{e < Spec.Frodo.Params.cdf_table_len a}
-> Lib.IntTypes.uint16 | Prims.Tot | [
"total"
] | [] | [
"Spec.Frodo.Params.frodo_alg",
"Lib.IntTypes.uint16",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Lib.IntTypes.v",
"Lib.IntTypes.U16",
"Lib.IntTypes.SEC",
"Prims.nat",
"Prims.op_LessThan",
"Spec.Frodo.Params.cdf_table_len",
"Lib.IntTypes.u16",
"Prims.op_Modulus",
"Lib.IntTypes.modulus",
"Prims.unit",
"Prims._assert",
"Prims.l_and",
"Prims.op_Minus",
"FStar.Pervasives.assert_norm",
"Prims.eq2",
"Prims.int",
"FStar.Math.Lib.powx",
"FStar.Mul.op_Star"
] | [] | false | false | false | false | false | let frodo_sample_res a r0 e =
| let open FStar.Math.Lib in
let e = (powx (- 1) (v r0)) * e in
assert_norm (powx (- 1) 1 == - 1);
assert_norm (powx (- 1) 0 == 1);
assert (- cdf_table_len a < e /\ e < cdf_table_len a);
u16 (e % modulus U16) | false |
MerkleTree.Low.fst | MerkleTree.Low.construct_rhs | val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j)) | val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j)) | let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 5,
"end_line": 1509,
"start_col": 0,
"start_line": 1354
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 1,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 250,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
lv: LowStar.Vector.uint32_t{lv <= MerkleTree.Low.merkle_tree_size_lg} ->
hs:
MerkleTree.Low.Datastructures.hash_vv hsz
{LowStar.Vector.size_of hs = MerkleTree.Low.merkle_tree_size_lg} ->
rhs:
MerkleTree.Low.Datastructures.hash_vec
{LowStar.Vector.size_of rhs = MerkleTree.Low.merkle_tree_size_lg} ->
i: MerkleTree.Low.index_t ->
j: MerkleTree.Low.index_t{i <= j && FStar.UInt32.v j < Prims.pow2 (32 - FStar.UInt32.v lv)} ->
acc: MerkleTree.Low.Datastructures.hash ->
actd: Prims.bool ->
hash_fun: MerkleTree.Low.Hashfunctions.hash_fun_t
-> FStar.HyperStack.ST.ST Prims.unit | FStar.HyperStack.ST.ST | [
""
] | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Ghost.erased",
"MerkleTree.Spec.hash_fun_t",
"FStar.UInt32.v",
"LowStar.Vector.uint32_t",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.merkle_tree_size_lg",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.op_Equality",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.index_t",
"Prims.op_AmpAmp",
"FStar.Integers.op_Less",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"Prims.pow2",
"FStar.Integers.op_Subtraction",
"Prims.bool",
"MerkleTree.Low.Hashfunctions.hash_fun_t",
"FStar.UInt32.t",
"FStar.UInt32.__uint_to_t",
"Prims._assert",
"Prims.eq2",
"FStar.Pervasives.Native.tuple2",
"MerkleTree.New.High.hashes",
"Prims.int",
"FStar.Seq.Base.length",
"MerkleTree.New.High.hash",
"MerkleTree.New.High.construct_rhs",
"FStar.Ghost.reveal",
"LowStar.Regional.__proj__Rgl__item__r_repr",
"LowStar.Regional.regional",
"MerkleTree.Low.Datastructures.hvvreg",
"MerkleTree.Low.Datastructures.hvreg",
"MerkleTree.Low.Datastructures.hreg",
"FStar.Pervasives.Native.Mktuple2",
"Prims.unit",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"MerkleTree.New.High.hs_wf_elts",
"LowStar.RVector.as_seq",
"MerkleTree.Low.mt_safe_elts_spec",
"MerkleTree.Low.mt_safe_elts",
"LowStar.RVector.rv_inv",
"FStar.Integers.op_Percent",
"MerkleTree.New.High.construct_rhs_even",
"MerkleTree.Low.construct_rhs",
"FStar.Integers.op_Plus",
"FStar.Integers.op_Slash",
"MerkleTree.Low.mt_safe_elts_rec",
"FStar.Math.Lemmas.pow2_double_mult",
"MerkleTree.New.High.construct_rhs_odd",
"Spec.Hash.Definitions.bytes",
"Prims.l_or",
"Prims.op_GreaterThanOrEqual",
"Prims.op_GreaterThan",
"Lib.IntTypes.uint8",
"FStar.Seq.Base.index",
"LowStar.Regional.__proj__Rgl__item__repr",
"FStar.Seq.Base.equal",
"FStar.Seq.Base.upd",
"FStar.Seq.Base.seq",
"MerkleTree.Low.Datastructures.hash_vv_as_seq_get_index",
"LowStar.RVector.as_seq_preserved",
"LowStar.Monotonic.Buffer.loc_region_only",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Buffer.trivial_preorder",
"LowStar.RVector.rv_inv_preserved",
"MerkleTree.Low.mt_safe_elts_preserved",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"LowStar.Vector.index",
"LowStar.Vector.vector",
"MerkleTree.Low.Datastructures.hash_vv_rv_inv_r_inv",
"MerkleTree.Low.mt_safe_elts_head",
"LowStar.Vector.frameOf",
"LowStar.Vector.loc_vector_within_included",
"LowStar.Vector.get",
"LowStar.Regional.__proj__Rgl__item__r_sep",
"LowStar.RVector.assign_copy",
"MerkleTree.Low.Datastructures.hcpy",
"LowStar.RVector.__proj__Cpy__item__copy",
"MerkleTree.Low.Datastructures.hash_vv_rv_inv_disjoint",
"MerkleTree.Low.offset_of"
] | [
"recursion"
] | false | true | false | false | false | let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
| let hh0 = HST.get () in
if j = 0ul
then
(assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz) (U32.v lv) (RV.as_seq hh0 hs) (U32.v i) (U32.v j));
let hh1 = HST.get () in
assert (MTH.construct_rhs #(U32.v hsz)
#(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i)
(U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc)))
else
let ofs = offset_of i in
(if j % 2ul = 0ul
then
(Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz)
#hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i)
(U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd;
assert (MTH.construct_rhs #(U32.v hsz)
#hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i)
(U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc)))
else
(if actd
then
(RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
Rgl?.r_sep (hreg hsz) acc (B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved hs (B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.as_seq_preserved hs (B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved (V.get hh0 hs lv)
(B.loc_all_regions_from false (V.frameOf rhs))
hh0
hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv) (Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
mt_safe_elts_preserved lv hs i j (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)))
else
(mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv)) (U32.v j - 1 - U32.v ofs)));
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv) (Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv)) (U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz)
#hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2)
(U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc)
true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz)
#hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i)
(U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd;
assert (MTH.construct_rhs #(U32.v hsz)
#hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i)
(U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc)))) | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.