effect
stringclasses 48
values | original_source_type
stringlengths 0
23k
| opens_and_abbrevs
listlengths 2
92
| isa_cross_project_example
bool 1
class | source_definition
stringlengths 9
57.9k
| partial_definition
stringlengths 7
23.3k
| is_div
bool 2
classes | is_type
null | is_proof
bool 2
classes | completed_definiton
stringlengths 1
250k
| dependencies
dict | effect_flags
sequencelengths 0
2
| ideal_premises
sequencelengths 0
236
| mutual_with
sequencelengths 0
11
| file_context
stringlengths 0
407k
| interleaved
bool 1
class | is_simply_typed
bool 2
classes | file_name
stringlengths 5
48
| vconfig
dict | is_simple_lemma
null | source_type
stringlengths 10
23k
| proof_features
sequencelengths 0
1
| name
stringlengths 8
95
| source
dict | verbose_type
stringlengths 1
7.42k
| source_range
dict |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v | let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) = | false | null | false | forall (ty: Type u#a) (v: ty). {:pattern index (singleton v) 0} index (singleton v) 0 == v | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.squash",
"FStar.Sequence.Base.singleton_length_one_fact",
"Prims.l_Forall",
"Prims.eq2",
"FStar.Sequence.Base.index",
"FStar.Sequence.Base.singleton",
"Prims.logical"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val index_into_singleton_fact : _: Prims.squash FStar.Sequence.Base.singleton_length_one_fact -> Prims.logical | [] | FStar.Sequence.Base.index_into_singleton_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | _: Prims.squash FStar.Sequence.Base.singleton_length_one_fact -> Prims.logical | {
"end_col": 31,
"end_line": 211,
"start_col": 2,
"start_line": 210
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let take_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern length (take s n)}
n <= length s ==> length (take s n) = n | let take_length_fact = | false | null | false | forall (ty: Type u#a) (s: seq ty) (n: nat). {:pattern length (take s n)}
n <= length s ==> length (take s n) = n | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.nat",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.Sequence.Base.length",
"Prims.op_Equality",
"FStar.Sequence.Base.take"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n)));
private let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n)
/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`:
///
/// axiom (forall<T> s: Seq T, x: T :: { Seq#Contains(s,x) }
/// Seq#Contains(s,x) <==>
/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x));
private let contains_iff_exists_index_fact =
forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x}
contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`:
///
/// axiom (forall<T> x: T ::
/// { Seq#Contains(Seq#Empty(), x) }
/// !Seq#Contains(Seq#Empty(), x));
private let empty_doesnt_contain_anything_fact =
forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x)
/// We represent the following Dafny axiom with `build_contains_equiv_fact`:
///
/// axiom (forall<T> s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy
/// { Seq#Contains(Seq#Build(s, v), x) }
/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x)));
private let build_contains_equiv_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x}
contains (build s v) x <==> (v == x \/ contains s x)
/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Take(s, n), x) }
/// Seq#Contains(Seq#Take(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let take_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x}
contains (take s n) x <==>
(exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Drop(s, n), x) }
/// Seq#Contains(Seq#Drop(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let drop_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `equal_def_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Equal(s0,s1) }
/// Seq#Equal(s0,s1) <==>
/// Seq#Length(s0) == Seq#Length(s1) &&
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < Seq#Length(s0) ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let equal_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern equal s0 s1}
equal s0 s1 <==>
length s0 == length s1 /\
(forall j.{:pattern index s0 j \/ index s1 j}
0 <= j && j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `extensionality_fact`:
///
/// axiom (forall<T> a: Seq T, b: Seq T :: { Seq#Equal(a,b) } // extensionality axiom for sequences
/// Seq#Equal(a,b) ==> a == b);
private let extensionality_fact =
forall (ty: Type u#a) (a: seq ty) (b: seq ty).{:pattern equal a b}
equal a b ==> a == b
/// We represent an analog of the following Dafny axiom with
/// `is_prefix_def_fact`. Our analog uses `is_prefix` instead
/// of `Seq#SameUntil`.
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#SameUntil(s0,s1,n) }
/// Seq#SameUntil(s0,s1,n) <==>
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < n ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let is_prefix_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern is_prefix s0 s1}
is_prefix s0 s1 <==>
length s0 <= length s1
/\ (forall (j: nat).{:pattern index s0 j \/ index s1 j}
j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `take_length_fact`:
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Take(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Take(s,n)) == n); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val take_length_fact : Prims.logical | [] | FStar.Sequence.Base.take_length_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | Prims.logical | {
"end_col": 43,
"end_line": 350,
"start_col": 2,
"start_line": 349
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0)) | let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) = | false | null | false | forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)}).
{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n) /\
(length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0)) | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.squash",
"FStar.Sequence.Base.append_sums_lengths_fact",
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThan",
"FStar.Sequence.Base.length",
"FStar.Sequence.Base.append",
"Prims.l_and",
"Prims.l_imp",
"Prims.eq2",
"FStar.Sequence.Base.index",
"Prims.op_LessThanOrEqual",
"Prims.op_Subtraction",
"Prims.logical"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0)))); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val index_after_append_fact : _: Prims.squash FStar.Sequence.Base.append_sums_lengths_fact -> Prims.logical | [] | FStar.Sequence.Base.index_after_append_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | _: Prims.squash FStar.Sequence.Base.append_sums_lengths_fact -> Prims.logical | {
"end_col": 78,
"end_line": 223,
"start_col": 2,
"start_line": 220
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let equal_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern equal s0 s1}
equal s0 s1 <==>
length s0 == length s1 /\
(forall j.{:pattern index s0 j \/ index s1 j}
0 <= j && j < length s0 ==> index s0 j == index s1 j) | let equal_def_fact = | false | null | false | forall (ty: Type u#a) (s0: seq ty) (s1: seq ty). {:pattern equal s0 s1}
equal s0 s1 <==>
length s0 == length s1 /\
(forall j. {:pattern index s0 j\/index s1 j} 0 <= j && j < length s0 ==> index s0 j == index s1 j) | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.l_iff",
"FStar.Sequence.Base.equal",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"FStar.Sequence.Base.length",
"Prims.int",
"Prims.b2t",
"Prims.op_GreaterThanOrEqual",
"Prims.op_LessThan",
"Prims.l_imp",
"Prims.op_AmpAmp",
"Prims.op_LessThanOrEqual",
"FStar.Sequence.Base.index"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n)));
private let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n)
/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`:
///
/// axiom (forall<T> s: Seq T, x: T :: { Seq#Contains(s,x) }
/// Seq#Contains(s,x) <==>
/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x));
private let contains_iff_exists_index_fact =
forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x}
contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`:
///
/// axiom (forall<T> x: T ::
/// { Seq#Contains(Seq#Empty(), x) }
/// !Seq#Contains(Seq#Empty(), x));
private let empty_doesnt_contain_anything_fact =
forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x)
/// We represent the following Dafny axiom with `build_contains_equiv_fact`:
///
/// axiom (forall<T> s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy
/// { Seq#Contains(Seq#Build(s, v), x) }
/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x)));
private let build_contains_equiv_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x}
contains (build s v) x <==> (v == x \/ contains s x)
/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Take(s, n), x) }
/// Seq#Contains(Seq#Take(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let take_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x}
contains (take s n) x <==>
(exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Drop(s, n), x) }
/// Seq#Contains(Seq#Drop(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let drop_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `equal_def_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Equal(s0,s1) }
/// Seq#Equal(s0,s1) <==>
/// Seq#Length(s0) == Seq#Length(s1) &&
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < Seq#Length(s0) ==> Seq#Index(s0,j) == Seq#Index(s1,j))); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val equal_def_fact : Prims.logical | [] | FStar.Sequence.Base.equal_def_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | Prims.logical | {
"end_col": 60,
"end_line": 316,
"start_col": 2,
"start_line": 312
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let drop_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x) | let drop_contains_equiv_exists_fact = | false | null | false | forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty). {:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat). {:pattern index s i} n <= i && i < length s /\ index s i == x) | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.Sequence.Base.length",
"Prims.l_iff",
"FStar.Sequence.Base.contains",
"FStar.Sequence.Base.drop",
"Prims.l_Exists",
"Prims.l_and",
"Prims.op_AmpAmp",
"Prims.op_LessThan",
"Prims.eq2",
"FStar.Sequence.Base.index"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n)));
private let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n)
/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`:
///
/// axiom (forall<T> s: Seq T, x: T :: { Seq#Contains(s,x) }
/// Seq#Contains(s,x) <==>
/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x));
private let contains_iff_exists_index_fact =
forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x}
contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`:
///
/// axiom (forall<T> x: T ::
/// { Seq#Contains(Seq#Empty(), x) }
/// !Seq#Contains(Seq#Empty(), x));
private let empty_doesnt_contain_anything_fact =
forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x)
/// We represent the following Dafny axiom with `build_contains_equiv_fact`:
///
/// axiom (forall<T> s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy
/// { Seq#Contains(Seq#Build(s, v), x) }
/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x)));
private let build_contains_equiv_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x}
contains (build s v) x <==> (v == x \/ contains s x)
/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Take(s, n), x) }
/// Seq#Contains(Seq#Take(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let take_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x}
contains (take s n) x <==>
(exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Drop(s, n), x) }
/// Seq#Contains(Seq#Drop(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x)); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val drop_contains_equiv_exists_fact : Prims.logical | [] | FStar.Sequence.Base.drop_contains_equiv_exists_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | Prims.logical | {
"end_col": 83,
"end_line": 301,
"start_col": 2,
"start_line": 299
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let index_into_take_fact (_ : squash (take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (take s n) j \/ index s j ; take s n}
j < n && n <= length s ==> index (take s n) j == index s j | let index_into_take_fact (_: squash (take_length_fact u#a)) = | false | null | false | forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (take s n) j\/index s j; take s n}
j < n && n <= length s ==> index (take s n) j == index s j | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.squash",
"FStar.Sequence.Base.take_length_fact",
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.nat",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_AmpAmp",
"Prims.op_LessThan",
"Prims.op_LessThanOrEqual",
"FStar.Sequence.Base.length",
"Prims.eq2",
"FStar.Sequence.Base.index",
"FStar.Sequence.Base.take",
"Prims.logical"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n)));
private let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n)
/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`:
///
/// axiom (forall<T> s: Seq T, x: T :: { Seq#Contains(s,x) }
/// Seq#Contains(s,x) <==>
/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x));
private let contains_iff_exists_index_fact =
forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x}
contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`:
///
/// axiom (forall<T> x: T ::
/// { Seq#Contains(Seq#Empty(), x) }
/// !Seq#Contains(Seq#Empty(), x));
private let empty_doesnt_contain_anything_fact =
forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x)
/// We represent the following Dafny axiom with `build_contains_equiv_fact`:
///
/// axiom (forall<T> s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy
/// { Seq#Contains(Seq#Build(s, v), x) }
/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x)));
private let build_contains_equiv_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x}
contains (build s v) x <==> (v == x \/ contains s x)
/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Take(s, n), x) }
/// Seq#Contains(Seq#Take(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let take_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x}
contains (take s n) x <==>
(exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Drop(s, n), x) }
/// Seq#Contains(Seq#Drop(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let drop_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `equal_def_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Equal(s0,s1) }
/// Seq#Equal(s0,s1) <==>
/// Seq#Length(s0) == Seq#Length(s1) &&
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < Seq#Length(s0) ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let equal_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern equal s0 s1}
equal s0 s1 <==>
length s0 == length s1 /\
(forall j.{:pattern index s0 j \/ index s1 j}
0 <= j && j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `extensionality_fact`:
///
/// axiom (forall<T> a: Seq T, b: Seq T :: { Seq#Equal(a,b) } // extensionality axiom for sequences
/// Seq#Equal(a,b) ==> a == b);
private let extensionality_fact =
forall (ty: Type u#a) (a: seq ty) (b: seq ty).{:pattern equal a b}
equal a b ==> a == b
/// We represent an analog of the following Dafny axiom with
/// `is_prefix_def_fact`. Our analog uses `is_prefix` instead
/// of `Seq#SameUntil`.
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#SameUntil(s0,s1,n) }
/// Seq#SameUntil(s0,s1,n) <==>
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < n ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let is_prefix_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern is_prefix s0 s1}
is_prefix s0 s1 <==>
length s0 <= length s1
/\ (forall (j: nat).{:pattern index s0 j \/ index s1 j}
j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `take_length_fact`:
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Take(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Take(s,n)) == n);
private let take_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern length (take s n)}
n <= length s ==> length (take s n) = n
/// We represent the following Dafny axiom with `index_into_take_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Take(s,n), j) }
/// { Seq#Index(s, j), Seq#Take(s,n) }
/// 0 <= j && j < n && j < Seq#Length(s) ==>
/// Seq#Index(Seq#Take(s,n), j) == Seq#Index(s, j)); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val index_into_take_fact : _: Prims.squash FStar.Sequence.Base.take_length_fact -> Prims.logical | [] | FStar.Sequence.Base.index_into_take_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | _: Prims.squash FStar.Sequence.Base.take_length_fact -> Prims.logical | {
"end_col": 62,
"end_line": 364,
"start_col": 2,
"start_line": 362
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let is_prefix_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern is_prefix s0 s1}
is_prefix s0 s1 <==>
length s0 <= length s1
/\ (forall (j: nat).{:pattern index s0 j \/ index s1 j}
j < length s0 ==> index s0 j == index s1 j) | let is_prefix_def_fact = | false | null | false | forall (ty: Type u#a) (s0: seq ty) (s1: seq ty). {:pattern is_prefix s0 s1}
is_prefix s0 s1 <==>
length s0 <= length s1 /\
(forall (j: nat). {:pattern index s0 j\/index s1 j} j < length s0 ==> index s0 j == index s1 j) | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.l_iff",
"FStar.Sequence.Base.is_prefix",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.Sequence.Base.length",
"Prims.nat",
"Prims.l_imp",
"Prims.op_LessThan",
"Prims.eq2",
"FStar.Sequence.Base.index"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n)));
private let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n)
/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`:
///
/// axiom (forall<T> s: Seq T, x: T :: { Seq#Contains(s,x) }
/// Seq#Contains(s,x) <==>
/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x));
private let contains_iff_exists_index_fact =
forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x}
contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`:
///
/// axiom (forall<T> x: T ::
/// { Seq#Contains(Seq#Empty(), x) }
/// !Seq#Contains(Seq#Empty(), x));
private let empty_doesnt_contain_anything_fact =
forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x)
/// We represent the following Dafny axiom with `build_contains_equiv_fact`:
///
/// axiom (forall<T> s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy
/// { Seq#Contains(Seq#Build(s, v), x) }
/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x)));
private let build_contains_equiv_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x}
contains (build s v) x <==> (v == x \/ contains s x)
/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Take(s, n), x) }
/// Seq#Contains(Seq#Take(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let take_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x}
contains (take s n) x <==>
(exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Drop(s, n), x) }
/// Seq#Contains(Seq#Drop(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let drop_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `equal_def_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Equal(s0,s1) }
/// Seq#Equal(s0,s1) <==>
/// Seq#Length(s0) == Seq#Length(s1) &&
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < Seq#Length(s0) ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let equal_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern equal s0 s1}
equal s0 s1 <==>
length s0 == length s1 /\
(forall j.{:pattern index s0 j \/ index s1 j}
0 <= j && j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `extensionality_fact`:
///
/// axiom (forall<T> a: Seq T, b: Seq T :: { Seq#Equal(a,b) } // extensionality axiom for sequences
/// Seq#Equal(a,b) ==> a == b);
private let extensionality_fact =
forall (ty: Type u#a) (a: seq ty) (b: seq ty).{:pattern equal a b}
equal a b ==> a == b
/// We represent an analog of the following Dafny axiom with
/// `is_prefix_def_fact`. Our analog uses `is_prefix` instead
/// of `Seq#SameUntil`.
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#SameUntil(s0,s1,n) }
/// Seq#SameUntil(s0,s1,n) <==>
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < n ==> Seq#Index(s0,j) == Seq#Index(s1,j))); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val is_prefix_def_fact : Prims.logical | [] | FStar.Sequence.Base.is_prefix_def_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | Prims.logical | {
"end_col": 52,
"end_line": 341,
"start_col": 2,
"start_line": 337
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let append_then_take_or_drop_fact (_ : squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (t: seq ty) (n: nat).
{:pattern take (append s t) n \/ drop (append s t) n}
n = length s ==> take (append s t) n == s /\ drop (append s t) n == t | let append_then_take_or_drop_fact (_: squash (append_sums_lengths_fact u#a)) = | false | null | false | forall (ty: Type u#a) (s: seq ty) (t: seq ty) (n: nat).
{:pattern take (append s t) n\/drop (append s t) n}
n = length s ==> take (append s t) n == s /\ drop (append s t) n == t | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.squash",
"FStar.Sequence.Base.append_sums_lengths_fact",
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.nat",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_Equality",
"FStar.Sequence.Base.length",
"Prims.l_and",
"Prims.eq2",
"FStar.Sequence.Base.take",
"FStar.Sequence.Base.append",
"FStar.Sequence.Base.drop",
"Prims.logical"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n)));
private let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n)
/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`:
///
/// axiom (forall<T> s: Seq T, x: T :: { Seq#Contains(s,x) }
/// Seq#Contains(s,x) <==>
/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x));
private let contains_iff_exists_index_fact =
forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x}
contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`:
///
/// axiom (forall<T> x: T ::
/// { Seq#Contains(Seq#Empty(), x) }
/// !Seq#Contains(Seq#Empty(), x));
private let empty_doesnt_contain_anything_fact =
forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x)
/// We represent the following Dafny axiom with `build_contains_equiv_fact`:
///
/// axiom (forall<T> s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy
/// { Seq#Contains(Seq#Build(s, v), x) }
/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x)));
private let build_contains_equiv_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x}
contains (build s v) x <==> (v == x \/ contains s x)
/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Take(s, n), x) }
/// Seq#Contains(Seq#Take(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let take_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x}
contains (take s n) x <==>
(exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Drop(s, n), x) }
/// Seq#Contains(Seq#Drop(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let drop_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `equal_def_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Equal(s0,s1) }
/// Seq#Equal(s0,s1) <==>
/// Seq#Length(s0) == Seq#Length(s1) &&
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < Seq#Length(s0) ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let equal_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern equal s0 s1}
equal s0 s1 <==>
length s0 == length s1 /\
(forall j.{:pattern index s0 j \/ index s1 j}
0 <= j && j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `extensionality_fact`:
///
/// axiom (forall<T> a: Seq T, b: Seq T :: { Seq#Equal(a,b) } // extensionality axiom for sequences
/// Seq#Equal(a,b) ==> a == b);
private let extensionality_fact =
forall (ty: Type u#a) (a: seq ty) (b: seq ty).{:pattern equal a b}
equal a b ==> a == b
/// We represent an analog of the following Dafny axiom with
/// `is_prefix_def_fact`. Our analog uses `is_prefix` instead
/// of `Seq#SameUntil`.
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#SameUntil(s0,s1,n) }
/// Seq#SameUntil(s0,s1,n) <==>
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < n ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let is_prefix_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern is_prefix s0 s1}
is_prefix s0 s1 <==>
length s0 <= length s1
/\ (forall (j: nat).{:pattern index s0 j \/ index s1 j}
j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `take_length_fact`:
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Take(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Take(s,n)) == n);
private let take_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern length (take s n)}
n <= length s ==> length (take s n) = n
/// We represent the following Dafny axiom with `index_into_take_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Take(s,n), j) }
/// { Seq#Index(s, j), Seq#Take(s,n) }
/// 0 <= j && j < n && j < Seq#Length(s) ==>
/// Seq#Index(Seq#Take(s,n), j) == Seq#Index(s, j));
private let index_into_take_fact (_ : squash (take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (take s n) j \/ index s j ; take s n}
j < n && n <= length s ==> index (take s n) j == index s j
/// We represent the following Dafny axiom with `drop_length_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Drop(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Drop(s,n)) == Seq#Length(s) - n);
private let drop_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).
{:pattern length (drop s n)}
n <= length s ==> length (drop s n) = length s - n
/// We represent the following Dafny axiom with `index_into_drop_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Drop(s,n), j) }
/// 0 <= n && 0 <= j && j < Seq#Length(s)-n ==>
/// Seq#Index(Seq#Drop(s,n), j) == Seq#Index(s, j+n));
private let index_into_drop_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (drop s n) j}
j < length s - n ==> index (drop s n) j == index s (j + n)
/// We represent the following Dafny axiom with `drop_index_offset_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, k: int ::
/// {:weight 25}
/// { Seq#Index(s, k), Seq#Drop(s,n) }
/// 0 <= n && n <= k && k < Seq#Length(s) ==>
/// Seq#Index(Seq#Drop(s,n), k-n) == Seq#Index(s, k));
private let drop_index_offset_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (k: nat).
{:pattern index s k; drop s n}
n <= k && k < length s ==> index (drop s n) (k - n) == index s k
/// We represent the following Dafny axiom with `append_then_take_or_drop_fact`.
///
/// axiom (forall<T> s, t: Seq T, n: int ::
/// { Seq#Take(Seq#Append(s, t), n) }
/// { Seq#Drop(Seq#Append(s, t), n) }
/// n == Seq#Length(s)
/// ==>
/// Seq#Take(Seq#Append(s, t), n) == s &&
/// Seq#Drop(Seq#Append(s, t), n) == t); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val append_then_take_or_drop_fact : _: Prims.squash FStar.Sequence.Base.append_sums_lengths_fact -> Prims.logical | [] | FStar.Sequence.Base.append_then_take_or_drop_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | _: Prims.squash FStar.Sequence.Base.append_sums_lengths_fact -> Prims.logical | {
"end_col": 73,
"end_line": 415,
"start_col": 2,
"start_line": 413
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n) | let update_then_index_fact = | false | null | false | forall (ty: Type u#a)
(s: seq ty)
(i: nat{i < length s})
(v: ty)
(n: nat{n < length (update s i v)}).
{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v) /\ (i <> n ==> index (update s i v) n == index s n) | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThan",
"FStar.Sequence.Base.length",
"FStar.Sequence.Base.update",
"Prims.l_imp",
"Prims.l_and",
"Prims.op_Equality",
"Prims.l_or",
"Prims.eq2",
"FStar.Sequence.Base.index",
"Prims.op_disEquality"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n))); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val update_then_index_fact : Prims.logical | [] | FStar.Sequence.Base.update_then_index_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | Prims.logical | {
"end_col": 59,
"end_line": 246,
"start_col": 2,
"start_line": 242
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let take_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
n <= i && i < length s ==>
take (update s i v) n == take s n | let take_ignores_out_of_range_update_fact (_: squash (update_maintains_length_fact u#a)) = | false | null | false | forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat). {:pattern take (update s i v) n}
n <= i && i < length s ==> take (update s i v) n == take s n | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.squash",
"FStar.Sequence.Base.update_maintains_length_fact",
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.nat",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_AmpAmp",
"Prims.op_LessThanOrEqual",
"Prims.op_LessThan",
"FStar.Sequence.Base.length",
"Prims.eq2",
"FStar.Sequence.Base.take",
"FStar.Sequence.Base.update",
"Prims.logical"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n)));
private let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n)
/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`:
///
/// axiom (forall<T> s: Seq T, x: T :: { Seq#Contains(s,x) }
/// Seq#Contains(s,x) <==>
/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x));
private let contains_iff_exists_index_fact =
forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x}
contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`:
///
/// axiom (forall<T> x: T ::
/// { Seq#Contains(Seq#Empty(), x) }
/// !Seq#Contains(Seq#Empty(), x));
private let empty_doesnt_contain_anything_fact =
forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x)
/// We represent the following Dafny axiom with `build_contains_equiv_fact`:
///
/// axiom (forall<T> s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy
/// { Seq#Contains(Seq#Build(s, v), x) }
/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x)));
private let build_contains_equiv_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x}
contains (build s v) x <==> (v == x \/ contains s x)
/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Take(s, n), x) }
/// Seq#Contains(Seq#Take(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let take_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x}
contains (take s n) x <==>
(exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Drop(s, n), x) }
/// Seq#Contains(Seq#Drop(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let drop_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `equal_def_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Equal(s0,s1) }
/// Seq#Equal(s0,s1) <==>
/// Seq#Length(s0) == Seq#Length(s1) &&
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < Seq#Length(s0) ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let equal_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern equal s0 s1}
equal s0 s1 <==>
length s0 == length s1 /\
(forall j.{:pattern index s0 j \/ index s1 j}
0 <= j && j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `extensionality_fact`:
///
/// axiom (forall<T> a: Seq T, b: Seq T :: { Seq#Equal(a,b) } // extensionality axiom for sequences
/// Seq#Equal(a,b) ==> a == b);
private let extensionality_fact =
forall (ty: Type u#a) (a: seq ty) (b: seq ty).{:pattern equal a b}
equal a b ==> a == b
/// We represent an analog of the following Dafny axiom with
/// `is_prefix_def_fact`. Our analog uses `is_prefix` instead
/// of `Seq#SameUntil`.
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#SameUntil(s0,s1,n) }
/// Seq#SameUntil(s0,s1,n) <==>
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < n ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let is_prefix_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern is_prefix s0 s1}
is_prefix s0 s1 <==>
length s0 <= length s1
/\ (forall (j: nat).{:pattern index s0 j \/ index s1 j}
j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `take_length_fact`:
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Take(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Take(s,n)) == n);
private let take_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern length (take s n)}
n <= length s ==> length (take s n) = n
/// We represent the following Dafny axiom with `index_into_take_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Take(s,n), j) }
/// { Seq#Index(s, j), Seq#Take(s,n) }
/// 0 <= j && j < n && j < Seq#Length(s) ==>
/// Seq#Index(Seq#Take(s,n), j) == Seq#Index(s, j));
private let index_into_take_fact (_ : squash (take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (take s n) j \/ index s j ; take s n}
j < n && n <= length s ==> index (take s n) j == index s j
/// We represent the following Dafny axiom with `drop_length_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Drop(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Drop(s,n)) == Seq#Length(s) - n);
private let drop_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).
{:pattern length (drop s n)}
n <= length s ==> length (drop s n) = length s - n
/// We represent the following Dafny axiom with `index_into_drop_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Drop(s,n), j) }
/// 0 <= n && 0 <= j && j < Seq#Length(s)-n ==>
/// Seq#Index(Seq#Drop(s,n), j) == Seq#Index(s, j+n));
private let index_into_drop_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (drop s n) j}
j < length s - n ==> index (drop s n) j == index s (j + n)
/// We represent the following Dafny axiom with `drop_index_offset_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, k: int ::
/// {:weight 25}
/// { Seq#Index(s, k), Seq#Drop(s,n) }
/// 0 <= n && n <= k && k < Seq#Length(s) ==>
/// Seq#Index(Seq#Drop(s,n), k-n) == Seq#Index(s, k));
private let drop_index_offset_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (k: nat).
{:pattern index s k; drop s n}
n <= k && k < length s ==> index (drop s n) (k - n) == index s k
/// We represent the following Dafny axiom with `append_then_take_or_drop_fact`.
///
/// axiom (forall<T> s, t: Seq T, n: int ::
/// { Seq#Take(Seq#Append(s, t), n) }
/// { Seq#Drop(Seq#Append(s, t), n) }
/// n == Seq#Length(s)
/// ==>
/// Seq#Take(Seq#Append(s, t), n) == s &&
/// Seq#Drop(Seq#Append(s, t), n) == t);
private let append_then_take_or_drop_fact (_ : squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (t: seq ty) (n: nat).
{:pattern take (append s t) n \/ drop (append s t) n}
n = length s ==> take (append s t) n == s /\ drop (append s t) n == t
/// We represent the following Dafny axiom with `take_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n <= Seq#Length(s) ==>
/// Seq#Take(Seq#Update(s, i, v), n) == Seq#Update(Seq#Take(s, n), i, v) );
private let take_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
i < n && n <= length s ==>
take (update s i v) n == update (take s n) i v
/// We represent the following Dafny axiom with `take_ignores_out_of_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// n <= i && i < Seq#Length(s) ==> Seq#Take(Seq#Update(s, i, v), n) == Seq#Take(s, n)); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val take_ignores_out_of_range_update_fact : _: Prims.squash FStar.Sequence.Base.update_maintains_length_fact -> Prims.logical | [] | FStar.Sequence.Base.take_ignores_out_of_range_update_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | _: Prims.squash FStar.Sequence.Base.update_maintains_length_fact -> Prims.logical | {
"end_col": 37,
"end_line": 439,
"start_col": 2,
"start_line": 437
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i) | let index_into_build_fact (_: squash (build_increments_length_fact u#a)) = | false | null | false | forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)}).
{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v) /\
(i <> length s ==> index (build s v) i == index s i) | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.squash",
"FStar.Sequence.Base.build_increments_length_fact",
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThan",
"FStar.Sequence.Base.length",
"FStar.Sequence.Base.build",
"Prims.l_and",
"Prims.l_imp",
"Prims.op_Equality",
"Prims.eq2",
"FStar.Sequence.Base.index",
"Prims.op_disEquality",
"Prims.logical"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i))); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val index_into_build_fact : _: Prims.squash FStar.Sequence.Base.build_increments_length_fact -> Prims.logical | [] | FStar.Sequence.Base.index_into_build_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | _: Prims.squash FStar.Sequence.Base.build_increments_length_fact -> Prims.logical | {
"end_col": 59,
"end_line": 194,
"start_col": 2,
"start_line": 191
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let take_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern length (take s i)}
i < length s ==> length (take s i) << length s | let take_ranks_less_fact = | false | null | false | forall (ty: Type u#a) (s: seq ty) (i: nat). {:pattern length (take s i)}
i < length s ==> length (take s i) << length s | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.nat",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_LessThan",
"FStar.Sequence.Base.length",
"Prims.precedes",
"FStar.Sequence.Base.take"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n)));
private let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n)
/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`:
///
/// axiom (forall<T> s: Seq T, x: T :: { Seq#Contains(s,x) }
/// Seq#Contains(s,x) <==>
/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x));
private let contains_iff_exists_index_fact =
forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x}
contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`:
///
/// axiom (forall<T> x: T ::
/// { Seq#Contains(Seq#Empty(), x) }
/// !Seq#Contains(Seq#Empty(), x));
private let empty_doesnt_contain_anything_fact =
forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x)
/// We represent the following Dafny axiom with `build_contains_equiv_fact`:
///
/// axiom (forall<T> s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy
/// { Seq#Contains(Seq#Build(s, v), x) }
/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x)));
private let build_contains_equiv_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x}
contains (build s v) x <==> (v == x \/ contains s x)
/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Take(s, n), x) }
/// Seq#Contains(Seq#Take(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let take_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x}
contains (take s n) x <==>
(exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Drop(s, n), x) }
/// Seq#Contains(Seq#Drop(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let drop_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `equal_def_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Equal(s0,s1) }
/// Seq#Equal(s0,s1) <==>
/// Seq#Length(s0) == Seq#Length(s1) &&
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < Seq#Length(s0) ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let equal_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern equal s0 s1}
equal s0 s1 <==>
length s0 == length s1 /\
(forall j.{:pattern index s0 j \/ index s1 j}
0 <= j && j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `extensionality_fact`:
///
/// axiom (forall<T> a: Seq T, b: Seq T :: { Seq#Equal(a,b) } // extensionality axiom for sequences
/// Seq#Equal(a,b) ==> a == b);
private let extensionality_fact =
forall (ty: Type u#a) (a: seq ty) (b: seq ty).{:pattern equal a b}
equal a b ==> a == b
/// We represent an analog of the following Dafny axiom with
/// `is_prefix_def_fact`. Our analog uses `is_prefix` instead
/// of `Seq#SameUntil`.
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#SameUntil(s0,s1,n) }
/// Seq#SameUntil(s0,s1,n) <==>
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < n ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let is_prefix_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern is_prefix s0 s1}
is_prefix s0 s1 <==>
length s0 <= length s1
/\ (forall (j: nat).{:pattern index s0 j \/ index s1 j}
j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `take_length_fact`:
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Take(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Take(s,n)) == n);
private let take_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern length (take s n)}
n <= length s ==> length (take s n) = n
/// We represent the following Dafny axiom with `index_into_take_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Take(s,n), j) }
/// { Seq#Index(s, j), Seq#Take(s,n) }
/// 0 <= j && j < n && j < Seq#Length(s) ==>
/// Seq#Index(Seq#Take(s,n), j) == Seq#Index(s, j));
private let index_into_take_fact (_ : squash (take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (take s n) j \/ index s j ; take s n}
j < n && n <= length s ==> index (take s n) j == index s j
/// We represent the following Dafny axiom with `drop_length_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Drop(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Drop(s,n)) == Seq#Length(s) - n);
private let drop_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).
{:pattern length (drop s n)}
n <= length s ==> length (drop s n) = length s - n
/// We represent the following Dafny axiom with `index_into_drop_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Drop(s,n), j) }
/// 0 <= n && 0 <= j && j < Seq#Length(s)-n ==>
/// Seq#Index(Seq#Drop(s,n), j) == Seq#Index(s, j+n));
private let index_into_drop_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (drop s n) j}
j < length s - n ==> index (drop s n) j == index s (j + n)
/// We represent the following Dafny axiom with `drop_index_offset_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, k: int ::
/// {:weight 25}
/// { Seq#Index(s, k), Seq#Drop(s,n) }
/// 0 <= n && n <= k && k < Seq#Length(s) ==>
/// Seq#Index(Seq#Drop(s,n), k-n) == Seq#Index(s, k));
private let drop_index_offset_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (k: nat).
{:pattern index s k; drop s n}
n <= k && k < length s ==> index (drop s n) (k - n) == index s k
/// We represent the following Dafny axiom with `append_then_take_or_drop_fact`.
///
/// axiom (forall<T> s, t: Seq T, n: int ::
/// { Seq#Take(Seq#Append(s, t), n) }
/// { Seq#Drop(Seq#Append(s, t), n) }
/// n == Seq#Length(s)
/// ==>
/// Seq#Take(Seq#Append(s, t), n) == s &&
/// Seq#Drop(Seq#Append(s, t), n) == t);
private let append_then_take_or_drop_fact (_ : squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (t: seq ty) (n: nat).
{:pattern take (append s t) n \/ drop (append s t) n}
n = length s ==> take (append s t) n == s /\ drop (append s t) n == t
/// We represent the following Dafny axiom with `take_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n <= Seq#Length(s) ==>
/// Seq#Take(Seq#Update(s, i, v), n) == Seq#Update(Seq#Take(s, n), i, v) );
private let take_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
i < n && n <= length s ==>
take (update s i v) n == update (take s n) i v
/// We represent the following Dafny axiom with `take_ignores_out_of_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// n <= i && i < Seq#Length(s) ==> Seq#Take(Seq#Update(s, i, v), n) == Seq#Take(s, n));
private let take_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
n <= i && i < length s ==>
take (update s i v) n == take s n
/// We represent the following Dafny axiom with `drop_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Drop(Seq#Update(s, i, v), n) }
/// 0 <= n && n <= i && i < Seq#Length(s) ==>
/// Seq#Drop(Seq#Update(s, i, v), n) == Seq#Update(Seq#Drop(s, n), i-n, v) );
private let drop_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n}
n <= i && i < length s ==>
drop (update s i v) n == update (drop s n) (i - n) v
/// We represent the following Dafny axiom with `drop_ignores_out_of_range_update_fact`.
/// Jay noticed that it was unnecessarily weak, possibly due to a typo, so he reported this as
/// Dafny issue #1423 (https://github.com/dafny-lang/dafny/issues/1423) and updated it here.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Drop(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n < Seq#Length(s) ==> Seq#Drop(Seq#Update(s, i, v), n) == Seq#Drop(s, n));
private let drop_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n}
i < n && n <= length s ==>
drop (update s i v) n == drop s n
/// We represent the following Dafny axiom with `drop_commutes_with_build_fact`.
///
/// axiom (forall<T> s: Seq T, v: T, n: int ::
/// { Seq#Drop(Seq#Build(s, v), n) }
/// 0 <= n && n <= Seq#Length(s) ==>
/// Seq#Drop(Seq#Build(s, v), n) == Seq#Build(Seq#Drop(s, n), v) );
private let drop_commutes_with_build_fact (_ : squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (n: nat).{:pattern drop (build s v) n}
n <= length s ==> drop (build s v) n == build (drop s n) v
/// We include the definition of `rank` among our facts.
private let rank_def_fact =
forall (ty: Type u#a) (v: ty).{:pattern rank v} rank v == v
/// We represent the following Dafny axiom with `element_ranks_less_fact`.
///
/// axiom (forall s: Seq Box, i: int ::
/// { DtRank($Unbox(Seq#Index(s, i)): DatatypeType) }
/// 0 <= i && i < Seq#Length(s) ==> DtRank($Unbox(Seq#Index(s, i)): DatatypeType) < Seq#Rank(s) );
private let element_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern rank (index s i)}
i < length s ==> rank (index s i) << rank s
/// We represent the following Dafny axiom with `drop_ranks_less_fact`.
///
/// axiom (forall<T> s: Seq T, i: int ::
/// { Seq#Rank(Seq#Drop(s, i)) }
/// 0 < i && i <= Seq#Length(s) ==> Seq#Rank(Seq#Drop(s, i)) < Seq#Rank(s) );
private let drop_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern rank (drop s i)}
0 < i && i <= length s ==> rank (drop s i) << rank s
/// We represent the following Dafny axiom with
/// `take_ranks_less_fact`. However, since it isn't true in F* (which
/// has strong requirements for <<), we instead substitute length,
/// requiring decreases clauses to use length in this case.
///
/// axiom (forall<T> s: Seq T, i: int ::
/// { Seq#Rank(Seq#Take(s, i)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Rank(Seq#Take(s, i)) < Seq#Rank(s) ); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val take_ranks_less_fact : Prims.logical | [] | FStar.Sequence.Base.take_ranks_less_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | Prims.logical | {
"end_col": 50,
"end_line": 514,
"start_col": 2,
"start_line": 513
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let index_into_drop_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (drop s n) j}
j < length s - n ==> index (drop s n) j == index s (j + n) | let index_into_drop_fact (_: squash (drop_length_fact u#a)) = | false | null | false | forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat). {:pattern index (drop s n) j}
j < length s - n ==> index (drop s n) j == index s (j + n) | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.squash",
"FStar.Sequence.Base.drop_length_fact",
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.nat",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_LessThan",
"Prims.op_Subtraction",
"FStar.Sequence.Base.length",
"Prims.eq2",
"FStar.Sequence.Base.index",
"FStar.Sequence.Base.drop",
"Prims.op_Addition",
"Prims.logical"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n)));
private let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n)
/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`:
///
/// axiom (forall<T> s: Seq T, x: T :: { Seq#Contains(s,x) }
/// Seq#Contains(s,x) <==>
/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x));
private let contains_iff_exists_index_fact =
forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x}
contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`:
///
/// axiom (forall<T> x: T ::
/// { Seq#Contains(Seq#Empty(), x) }
/// !Seq#Contains(Seq#Empty(), x));
private let empty_doesnt_contain_anything_fact =
forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x)
/// We represent the following Dafny axiom with `build_contains_equiv_fact`:
///
/// axiom (forall<T> s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy
/// { Seq#Contains(Seq#Build(s, v), x) }
/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x)));
private let build_contains_equiv_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x}
contains (build s v) x <==> (v == x \/ contains s x)
/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Take(s, n), x) }
/// Seq#Contains(Seq#Take(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let take_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x}
contains (take s n) x <==>
(exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Drop(s, n), x) }
/// Seq#Contains(Seq#Drop(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let drop_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `equal_def_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Equal(s0,s1) }
/// Seq#Equal(s0,s1) <==>
/// Seq#Length(s0) == Seq#Length(s1) &&
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < Seq#Length(s0) ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let equal_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern equal s0 s1}
equal s0 s1 <==>
length s0 == length s1 /\
(forall j.{:pattern index s0 j \/ index s1 j}
0 <= j && j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `extensionality_fact`:
///
/// axiom (forall<T> a: Seq T, b: Seq T :: { Seq#Equal(a,b) } // extensionality axiom for sequences
/// Seq#Equal(a,b) ==> a == b);
private let extensionality_fact =
forall (ty: Type u#a) (a: seq ty) (b: seq ty).{:pattern equal a b}
equal a b ==> a == b
/// We represent an analog of the following Dafny axiom with
/// `is_prefix_def_fact`. Our analog uses `is_prefix` instead
/// of `Seq#SameUntil`.
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#SameUntil(s0,s1,n) }
/// Seq#SameUntil(s0,s1,n) <==>
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < n ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let is_prefix_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern is_prefix s0 s1}
is_prefix s0 s1 <==>
length s0 <= length s1
/\ (forall (j: nat).{:pattern index s0 j \/ index s1 j}
j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `take_length_fact`:
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Take(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Take(s,n)) == n);
private let take_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern length (take s n)}
n <= length s ==> length (take s n) = n
/// We represent the following Dafny axiom with `index_into_take_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Take(s,n), j) }
/// { Seq#Index(s, j), Seq#Take(s,n) }
/// 0 <= j && j < n && j < Seq#Length(s) ==>
/// Seq#Index(Seq#Take(s,n), j) == Seq#Index(s, j));
private let index_into_take_fact (_ : squash (take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (take s n) j \/ index s j ; take s n}
j < n && n <= length s ==> index (take s n) j == index s j
/// We represent the following Dafny axiom with `drop_length_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Drop(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Drop(s,n)) == Seq#Length(s) - n);
private let drop_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).
{:pattern length (drop s n)}
n <= length s ==> length (drop s n) = length s - n
/// We represent the following Dafny axiom with `index_into_drop_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Drop(s,n), j) }
/// 0 <= n && 0 <= j && j < Seq#Length(s)-n ==>
/// Seq#Index(Seq#Drop(s,n), j) == Seq#Index(s, j+n)); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val index_into_drop_fact : _: Prims.squash FStar.Sequence.Base.drop_length_fact -> Prims.logical | [] | FStar.Sequence.Base.index_into_drop_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | _: Prims.squash FStar.Sequence.Base.drop_length_fact -> Prims.logical | {
"end_col": 62,
"end_line": 387,
"start_col": 2,
"start_line": 385
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let drop_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern rank (drop s i)}
0 < i && i <= length s ==> rank (drop s i) << rank s | let drop_ranks_less_fact = | false | null | false | forall (ty: Type u#a) (s: seq ty) (i: nat). {:pattern rank (drop s i)}
0 < i && i <= length s ==> rank (drop s i) << rank s | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.nat",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_AmpAmp",
"Prims.op_LessThan",
"Prims.op_LessThanOrEqual",
"FStar.Sequence.Base.length",
"Prims.precedes",
"FStar.Sequence.Base.rank",
"FStar.Sequence.Base.drop"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n)));
private let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n)
/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`:
///
/// axiom (forall<T> s: Seq T, x: T :: { Seq#Contains(s,x) }
/// Seq#Contains(s,x) <==>
/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x));
private let contains_iff_exists_index_fact =
forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x}
contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`:
///
/// axiom (forall<T> x: T ::
/// { Seq#Contains(Seq#Empty(), x) }
/// !Seq#Contains(Seq#Empty(), x));
private let empty_doesnt_contain_anything_fact =
forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x)
/// We represent the following Dafny axiom with `build_contains_equiv_fact`:
///
/// axiom (forall<T> s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy
/// { Seq#Contains(Seq#Build(s, v), x) }
/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x)));
private let build_contains_equiv_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x}
contains (build s v) x <==> (v == x \/ contains s x)
/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Take(s, n), x) }
/// Seq#Contains(Seq#Take(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let take_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x}
contains (take s n) x <==>
(exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Drop(s, n), x) }
/// Seq#Contains(Seq#Drop(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let drop_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `equal_def_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Equal(s0,s1) }
/// Seq#Equal(s0,s1) <==>
/// Seq#Length(s0) == Seq#Length(s1) &&
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < Seq#Length(s0) ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let equal_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern equal s0 s1}
equal s0 s1 <==>
length s0 == length s1 /\
(forall j.{:pattern index s0 j \/ index s1 j}
0 <= j && j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `extensionality_fact`:
///
/// axiom (forall<T> a: Seq T, b: Seq T :: { Seq#Equal(a,b) } // extensionality axiom for sequences
/// Seq#Equal(a,b) ==> a == b);
private let extensionality_fact =
forall (ty: Type u#a) (a: seq ty) (b: seq ty).{:pattern equal a b}
equal a b ==> a == b
/// We represent an analog of the following Dafny axiom with
/// `is_prefix_def_fact`. Our analog uses `is_prefix` instead
/// of `Seq#SameUntil`.
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#SameUntil(s0,s1,n) }
/// Seq#SameUntil(s0,s1,n) <==>
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < n ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let is_prefix_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern is_prefix s0 s1}
is_prefix s0 s1 <==>
length s0 <= length s1
/\ (forall (j: nat).{:pattern index s0 j \/ index s1 j}
j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `take_length_fact`:
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Take(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Take(s,n)) == n);
private let take_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern length (take s n)}
n <= length s ==> length (take s n) = n
/// We represent the following Dafny axiom with `index_into_take_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Take(s,n), j) }
/// { Seq#Index(s, j), Seq#Take(s,n) }
/// 0 <= j && j < n && j < Seq#Length(s) ==>
/// Seq#Index(Seq#Take(s,n), j) == Seq#Index(s, j));
private let index_into_take_fact (_ : squash (take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (take s n) j \/ index s j ; take s n}
j < n && n <= length s ==> index (take s n) j == index s j
/// We represent the following Dafny axiom with `drop_length_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Drop(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Drop(s,n)) == Seq#Length(s) - n);
private let drop_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).
{:pattern length (drop s n)}
n <= length s ==> length (drop s n) = length s - n
/// We represent the following Dafny axiom with `index_into_drop_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Drop(s,n), j) }
/// 0 <= n && 0 <= j && j < Seq#Length(s)-n ==>
/// Seq#Index(Seq#Drop(s,n), j) == Seq#Index(s, j+n));
private let index_into_drop_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (drop s n) j}
j < length s - n ==> index (drop s n) j == index s (j + n)
/// We represent the following Dafny axiom with `drop_index_offset_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, k: int ::
/// {:weight 25}
/// { Seq#Index(s, k), Seq#Drop(s,n) }
/// 0 <= n && n <= k && k < Seq#Length(s) ==>
/// Seq#Index(Seq#Drop(s,n), k-n) == Seq#Index(s, k));
private let drop_index_offset_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (k: nat).
{:pattern index s k; drop s n}
n <= k && k < length s ==> index (drop s n) (k - n) == index s k
/// We represent the following Dafny axiom with `append_then_take_or_drop_fact`.
///
/// axiom (forall<T> s, t: Seq T, n: int ::
/// { Seq#Take(Seq#Append(s, t), n) }
/// { Seq#Drop(Seq#Append(s, t), n) }
/// n == Seq#Length(s)
/// ==>
/// Seq#Take(Seq#Append(s, t), n) == s &&
/// Seq#Drop(Seq#Append(s, t), n) == t);
private let append_then_take_or_drop_fact (_ : squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (t: seq ty) (n: nat).
{:pattern take (append s t) n \/ drop (append s t) n}
n = length s ==> take (append s t) n == s /\ drop (append s t) n == t
/// We represent the following Dafny axiom with `take_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n <= Seq#Length(s) ==>
/// Seq#Take(Seq#Update(s, i, v), n) == Seq#Update(Seq#Take(s, n), i, v) );
private let take_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
i < n && n <= length s ==>
take (update s i v) n == update (take s n) i v
/// We represent the following Dafny axiom with `take_ignores_out_of_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// n <= i && i < Seq#Length(s) ==> Seq#Take(Seq#Update(s, i, v), n) == Seq#Take(s, n));
private let take_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
n <= i && i < length s ==>
take (update s i v) n == take s n
/// We represent the following Dafny axiom with `drop_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Drop(Seq#Update(s, i, v), n) }
/// 0 <= n && n <= i && i < Seq#Length(s) ==>
/// Seq#Drop(Seq#Update(s, i, v), n) == Seq#Update(Seq#Drop(s, n), i-n, v) );
private let drop_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n}
n <= i && i < length s ==>
drop (update s i v) n == update (drop s n) (i - n) v
/// We represent the following Dafny axiom with `drop_ignores_out_of_range_update_fact`.
/// Jay noticed that it was unnecessarily weak, possibly due to a typo, so he reported this as
/// Dafny issue #1423 (https://github.com/dafny-lang/dafny/issues/1423) and updated it here.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Drop(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n < Seq#Length(s) ==> Seq#Drop(Seq#Update(s, i, v), n) == Seq#Drop(s, n));
private let drop_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n}
i < n && n <= length s ==>
drop (update s i v) n == drop s n
/// We represent the following Dafny axiom with `drop_commutes_with_build_fact`.
///
/// axiom (forall<T> s: Seq T, v: T, n: int ::
/// { Seq#Drop(Seq#Build(s, v), n) }
/// 0 <= n && n <= Seq#Length(s) ==>
/// Seq#Drop(Seq#Build(s, v), n) == Seq#Build(Seq#Drop(s, n), v) );
private let drop_commutes_with_build_fact (_ : squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (n: nat).{:pattern drop (build s v) n}
n <= length s ==> drop (build s v) n == build (drop s n) v
/// We include the definition of `rank` among our facts.
private let rank_def_fact =
forall (ty: Type u#a) (v: ty).{:pattern rank v} rank v == v
/// We represent the following Dafny axiom with `element_ranks_less_fact`.
///
/// axiom (forall s: Seq Box, i: int ::
/// { DtRank($Unbox(Seq#Index(s, i)): DatatypeType) }
/// 0 <= i && i < Seq#Length(s) ==> DtRank($Unbox(Seq#Index(s, i)): DatatypeType) < Seq#Rank(s) );
private let element_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern rank (index s i)}
i < length s ==> rank (index s i) << rank s
/// We represent the following Dafny axiom with `drop_ranks_less_fact`.
///
/// axiom (forall<T> s: Seq T, i: int ::
/// { Seq#Rank(Seq#Drop(s, i)) }
/// 0 < i && i <= Seq#Length(s) ==> Seq#Rank(Seq#Drop(s, i)) < Seq#Rank(s) ); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val drop_ranks_less_fact : Prims.logical | [] | FStar.Sequence.Base.drop_ranks_less_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | Prims.logical | {
"end_col": 56,
"end_line": 501,
"start_col": 2,
"start_line": 500
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let drop_zero_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern drop s n}
n = 0 ==> drop s n == s | let drop_zero_fact = | false | null | false | forall (ty: Type u#a) (s: seq ty) (n: nat). {:pattern drop s n} n = 0 ==> drop s n == s | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.nat",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_Equality",
"Prims.int",
"Prims.eq2",
"FStar.Sequence.Base.drop"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n)));
private let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n)
/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`:
///
/// axiom (forall<T> s: Seq T, x: T :: { Seq#Contains(s,x) }
/// Seq#Contains(s,x) <==>
/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x));
private let contains_iff_exists_index_fact =
forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x}
contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`:
///
/// axiom (forall<T> x: T ::
/// { Seq#Contains(Seq#Empty(), x) }
/// !Seq#Contains(Seq#Empty(), x));
private let empty_doesnt_contain_anything_fact =
forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x)
/// We represent the following Dafny axiom with `build_contains_equiv_fact`:
///
/// axiom (forall<T> s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy
/// { Seq#Contains(Seq#Build(s, v), x) }
/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x)));
private let build_contains_equiv_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x}
contains (build s v) x <==> (v == x \/ contains s x)
/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Take(s, n), x) }
/// Seq#Contains(Seq#Take(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let take_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x}
contains (take s n) x <==>
(exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Drop(s, n), x) }
/// Seq#Contains(Seq#Drop(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let drop_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `equal_def_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Equal(s0,s1) }
/// Seq#Equal(s0,s1) <==>
/// Seq#Length(s0) == Seq#Length(s1) &&
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < Seq#Length(s0) ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let equal_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern equal s0 s1}
equal s0 s1 <==>
length s0 == length s1 /\
(forall j.{:pattern index s0 j \/ index s1 j}
0 <= j && j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `extensionality_fact`:
///
/// axiom (forall<T> a: Seq T, b: Seq T :: { Seq#Equal(a,b) } // extensionality axiom for sequences
/// Seq#Equal(a,b) ==> a == b);
private let extensionality_fact =
forall (ty: Type u#a) (a: seq ty) (b: seq ty).{:pattern equal a b}
equal a b ==> a == b
/// We represent an analog of the following Dafny axiom with
/// `is_prefix_def_fact`. Our analog uses `is_prefix` instead
/// of `Seq#SameUntil`.
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#SameUntil(s0,s1,n) }
/// Seq#SameUntil(s0,s1,n) <==>
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < n ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let is_prefix_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern is_prefix s0 s1}
is_prefix s0 s1 <==>
length s0 <= length s1
/\ (forall (j: nat).{:pattern index s0 j \/ index s1 j}
j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `take_length_fact`:
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Take(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Take(s,n)) == n);
private let take_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern length (take s n)}
n <= length s ==> length (take s n) = n
/// We represent the following Dafny axiom with `index_into_take_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Take(s,n), j) }
/// { Seq#Index(s, j), Seq#Take(s,n) }
/// 0 <= j && j < n && j < Seq#Length(s) ==>
/// Seq#Index(Seq#Take(s,n), j) == Seq#Index(s, j));
private let index_into_take_fact (_ : squash (take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (take s n) j \/ index s j ; take s n}
j < n && n <= length s ==> index (take s n) j == index s j
/// We represent the following Dafny axiom with `drop_length_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Drop(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Drop(s,n)) == Seq#Length(s) - n);
private let drop_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).
{:pattern length (drop s n)}
n <= length s ==> length (drop s n) = length s - n
/// We represent the following Dafny axiom with `index_into_drop_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Drop(s,n), j) }
/// 0 <= n && 0 <= j && j < Seq#Length(s)-n ==>
/// Seq#Index(Seq#Drop(s,n), j) == Seq#Index(s, j+n));
private let index_into_drop_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (drop s n) j}
j < length s - n ==> index (drop s n) j == index s (j + n)
/// We represent the following Dafny axiom with `drop_index_offset_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, k: int ::
/// {:weight 25}
/// { Seq#Index(s, k), Seq#Drop(s,n) }
/// 0 <= n && n <= k && k < Seq#Length(s) ==>
/// Seq#Index(Seq#Drop(s,n), k-n) == Seq#Index(s, k));
private let drop_index_offset_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (k: nat).
{:pattern index s k; drop s n}
n <= k && k < length s ==> index (drop s n) (k - n) == index s k
/// We represent the following Dafny axiom with `append_then_take_or_drop_fact`.
///
/// axiom (forall<T> s, t: Seq T, n: int ::
/// { Seq#Take(Seq#Append(s, t), n) }
/// { Seq#Drop(Seq#Append(s, t), n) }
/// n == Seq#Length(s)
/// ==>
/// Seq#Take(Seq#Append(s, t), n) == s &&
/// Seq#Drop(Seq#Append(s, t), n) == t);
private let append_then_take_or_drop_fact (_ : squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (t: seq ty) (n: nat).
{:pattern take (append s t) n \/ drop (append s t) n}
n = length s ==> take (append s t) n == s /\ drop (append s t) n == t
/// We represent the following Dafny axiom with `take_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n <= Seq#Length(s) ==>
/// Seq#Take(Seq#Update(s, i, v), n) == Seq#Update(Seq#Take(s, n), i, v) );
private let take_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
i < n && n <= length s ==>
take (update s i v) n == update (take s n) i v
/// We represent the following Dafny axiom with `take_ignores_out_of_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// n <= i && i < Seq#Length(s) ==> Seq#Take(Seq#Update(s, i, v), n) == Seq#Take(s, n));
private let take_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
n <= i && i < length s ==>
take (update s i v) n == take s n
/// We represent the following Dafny axiom with `drop_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Drop(Seq#Update(s, i, v), n) }
/// 0 <= n && n <= i && i < Seq#Length(s) ==>
/// Seq#Drop(Seq#Update(s, i, v), n) == Seq#Update(Seq#Drop(s, n), i-n, v) );
private let drop_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n}
n <= i && i < length s ==>
drop (update s i v) n == update (drop s n) (i - n) v
/// We represent the following Dafny axiom with `drop_ignores_out_of_range_update_fact`.
/// Jay noticed that it was unnecessarily weak, possibly due to a typo, so he reported this as
/// Dafny issue #1423 (https://github.com/dafny-lang/dafny/issues/1423) and updated it here.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Drop(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n < Seq#Length(s) ==> Seq#Drop(Seq#Update(s, i, v), n) == Seq#Drop(s, n));
private let drop_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n}
i < n && n <= length s ==>
drop (update s i v) n == drop s n
/// We represent the following Dafny axiom with `drop_commutes_with_build_fact`.
///
/// axiom (forall<T> s: Seq T, v: T, n: int ::
/// { Seq#Drop(Seq#Build(s, v), n) }
/// 0 <= n && n <= Seq#Length(s) ==>
/// Seq#Drop(Seq#Build(s, v), n) == Seq#Build(Seq#Drop(s, n), v) );
private let drop_commutes_with_build_fact (_ : squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (n: nat).{:pattern drop (build s v) n}
n <= length s ==> drop (build s v) n == build (drop s n) v
/// We include the definition of `rank` among our facts.
private let rank_def_fact =
forall (ty: Type u#a) (v: ty).{:pattern rank v} rank v == v
/// We represent the following Dafny axiom with `element_ranks_less_fact`.
///
/// axiom (forall s: Seq Box, i: int ::
/// { DtRank($Unbox(Seq#Index(s, i)): DatatypeType) }
/// 0 <= i && i < Seq#Length(s) ==> DtRank($Unbox(Seq#Index(s, i)): DatatypeType) < Seq#Rank(s) );
private let element_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern rank (index s i)}
i < length s ==> rank (index s i) << rank s
/// We represent the following Dafny axiom with `drop_ranks_less_fact`.
///
/// axiom (forall<T> s: Seq T, i: int ::
/// { Seq#Rank(Seq#Drop(s, i)) }
/// 0 < i && i <= Seq#Length(s) ==> Seq#Rank(Seq#Drop(s, i)) < Seq#Rank(s) );
private let drop_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern rank (drop s i)}
0 < i && i <= length s ==> rank (drop s i) << rank s
/// We represent the following Dafny axiom with
/// `take_ranks_less_fact`. However, since it isn't true in F* (which
/// has strong requirements for <<), we instead substitute length,
/// requiring decreases clauses to use length in this case.
///
/// axiom (forall<T> s: Seq T, i: int ::
/// { Seq#Rank(Seq#Take(s, i)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Rank(Seq#Take(s, i)) < Seq#Rank(s) );
private let take_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern length (take s i)}
i < length s ==> length (take s i) << length s
/// We represent the following Dafny axiom with
/// `append_take_drop_ranks_less_fact`. However, since it isn't true
/// in F* (which has strong requirements for <<), we instead
/// substitute length, requiring decreases clauses to use
/// length in this case.
///
/// axiom (forall<T> s: Seq T, i: int, j: int ::
/// { Seq#Rank(Seq#Append(Seq#Take(s, i), Seq#Drop(s, j))) }
/// 0 <= i && i < j && j <= Seq#Length(s) ==>
/// Seq#Rank(Seq#Append(Seq#Take(s, i), Seq#Drop(s, j))) < Seq#Rank(s) );
private let append_take_drop_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat) (j: nat).{:pattern length (append (take s i) (drop s j))}
i < j && j <= length s ==> length (append (take s i) (drop s j)) << length s
/// We represent the following Dafny axiom with `drop_zero_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Drop(s, n) }
/// n == 0 ==> Seq#Drop(s, n) == s); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val drop_zero_fact : Prims.logical | [] | FStar.Sequence.Base.drop_zero_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | Prims.logical | {
"end_col": 27,
"end_line": 538,
"start_col": 2,
"start_line": 537
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let drop_commutes_with_build_fact (_ : squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (n: nat).{:pattern drop (build s v) n}
n <= length s ==> drop (build s v) n == build (drop s n) v | let drop_commutes_with_build_fact (_: squash (build_increments_length_fact u#a)) = | false | null | false | forall (ty: Type u#a) (s: seq ty) (v: ty) (n: nat). {:pattern drop (build s v) n}
n <= length s ==> drop (build s v) n == build (drop s n) v | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.squash",
"FStar.Sequence.Base.build_increments_length_fact",
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.nat",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.Sequence.Base.length",
"Prims.eq2",
"FStar.Sequence.Base.drop",
"FStar.Sequence.Base.build",
"Prims.logical"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n)));
private let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n)
/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`:
///
/// axiom (forall<T> s: Seq T, x: T :: { Seq#Contains(s,x) }
/// Seq#Contains(s,x) <==>
/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x));
private let contains_iff_exists_index_fact =
forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x}
contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`:
///
/// axiom (forall<T> x: T ::
/// { Seq#Contains(Seq#Empty(), x) }
/// !Seq#Contains(Seq#Empty(), x));
private let empty_doesnt_contain_anything_fact =
forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x)
/// We represent the following Dafny axiom with `build_contains_equiv_fact`:
///
/// axiom (forall<T> s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy
/// { Seq#Contains(Seq#Build(s, v), x) }
/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x)));
private let build_contains_equiv_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x}
contains (build s v) x <==> (v == x \/ contains s x)
/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Take(s, n), x) }
/// Seq#Contains(Seq#Take(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let take_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x}
contains (take s n) x <==>
(exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Drop(s, n), x) }
/// Seq#Contains(Seq#Drop(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let drop_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `equal_def_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Equal(s0,s1) }
/// Seq#Equal(s0,s1) <==>
/// Seq#Length(s0) == Seq#Length(s1) &&
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < Seq#Length(s0) ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let equal_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern equal s0 s1}
equal s0 s1 <==>
length s0 == length s1 /\
(forall j.{:pattern index s0 j \/ index s1 j}
0 <= j && j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `extensionality_fact`:
///
/// axiom (forall<T> a: Seq T, b: Seq T :: { Seq#Equal(a,b) } // extensionality axiom for sequences
/// Seq#Equal(a,b) ==> a == b);
private let extensionality_fact =
forall (ty: Type u#a) (a: seq ty) (b: seq ty).{:pattern equal a b}
equal a b ==> a == b
/// We represent an analog of the following Dafny axiom with
/// `is_prefix_def_fact`. Our analog uses `is_prefix` instead
/// of `Seq#SameUntil`.
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#SameUntil(s0,s1,n) }
/// Seq#SameUntil(s0,s1,n) <==>
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < n ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let is_prefix_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern is_prefix s0 s1}
is_prefix s0 s1 <==>
length s0 <= length s1
/\ (forall (j: nat).{:pattern index s0 j \/ index s1 j}
j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `take_length_fact`:
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Take(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Take(s,n)) == n);
private let take_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern length (take s n)}
n <= length s ==> length (take s n) = n
/// We represent the following Dafny axiom with `index_into_take_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Take(s,n), j) }
/// { Seq#Index(s, j), Seq#Take(s,n) }
/// 0 <= j && j < n && j < Seq#Length(s) ==>
/// Seq#Index(Seq#Take(s,n), j) == Seq#Index(s, j));
private let index_into_take_fact (_ : squash (take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (take s n) j \/ index s j ; take s n}
j < n && n <= length s ==> index (take s n) j == index s j
/// We represent the following Dafny axiom with `drop_length_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Drop(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Drop(s,n)) == Seq#Length(s) - n);
private let drop_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).
{:pattern length (drop s n)}
n <= length s ==> length (drop s n) = length s - n
/// We represent the following Dafny axiom with `index_into_drop_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Drop(s,n), j) }
/// 0 <= n && 0 <= j && j < Seq#Length(s)-n ==>
/// Seq#Index(Seq#Drop(s,n), j) == Seq#Index(s, j+n));
private let index_into_drop_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (drop s n) j}
j < length s - n ==> index (drop s n) j == index s (j + n)
/// We represent the following Dafny axiom with `drop_index_offset_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, k: int ::
/// {:weight 25}
/// { Seq#Index(s, k), Seq#Drop(s,n) }
/// 0 <= n && n <= k && k < Seq#Length(s) ==>
/// Seq#Index(Seq#Drop(s,n), k-n) == Seq#Index(s, k));
private let drop_index_offset_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (k: nat).
{:pattern index s k; drop s n}
n <= k && k < length s ==> index (drop s n) (k - n) == index s k
/// We represent the following Dafny axiom with `append_then_take_or_drop_fact`.
///
/// axiom (forall<T> s, t: Seq T, n: int ::
/// { Seq#Take(Seq#Append(s, t), n) }
/// { Seq#Drop(Seq#Append(s, t), n) }
/// n == Seq#Length(s)
/// ==>
/// Seq#Take(Seq#Append(s, t), n) == s &&
/// Seq#Drop(Seq#Append(s, t), n) == t);
private let append_then_take_or_drop_fact (_ : squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (t: seq ty) (n: nat).
{:pattern take (append s t) n \/ drop (append s t) n}
n = length s ==> take (append s t) n == s /\ drop (append s t) n == t
/// We represent the following Dafny axiom with `take_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n <= Seq#Length(s) ==>
/// Seq#Take(Seq#Update(s, i, v), n) == Seq#Update(Seq#Take(s, n), i, v) );
private let take_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
i < n && n <= length s ==>
take (update s i v) n == update (take s n) i v
/// We represent the following Dafny axiom with `take_ignores_out_of_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// n <= i && i < Seq#Length(s) ==> Seq#Take(Seq#Update(s, i, v), n) == Seq#Take(s, n));
private let take_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
n <= i && i < length s ==>
take (update s i v) n == take s n
/// We represent the following Dafny axiom with `drop_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Drop(Seq#Update(s, i, v), n) }
/// 0 <= n && n <= i && i < Seq#Length(s) ==>
/// Seq#Drop(Seq#Update(s, i, v), n) == Seq#Update(Seq#Drop(s, n), i-n, v) );
private let drop_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n}
n <= i && i < length s ==>
drop (update s i v) n == update (drop s n) (i - n) v
/// We represent the following Dafny axiom with `drop_ignores_out_of_range_update_fact`.
/// Jay noticed that it was unnecessarily weak, possibly due to a typo, so he reported this as
/// Dafny issue #1423 (https://github.com/dafny-lang/dafny/issues/1423) and updated it here.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Drop(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n < Seq#Length(s) ==> Seq#Drop(Seq#Update(s, i, v), n) == Seq#Drop(s, n));
private let drop_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n}
i < n && n <= length s ==>
drop (update s i v) n == drop s n
/// We represent the following Dafny axiom with `drop_commutes_with_build_fact`.
///
/// axiom (forall<T> s: Seq T, v: T, n: int ::
/// { Seq#Drop(Seq#Build(s, v), n) }
/// 0 <= n && n <= Seq#Length(s) ==>
/// Seq#Drop(Seq#Build(s, v), n) == Seq#Build(Seq#Drop(s, n), v) ); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val drop_commutes_with_build_fact : _: Prims.squash FStar.Sequence.Base.build_increments_length_fact -> Prims.logical | [] | FStar.Sequence.Base.drop_commutes_with_build_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | _: Prims.squash FStar.Sequence.Base.build_increments_length_fact -> Prims.logical | {
"end_col": 62,
"end_line": 476,
"start_col": 2,
"start_line": 475
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let drop_index_offset_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (k: nat).
{:pattern index s k; drop s n}
n <= k && k < length s ==> index (drop s n) (k - n) == index s k | let drop_index_offset_fact (_: squash (drop_length_fact u#a)) = | false | null | false | forall (ty: Type u#a) (s: seq ty) (n: nat) (k: nat). {:pattern index s k; drop s n}
n <= k && k < length s ==> index (drop s n) (k - n) == index s k | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.squash",
"FStar.Sequence.Base.drop_length_fact",
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.nat",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_AmpAmp",
"Prims.op_LessThanOrEqual",
"Prims.op_LessThan",
"FStar.Sequence.Base.length",
"Prims.eq2",
"FStar.Sequence.Base.index",
"FStar.Sequence.Base.drop",
"Prims.op_Subtraction",
"Prims.logical"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n)));
private let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n)
/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`:
///
/// axiom (forall<T> s: Seq T, x: T :: { Seq#Contains(s,x) }
/// Seq#Contains(s,x) <==>
/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x));
private let contains_iff_exists_index_fact =
forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x}
contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`:
///
/// axiom (forall<T> x: T ::
/// { Seq#Contains(Seq#Empty(), x) }
/// !Seq#Contains(Seq#Empty(), x));
private let empty_doesnt_contain_anything_fact =
forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x)
/// We represent the following Dafny axiom with `build_contains_equiv_fact`:
///
/// axiom (forall<T> s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy
/// { Seq#Contains(Seq#Build(s, v), x) }
/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x)));
private let build_contains_equiv_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x}
contains (build s v) x <==> (v == x \/ contains s x)
/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Take(s, n), x) }
/// Seq#Contains(Seq#Take(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let take_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x}
contains (take s n) x <==>
(exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Drop(s, n), x) }
/// Seq#Contains(Seq#Drop(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let drop_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `equal_def_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Equal(s0,s1) }
/// Seq#Equal(s0,s1) <==>
/// Seq#Length(s0) == Seq#Length(s1) &&
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < Seq#Length(s0) ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let equal_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern equal s0 s1}
equal s0 s1 <==>
length s0 == length s1 /\
(forall j.{:pattern index s0 j \/ index s1 j}
0 <= j && j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `extensionality_fact`:
///
/// axiom (forall<T> a: Seq T, b: Seq T :: { Seq#Equal(a,b) } // extensionality axiom for sequences
/// Seq#Equal(a,b) ==> a == b);
private let extensionality_fact =
forall (ty: Type u#a) (a: seq ty) (b: seq ty).{:pattern equal a b}
equal a b ==> a == b
/// We represent an analog of the following Dafny axiom with
/// `is_prefix_def_fact`. Our analog uses `is_prefix` instead
/// of `Seq#SameUntil`.
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#SameUntil(s0,s1,n) }
/// Seq#SameUntil(s0,s1,n) <==>
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < n ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let is_prefix_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern is_prefix s0 s1}
is_prefix s0 s1 <==>
length s0 <= length s1
/\ (forall (j: nat).{:pattern index s0 j \/ index s1 j}
j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `take_length_fact`:
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Take(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Take(s,n)) == n);
private let take_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern length (take s n)}
n <= length s ==> length (take s n) = n
/// We represent the following Dafny axiom with `index_into_take_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Take(s,n), j) }
/// { Seq#Index(s, j), Seq#Take(s,n) }
/// 0 <= j && j < n && j < Seq#Length(s) ==>
/// Seq#Index(Seq#Take(s,n), j) == Seq#Index(s, j));
private let index_into_take_fact (_ : squash (take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (take s n) j \/ index s j ; take s n}
j < n && n <= length s ==> index (take s n) j == index s j
/// We represent the following Dafny axiom with `drop_length_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Drop(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Drop(s,n)) == Seq#Length(s) - n);
private let drop_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).
{:pattern length (drop s n)}
n <= length s ==> length (drop s n) = length s - n
/// We represent the following Dafny axiom with `index_into_drop_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Drop(s,n), j) }
/// 0 <= n && 0 <= j && j < Seq#Length(s)-n ==>
/// Seq#Index(Seq#Drop(s,n), j) == Seq#Index(s, j+n));
private let index_into_drop_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (drop s n) j}
j < length s - n ==> index (drop s n) j == index s (j + n)
/// We represent the following Dafny axiom with `drop_index_offset_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, k: int ::
/// {:weight 25}
/// { Seq#Index(s, k), Seq#Drop(s,n) }
/// 0 <= n && n <= k && k < Seq#Length(s) ==>
/// Seq#Index(Seq#Drop(s,n), k-n) == Seq#Index(s, k)); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val drop_index_offset_fact : _: Prims.squash FStar.Sequence.Base.drop_length_fact -> Prims.logical | [] | FStar.Sequence.Base.drop_index_offset_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | _: Prims.squash FStar.Sequence.Base.drop_length_fact -> Prims.logical | {
"end_col": 68,
"end_line": 400,
"start_col": 2,
"start_line": 398
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let append_take_drop_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat) (j: nat).{:pattern length (append (take s i) (drop s j))}
i < j && j <= length s ==> length (append (take s i) (drop s j)) << length s | let append_take_drop_ranks_less_fact = | false | null | false | forall (ty: Type u#a) (s: seq ty) (i: nat) (j: nat).
{:pattern length (append (take s i) (drop s j))}
i < j && j <= length s ==> length (append (take s i) (drop s j)) << length s | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.nat",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_AmpAmp",
"Prims.op_LessThan",
"Prims.op_LessThanOrEqual",
"FStar.Sequence.Base.length",
"Prims.precedes",
"FStar.Sequence.Base.append",
"FStar.Sequence.Base.take",
"FStar.Sequence.Base.drop"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n)));
private let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n)
/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`:
///
/// axiom (forall<T> s: Seq T, x: T :: { Seq#Contains(s,x) }
/// Seq#Contains(s,x) <==>
/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x));
private let contains_iff_exists_index_fact =
forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x}
contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`:
///
/// axiom (forall<T> x: T ::
/// { Seq#Contains(Seq#Empty(), x) }
/// !Seq#Contains(Seq#Empty(), x));
private let empty_doesnt_contain_anything_fact =
forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x)
/// We represent the following Dafny axiom with `build_contains_equiv_fact`:
///
/// axiom (forall<T> s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy
/// { Seq#Contains(Seq#Build(s, v), x) }
/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x)));
private let build_contains_equiv_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x}
contains (build s v) x <==> (v == x \/ contains s x)
/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Take(s, n), x) }
/// Seq#Contains(Seq#Take(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let take_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x}
contains (take s n) x <==>
(exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Drop(s, n), x) }
/// Seq#Contains(Seq#Drop(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let drop_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `equal_def_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Equal(s0,s1) }
/// Seq#Equal(s0,s1) <==>
/// Seq#Length(s0) == Seq#Length(s1) &&
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < Seq#Length(s0) ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let equal_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern equal s0 s1}
equal s0 s1 <==>
length s0 == length s1 /\
(forall j.{:pattern index s0 j \/ index s1 j}
0 <= j && j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `extensionality_fact`:
///
/// axiom (forall<T> a: Seq T, b: Seq T :: { Seq#Equal(a,b) } // extensionality axiom for sequences
/// Seq#Equal(a,b) ==> a == b);
private let extensionality_fact =
forall (ty: Type u#a) (a: seq ty) (b: seq ty).{:pattern equal a b}
equal a b ==> a == b
/// We represent an analog of the following Dafny axiom with
/// `is_prefix_def_fact`. Our analog uses `is_prefix` instead
/// of `Seq#SameUntil`.
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#SameUntil(s0,s1,n) }
/// Seq#SameUntil(s0,s1,n) <==>
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < n ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let is_prefix_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern is_prefix s0 s1}
is_prefix s0 s1 <==>
length s0 <= length s1
/\ (forall (j: nat).{:pattern index s0 j \/ index s1 j}
j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `take_length_fact`:
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Take(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Take(s,n)) == n);
private let take_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern length (take s n)}
n <= length s ==> length (take s n) = n
/// We represent the following Dafny axiom with `index_into_take_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Take(s,n), j) }
/// { Seq#Index(s, j), Seq#Take(s,n) }
/// 0 <= j && j < n && j < Seq#Length(s) ==>
/// Seq#Index(Seq#Take(s,n), j) == Seq#Index(s, j));
private let index_into_take_fact (_ : squash (take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (take s n) j \/ index s j ; take s n}
j < n && n <= length s ==> index (take s n) j == index s j
/// We represent the following Dafny axiom with `drop_length_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Drop(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Drop(s,n)) == Seq#Length(s) - n);
private let drop_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).
{:pattern length (drop s n)}
n <= length s ==> length (drop s n) = length s - n
/// We represent the following Dafny axiom with `index_into_drop_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Drop(s,n), j) }
/// 0 <= n && 0 <= j && j < Seq#Length(s)-n ==>
/// Seq#Index(Seq#Drop(s,n), j) == Seq#Index(s, j+n));
private let index_into_drop_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (drop s n) j}
j < length s - n ==> index (drop s n) j == index s (j + n)
/// We represent the following Dafny axiom with `drop_index_offset_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, k: int ::
/// {:weight 25}
/// { Seq#Index(s, k), Seq#Drop(s,n) }
/// 0 <= n && n <= k && k < Seq#Length(s) ==>
/// Seq#Index(Seq#Drop(s,n), k-n) == Seq#Index(s, k));
private let drop_index_offset_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (k: nat).
{:pattern index s k; drop s n}
n <= k && k < length s ==> index (drop s n) (k - n) == index s k
/// We represent the following Dafny axiom with `append_then_take_or_drop_fact`.
///
/// axiom (forall<T> s, t: Seq T, n: int ::
/// { Seq#Take(Seq#Append(s, t), n) }
/// { Seq#Drop(Seq#Append(s, t), n) }
/// n == Seq#Length(s)
/// ==>
/// Seq#Take(Seq#Append(s, t), n) == s &&
/// Seq#Drop(Seq#Append(s, t), n) == t);
private let append_then_take_or_drop_fact (_ : squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (t: seq ty) (n: nat).
{:pattern take (append s t) n \/ drop (append s t) n}
n = length s ==> take (append s t) n == s /\ drop (append s t) n == t
/// We represent the following Dafny axiom with `take_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n <= Seq#Length(s) ==>
/// Seq#Take(Seq#Update(s, i, v), n) == Seq#Update(Seq#Take(s, n), i, v) );
private let take_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
i < n && n <= length s ==>
take (update s i v) n == update (take s n) i v
/// We represent the following Dafny axiom with `take_ignores_out_of_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// n <= i && i < Seq#Length(s) ==> Seq#Take(Seq#Update(s, i, v), n) == Seq#Take(s, n));
private let take_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
n <= i && i < length s ==>
take (update s i v) n == take s n
/// We represent the following Dafny axiom with `drop_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Drop(Seq#Update(s, i, v), n) }
/// 0 <= n && n <= i && i < Seq#Length(s) ==>
/// Seq#Drop(Seq#Update(s, i, v), n) == Seq#Update(Seq#Drop(s, n), i-n, v) );
private let drop_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n}
n <= i && i < length s ==>
drop (update s i v) n == update (drop s n) (i - n) v
/// We represent the following Dafny axiom with `drop_ignores_out_of_range_update_fact`.
/// Jay noticed that it was unnecessarily weak, possibly due to a typo, so he reported this as
/// Dafny issue #1423 (https://github.com/dafny-lang/dafny/issues/1423) and updated it here.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Drop(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n < Seq#Length(s) ==> Seq#Drop(Seq#Update(s, i, v), n) == Seq#Drop(s, n));
private let drop_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n}
i < n && n <= length s ==>
drop (update s i v) n == drop s n
/// We represent the following Dafny axiom with `drop_commutes_with_build_fact`.
///
/// axiom (forall<T> s: Seq T, v: T, n: int ::
/// { Seq#Drop(Seq#Build(s, v), n) }
/// 0 <= n && n <= Seq#Length(s) ==>
/// Seq#Drop(Seq#Build(s, v), n) == Seq#Build(Seq#Drop(s, n), v) );
private let drop_commutes_with_build_fact (_ : squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (n: nat).{:pattern drop (build s v) n}
n <= length s ==> drop (build s v) n == build (drop s n) v
/// We include the definition of `rank` among our facts.
private let rank_def_fact =
forall (ty: Type u#a) (v: ty).{:pattern rank v} rank v == v
/// We represent the following Dafny axiom with `element_ranks_less_fact`.
///
/// axiom (forall s: Seq Box, i: int ::
/// { DtRank($Unbox(Seq#Index(s, i)): DatatypeType) }
/// 0 <= i && i < Seq#Length(s) ==> DtRank($Unbox(Seq#Index(s, i)): DatatypeType) < Seq#Rank(s) );
private let element_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern rank (index s i)}
i < length s ==> rank (index s i) << rank s
/// We represent the following Dafny axiom with `drop_ranks_less_fact`.
///
/// axiom (forall<T> s: Seq T, i: int ::
/// { Seq#Rank(Seq#Drop(s, i)) }
/// 0 < i && i <= Seq#Length(s) ==> Seq#Rank(Seq#Drop(s, i)) < Seq#Rank(s) );
private let drop_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern rank (drop s i)}
0 < i && i <= length s ==> rank (drop s i) << rank s
/// We represent the following Dafny axiom with
/// `take_ranks_less_fact`. However, since it isn't true in F* (which
/// has strong requirements for <<), we instead substitute length,
/// requiring decreases clauses to use length in this case.
///
/// axiom (forall<T> s: Seq T, i: int ::
/// { Seq#Rank(Seq#Take(s, i)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Rank(Seq#Take(s, i)) < Seq#Rank(s) );
private let take_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern length (take s i)}
i < length s ==> length (take s i) << length s
/// We represent the following Dafny axiom with
/// `append_take_drop_ranks_less_fact`. However, since it isn't true
/// in F* (which has strong requirements for <<), we instead
/// substitute length, requiring decreases clauses to use
/// length in this case.
///
/// axiom (forall<T> s: Seq T, i: int, j: int ::
/// { Seq#Rank(Seq#Append(Seq#Take(s, i), Seq#Drop(s, j))) }
/// 0 <= i && i < j && j <= Seq#Length(s) ==>
/// Seq#Rank(Seq#Append(Seq#Take(s, i), Seq#Drop(s, j))) < Seq#Rank(s) ); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val append_take_drop_ranks_less_fact : Prims.logical | [] | FStar.Sequence.Base.append_take_drop_ranks_less_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | Prims.logical | {
"end_col": 80,
"end_line": 529,
"start_col": 2,
"start_line": 528
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let take_zero_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern take s n}
n = 0 ==> take s n == empty | let take_zero_fact = | false | null | false | forall (ty: Type u#a) (s: seq ty) (n: nat). {:pattern take s n} n = 0 ==> take s n == empty | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.nat",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_Equality",
"Prims.int",
"Prims.eq2",
"FStar.Sequence.Base.take",
"FStar.Sequence.Base.empty"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n)));
private let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n)
/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`:
///
/// axiom (forall<T> s: Seq T, x: T :: { Seq#Contains(s,x) }
/// Seq#Contains(s,x) <==>
/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x));
private let contains_iff_exists_index_fact =
forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x}
contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`:
///
/// axiom (forall<T> x: T ::
/// { Seq#Contains(Seq#Empty(), x) }
/// !Seq#Contains(Seq#Empty(), x));
private let empty_doesnt_contain_anything_fact =
forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x)
/// We represent the following Dafny axiom with `build_contains_equiv_fact`:
///
/// axiom (forall<T> s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy
/// { Seq#Contains(Seq#Build(s, v), x) }
/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x)));
private let build_contains_equiv_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x}
contains (build s v) x <==> (v == x \/ contains s x)
/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Take(s, n), x) }
/// Seq#Contains(Seq#Take(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let take_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x}
contains (take s n) x <==>
(exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Drop(s, n), x) }
/// Seq#Contains(Seq#Drop(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let drop_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `equal_def_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Equal(s0,s1) }
/// Seq#Equal(s0,s1) <==>
/// Seq#Length(s0) == Seq#Length(s1) &&
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < Seq#Length(s0) ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let equal_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern equal s0 s1}
equal s0 s1 <==>
length s0 == length s1 /\
(forall j.{:pattern index s0 j \/ index s1 j}
0 <= j && j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `extensionality_fact`:
///
/// axiom (forall<T> a: Seq T, b: Seq T :: { Seq#Equal(a,b) } // extensionality axiom for sequences
/// Seq#Equal(a,b) ==> a == b);
private let extensionality_fact =
forall (ty: Type u#a) (a: seq ty) (b: seq ty).{:pattern equal a b}
equal a b ==> a == b
/// We represent an analog of the following Dafny axiom with
/// `is_prefix_def_fact`. Our analog uses `is_prefix` instead
/// of `Seq#SameUntil`.
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#SameUntil(s0,s1,n) }
/// Seq#SameUntil(s0,s1,n) <==>
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < n ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let is_prefix_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern is_prefix s0 s1}
is_prefix s0 s1 <==>
length s0 <= length s1
/\ (forall (j: nat).{:pattern index s0 j \/ index s1 j}
j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `take_length_fact`:
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Take(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Take(s,n)) == n);
private let take_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern length (take s n)}
n <= length s ==> length (take s n) = n
/// We represent the following Dafny axiom with `index_into_take_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Take(s,n), j) }
/// { Seq#Index(s, j), Seq#Take(s,n) }
/// 0 <= j && j < n && j < Seq#Length(s) ==>
/// Seq#Index(Seq#Take(s,n), j) == Seq#Index(s, j));
private let index_into_take_fact (_ : squash (take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (take s n) j \/ index s j ; take s n}
j < n && n <= length s ==> index (take s n) j == index s j
/// We represent the following Dafny axiom with `drop_length_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Drop(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Drop(s,n)) == Seq#Length(s) - n);
private let drop_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).
{:pattern length (drop s n)}
n <= length s ==> length (drop s n) = length s - n
/// We represent the following Dafny axiom with `index_into_drop_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Drop(s,n), j) }
/// 0 <= n && 0 <= j && j < Seq#Length(s)-n ==>
/// Seq#Index(Seq#Drop(s,n), j) == Seq#Index(s, j+n));
private let index_into_drop_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (drop s n) j}
j < length s - n ==> index (drop s n) j == index s (j + n)
/// We represent the following Dafny axiom with `drop_index_offset_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, k: int ::
/// {:weight 25}
/// { Seq#Index(s, k), Seq#Drop(s,n) }
/// 0 <= n && n <= k && k < Seq#Length(s) ==>
/// Seq#Index(Seq#Drop(s,n), k-n) == Seq#Index(s, k));
private let drop_index_offset_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (k: nat).
{:pattern index s k; drop s n}
n <= k && k < length s ==> index (drop s n) (k - n) == index s k
/// We represent the following Dafny axiom with `append_then_take_or_drop_fact`.
///
/// axiom (forall<T> s, t: Seq T, n: int ::
/// { Seq#Take(Seq#Append(s, t), n) }
/// { Seq#Drop(Seq#Append(s, t), n) }
/// n == Seq#Length(s)
/// ==>
/// Seq#Take(Seq#Append(s, t), n) == s &&
/// Seq#Drop(Seq#Append(s, t), n) == t);
private let append_then_take_or_drop_fact (_ : squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (t: seq ty) (n: nat).
{:pattern take (append s t) n \/ drop (append s t) n}
n = length s ==> take (append s t) n == s /\ drop (append s t) n == t
/// We represent the following Dafny axiom with `take_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n <= Seq#Length(s) ==>
/// Seq#Take(Seq#Update(s, i, v), n) == Seq#Update(Seq#Take(s, n), i, v) );
private let take_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
i < n && n <= length s ==>
take (update s i v) n == update (take s n) i v
/// We represent the following Dafny axiom with `take_ignores_out_of_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// n <= i && i < Seq#Length(s) ==> Seq#Take(Seq#Update(s, i, v), n) == Seq#Take(s, n));
private let take_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
n <= i && i < length s ==>
take (update s i v) n == take s n
/// We represent the following Dafny axiom with `drop_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Drop(Seq#Update(s, i, v), n) }
/// 0 <= n && n <= i && i < Seq#Length(s) ==>
/// Seq#Drop(Seq#Update(s, i, v), n) == Seq#Update(Seq#Drop(s, n), i-n, v) );
private let drop_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n}
n <= i && i < length s ==>
drop (update s i v) n == update (drop s n) (i - n) v
/// We represent the following Dafny axiom with `drop_ignores_out_of_range_update_fact`.
/// Jay noticed that it was unnecessarily weak, possibly due to a typo, so he reported this as
/// Dafny issue #1423 (https://github.com/dafny-lang/dafny/issues/1423) and updated it here.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Drop(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n < Seq#Length(s) ==> Seq#Drop(Seq#Update(s, i, v), n) == Seq#Drop(s, n));
private let drop_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n}
i < n && n <= length s ==>
drop (update s i v) n == drop s n
/// We represent the following Dafny axiom with `drop_commutes_with_build_fact`.
///
/// axiom (forall<T> s: Seq T, v: T, n: int ::
/// { Seq#Drop(Seq#Build(s, v), n) }
/// 0 <= n && n <= Seq#Length(s) ==>
/// Seq#Drop(Seq#Build(s, v), n) == Seq#Build(Seq#Drop(s, n), v) );
private let drop_commutes_with_build_fact (_ : squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (n: nat).{:pattern drop (build s v) n}
n <= length s ==> drop (build s v) n == build (drop s n) v
/// We include the definition of `rank` among our facts.
private let rank_def_fact =
forall (ty: Type u#a) (v: ty).{:pattern rank v} rank v == v
/// We represent the following Dafny axiom with `element_ranks_less_fact`.
///
/// axiom (forall s: Seq Box, i: int ::
/// { DtRank($Unbox(Seq#Index(s, i)): DatatypeType) }
/// 0 <= i && i < Seq#Length(s) ==> DtRank($Unbox(Seq#Index(s, i)): DatatypeType) < Seq#Rank(s) );
private let element_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern rank (index s i)}
i < length s ==> rank (index s i) << rank s
/// We represent the following Dafny axiom with `drop_ranks_less_fact`.
///
/// axiom (forall<T> s: Seq T, i: int ::
/// { Seq#Rank(Seq#Drop(s, i)) }
/// 0 < i && i <= Seq#Length(s) ==> Seq#Rank(Seq#Drop(s, i)) < Seq#Rank(s) );
private let drop_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern rank (drop s i)}
0 < i && i <= length s ==> rank (drop s i) << rank s
/// We represent the following Dafny axiom with
/// `take_ranks_less_fact`. However, since it isn't true in F* (which
/// has strong requirements for <<), we instead substitute length,
/// requiring decreases clauses to use length in this case.
///
/// axiom (forall<T> s: Seq T, i: int ::
/// { Seq#Rank(Seq#Take(s, i)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Rank(Seq#Take(s, i)) < Seq#Rank(s) );
private let take_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern length (take s i)}
i < length s ==> length (take s i) << length s
/// We represent the following Dafny axiom with
/// `append_take_drop_ranks_less_fact`. However, since it isn't true
/// in F* (which has strong requirements for <<), we instead
/// substitute length, requiring decreases clauses to use
/// length in this case.
///
/// axiom (forall<T> s: Seq T, i: int, j: int ::
/// { Seq#Rank(Seq#Append(Seq#Take(s, i), Seq#Drop(s, j))) }
/// 0 <= i && i < j && j <= Seq#Length(s) ==>
/// Seq#Rank(Seq#Append(Seq#Take(s, i), Seq#Drop(s, j))) < Seq#Rank(s) );
private let append_take_drop_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat) (j: nat).{:pattern length (append (take s i) (drop s j))}
i < j && j <= length s ==> length (append (take s i) (drop s j)) << length s
/// We represent the following Dafny axiom with `drop_zero_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Drop(s, n) }
/// n == 0 ==> Seq#Drop(s, n) == s);
private let drop_zero_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern drop s n}
n = 0 ==> drop s n == s
/// We represent the following Dafny axiom with `take_zero_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Take(s, n) }
/// n == 0 ==> Seq#Take(s, n) == Seq#Empty()); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val take_zero_fact : Prims.logical | [] | FStar.Sequence.Base.take_zero_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | Prims.logical | {
"end_col": 31,
"end_line": 547,
"start_col": 2,
"start_line": 546
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let drop_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n}
i < n && n <= length s ==>
drop (update s i v) n == drop s n | let drop_ignores_out_of_range_update_fact (_: squash (update_maintains_length_fact u#a)) = | false | null | false | forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat). {:pattern drop (update s i v) n}
i < n && n <= length s ==> drop (update s i v) n == drop s n | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.squash",
"FStar.Sequence.Base.update_maintains_length_fact",
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.nat",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_AmpAmp",
"Prims.op_LessThan",
"Prims.op_LessThanOrEqual",
"FStar.Sequence.Base.length",
"Prims.eq2",
"FStar.Sequence.Base.drop",
"FStar.Sequence.Base.update",
"Prims.logical"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n)));
private let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n)
/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`:
///
/// axiom (forall<T> s: Seq T, x: T :: { Seq#Contains(s,x) }
/// Seq#Contains(s,x) <==>
/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x));
private let contains_iff_exists_index_fact =
forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x}
contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`:
///
/// axiom (forall<T> x: T ::
/// { Seq#Contains(Seq#Empty(), x) }
/// !Seq#Contains(Seq#Empty(), x));
private let empty_doesnt_contain_anything_fact =
forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x)
/// We represent the following Dafny axiom with `build_contains_equiv_fact`:
///
/// axiom (forall<T> s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy
/// { Seq#Contains(Seq#Build(s, v), x) }
/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x)));
private let build_contains_equiv_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x}
contains (build s v) x <==> (v == x \/ contains s x)
/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Take(s, n), x) }
/// Seq#Contains(Seq#Take(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let take_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x}
contains (take s n) x <==>
(exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Drop(s, n), x) }
/// Seq#Contains(Seq#Drop(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let drop_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `equal_def_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Equal(s0,s1) }
/// Seq#Equal(s0,s1) <==>
/// Seq#Length(s0) == Seq#Length(s1) &&
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < Seq#Length(s0) ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let equal_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern equal s0 s1}
equal s0 s1 <==>
length s0 == length s1 /\
(forall j.{:pattern index s0 j \/ index s1 j}
0 <= j && j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `extensionality_fact`:
///
/// axiom (forall<T> a: Seq T, b: Seq T :: { Seq#Equal(a,b) } // extensionality axiom for sequences
/// Seq#Equal(a,b) ==> a == b);
private let extensionality_fact =
forall (ty: Type u#a) (a: seq ty) (b: seq ty).{:pattern equal a b}
equal a b ==> a == b
/// We represent an analog of the following Dafny axiom with
/// `is_prefix_def_fact`. Our analog uses `is_prefix` instead
/// of `Seq#SameUntil`.
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#SameUntil(s0,s1,n) }
/// Seq#SameUntil(s0,s1,n) <==>
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < n ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let is_prefix_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern is_prefix s0 s1}
is_prefix s0 s1 <==>
length s0 <= length s1
/\ (forall (j: nat).{:pattern index s0 j \/ index s1 j}
j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `take_length_fact`:
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Take(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Take(s,n)) == n);
private let take_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern length (take s n)}
n <= length s ==> length (take s n) = n
/// We represent the following Dafny axiom with `index_into_take_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Take(s,n), j) }
/// { Seq#Index(s, j), Seq#Take(s,n) }
/// 0 <= j && j < n && j < Seq#Length(s) ==>
/// Seq#Index(Seq#Take(s,n), j) == Seq#Index(s, j));
private let index_into_take_fact (_ : squash (take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (take s n) j \/ index s j ; take s n}
j < n && n <= length s ==> index (take s n) j == index s j
/// We represent the following Dafny axiom with `drop_length_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Drop(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Drop(s,n)) == Seq#Length(s) - n);
private let drop_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).
{:pattern length (drop s n)}
n <= length s ==> length (drop s n) = length s - n
/// We represent the following Dafny axiom with `index_into_drop_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Drop(s,n), j) }
/// 0 <= n && 0 <= j && j < Seq#Length(s)-n ==>
/// Seq#Index(Seq#Drop(s,n), j) == Seq#Index(s, j+n));
private let index_into_drop_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (drop s n) j}
j < length s - n ==> index (drop s n) j == index s (j + n)
/// We represent the following Dafny axiom with `drop_index_offset_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, k: int ::
/// {:weight 25}
/// { Seq#Index(s, k), Seq#Drop(s,n) }
/// 0 <= n && n <= k && k < Seq#Length(s) ==>
/// Seq#Index(Seq#Drop(s,n), k-n) == Seq#Index(s, k));
private let drop_index_offset_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (k: nat).
{:pattern index s k; drop s n}
n <= k && k < length s ==> index (drop s n) (k - n) == index s k
/// We represent the following Dafny axiom with `append_then_take_or_drop_fact`.
///
/// axiom (forall<T> s, t: Seq T, n: int ::
/// { Seq#Take(Seq#Append(s, t), n) }
/// { Seq#Drop(Seq#Append(s, t), n) }
/// n == Seq#Length(s)
/// ==>
/// Seq#Take(Seq#Append(s, t), n) == s &&
/// Seq#Drop(Seq#Append(s, t), n) == t);
private let append_then_take_or_drop_fact (_ : squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (t: seq ty) (n: nat).
{:pattern take (append s t) n \/ drop (append s t) n}
n = length s ==> take (append s t) n == s /\ drop (append s t) n == t
/// We represent the following Dafny axiom with `take_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n <= Seq#Length(s) ==>
/// Seq#Take(Seq#Update(s, i, v), n) == Seq#Update(Seq#Take(s, n), i, v) );
private let take_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
i < n && n <= length s ==>
take (update s i v) n == update (take s n) i v
/// We represent the following Dafny axiom with `take_ignores_out_of_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// n <= i && i < Seq#Length(s) ==> Seq#Take(Seq#Update(s, i, v), n) == Seq#Take(s, n));
private let take_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
n <= i && i < length s ==>
take (update s i v) n == take s n
/// We represent the following Dafny axiom with `drop_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Drop(Seq#Update(s, i, v), n) }
/// 0 <= n && n <= i && i < Seq#Length(s) ==>
/// Seq#Drop(Seq#Update(s, i, v), n) == Seq#Update(Seq#Drop(s, n), i-n, v) );
private let drop_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n}
n <= i && i < length s ==>
drop (update s i v) n == update (drop s n) (i - n) v
/// We represent the following Dafny axiom with `drop_ignores_out_of_range_update_fact`.
/// Jay noticed that it was unnecessarily weak, possibly due to a typo, so he reported this as
/// Dafny issue #1423 (https://github.com/dafny-lang/dafny/issues/1423) and updated it here.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Drop(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n < Seq#Length(s) ==> Seq#Drop(Seq#Update(s, i, v), n) == Seq#Drop(s, n)); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val drop_ignores_out_of_range_update_fact : _: Prims.squash FStar.Sequence.Base.update_maintains_length_fact -> Prims.logical | [] | FStar.Sequence.Base.drop_ignores_out_of_range_update_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | _: Prims.squash FStar.Sequence.Base.update_maintains_length_fact -> Prims.logical | {
"end_col": 37,
"end_line": 465,
"start_col": 2,
"start_line": 463
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let take_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
i < n && n <= length s ==>
take (update s i v) n == update (take s n) i v | let take_commutes_with_in_range_update_fact
(_: squash (update_maintains_length_fact u#a /\ take_length_fact u#a))
= | false | null | false | forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat). {:pattern take (update s i v) n}
i < n && n <= length s ==> take (update s i v) n == update (take s n) i v | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.squash",
"Prims.l_and",
"FStar.Sequence.Base.update_maintains_length_fact",
"FStar.Sequence.Base.take_length_fact",
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.nat",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_AmpAmp",
"Prims.op_LessThan",
"Prims.op_LessThanOrEqual",
"FStar.Sequence.Base.length",
"Prims.eq2",
"FStar.Sequence.Base.take",
"FStar.Sequence.Base.update",
"Prims.logical"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n)));
private let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n)
/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`:
///
/// axiom (forall<T> s: Seq T, x: T :: { Seq#Contains(s,x) }
/// Seq#Contains(s,x) <==>
/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x));
private let contains_iff_exists_index_fact =
forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x}
contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`:
///
/// axiom (forall<T> x: T ::
/// { Seq#Contains(Seq#Empty(), x) }
/// !Seq#Contains(Seq#Empty(), x));
private let empty_doesnt_contain_anything_fact =
forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x)
/// We represent the following Dafny axiom with `build_contains_equiv_fact`:
///
/// axiom (forall<T> s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy
/// { Seq#Contains(Seq#Build(s, v), x) }
/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x)));
private let build_contains_equiv_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x}
contains (build s v) x <==> (v == x \/ contains s x)
/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Take(s, n), x) }
/// Seq#Contains(Seq#Take(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let take_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x}
contains (take s n) x <==>
(exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Drop(s, n), x) }
/// Seq#Contains(Seq#Drop(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let drop_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `equal_def_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Equal(s0,s1) }
/// Seq#Equal(s0,s1) <==>
/// Seq#Length(s0) == Seq#Length(s1) &&
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < Seq#Length(s0) ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let equal_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern equal s0 s1}
equal s0 s1 <==>
length s0 == length s1 /\
(forall j.{:pattern index s0 j \/ index s1 j}
0 <= j && j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `extensionality_fact`:
///
/// axiom (forall<T> a: Seq T, b: Seq T :: { Seq#Equal(a,b) } // extensionality axiom for sequences
/// Seq#Equal(a,b) ==> a == b);
private let extensionality_fact =
forall (ty: Type u#a) (a: seq ty) (b: seq ty).{:pattern equal a b}
equal a b ==> a == b
/// We represent an analog of the following Dafny axiom with
/// `is_prefix_def_fact`. Our analog uses `is_prefix` instead
/// of `Seq#SameUntil`.
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#SameUntil(s0,s1,n) }
/// Seq#SameUntil(s0,s1,n) <==>
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < n ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let is_prefix_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern is_prefix s0 s1}
is_prefix s0 s1 <==>
length s0 <= length s1
/\ (forall (j: nat).{:pattern index s0 j \/ index s1 j}
j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `take_length_fact`:
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Take(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Take(s,n)) == n);
private let take_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern length (take s n)}
n <= length s ==> length (take s n) = n
/// We represent the following Dafny axiom with `index_into_take_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Take(s,n), j) }
/// { Seq#Index(s, j), Seq#Take(s,n) }
/// 0 <= j && j < n && j < Seq#Length(s) ==>
/// Seq#Index(Seq#Take(s,n), j) == Seq#Index(s, j));
private let index_into_take_fact (_ : squash (take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (take s n) j \/ index s j ; take s n}
j < n && n <= length s ==> index (take s n) j == index s j
/// We represent the following Dafny axiom with `drop_length_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Drop(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Drop(s,n)) == Seq#Length(s) - n);
private let drop_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).
{:pattern length (drop s n)}
n <= length s ==> length (drop s n) = length s - n
/// We represent the following Dafny axiom with `index_into_drop_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Drop(s,n), j) }
/// 0 <= n && 0 <= j && j < Seq#Length(s)-n ==>
/// Seq#Index(Seq#Drop(s,n), j) == Seq#Index(s, j+n));
private let index_into_drop_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (drop s n) j}
j < length s - n ==> index (drop s n) j == index s (j + n)
/// We represent the following Dafny axiom with `drop_index_offset_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, k: int ::
/// {:weight 25}
/// { Seq#Index(s, k), Seq#Drop(s,n) }
/// 0 <= n && n <= k && k < Seq#Length(s) ==>
/// Seq#Index(Seq#Drop(s,n), k-n) == Seq#Index(s, k));
private let drop_index_offset_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (k: nat).
{:pattern index s k; drop s n}
n <= k && k < length s ==> index (drop s n) (k - n) == index s k
/// We represent the following Dafny axiom with `append_then_take_or_drop_fact`.
///
/// axiom (forall<T> s, t: Seq T, n: int ::
/// { Seq#Take(Seq#Append(s, t), n) }
/// { Seq#Drop(Seq#Append(s, t), n) }
/// n == Seq#Length(s)
/// ==>
/// Seq#Take(Seq#Append(s, t), n) == s &&
/// Seq#Drop(Seq#Append(s, t), n) == t);
private let append_then_take_or_drop_fact (_ : squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (t: seq ty) (n: nat).
{:pattern take (append s t) n \/ drop (append s t) n}
n = length s ==> take (append s t) n == s /\ drop (append s t) n == t
/// We represent the following Dafny axiom with `take_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n <= Seq#Length(s) ==>
/// Seq#Take(Seq#Update(s, i, v), n) == Seq#Update(Seq#Take(s, n), i, v) );
private let take_commutes_with_in_range_update_fact | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val take_commutes_with_in_range_update_fact : _:
Prims.squash (FStar.Sequence.Base.update_maintains_length_fact /\
FStar.Sequence.Base.take_length_fact)
-> Prims.logical | [] | FStar.Sequence.Base.take_commutes_with_in_range_update_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
_:
Prims.squash (FStar.Sequence.Base.update_maintains_length_fact /\
FStar.Sequence.Base.take_length_fact)
-> Prims.logical | {
"end_col": 50,
"end_line": 428,
"start_col": 2,
"start_line": 426
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let drop_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n}
n <= i && i < length s ==>
drop (update s i v) n == update (drop s n) (i - n) v | let drop_commutes_with_in_range_update_fact
(_: squash (update_maintains_length_fact u#a /\ drop_length_fact u#a))
= | false | null | false | forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat). {:pattern drop (update s i v) n}
n <= i && i < length s ==> drop (update s i v) n == update (drop s n) (i - n) v | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.squash",
"Prims.l_and",
"FStar.Sequence.Base.update_maintains_length_fact",
"FStar.Sequence.Base.drop_length_fact",
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.nat",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_AmpAmp",
"Prims.op_LessThanOrEqual",
"Prims.op_LessThan",
"FStar.Sequence.Base.length",
"Prims.eq2",
"FStar.Sequence.Base.drop",
"FStar.Sequence.Base.update",
"Prims.op_Subtraction",
"Prims.logical"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n)));
private let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n)
/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`:
///
/// axiom (forall<T> s: Seq T, x: T :: { Seq#Contains(s,x) }
/// Seq#Contains(s,x) <==>
/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x));
private let contains_iff_exists_index_fact =
forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x}
contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`:
///
/// axiom (forall<T> x: T ::
/// { Seq#Contains(Seq#Empty(), x) }
/// !Seq#Contains(Seq#Empty(), x));
private let empty_doesnt_contain_anything_fact =
forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x)
/// We represent the following Dafny axiom with `build_contains_equiv_fact`:
///
/// axiom (forall<T> s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy
/// { Seq#Contains(Seq#Build(s, v), x) }
/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x)));
private let build_contains_equiv_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x}
contains (build s v) x <==> (v == x \/ contains s x)
/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Take(s, n), x) }
/// Seq#Contains(Seq#Take(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let take_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x}
contains (take s n) x <==>
(exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Drop(s, n), x) }
/// Seq#Contains(Seq#Drop(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let drop_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `equal_def_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Equal(s0,s1) }
/// Seq#Equal(s0,s1) <==>
/// Seq#Length(s0) == Seq#Length(s1) &&
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < Seq#Length(s0) ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let equal_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern equal s0 s1}
equal s0 s1 <==>
length s0 == length s1 /\
(forall j.{:pattern index s0 j \/ index s1 j}
0 <= j && j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `extensionality_fact`:
///
/// axiom (forall<T> a: Seq T, b: Seq T :: { Seq#Equal(a,b) } // extensionality axiom for sequences
/// Seq#Equal(a,b) ==> a == b);
private let extensionality_fact =
forall (ty: Type u#a) (a: seq ty) (b: seq ty).{:pattern equal a b}
equal a b ==> a == b
/// We represent an analog of the following Dafny axiom with
/// `is_prefix_def_fact`. Our analog uses `is_prefix` instead
/// of `Seq#SameUntil`.
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#SameUntil(s0,s1,n) }
/// Seq#SameUntil(s0,s1,n) <==>
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < n ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let is_prefix_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern is_prefix s0 s1}
is_prefix s0 s1 <==>
length s0 <= length s1
/\ (forall (j: nat).{:pattern index s0 j \/ index s1 j}
j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `take_length_fact`:
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Take(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Take(s,n)) == n);
private let take_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern length (take s n)}
n <= length s ==> length (take s n) = n
/// We represent the following Dafny axiom with `index_into_take_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Take(s,n), j) }
/// { Seq#Index(s, j), Seq#Take(s,n) }
/// 0 <= j && j < n && j < Seq#Length(s) ==>
/// Seq#Index(Seq#Take(s,n), j) == Seq#Index(s, j));
private let index_into_take_fact (_ : squash (take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (take s n) j \/ index s j ; take s n}
j < n && n <= length s ==> index (take s n) j == index s j
/// We represent the following Dafny axiom with `drop_length_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Drop(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Drop(s,n)) == Seq#Length(s) - n);
private let drop_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).
{:pattern length (drop s n)}
n <= length s ==> length (drop s n) = length s - n
/// We represent the following Dafny axiom with `index_into_drop_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Drop(s,n), j) }
/// 0 <= n && 0 <= j && j < Seq#Length(s)-n ==>
/// Seq#Index(Seq#Drop(s,n), j) == Seq#Index(s, j+n));
private let index_into_drop_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (drop s n) j}
j < length s - n ==> index (drop s n) j == index s (j + n)
/// We represent the following Dafny axiom with `drop_index_offset_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, k: int ::
/// {:weight 25}
/// { Seq#Index(s, k), Seq#Drop(s,n) }
/// 0 <= n && n <= k && k < Seq#Length(s) ==>
/// Seq#Index(Seq#Drop(s,n), k-n) == Seq#Index(s, k));
private let drop_index_offset_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (k: nat).
{:pattern index s k; drop s n}
n <= k && k < length s ==> index (drop s n) (k - n) == index s k
/// We represent the following Dafny axiom with `append_then_take_or_drop_fact`.
///
/// axiom (forall<T> s, t: Seq T, n: int ::
/// { Seq#Take(Seq#Append(s, t), n) }
/// { Seq#Drop(Seq#Append(s, t), n) }
/// n == Seq#Length(s)
/// ==>
/// Seq#Take(Seq#Append(s, t), n) == s &&
/// Seq#Drop(Seq#Append(s, t), n) == t);
private let append_then_take_or_drop_fact (_ : squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (t: seq ty) (n: nat).
{:pattern take (append s t) n \/ drop (append s t) n}
n = length s ==> take (append s t) n == s /\ drop (append s t) n == t
/// We represent the following Dafny axiom with `take_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n <= Seq#Length(s) ==>
/// Seq#Take(Seq#Update(s, i, v), n) == Seq#Update(Seq#Take(s, n), i, v) );
private let take_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
i < n && n <= length s ==>
take (update s i v) n == update (take s n) i v
/// We represent the following Dafny axiom with `take_ignores_out_of_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// n <= i && i < Seq#Length(s) ==> Seq#Take(Seq#Update(s, i, v), n) == Seq#Take(s, n));
private let take_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
n <= i && i < length s ==>
take (update s i v) n == take s n
/// We represent the following Dafny axiom with `drop_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Drop(Seq#Update(s, i, v), n) }
/// 0 <= n && n <= i && i < Seq#Length(s) ==>
/// Seq#Drop(Seq#Update(s, i, v), n) == Seq#Update(Seq#Drop(s, n), i-n, v) );
private let drop_commutes_with_in_range_update_fact | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val drop_commutes_with_in_range_update_fact : _:
Prims.squash (FStar.Sequence.Base.update_maintains_length_fact /\
FStar.Sequence.Base.drop_length_fact)
-> Prims.logical | [] | FStar.Sequence.Base.drop_commutes_with_in_range_update_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
_:
Prims.squash (FStar.Sequence.Base.update_maintains_length_fact /\
FStar.Sequence.Base.drop_length_fact)
-> Prims.logical | {
"end_col": 56,
"end_line": 452,
"start_col": 2,
"start_line": 450
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let all_seq_facts =
length_of_empty_is_zero_fact u#a
/\ length_zero_implies_empty_fact u#a
/\ singleton_length_one_fact u#a
/\ build_increments_length_fact u#a
/\ index_into_build_fact u#a ()
/\ append_sums_lengths_fact u#a
/\ index_into_singleton_fact u#a ()
/\ index_after_append_fact u#a ()
/\ update_maintains_length_fact u#a
/\ update_then_index_fact u#a
/\ contains_iff_exists_index_fact u#a
/\ empty_doesnt_contain_anything_fact u#a
/\ build_contains_equiv_fact u#a
/\ take_contains_equiv_exists_fact u#a
/\ drop_contains_equiv_exists_fact u#a
/\ equal_def_fact u#a
/\ extensionality_fact u#a
/\ is_prefix_def_fact u#a
/\ take_length_fact u#a
/\ index_into_take_fact u#a ()
/\ drop_length_fact u#a
/\ index_into_drop_fact u#a ()
/\ drop_index_offset_fact u#a ()
/\ append_then_take_or_drop_fact u#a ()
/\ take_commutes_with_in_range_update_fact u#a ()
/\ take_ignores_out_of_range_update_fact u#a ()
/\ drop_commutes_with_in_range_update_fact u#a ()
/\ drop_ignores_out_of_range_update_fact u#a ()
/\ drop_commutes_with_build_fact u#a ()
/\ rank_def_fact u#a
/\ element_ranks_less_fact u#a
/\ drop_ranks_less_fact u#a
/\ take_ranks_less_fact u#a
/\ append_take_drop_ranks_less_fact u#a
/\ drop_zero_fact u#a
/\ take_zero_fact u#a
/\ drop_then_drop_fact u#a () | let all_seq_facts = | false | null | false | length_of_empty_is_zero_fact u#a /\ length_zero_implies_empty_fact u#a /\
singleton_length_one_fact u#a /\ build_increments_length_fact u#a /\ index_into_build_fact u#a () /\
append_sums_lengths_fact u#a /\ index_into_singleton_fact u#a () /\ index_after_append_fact u#a () /\
update_maintains_length_fact u#a /\ update_then_index_fact u#a /\ contains_iff_exists_index_fact u#a /\
empty_doesnt_contain_anything_fact u#a /\ build_contains_equiv_fact u#a /\
take_contains_equiv_exists_fact u#a /\ drop_contains_equiv_exists_fact u#a /\ equal_def_fact u#a /\
extensionality_fact u#a /\ is_prefix_def_fact u#a /\ take_length_fact u#a /\
index_into_take_fact u#a () /\ drop_length_fact u#a /\ index_into_drop_fact u#a () /\
drop_index_offset_fact u#a () /\ append_then_take_or_drop_fact u#a () /\
take_commutes_with_in_range_update_fact u#a () /\ take_ignores_out_of_range_update_fact u#a () /\
drop_commutes_with_in_range_update_fact u#a () /\ drop_ignores_out_of_range_update_fact u#a () /\
drop_commutes_with_build_fact u#a () /\ rank_def_fact u#a /\ element_ranks_less_fact u#a /\
drop_ranks_less_fact u#a /\ take_ranks_less_fact u#a /\ append_take_drop_ranks_less_fact u#a /\
drop_zero_fact u#a /\ take_zero_fact u#a /\ drop_then_drop_fact u#a () | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.l_and",
"FStar.Sequence.Base.length_of_empty_is_zero_fact",
"FStar.Sequence.Base.length_zero_implies_empty_fact",
"FStar.Sequence.Base.singleton_length_one_fact",
"FStar.Sequence.Base.build_increments_length_fact",
"FStar.Sequence.Base.index_into_build_fact",
"FStar.Sequence.Base.append_sums_lengths_fact",
"FStar.Sequence.Base.index_into_singleton_fact",
"FStar.Sequence.Base.index_after_append_fact",
"FStar.Sequence.Base.update_maintains_length_fact",
"FStar.Sequence.Base.update_then_index_fact",
"FStar.Sequence.Base.contains_iff_exists_index_fact",
"FStar.Sequence.Base.empty_doesnt_contain_anything_fact",
"FStar.Sequence.Base.build_contains_equiv_fact",
"FStar.Sequence.Base.take_contains_equiv_exists_fact",
"FStar.Sequence.Base.drop_contains_equiv_exists_fact",
"FStar.Sequence.Base.equal_def_fact",
"FStar.Sequence.Base.extensionality_fact",
"FStar.Sequence.Base.is_prefix_def_fact",
"FStar.Sequence.Base.take_length_fact",
"FStar.Sequence.Base.index_into_take_fact",
"FStar.Sequence.Base.drop_length_fact",
"FStar.Sequence.Base.index_into_drop_fact",
"FStar.Sequence.Base.drop_index_offset_fact",
"FStar.Sequence.Base.append_then_take_or_drop_fact",
"FStar.Sequence.Base.take_commutes_with_in_range_update_fact",
"FStar.Sequence.Base.take_ignores_out_of_range_update_fact",
"FStar.Sequence.Base.drop_commutes_with_in_range_update_fact",
"FStar.Sequence.Base.drop_ignores_out_of_range_update_fact",
"FStar.Sequence.Base.drop_commutes_with_build_fact",
"FStar.Sequence.Base.rank_def_fact",
"FStar.Sequence.Base.element_ranks_less_fact",
"FStar.Sequence.Base.drop_ranks_less_fact",
"FStar.Sequence.Base.take_ranks_less_fact",
"FStar.Sequence.Base.append_take_drop_ranks_less_fact",
"FStar.Sequence.Base.drop_zero_fact",
"FStar.Sequence.Base.take_zero_fact",
"FStar.Sequence.Base.drop_then_drop_fact"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n)));
private let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n)
/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`:
///
/// axiom (forall<T> s: Seq T, x: T :: { Seq#Contains(s,x) }
/// Seq#Contains(s,x) <==>
/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x));
private let contains_iff_exists_index_fact =
forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x}
contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`:
///
/// axiom (forall<T> x: T ::
/// { Seq#Contains(Seq#Empty(), x) }
/// !Seq#Contains(Seq#Empty(), x));
private let empty_doesnt_contain_anything_fact =
forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x)
/// We represent the following Dafny axiom with `build_contains_equiv_fact`:
///
/// axiom (forall<T> s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy
/// { Seq#Contains(Seq#Build(s, v), x) }
/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x)));
private let build_contains_equiv_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x}
contains (build s v) x <==> (v == x \/ contains s x)
/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Take(s, n), x) }
/// Seq#Contains(Seq#Take(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let take_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x}
contains (take s n) x <==>
(exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Drop(s, n), x) }
/// Seq#Contains(Seq#Drop(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let drop_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `equal_def_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Equal(s0,s1) }
/// Seq#Equal(s0,s1) <==>
/// Seq#Length(s0) == Seq#Length(s1) &&
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < Seq#Length(s0) ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let equal_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern equal s0 s1}
equal s0 s1 <==>
length s0 == length s1 /\
(forall j.{:pattern index s0 j \/ index s1 j}
0 <= j && j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `extensionality_fact`:
///
/// axiom (forall<T> a: Seq T, b: Seq T :: { Seq#Equal(a,b) } // extensionality axiom for sequences
/// Seq#Equal(a,b) ==> a == b);
private let extensionality_fact =
forall (ty: Type u#a) (a: seq ty) (b: seq ty).{:pattern equal a b}
equal a b ==> a == b
/// We represent an analog of the following Dafny axiom with
/// `is_prefix_def_fact`. Our analog uses `is_prefix` instead
/// of `Seq#SameUntil`.
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#SameUntil(s0,s1,n) }
/// Seq#SameUntil(s0,s1,n) <==>
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < n ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let is_prefix_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern is_prefix s0 s1}
is_prefix s0 s1 <==>
length s0 <= length s1
/\ (forall (j: nat).{:pattern index s0 j \/ index s1 j}
j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `take_length_fact`:
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Take(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Take(s,n)) == n);
private let take_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern length (take s n)}
n <= length s ==> length (take s n) = n
/// We represent the following Dafny axiom with `index_into_take_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Take(s,n), j) }
/// { Seq#Index(s, j), Seq#Take(s,n) }
/// 0 <= j && j < n && j < Seq#Length(s) ==>
/// Seq#Index(Seq#Take(s,n), j) == Seq#Index(s, j));
private let index_into_take_fact (_ : squash (take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (take s n) j \/ index s j ; take s n}
j < n && n <= length s ==> index (take s n) j == index s j
/// We represent the following Dafny axiom with `drop_length_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Drop(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Drop(s,n)) == Seq#Length(s) - n);
private let drop_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).
{:pattern length (drop s n)}
n <= length s ==> length (drop s n) = length s - n
/// We represent the following Dafny axiom with `index_into_drop_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Drop(s,n), j) }
/// 0 <= n && 0 <= j && j < Seq#Length(s)-n ==>
/// Seq#Index(Seq#Drop(s,n), j) == Seq#Index(s, j+n));
private let index_into_drop_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (drop s n) j}
j < length s - n ==> index (drop s n) j == index s (j + n)
/// We represent the following Dafny axiom with `drop_index_offset_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, k: int ::
/// {:weight 25}
/// { Seq#Index(s, k), Seq#Drop(s,n) }
/// 0 <= n && n <= k && k < Seq#Length(s) ==>
/// Seq#Index(Seq#Drop(s,n), k-n) == Seq#Index(s, k));
private let drop_index_offset_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (k: nat).
{:pattern index s k; drop s n}
n <= k && k < length s ==> index (drop s n) (k - n) == index s k
/// We represent the following Dafny axiom with `append_then_take_or_drop_fact`.
///
/// axiom (forall<T> s, t: Seq T, n: int ::
/// { Seq#Take(Seq#Append(s, t), n) }
/// { Seq#Drop(Seq#Append(s, t), n) }
/// n == Seq#Length(s)
/// ==>
/// Seq#Take(Seq#Append(s, t), n) == s &&
/// Seq#Drop(Seq#Append(s, t), n) == t);
private let append_then_take_or_drop_fact (_ : squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (t: seq ty) (n: nat).
{:pattern take (append s t) n \/ drop (append s t) n}
n = length s ==> take (append s t) n == s /\ drop (append s t) n == t
/// We represent the following Dafny axiom with `take_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n <= Seq#Length(s) ==>
/// Seq#Take(Seq#Update(s, i, v), n) == Seq#Update(Seq#Take(s, n), i, v) );
private let take_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
i < n && n <= length s ==>
take (update s i v) n == update (take s n) i v
/// We represent the following Dafny axiom with `take_ignores_out_of_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// n <= i && i < Seq#Length(s) ==> Seq#Take(Seq#Update(s, i, v), n) == Seq#Take(s, n));
private let take_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
n <= i && i < length s ==>
take (update s i v) n == take s n
/// We represent the following Dafny axiom with `drop_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Drop(Seq#Update(s, i, v), n) }
/// 0 <= n && n <= i && i < Seq#Length(s) ==>
/// Seq#Drop(Seq#Update(s, i, v), n) == Seq#Update(Seq#Drop(s, n), i-n, v) );
private let drop_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n}
n <= i && i < length s ==>
drop (update s i v) n == update (drop s n) (i - n) v
/// We represent the following Dafny axiom with `drop_ignores_out_of_range_update_fact`.
/// Jay noticed that it was unnecessarily weak, possibly due to a typo, so he reported this as
/// Dafny issue #1423 (https://github.com/dafny-lang/dafny/issues/1423) and updated it here.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Drop(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n < Seq#Length(s) ==> Seq#Drop(Seq#Update(s, i, v), n) == Seq#Drop(s, n));
private let drop_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n}
i < n && n <= length s ==>
drop (update s i v) n == drop s n
/// We represent the following Dafny axiom with `drop_commutes_with_build_fact`.
///
/// axiom (forall<T> s: Seq T, v: T, n: int ::
/// { Seq#Drop(Seq#Build(s, v), n) }
/// 0 <= n && n <= Seq#Length(s) ==>
/// Seq#Drop(Seq#Build(s, v), n) == Seq#Build(Seq#Drop(s, n), v) );
private let drop_commutes_with_build_fact (_ : squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (n: nat).{:pattern drop (build s v) n}
n <= length s ==> drop (build s v) n == build (drop s n) v
/// We include the definition of `rank` among our facts.
private let rank_def_fact =
forall (ty: Type u#a) (v: ty).{:pattern rank v} rank v == v
/// We represent the following Dafny axiom with `element_ranks_less_fact`.
///
/// axiom (forall s: Seq Box, i: int ::
/// { DtRank($Unbox(Seq#Index(s, i)): DatatypeType) }
/// 0 <= i && i < Seq#Length(s) ==> DtRank($Unbox(Seq#Index(s, i)): DatatypeType) < Seq#Rank(s) );
private let element_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern rank (index s i)}
i < length s ==> rank (index s i) << rank s
/// We represent the following Dafny axiom with `drop_ranks_less_fact`.
///
/// axiom (forall<T> s: Seq T, i: int ::
/// { Seq#Rank(Seq#Drop(s, i)) }
/// 0 < i && i <= Seq#Length(s) ==> Seq#Rank(Seq#Drop(s, i)) < Seq#Rank(s) );
private let drop_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern rank (drop s i)}
0 < i && i <= length s ==> rank (drop s i) << rank s
/// We represent the following Dafny axiom with
/// `take_ranks_less_fact`. However, since it isn't true in F* (which
/// has strong requirements for <<), we instead substitute length,
/// requiring decreases clauses to use length in this case.
///
/// axiom (forall<T> s: Seq T, i: int ::
/// { Seq#Rank(Seq#Take(s, i)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Rank(Seq#Take(s, i)) < Seq#Rank(s) );
private let take_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern length (take s i)}
i < length s ==> length (take s i) << length s
/// We represent the following Dafny axiom with
/// `append_take_drop_ranks_less_fact`. However, since it isn't true
/// in F* (which has strong requirements for <<), we instead
/// substitute length, requiring decreases clauses to use
/// length in this case.
///
/// axiom (forall<T> s: Seq T, i: int, j: int ::
/// { Seq#Rank(Seq#Append(Seq#Take(s, i), Seq#Drop(s, j))) }
/// 0 <= i && i < j && j <= Seq#Length(s) ==>
/// Seq#Rank(Seq#Append(Seq#Take(s, i), Seq#Drop(s, j))) < Seq#Rank(s) );
private let append_take_drop_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat) (j: nat).{:pattern length (append (take s i) (drop s j))}
i < j && j <= length s ==> length (append (take s i) (drop s j)) << length s
/// We represent the following Dafny axiom with `drop_zero_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Drop(s, n) }
/// n == 0 ==> Seq#Drop(s, n) == s);
private let drop_zero_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern drop s n}
n = 0 ==> drop s n == s
/// We represent the following Dafny axiom with `take_zero_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Take(s, n) }
/// n == 0 ==> Seq#Take(s, n) == Seq#Empty());
private let take_zero_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern take s n}
n = 0 ==> take s n == empty
/// We represent the following Dafny axiom with `drop_then_drop_fact`.
///
/// axiom (forall<T> s: Seq T, m, n: int :: { Seq#Drop(Seq#Drop(s, m), n) }
/// 0 <= m && 0 <= n && m+n <= Seq#Length(s) ==>
/// Seq#Drop(Seq#Drop(s, m), n) == Seq#Drop(s, m+n));
private let drop_then_drop_fact (_: squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (m: nat) (n: nat).{:pattern drop (drop s m) n}
m + n <= length s ==> drop (drop s m) n == drop s (m + n)
(**
The predicate `all_dafny_seq_facts` collects all the Dafny sequence axioms.
One can bring all these facts into scope with `all_dafny_seq_facts_lemma ()`.
**) | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val all_seq_facts : Prims.logical | [] | FStar.Sequence.Base.all_seq_facts | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | Prims.logical | {
"end_col": 31,
"end_line": 601,
"start_col": 4,
"start_line": 565
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Sequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let drop_then_drop_fact (_: squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (m: nat) (n: nat).{:pattern drop (drop s m) n}
m + n <= length s ==> drop (drop s m) n == drop s (m + n) | let drop_then_drop_fact (_: squash (drop_length_fact u#a)) = | false | null | false | forall (ty: Type u#a) (s: seq ty) (m: nat) (n: nat). {:pattern drop (drop s m) n}
m + n <= length s ==> drop (drop s m) n == drop s (m + n) | {
"checked_file": "FStar.Sequence.Base.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Sequence.Base.fsti"
} | [
"total"
] | [
"Prims.squash",
"FStar.Sequence.Base.drop_length_fact",
"Prims.l_Forall",
"FStar.Sequence.Base.seq",
"Prims.nat",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.Sequence.Base.length",
"Prims.eq2",
"FStar.Sequence.Base.drop",
"Prims.logical"
] | [] | (*
Copyright 2008-2021 Jay Lorch, Rustan Leino, Alex Summers, Dan
Rosen, Nikhil Swamy, Microsoft Research, and contributors to
the Dafny Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Includes material from the Dafny project
(https://github.com/dafny-lang/dafny) which carries this license
information:
Created 9 February 2008 by Rustan Leino.
Converted to Boogie 2 on 28 June 2008.
Edited sequence axioms 20 October 2009 by Alex Summers.
Modified 2014 by Dan Rosen.
Copyright (c) 2008-2014, Microsoft.
Copyright by the contributors to the Dafny Project
SPDX-License-Identifier: MIT
*)
(**
This module declares a type and functions used for modeling
sequences as they're modeled in Dafny.
@summary Type and functions for modeling sequences
*)
module FStar.Sequence.Base
new val seq ([@@@ strictly_positive] a: Type u#a) : Type u#a
(**
We translate each Dafny sequence function prefixed with `Seq#`
into an F* function.
**)
/// We represent the Dafny function `Seq#Length` with `length`:
///
/// function Seq#Length<T>(Seq T): int;
val length : #ty: Type -> seq ty -> nat
/// We represent the Dafny function `Seq#Empty` with `empty`:
///
/// function Seq#Empty<T>(): Seq T;
///
/// We also provide an alias `nil` for it.
val empty : #ty: Type -> seq ty
/// We represent the Dafny function `Seq#Singleton` with `singleton`:
///
/// function Seq#Singleton<T>(T): Seq T;
val singleton : #ty: Type -> ty -> seq ty
/// We represent the Dafny function `Seq#Index` with `index`:
///
/// function Seq#Index<T>(Seq T, int): T;
///
/// We also provide the infix symbol `$@` for it.
val index: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty
let ($@) = index
/// We represent the Dafny function `Seq#Build` with `build`:
///
/// function Seq#Build<T>(s: Seq T, val: T): Seq T;
///
/// We also provide the infix symbol `$::` for it.
val build: #ty: Type -> seq ty -> ty -> seq ty
let ($::) = build
/// We represent the Dafny function `Seq#Append` with `append`:
///
/// function Seq#Append<T>(Seq T, Seq T): Seq T;
///
/// We also provide the infix notation `$+` for it.
val append: #ty: Type -> seq ty -> seq ty -> seq ty
let ($+) = append
/// We represent the Dafny function `Seq#Update` with `update`:
///
/// function Seq#Update<T>(Seq T, int, T): Seq T;
val update: #ty: Type -> s: seq ty -> i: nat{i < length s} -> ty -> seq ty
/// We represent the Dafny function `Seq#Contains` with `contains`:
///
/// function Seq#Contains<T>(Seq T, T): bool;
val contains: #ty: Type -> seq ty -> ty -> Type0
/// We represent the Dafny function `Seq#Take` with `take`:
///
/// function Seq#Take<T>(s: Seq T, howMany: int): Seq T;
val take: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Drop` with `drop`:
///
/// function Seq#Drop<T>(s: Seq T, howMany: int): Seq T;
val drop: #ty: Type -> s: seq ty -> howMany: nat{howMany <= length s} -> seq ty
/// We represent the Dafny function `Seq#Equal` with `equal`.
///
/// function Seq#Equal<T>(Seq T, Seq T): bool;
///
/// We also provide the infix symbol `$==` for it.
val equal: #ty: Type -> seq ty -> seq ty -> Type0
let ($==) = equal
/// Instead of representing the Dafny function `Seq#SameUntil`, which
/// is only ever used in Dafny to represent prefix relations, we
/// instead use `is_prefix`.
///
/// function Seq#SameUntil<T>(Seq T, Seq T, int): bool;
///
/// We also provide the infix notation `$<=` for it.
val is_prefix: #ty: Type -> seq ty -> seq ty -> Type0
let ($<=) = is_prefix
/// We represent the Dafny function `Seq#Rank` with `rank`.
///
/// function Seq#Rank<T>(Seq T): int;
val rank: #ty: Type -> ty -> ty
(**
We translate each sequence axiom from the Dafny prelude into an F*
predicate ending in `_fact`.
**)
/// We don't need the following axiom since we return a nat from length:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) } 0 <= Seq#Length(s));
/// We represent the following Dafny axiom with `length_of_empty_is_zero_fact`:
///
/// axiom (forall<T> :: { Seq#Empty(): Seq T } Seq#Length(Seq#Empty(): Seq T) == 0);
private let length_of_empty_is_zero_fact =
forall (ty: Type u#a).{:pattern empty #ty} length (empty #ty) = 0
/// We represent the following Dafny axiom with `length_zero_implies_empty_fact`:
///
/// axiom (forall<T> s: Seq T :: { Seq#Length(s) }
/// (Seq#Length(s) == 0 ==> s == Seq#Empty())
private let length_zero_implies_empty_fact =
forall (ty: Type u#a) (s: seq ty).{:pattern length s} length s = 0 ==> s == empty
/// We represent the following Dafny axiom with `singleton_length_one_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Length(Seq#Singleton(t)) } Seq#Length(Seq#Singleton(t)) == 1);
private let singleton_length_one_fact =
forall (ty: Type u#a) (v: ty).{:pattern length (singleton v)} length (singleton v) = 1
/// We represent the following Dafny axiom with `build_increments_length_fact`:
///
/// axiom (forall<T> s: Seq T, v: T ::
/// { Seq#Build(s,v) }
/// Seq#Length(Seq#Build(s,v)) == 1 + Seq#Length(s));
private let build_increments_length_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty).{:pattern build s v}
length (build s v) = 1 + length s
/// We represent the following Dafny axiom with `index_into_build_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Index(Seq#Build(s,v), i) }
/// (i == Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == v) &&
/// (i != Seq#Length(s) ==> Seq#Index(Seq#Build(s,v), i) == Seq#Index(s, i)));
private let index_into_build_fact (_: squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (i: nat{i < length (build s v)})
.{:pattern index (build s v) i}
(i = length s ==> index (build s v) i == v)
/\ (i <> length s ==> index (build s v) i == index s i)
/// We represent the following Dafny axiom with `append_sums_lengths_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Length(Seq#Append(s0,s1)) }
/// Seq#Length(Seq#Append(s0,s1)) == Seq#Length(s0) + Seq#Length(s1));
private let append_sums_lengths_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern length (append s0 s1)}
length (append s0 s1) = length s0 + length s1
/// We represent the following Dafny axiom with `index_into_singleton_fact`:
///
/// axiom (forall<T> t: T :: { Seq#Index(Seq#Singleton(t), 0) } Seq#Index(Seq#Singleton(t), 0) == t);
private let index_into_singleton_fact (_: squash (singleton_length_one_fact u#a)) =
forall (ty: Type u#a) (v: ty).{:pattern index (singleton v) 0}
index (singleton v) 0 == v
/// We represent the following axiom with `index_after_append_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#Index(Seq#Append(s0,s1), n) }
/// (n < Seq#Length(s0) ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s0, n)) &&
/// (Seq#Length(s0) <= n ==> Seq#Index(Seq#Append(s0,s1), n) == Seq#Index(s1, n - Seq#Length(s0))));
private let index_after_append_fact (_: squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty) (n: nat{n < length (append s0 s1)})
.{:pattern index (append s0 s1) n}
(n < length s0 ==> index (append s0 s1) n == index s0 n)
/\ (length s0 <= n ==> index (append s0 s1) n == index s1 (n - length s0))
/// We represent the following Dafny axiom with `update_maintains_length`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T :: { Seq#Length(Seq#Update(s,i,v)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Length(Seq#Update(s,i,v)) == Seq#Length(s));
private let update_maintains_length_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty).{:pattern length (update s i v)}
length (update s i v) = length s
/// We represent the following Dafny axiom with `update_then_index_fact`:
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int :: { Seq#Index(Seq#Update(s,i,v),n) }
/// 0 <= n && n < Seq#Length(s) ==>
/// (i == n ==> Seq#Index(Seq#Update(s,i,v),n) == v) &&
/// (i != n ==> Seq#Index(Seq#Update(s,i,v),n) == Seq#Index(s,n)));
private let update_then_index_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat{i < length s}) (v: ty) (n: nat{n < length (update s i v)})
.{:pattern index (update s i v) n}
n < length s ==>
(i = n ==> index (update s i v) n == v)
/\ (i <> n ==> index (update s i v) n == index s n)
/// We represent the following Dafny axiom with `contains_iff_exists_index_fact`:
///
/// axiom (forall<T> s: Seq T, x: T :: { Seq#Contains(s,x) }
/// Seq#Contains(s,x) <==>
/// (exists i: int :: { Seq#Index(s,i) } 0 <= i && i < Seq#Length(s) && Seq#Index(s,i) == x));
private let contains_iff_exists_index_fact =
forall (ty: Type u#a) (s: seq ty) (x: ty).{:pattern contains s x}
contains s x <==> (exists (i: nat).{:pattern index s i} i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `empty_doesnt_contain_fact`:
///
/// axiom (forall<T> x: T ::
/// { Seq#Contains(Seq#Empty(), x) }
/// !Seq#Contains(Seq#Empty(), x));
private let empty_doesnt_contain_anything_fact =
forall (ty: Type u#a) (x: ty).{:pattern contains empty x} ~(contains empty x)
/// We represent the following Dafny axiom with `build_contains_equiv_fact`:
///
/// axiom (forall<T> s: Seq T, v: T, x: T :: // needed to prove things like '4 in [2,3,4]', see method TestSequences0 in SmallTests.dfy
/// { Seq#Contains(Seq#Build(s, v), x) }
/// Seq#Contains(Seq#Build(s, v), x) <==> (v == x || Seq#Contains(s, x)));
private let build_contains_equiv_fact =
forall (ty: Type u#a) (s: seq ty) (v: ty) (x: ty).{:pattern contains (build s v) x}
contains (build s v) x <==> (v == x \/ contains s x)
/// We represent the following Dafny axiom with `take_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Take(s, n), x) }
/// Seq#Contains(Seq#Take(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= i && i < n && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let take_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (take s n) x}
contains (take s n) x <==>
(exists (i: nat).{:pattern index s i} i < n /\ i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `drop_contains_equiv_exists_fact`:
///
/// axiom (forall<T> s: Seq T, n: int, x: T ::
/// { Seq#Contains(Seq#Drop(s, n), x) }
/// Seq#Contains(Seq#Drop(s, n), x) <==>
/// (exists i: int :: { Seq#Index(s, i) }
/// 0 <= n && n <= i && i < Seq#Length(s) && Seq#Index(s, i) == x));
private let drop_contains_equiv_exists_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat{n <= length s}) (x: ty).{:pattern contains (drop s n) x}
contains (drop s n) x <==>
(exists (i: nat).{:pattern index s i} n <= i && i < length s /\ index s i == x)
/// We represent the following Dafny axiom with `equal_def_fact`:
///
/// axiom (forall<T> s0: Seq T, s1: Seq T :: { Seq#Equal(s0,s1) }
/// Seq#Equal(s0,s1) <==>
/// Seq#Length(s0) == Seq#Length(s1) &&
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < Seq#Length(s0) ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let equal_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern equal s0 s1}
equal s0 s1 <==>
length s0 == length s1 /\
(forall j.{:pattern index s0 j \/ index s1 j}
0 <= j && j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `extensionality_fact`:
///
/// axiom (forall<T> a: Seq T, b: Seq T :: { Seq#Equal(a,b) } // extensionality axiom for sequences
/// Seq#Equal(a,b) ==> a == b);
private let extensionality_fact =
forall (ty: Type u#a) (a: seq ty) (b: seq ty).{:pattern equal a b}
equal a b ==> a == b
/// We represent an analog of the following Dafny axiom with
/// `is_prefix_def_fact`. Our analog uses `is_prefix` instead
/// of `Seq#SameUntil`.
///
/// axiom (forall<T> s0: Seq T, s1: Seq T, n: int :: { Seq#SameUntil(s0,s1,n) }
/// Seq#SameUntil(s0,s1,n) <==>
/// (forall j: int :: { Seq#Index(s0,j) } { Seq#Index(s1,j) }
/// 0 <= j && j < n ==> Seq#Index(s0,j) == Seq#Index(s1,j)));
private let is_prefix_def_fact =
forall (ty: Type u#a) (s0: seq ty) (s1: seq ty).{:pattern is_prefix s0 s1}
is_prefix s0 s1 <==>
length s0 <= length s1
/\ (forall (j: nat).{:pattern index s0 j \/ index s1 j}
j < length s0 ==> index s0 j == index s1 j)
/// We represent the following Dafny axiom with `take_length_fact`:
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Take(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Take(s,n)) == n);
private let take_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern length (take s n)}
n <= length s ==> length (take s n) = n
/// We represent the following Dafny axiom with `index_into_take_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Take(s,n), j) }
/// { Seq#Index(s, j), Seq#Take(s,n) }
/// 0 <= j && j < n && j < Seq#Length(s) ==>
/// Seq#Index(Seq#Take(s,n), j) == Seq#Index(s, j));
private let index_into_take_fact (_ : squash (take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (take s n) j \/ index s j ; take s n}
j < n && n <= length s ==> index (take s n) j == index s j
/// We represent the following Dafny axiom with `drop_length_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Length(Seq#Drop(s,n)) }
/// 0 <= n && n <= Seq#Length(s) ==> Seq#Length(Seq#Drop(s,n)) == Seq#Length(s) - n);
private let drop_length_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).
{:pattern length (drop s n)}
n <= length s ==> length (drop s n) = length s - n
/// We represent the following Dafny axiom with `index_into_drop_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, j: int ::
/// {:weight 25}
/// { Seq#Index(Seq#Drop(s,n), j) }
/// 0 <= n && 0 <= j && j < Seq#Length(s)-n ==>
/// Seq#Index(Seq#Drop(s,n), j) == Seq#Index(s, j+n));
private let index_into_drop_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (j: nat).
{:pattern index (drop s n) j}
j < length s - n ==> index (drop s n) j == index s (j + n)
/// We represent the following Dafny axiom with `drop_index_offset_fact`.
///
/// axiom (forall<T> s: Seq T, n: int, k: int ::
/// {:weight 25}
/// { Seq#Index(s, k), Seq#Drop(s,n) }
/// 0 <= n && n <= k && k < Seq#Length(s) ==>
/// Seq#Index(Seq#Drop(s,n), k-n) == Seq#Index(s, k));
private let drop_index_offset_fact (_ : squash (drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (n: nat) (k: nat).
{:pattern index s k; drop s n}
n <= k && k < length s ==> index (drop s n) (k - n) == index s k
/// We represent the following Dafny axiom with `append_then_take_or_drop_fact`.
///
/// axiom (forall<T> s, t: Seq T, n: int ::
/// { Seq#Take(Seq#Append(s, t), n) }
/// { Seq#Drop(Seq#Append(s, t), n) }
/// n == Seq#Length(s)
/// ==>
/// Seq#Take(Seq#Append(s, t), n) == s &&
/// Seq#Drop(Seq#Append(s, t), n) == t);
private let append_then_take_or_drop_fact (_ : squash (append_sums_lengths_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (t: seq ty) (n: nat).
{:pattern take (append s t) n \/ drop (append s t) n}
n = length s ==> take (append s t) n == s /\ drop (append s t) n == t
/// We represent the following Dafny axiom with `take_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n <= Seq#Length(s) ==>
/// Seq#Take(Seq#Update(s, i, v), n) == Seq#Update(Seq#Take(s, n), i, v) );
private let take_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ take_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
i < n && n <= length s ==>
take (update s i v) n == update (take s n) i v
/// We represent the following Dafny axiom with `take_ignores_out_of_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Take(Seq#Update(s, i, v), n) }
/// n <= i && i < Seq#Length(s) ==> Seq#Take(Seq#Update(s, i, v), n) == Seq#Take(s, n));
private let take_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern take (update s i v) n}
n <= i && i < length s ==>
take (update s i v) n == take s n
/// We represent the following Dafny axiom with `drop_commutes_with_in_range_update_fact`.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Drop(Seq#Update(s, i, v), n) }
/// 0 <= n && n <= i && i < Seq#Length(s) ==>
/// Seq#Drop(Seq#Update(s, i, v), n) == Seq#Update(Seq#Drop(s, n), i-n, v) );
private let drop_commutes_with_in_range_update_fact
(_ : squash (update_maintains_length_fact u#a /\ drop_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n}
n <= i && i < length s ==>
drop (update s i v) n == update (drop s n) (i - n) v
/// We represent the following Dafny axiom with `drop_ignores_out_of_range_update_fact`.
/// Jay noticed that it was unnecessarily weak, possibly due to a typo, so he reported this as
/// Dafny issue #1423 (https://github.com/dafny-lang/dafny/issues/1423) and updated it here.
///
/// axiom (forall<T> s: Seq T, i: int, v: T, n: int ::
/// { Seq#Drop(Seq#Update(s, i, v), n) }
/// 0 <= i && i < n && n < Seq#Length(s) ==> Seq#Drop(Seq#Update(s, i, v), n) == Seq#Drop(s, n));
private let drop_ignores_out_of_range_update_fact (_ : squash (update_maintains_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (i: nat) (v: ty) (n: nat).{:pattern drop (update s i v) n}
i < n && n <= length s ==>
drop (update s i v) n == drop s n
/// We represent the following Dafny axiom with `drop_commutes_with_build_fact`.
///
/// axiom (forall<T> s: Seq T, v: T, n: int ::
/// { Seq#Drop(Seq#Build(s, v), n) }
/// 0 <= n && n <= Seq#Length(s) ==>
/// Seq#Drop(Seq#Build(s, v), n) == Seq#Build(Seq#Drop(s, n), v) );
private let drop_commutes_with_build_fact (_ : squash (build_increments_length_fact u#a)) =
forall (ty: Type u#a) (s: seq ty) (v: ty) (n: nat).{:pattern drop (build s v) n}
n <= length s ==> drop (build s v) n == build (drop s n) v
/// We include the definition of `rank` among our facts.
private let rank_def_fact =
forall (ty: Type u#a) (v: ty).{:pattern rank v} rank v == v
/// We represent the following Dafny axiom with `element_ranks_less_fact`.
///
/// axiom (forall s: Seq Box, i: int ::
/// { DtRank($Unbox(Seq#Index(s, i)): DatatypeType) }
/// 0 <= i && i < Seq#Length(s) ==> DtRank($Unbox(Seq#Index(s, i)): DatatypeType) < Seq#Rank(s) );
private let element_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern rank (index s i)}
i < length s ==> rank (index s i) << rank s
/// We represent the following Dafny axiom with `drop_ranks_less_fact`.
///
/// axiom (forall<T> s: Seq T, i: int ::
/// { Seq#Rank(Seq#Drop(s, i)) }
/// 0 < i && i <= Seq#Length(s) ==> Seq#Rank(Seq#Drop(s, i)) < Seq#Rank(s) );
private let drop_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern rank (drop s i)}
0 < i && i <= length s ==> rank (drop s i) << rank s
/// We represent the following Dafny axiom with
/// `take_ranks_less_fact`. However, since it isn't true in F* (which
/// has strong requirements for <<), we instead substitute length,
/// requiring decreases clauses to use length in this case.
///
/// axiom (forall<T> s: Seq T, i: int ::
/// { Seq#Rank(Seq#Take(s, i)) }
/// 0 <= i && i < Seq#Length(s) ==> Seq#Rank(Seq#Take(s, i)) < Seq#Rank(s) );
private let take_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat).{:pattern length (take s i)}
i < length s ==> length (take s i) << length s
/// We represent the following Dafny axiom with
/// `append_take_drop_ranks_less_fact`. However, since it isn't true
/// in F* (which has strong requirements for <<), we instead
/// substitute length, requiring decreases clauses to use
/// length in this case.
///
/// axiom (forall<T> s: Seq T, i: int, j: int ::
/// { Seq#Rank(Seq#Append(Seq#Take(s, i), Seq#Drop(s, j))) }
/// 0 <= i && i < j && j <= Seq#Length(s) ==>
/// Seq#Rank(Seq#Append(Seq#Take(s, i), Seq#Drop(s, j))) < Seq#Rank(s) );
private let append_take_drop_ranks_less_fact =
forall (ty: Type u#a) (s: seq ty) (i: nat) (j: nat).{:pattern length (append (take s i) (drop s j))}
i < j && j <= length s ==> length (append (take s i) (drop s j)) << length s
/// We represent the following Dafny axiom with `drop_zero_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Drop(s, n) }
/// n == 0 ==> Seq#Drop(s, n) == s);
private let drop_zero_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern drop s n}
n = 0 ==> drop s n == s
/// We represent the following Dafny axiom with `take_zero_fact`.
///
/// axiom (forall<T> s: Seq T, n: int :: { Seq#Take(s, n) }
/// n == 0 ==> Seq#Take(s, n) == Seq#Empty());
private let take_zero_fact =
forall (ty: Type u#a) (s: seq ty) (n: nat).{:pattern take s n}
n = 0 ==> take s n == empty
/// We represent the following Dafny axiom with `drop_then_drop_fact`.
///
/// axiom (forall<T> s: Seq T, m, n: int :: { Seq#Drop(Seq#Drop(s, m), n) }
/// 0 <= m && 0 <= n && m+n <= Seq#Length(s) ==>
/// Seq#Drop(Seq#Drop(s, m), n) == Seq#Drop(s, m+n)); | false | true | FStar.Sequence.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val drop_then_drop_fact : _: Prims.squash FStar.Sequence.Base.drop_length_fact -> Prims.logical | [] | FStar.Sequence.Base.drop_then_drop_fact | {
"file_name": "ulib/experimental/FStar.Sequence.Base.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | _: Prims.squash FStar.Sequence.Base.drop_length_fact -> Prims.logical | {
"end_col": 61,
"end_line": 557,
"start_col": 2,
"start_line": 556
} |
|
Prims.Tot | val pts_to_sl (#a:Type)
(#p:Preorder.preorder a)
(r:ref a p)
(f:perm)
(v:a)
: slprop u#1 | [
{
"abbrev": true,
"full_module": "FStar.Universe",
"short_module": "U"
},
{
"abbrev": true,
"full_module": "Steel.MonotonicHigherReference",
"short_module": "MHR"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let pts_to_sl (#a:Type) (#p:Preorder.preorder a)
(r:ref a p)
(f:perm)
(v:a)
= MHR.pts_to_sl r f (hide (U.raise_val v)) | val pts_to_sl (#a:Type)
(#p:Preorder.preorder a)
(r:ref a p)
(f:perm)
(v:a)
: slprop u#1
let pts_to_sl (#a: Type) (#p: Preorder.preorder a) (r: ref a p) (f: perm) (v: a) = | false | null | false | MHR.pts_to_sl r f (hide (U.raise_val v)) | {
"checked_file": "Steel.MonotonicReference.fst.checked",
"dependencies": [
"Steel.MonotonicHigherReference.fsti.checked",
"Steel.Memory.fsti.checked",
"Steel.FractionalPermission.fst.checked",
"Steel.Effect.Atomic.fsti.checked",
"Steel.Effect.fsti.checked",
"prims.fst.checked",
"FStar.Universe.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.PCM.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": true,
"source_file": "Steel.MonotonicReference.fst"
} | [
"total"
] | [
"FStar.Preorder.preorder",
"Steel.MonotonicReference.ref",
"Steel.FractionalPermission.perm",
"Steel.MonotonicHigherReference.pts_to_sl",
"FStar.Universe.raise_t",
"Steel.MonotonicReference.raise_preorder",
"FStar.Ghost.hide",
"FStar.Universe.raise_val",
"Steel.Memory.slprop"
] | [] | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Steel.MonotonicReference
open FStar.PCM
open FStar.Ghost
open Steel.FractionalPermission
open Steel.Memory
open Steel.Effect.Atomic
open Steel.Effect
module Preorder = FStar.Preorder
module MHR = Steel.MonotonicHigherReference
module U = FStar.Universe
let raise_preorder (#a:Type0) (p:Preorder.preorder a)
: Preorder.preorder (U.raise_t a)
= fun (x0 x1:U.raise_t a) ->
p (U.downgrade_val x0) (U.downgrade_val x1)
let ref a p = MHR.ref (FStar.Universe.raise_t a) (raise_preorder p)
/// The standard points to separation logic predicate
let pts_to_sl (#a:Type) (#p:Preorder.preorder a)
(r:ref a p)
(f:perm) | false | false | Steel.MonotonicReference.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val pts_to_sl (#a:Type)
(#p:Preorder.preorder a)
(r:ref a p)
(f:perm)
(v:a)
: slprop u#1 | [] | Steel.MonotonicReference.pts_to_sl | {
"file_name": "lib/steel/Steel.MonotonicReference.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | r: Steel.MonotonicReference.ref a p -> f: Steel.FractionalPermission.perm -> v: a
-> Steel.Memory.slprop | {
"end_col": 46,
"end_line": 43,
"start_col": 6,
"start_line": 43
} |
Prims.Tot | val ref (a:Type u#0) (p:Preorder.preorder a)
: Type u#0 | [
{
"abbrev": true,
"full_module": "FStar.Universe",
"short_module": "U"
},
{
"abbrev": true,
"full_module": "Steel.MonotonicHigherReference",
"short_module": "MHR"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ref a p = MHR.ref (FStar.Universe.raise_t a) (raise_preorder p) | val ref (a:Type u#0) (p:Preorder.preorder a)
: Type u#0
let ref a p = | false | null | false | MHR.ref (FStar.Universe.raise_t a) (raise_preorder p) | {
"checked_file": "Steel.MonotonicReference.fst.checked",
"dependencies": [
"Steel.MonotonicHigherReference.fsti.checked",
"Steel.Memory.fsti.checked",
"Steel.FractionalPermission.fst.checked",
"Steel.Effect.Atomic.fsti.checked",
"Steel.Effect.fsti.checked",
"prims.fst.checked",
"FStar.Universe.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.PCM.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": true,
"source_file": "Steel.MonotonicReference.fst"
} | [
"total"
] | [
"FStar.Preorder.preorder",
"Steel.MonotonicHigherReference.ref",
"FStar.Universe.raise_t",
"Steel.MonotonicReference.raise_preorder"
] | [] | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Steel.MonotonicReference
open FStar.PCM
open FStar.Ghost
open Steel.FractionalPermission
open Steel.Memory
open Steel.Effect.Atomic
open Steel.Effect
module Preorder = FStar.Preorder
module MHR = Steel.MonotonicHigherReference
module U = FStar.Universe
let raise_preorder (#a:Type0) (p:Preorder.preorder a)
: Preorder.preorder (U.raise_t a)
= fun (x0 x1:U.raise_t a) ->
p (U.downgrade_val x0) (U.downgrade_val x1) | false | false | Steel.MonotonicReference.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ref (a:Type u#0) (p:Preorder.preorder a)
: Type u#0 | [] | Steel.MonotonicReference.ref | {
"file_name": "lib/steel/Steel.MonotonicReference.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | a: Type0 -> p: FStar.Preorder.preorder a -> Type0 | {
"end_col": 67,
"end_line": 36,
"start_col": 14,
"start_line": 36
} |
Prims.Tot | val witnessed (#a:Type u#0) (#p:Preorder.preorder a) (r:ref a p) (fact:property a)
: Type0 | [
{
"abbrev": true,
"full_module": "FStar.Universe",
"short_module": "U"
},
{
"abbrev": true,
"full_module": "Steel.MonotonicHigherReference",
"short_module": "MHR"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let witnessed (#a:Type u#0)
(#p:Preorder.preorder a)
(r:ref a p)
(fact:property a)
= MHR.witnessed r (lift_property fact) | val witnessed (#a:Type u#0) (#p:Preorder.preorder a) (r:ref a p) (fact:property a)
: Type0
let witnessed (#a: Type u#0) (#p: Preorder.preorder a) (r: ref a p) (fact: property a) = | false | null | false | MHR.witnessed r (lift_property fact) | {
"checked_file": "Steel.MonotonicReference.fst.checked",
"dependencies": [
"Steel.MonotonicHigherReference.fsti.checked",
"Steel.Memory.fsti.checked",
"Steel.FractionalPermission.fst.checked",
"Steel.Effect.Atomic.fsti.checked",
"Steel.Effect.fsti.checked",
"prims.fst.checked",
"FStar.Universe.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.PCM.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": true,
"source_file": "Steel.MonotonicReference.fst"
} | [
"total"
] | [
"FStar.Preorder.preorder",
"Steel.MonotonicReference.ref",
"Steel.MonotonicReference.property",
"Steel.MonotonicHigherReference.witnessed",
"FStar.Universe.raise_t",
"Steel.MonotonicReference.raise_preorder",
"Steel.MonotonicReference.lift_property"
] | [] | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Steel.MonotonicReference
open FStar.PCM
open FStar.Ghost
open Steel.FractionalPermission
open Steel.Memory
open Steel.Effect.Atomic
open Steel.Effect
module Preorder = FStar.Preorder
module MHR = Steel.MonotonicHigherReference
module U = FStar.Universe
let raise_preorder (#a:Type0) (p:Preorder.preorder a)
: Preorder.preorder (U.raise_t a)
= fun (x0 x1:U.raise_t a) ->
p (U.downgrade_val x0) (U.downgrade_val x1)
let ref a p = MHR.ref (FStar.Universe.raise_t a) (raise_preorder p)
/// The standard points to separation logic predicate
let pts_to_sl (#a:Type) (#p:Preorder.preorder a)
(r:ref a p)
(f:perm)
(v:a)
= MHR.pts_to_sl r f (hide (U.raise_val v))
/// Allocates a reference with value [x]. We have full permission on the newly
/// allocated reference.
let alloc (#a:Type) (p:Preorder.preorder a) (v:a)
: SteelT (ref a p) emp (fun r -> pts_to r full_perm v)
= let r = MHR.alloc (raise_preorder p) (U.raise_val v) in
rewrite_slprop
(MHR.pts_to r full_perm (hide (U.raise_val v)))
(pts_to r full_perm v)
(fun _ -> ());
return r
/// Writes value [x] in the reference [r], as long as we have full ownership of [r]
let write (#a:Type) (#p:Preorder.preorder a) (#v:erased a)
(r:ref a p) (x:a)
: Steel unit (pts_to r full_perm v)
(fun v -> pts_to r full_perm x)
(requires fun _ -> p v x /\ True)
(ensures fun _ _ _ -> True)
= MHR.write r (U.raise_val x);
rewrite_slprop
(MHR.pts_to _ _ _)
(pts_to r full_perm x)
(fun _ -> ())
let lift_property (#a:Type u#0) (p:property a)
: MHR.property (U.raise_t a)
= fun x -> p (U.downgrade_val x)
let witnessed (#a:Type u#0)
(#p:Preorder.preorder a)
(r:ref a p) | false | false | Steel.MonotonicReference.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val witnessed (#a:Type u#0) (#p:Preorder.preorder a) (r:ref a p) (fact:property a)
: Type0 | [] | Steel.MonotonicReference.witnessed | {
"file_name": "lib/steel/Steel.MonotonicReference.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | r: Steel.MonotonicReference.ref a p -> fact: Steel.MonotonicReference.property a -> Type0 | {
"end_col": 40,
"end_line": 78,
"start_col": 4,
"start_line": 78
} |
Prims.Tot | val lift_property (#a: Type u#0) (p: property a) : MHR.property (U.raise_t a) | [
{
"abbrev": true,
"full_module": "FStar.Universe",
"short_module": "U"
},
{
"abbrev": true,
"full_module": "Steel.MonotonicHigherReference",
"short_module": "MHR"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lift_property (#a:Type u#0) (p:property a)
: MHR.property (U.raise_t a)
= fun x -> p (U.downgrade_val x) | val lift_property (#a: Type u#0) (p: property a) : MHR.property (U.raise_t a)
let lift_property (#a: Type u#0) (p: property a) : MHR.property (U.raise_t a) = | false | null | false | fun x -> p (U.downgrade_val x) | {
"checked_file": "Steel.MonotonicReference.fst.checked",
"dependencies": [
"Steel.MonotonicHigherReference.fsti.checked",
"Steel.Memory.fsti.checked",
"Steel.FractionalPermission.fst.checked",
"Steel.Effect.Atomic.fsti.checked",
"Steel.Effect.fsti.checked",
"prims.fst.checked",
"FStar.Universe.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.PCM.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": true,
"source_file": "Steel.MonotonicReference.fst"
} | [
"total"
] | [
"Steel.MonotonicReference.property",
"FStar.Universe.raise_t",
"FStar.Universe.downgrade_val",
"Prims.prop",
"Steel.MonotonicHigherReference.property"
] | [] | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Steel.MonotonicReference
open FStar.PCM
open FStar.Ghost
open Steel.FractionalPermission
open Steel.Memory
open Steel.Effect.Atomic
open Steel.Effect
module Preorder = FStar.Preorder
module MHR = Steel.MonotonicHigherReference
module U = FStar.Universe
let raise_preorder (#a:Type0) (p:Preorder.preorder a)
: Preorder.preorder (U.raise_t a)
= fun (x0 x1:U.raise_t a) ->
p (U.downgrade_val x0) (U.downgrade_val x1)
let ref a p = MHR.ref (FStar.Universe.raise_t a) (raise_preorder p)
/// The standard points to separation logic predicate
let pts_to_sl (#a:Type) (#p:Preorder.preorder a)
(r:ref a p)
(f:perm)
(v:a)
= MHR.pts_to_sl r f (hide (U.raise_val v))
/// Allocates a reference with value [x]. We have full permission on the newly
/// allocated reference.
let alloc (#a:Type) (p:Preorder.preorder a) (v:a)
: SteelT (ref a p) emp (fun r -> pts_to r full_perm v)
= let r = MHR.alloc (raise_preorder p) (U.raise_val v) in
rewrite_slprop
(MHR.pts_to r full_perm (hide (U.raise_val v)))
(pts_to r full_perm v)
(fun _ -> ());
return r
/// Writes value [x] in the reference [r], as long as we have full ownership of [r]
let write (#a:Type) (#p:Preorder.preorder a) (#v:erased a)
(r:ref a p) (x:a)
: Steel unit (pts_to r full_perm v)
(fun v -> pts_to r full_perm x)
(requires fun _ -> p v x /\ True)
(ensures fun _ _ _ -> True)
= MHR.write r (U.raise_val x);
rewrite_slprop
(MHR.pts_to _ _ _)
(pts_to r full_perm x)
(fun _ -> ())
let lift_property (#a:Type u#0) (p:property a) | false | false | Steel.MonotonicReference.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lift_property (#a: Type u#0) (p: property a) : MHR.property (U.raise_t a) | [] | Steel.MonotonicReference.lift_property | {
"file_name": "lib/steel/Steel.MonotonicReference.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | p: Steel.MonotonicReference.property a
-> Steel.MonotonicHigherReference.property (FStar.Universe.raise_t a) | {
"end_col": 34,
"end_line": 72,
"start_col": 4,
"start_line": 72
} |
Prims.Tot | val raise_preorder (#a: Type0) (p: Preorder.preorder a) : Preorder.preorder (U.raise_t a) | [
{
"abbrev": true,
"full_module": "FStar.Universe",
"short_module": "U"
},
{
"abbrev": true,
"full_module": "Steel.MonotonicHigherReference",
"short_module": "MHR"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let raise_preorder (#a:Type0) (p:Preorder.preorder a)
: Preorder.preorder (U.raise_t a)
= fun (x0 x1:U.raise_t a) ->
p (U.downgrade_val x0) (U.downgrade_val x1) | val raise_preorder (#a: Type0) (p: Preorder.preorder a) : Preorder.preorder (U.raise_t a)
let raise_preorder (#a: Type0) (p: Preorder.preorder a) : Preorder.preorder (U.raise_t a) = | false | null | false | fun (x0: U.raise_t a) (x1: U.raise_t a) -> p (U.downgrade_val x0) (U.downgrade_val x1) | {
"checked_file": "Steel.MonotonicReference.fst.checked",
"dependencies": [
"Steel.MonotonicHigherReference.fsti.checked",
"Steel.Memory.fsti.checked",
"Steel.FractionalPermission.fst.checked",
"Steel.Effect.Atomic.fsti.checked",
"Steel.Effect.fsti.checked",
"prims.fst.checked",
"FStar.Universe.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.PCM.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": true,
"source_file": "Steel.MonotonicReference.fst"
} | [
"total"
] | [
"FStar.Preorder.preorder",
"FStar.Universe.raise_t",
"FStar.Universe.downgrade_val"
] | [] | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Steel.MonotonicReference
open FStar.PCM
open FStar.Ghost
open Steel.FractionalPermission
open Steel.Memory
open Steel.Effect.Atomic
open Steel.Effect
module Preorder = FStar.Preorder
module MHR = Steel.MonotonicHigherReference
module U = FStar.Universe
let raise_preorder (#a:Type0) (p:Preorder.preorder a) | false | false | Steel.MonotonicReference.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val raise_preorder (#a: Type0) (p: Preorder.preorder a) : Preorder.preorder (U.raise_t a) | [] | Steel.MonotonicReference.raise_preorder | {
"file_name": "lib/steel/Steel.MonotonicReference.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | p: FStar.Preorder.preorder a -> FStar.Preorder.preorder (FStar.Universe.raise_t a) | {
"end_col": 50,
"end_line": 34,
"start_col": 4,
"start_line": 33
} |
Steel.Effect.Atomic.SteelAtomicU | val recall (#inames: _) (#a:Type u#0) (#q:perm) (#p:Preorder.preorder a)
(fact:property a)
(r:erased (ref a p))
(v:erased a)
(w:witnessed r fact)
: SteelAtomicU unit inames (pts_to r q v)
(fun _ -> pts_to r q v)
(requires fun _ -> True)
(ensures fun _ _ _ -> fact v) | [
{
"abbrev": true,
"full_module": "FStar.Universe",
"short_module": "U"
},
{
"abbrev": true,
"full_module": "Steel.MonotonicHigherReference",
"short_module": "MHR"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let recall (#inames: _)
(#a:Type u#0)
(#q:perm)
(#p:Preorder.preorder a)
(fact:property a)
(r:erased (ref a p))
(v:erased a)
(w:witnessed r fact)
= MHR.recall (lift_property fact) r (hide (U.raise_val (reveal v))) w | val recall (#inames: _) (#a:Type u#0) (#q:perm) (#p:Preorder.preorder a)
(fact:property a)
(r:erased (ref a p))
(v:erased a)
(w:witnessed r fact)
: SteelAtomicU unit inames (pts_to r q v)
(fun _ -> pts_to r q v)
(requires fun _ -> True)
(ensures fun _ _ _ -> fact v)
let recall
(#inames: _)
(#a: Type u#0)
(#q: perm)
(#p: Preorder.preorder a)
(fact: property a)
(r: erased (ref a p))
(v: erased a)
(w: witnessed r fact)
= | true | null | false | MHR.recall (lift_property fact) r (hide (U.raise_val (reveal v))) w | {
"checked_file": "Steel.MonotonicReference.fst.checked",
"dependencies": [
"Steel.MonotonicHigherReference.fsti.checked",
"Steel.Memory.fsti.checked",
"Steel.FractionalPermission.fst.checked",
"Steel.Effect.Atomic.fsti.checked",
"Steel.Effect.fsti.checked",
"prims.fst.checked",
"FStar.Universe.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.PCM.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": true,
"source_file": "Steel.MonotonicReference.fst"
} | [] | [
"Steel.Memory.inames",
"Steel.FractionalPermission.perm",
"FStar.Preorder.preorder",
"Steel.MonotonicReference.property",
"FStar.Ghost.erased",
"Steel.MonotonicReference.ref",
"Steel.MonotonicReference.witnessed",
"FStar.Ghost.reveal",
"Steel.MonotonicHigherReference.recall",
"FStar.Universe.raise_t",
"Steel.MonotonicReference.raise_preorder",
"Steel.MonotonicReference.lift_property",
"FStar.Ghost.hide",
"FStar.Universe.raise_val",
"Prims.unit"
] | [] | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Steel.MonotonicReference
open FStar.PCM
open FStar.Ghost
open Steel.FractionalPermission
open Steel.Memory
open Steel.Effect.Atomic
open Steel.Effect
module Preorder = FStar.Preorder
module MHR = Steel.MonotonicHigherReference
module U = FStar.Universe
let raise_preorder (#a:Type0) (p:Preorder.preorder a)
: Preorder.preorder (U.raise_t a)
= fun (x0 x1:U.raise_t a) ->
p (U.downgrade_val x0) (U.downgrade_val x1)
let ref a p = MHR.ref (FStar.Universe.raise_t a) (raise_preorder p)
/// The standard points to separation logic predicate
let pts_to_sl (#a:Type) (#p:Preorder.preorder a)
(r:ref a p)
(f:perm)
(v:a)
= MHR.pts_to_sl r f (hide (U.raise_val v))
/// Allocates a reference with value [x]. We have full permission on the newly
/// allocated reference.
let alloc (#a:Type) (p:Preorder.preorder a) (v:a)
: SteelT (ref a p) emp (fun r -> pts_to r full_perm v)
= let r = MHR.alloc (raise_preorder p) (U.raise_val v) in
rewrite_slprop
(MHR.pts_to r full_perm (hide (U.raise_val v)))
(pts_to r full_perm v)
(fun _ -> ());
return r
/// Writes value [x] in the reference [r], as long as we have full ownership of [r]
let write (#a:Type) (#p:Preorder.preorder a) (#v:erased a)
(r:ref a p) (x:a)
: Steel unit (pts_to r full_perm v)
(fun v -> pts_to r full_perm x)
(requires fun _ -> p v x /\ True)
(ensures fun _ _ _ -> True)
= MHR.write r (U.raise_val x);
rewrite_slprop
(MHR.pts_to _ _ _)
(pts_to r full_perm x)
(fun _ -> ())
let lift_property (#a:Type u#0) (p:property a)
: MHR.property (U.raise_t a)
= fun x -> p (U.downgrade_val x)
let witnessed (#a:Type u#0)
(#p:Preorder.preorder a)
(r:ref a p)
(fact:property a)
= MHR.witnessed r (lift_property fact)
/// If [fact] is a stable property for the reference preorder [p], and if
/// it holds for the current value [v] of the reference, then we can witness it
let witness (#inames: _)
(#a:Type)
(#q:perm)
(#p:Preorder.preorder a)
(r:erased (ref a p))
(fact:stable_property p)
(v:erased a)
(_:squash (fact v))
= MHR.witness r (lift_property fact) (hide (U.raise_val (reveal v))) ()
/// If we previously witnessed the validity of [fact], we can recall its validity
let recall (#inames: _)
(#a:Type u#0)
(#q:perm)
(#p:Preorder.preorder a)
(fact:property a)
(r:erased (ref a p))
(v:erased a) | false | false | Steel.MonotonicReference.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val recall (#inames: _) (#a:Type u#0) (#q:perm) (#p:Preorder.preorder a)
(fact:property a)
(r:erased (ref a p))
(v:erased a)
(w:witnessed r fact)
: SteelAtomicU unit inames (pts_to r q v)
(fun _ -> pts_to r q v)
(requires fun _ -> True)
(ensures fun _ _ _ -> fact v) | [] | Steel.MonotonicReference.recall | {
"file_name": "lib/steel/Steel.MonotonicReference.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} |
fact: Steel.MonotonicReference.property a ->
r: FStar.Ghost.erased (Steel.MonotonicReference.ref a p) ->
v: FStar.Ghost.erased a ->
w: Steel.MonotonicReference.witnessed (FStar.Ghost.reveal r) fact
-> Steel.Effect.Atomic.SteelAtomicU Prims.unit | {
"end_col": 71,
"end_line": 101,
"start_col": 4,
"start_line": 101
} |
Steel.Effect.Atomic.SteelAtomicUT | val witness (#inames: _) (#a:Type) (#q:perm) (#p:Preorder.preorder a)
(r:erased (ref a p))
(fact:stable_property p)
(v:erased a)
(_:squash (fact v))
: SteelAtomicUT (witnessed r fact) inames
(pts_to r q v)
(fun _ -> pts_to r q v) | [
{
"abbrev": true,
"full_module": "FStar.Universe",
"short_module": "U"
},
{
"abbrev": true,
"full_module": "Steel.MonotonicHigherReference",
"short_module": "MHR"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let witness (#inames: _)
(#a:Type)
(#q:perm)
(#p:Preorder.preorder a)
(r:erased (ref a p))
(fact:stable_property p)
(v:erased a)
(_:squash (fact v))
= MHR.witness r (lift_property fact) (hide (U.raise_val (reveal v))) () | val witness (#inames: _) (#a:Type) (#q:perm) (#p:Preorder.preorder a)
(r:erased (ref a p))
(fact:stable_property p)
(v:erased a)
(_:squash (fact v))
: SteelAtomicUT (witnessed r fact) inames
(pts_to r q v)
(fun _ -> pts_to r q v)
let witness
(#inames: _)
(#a: Type)
(#q: perm)
(#p: Preorder.preorder a)
(r: erased (ref a p))
(fact: stable_property p)
(v: erased a)
(_: squash (fact v))
= | true | null | false | MHR.witness r (lift_property fact) (hide (U.raise_val (reveal v))) () | {
"checked_file": "Steel.MonotonicReference.fst.checked",
"dependencies": [
"Steel.MonotonicHigherReference.fsti.checked",
"Steel.Memory.fsti.checked",
"Steel.FractionalPermission.fst.checked",
"Steel.Effect.Atomic.fsti.checked",
"Steel.Effect.fsti.checked",
"prims.fst.checked",
"FStar.Universe.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.PCM.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": true,
"source_file": "Steel.MonotonicReference.fst"
} | [] | [
"Steel.Memory.inames",
"Steel.FractionalPermission.perm",
"FStar.Preorder.preorder",
"FStar.Ghost.erased",
"Steel.MonotonicReference.ref",
"Steel.MonotonicReference.stable_property",
"Prims.squash",
"FStar.Ghost.reveal",
"Steel.MonotonicHigherReference.witness",
"FStar.Universe.raise_t",
"Steel.MonotonicReference.raise_preorder",
"Steel.MonotonicReference.lift_property",
"FStar.Ghost.hide",
"FStar.Universe.raise_val",
"Steel.MonotonicHigherReference.witnessed",
"Steel.MonotonicHigherReference.ref",
"Steel.MonotonicReference.witnessed"
] | [] | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Steel.MonotonicReference
open FStar.PCM
open FStar.Ghost
open Steel.FractionalPermission
open Steel.Memory
open Steel.Effect.Atomic
open Steel.Effect
module Preorder = FStar.Preorder
module MHR = Steel.MonotonicHigherReference
module U = FStar.Universe
let raise_preorder (#a:Type0) (p:Preorder.preorder a)
: Preorder.preorder (U.raise_t a)
= fun (x0 x1:U.raise_t a) ->
p (U.downgrade_val x0) (U.downgrade_val x1)
let ref a p = MHR.ref (FStar.Universe.raise_t a) (raise_preorder p)
/// The standard points to separation logic predicate
let pts_to_sl (#a:Type) (#p:Preorder.preorder a)
(r:ref a p)
(f:perm)
(v:a)
= MHR.pts_to_sl r f (hide (U.raise_val v))
/// Allocates a reference with value [x]. We have full permission on the newly
/// allocated reference.
let alloc (#a:Type) (p:Preorder.preorder a) (v:a)
: SteelT (ref a p) emp (fun r -> pts_to r full_perm v)
= let r = MHR.alloc (raise_preorder p) (U.raise_val v) in
rewrite_slprop
(MHR.pts_to r full_perm (hide (U.raise_val v)))
(pts_to r full_perm v)
(fun _ -> ());
return r
/// Writes value [x] in the reference [r], as long as we have full ownership of [r]
let write (#a:Type) (#p:Preorder.preorder a) (#v:erased a)
(r:ref a p) (x:a)
: Steel unit (pts_to r full_perm v)
(fun v -> pts_to r full_perm x)
(requires fun _ -> p v x /\ True)
(ensures fun _ _ _ -> True)
= MHR.write r (U.raise_val x);
rewrite_slprop
(MHR.pts_to _ _ _)
(pts_to r full_perm x)
(fun _ -> ())
let lift_property (#a:Type u#0) (p:property a)
: MHR.property (U.raise_t a)
= fun x -> p (U.downgrade_val x)
let witnessed (#a:Type u#0)
(#p:Preorder.preorder a)
(r:ref a p)
(fact:property a)
= MHR.witnessed r (lift_property fact)
/// If [fact] is a stable property for the reference preorder [p], and if
/// it holds for the current value [v] of the reference, then we can witness it
let witness (#inames: _)
(#a:Type)
(#q:perm)
(#p:Preorder.preorder a)
(r:erased (ref a p))
(fact:stable_property p)
(v:erased a) | false | false | Steel.MonotonicReference.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val witness (#inames: _) (#a:Type) (#q:perm) (#p:Preorder.preorder a)
(r:erased (ref a p))
(fact:stable_property p)
(v:erased a)
(_:squash (fact v))
: SteelAtomicUT (witnessed r fact) inames
(pts_to r q v)
(fun _ -> pts_to r q v) | [] | Steel.MonotonicReference.witness | {
"file_name": "lib/steel/Steel.MonotonicReference.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} |
r: FStar.Ghost.erased (Steel.MonotonicReference.ref a p) ->
fact: Steel.MonotonicReference.stable_property p ->
v: FStar.Ghost.erased a ->
_: Prims.squash (fact (FStar.Ghost.reveal v))
-> Steel.Effect.Atomic.SteelAtomicUT
(Steel.MonotonicReference.witnessed (FStar.Ghost.reveal r) fact) | {
"end_col": 73,
"end_line": 90,
"start_col": 4,
"start_line": 90
} |
Steel.Effect.Atomic.SteelGhostT | val share (#inames:_)
(#a:Type)
(#p:Preorder.preorder a)
(r:ref a p)
(f:perm)
(v:Ghost.erased a)
: SteelGhostT unit inames
(pts_to r f v)
(fun _ -> pts_to r (half_perm f) v `star` pts_to r (half_perm f) v) | [
{
"abbrev": true,
"full_module": "FStar.Universe",
"short_module": "U"
},
{
"abbrev": true,
"full_module": "Steel.MonotonicHigherReference",
"short_module": "MHR"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let share (#inames:_)
(#a:Type)
(#p:Preorder.preorder a)
(r:ref a p)
(f:perm)
(v:Ghost.erased a)
: SteelGhostT unit inames
(pts_to r f v)
(fun _ -> pts_to r (half_perm f) v `star` pts_to r (half_perm f) v)
= MHR.share r f (hide (U.raise_val (reveal v))) | val share (#inames:_)
(#a:Type)
(#p:Preorder.preorder a)
(r:ref a p)
(f:perm)
(v:Ghost.erased a)
: SteelGhostT unit inames
(pts_to r f v)
(fun _ -> pts_to r (half_perm f) v `star` pts_to r (half_perm f) v)
let share
(#inames: _)
(#a: Type)
(#p: Preorder.preorder a)
(r: ref a p)
(f: perm)
(v: Ghost.erased a)
: SteelGhostT unit
inames
(pts_to r f v)
(fun _ -> (pts_to r (half_perm f) v) `star` (pts_to r (half_perm f) v)) = | true | null | false | MHR.share r f (hide (U.raise_val (reveal v))) | {
"checked_file": "Steel.MonotonicReference.fst.checked",
"dependencies": [
"Steel.MonotonicHigherReference.fsti.checked",
"Steel.Memory.fsti.checked",
"Steel.FractionalPermission.fst.checked",
"Steel.Effect.Atomic.fsti.checked",
"Steel.Effect.fsti.checked",
"prims.fst.checked",
"FStar.Universe.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.PCM.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": true,
"source_file": "Steel.MonotonicReference.fst"
} | [] | [
"Steel.Memory.inames",
"FStar.Preorder.preorder",
"Steel.MonotonicReference.ref",
"Steel.FractionalPermission.perm",
"FStar.Ghost.erased",
"Steel.MonotonicHigherReference.share",
"FStar.Universe.raise_t",
"Steel.MonotonicReference.raise_preorder",
"FStar.Ghost.hide",
"FStar.Universe.raise_val",
"FStar.Ghost.reveal",
"Prims.unit",
"Steel.MonotonicReference.pts_to",
"Steel.Effect.Common.star",
"Steel.FractionalPermission.half_perm",
"Steel.Effect.Common.vprop"
] | [] | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Steel.MonotonicReference
open FStar.PCM
open FStar.Ghost
open Steel.FractionalPermission
open Steel.Memory
open Steel.Effect.Atomic
open Steel.Effect
module Preorder = FStar.Preorder
module MHR = Steel.MonotonicHigherReference
module U = FStar.Universe
let raise_preorder (#a:Type0) (p:Preorder.preorder a)
: Preorder.preorder (U.raise_t a)
= fun (x0 x1:U.raise_t a) ->
p (U.downgrade_val x0) (U.downgrade_val x1)
let ref a p = MHR.ref (FStar.Universe.raise_t a) (raise_preorder p)
/// The standard points to separation logic predicate
let pts_to_sl (#a:Type) (#p:Preorder.preorder a)
(r:ref a p)
(f:perm)
(v:a)
= MHR.pts_to_sl r f (hide (U.raise_val v))
/// Allocates a reference with value [x]. We have full permission on the newly
/// allocated reference.
let alloc (#a:Type) (p:Preorder.preorder a) (v:a)
: SteelT (ref a p) emp (fun r -> pts_to r full_perm v)
= let r = MHR.alloc (raise_preorder p) (U.raise_val v) in
rewrite_slprop
(MHR.pts_to r full_perm (hide (U.raise_val v)))
(pts_to r full_perm v)
(fun _ -> ());
return r
/// Writes value [x] in the reference [r], as long as we have full ownership of [r]
let write (#a:Type) (#p:Preorder.preorder a) (#v:erased a)
(r:ref a p) (x:a)
: Steel unit (pts_to r full_perm v)
(fun v -> pts_to r full_perm x)
(requires fun _ -> p v x /\ True)
(ensures fun _ _ _ -> True)
= MHR.write r (U.raise_val x);
rewrite_slprop
(MHR.pts_to _ _ _)
(pts_to r full_perm x)
(fun _ -> ())
let lift_property (#a:Type u#0) (p:property a)
: MHR.property (U.raise_t a)
= fun x -> p (U.downgrade_val x)
let witnessed (#a:Type u#0)
(#p:Preorder.preorder a)
(r:ref a p)
(fact:property a)
= MHR.witnessed r (lift_property fact)
/// If [fact] is a stable property for the reference preorder [p], and if
/// it holds for the current value [v] of the reference, then we can witness it
let witness (#inames: _)
(#a:Type)
(#q:perm)
(#p:Preorder.preorder a)
(r:erased (ref a p))
(fact:stable_property p)
(v:erased a)
(_:squash (fact v))
= MHR.witness r (lift_property fact) (hide (U.raise_val (reveal v))) ()
/// If we previously witnessed the validity of [fact], we can recall its validity
let recall (#inames: _)
(#a:Type u#0)
(#q:perm)
(#p:Preorder.preorder a)
(fact:property a)
(r:erased (ref a p))
(v:erased a)
(w:witnessed r fact)
= MHR.recall (lift_property fact) r (hide (U.raise_val (reveal v))) w
/// Monotonic references are also equipped with the usual fractional permission discipline
/// So, you can split a reference into two read-only shares
let share (#inames:_)
(#a:Type)
(#p:Preorder.preorder a)
(r:ref a p)
(f:perm)
(v:Ghost.erased a)
: SteelGhostT unit inames
(pts_to r f v) | false | false | Steel.MonotonicReference.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val share (#inames:_)
(#a:Type)
(#p:Preorder.preorder a)
(r:ref a p)
(f:perm)
(v:Ghost.erased a)
: SteelGhostT unit inames
(pts_to r f v)
(fun _ -> pts_to r (half_perm f) v `star` pts_to r (half_perm f) v) | [] | Steel.MonotonicReference.share | {
"file_name": "lib/steel/Steel.MonotonicReference.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | r: Steel.MonotonicReference.ref a p -> f: Steel.FractionalPermission.perm -> v: FStar.Ghost.erased a
-> Steel.Effect.Atomic.SteelGhostT Prims.unit | {
"end_col": 49,
"end_line": 114,
"start_col": 4,
"start_line": 114
} |
Steel.Effect.Atomic.SteelGhostT | val gather (#inames:_)
(#a:Type)
(#p:Preorder.preorder a)
(r:ref a p)
(f g:perm)
(v:Ghost.erased a)
: SteelGhostT unit inames
(pts_to r f v `star` pts_to r g v)
(fun _ -> pts_to r (sum_perm f g) v) | [
{
"abbrev": true,
"full_module": "FStar.Universe",
"short_module": "U"
},
{
"abbrev": true,
"full_module": "Steel.MonotonicHigherReference",
"short_module": "MHR"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gather (#inames:_)
(#a:Type)
(#p:Preorder.preorder a)
(r:ref a p)
(f g:perm)
(v:Ghost.erased a)
: SteelGhostT unit inames
(pts_to r f v `star` pts_to r g v)
(fun _ -> pts_to r (sum_perm f g) v)
= MHR.gather r f g (hide (U.raise_val (reveal v))) | val gather (#inames:_)
(#a:Type)
(#p:Preorder.preorder a)
(r:ref a p)
(f g:perm)
(v:Ghost.erased a)
: SteelGhostT unit inames
(pts_to r f v `star` pts_to r g v)
(fun _ -> pts_to r (sum_perm f g) v)
let gather
(#inames: _)
(#a: Type)
(#p: Preorder.preorder a)
(r: ref a p)
(f g: perm)
(v: Ghost.erased a)
: SteelGhostT unit
inames
((pts_to r f v) `star` (pts_to r g v))
(fun _ -> pts_to r (sum_perm f g) v) = | true | null | false | MHR.gather r f g (hide (U.raise_val (reveal v))) | {
"checked_file": "Steel.MonotonicReference.fst.checked",
"dependencies": [
"Steel.MonotonicHigherReference.fsti.checked",
"Steel.Memory.fsti.checked",
"Steel.FractionalPermission.fst.checked",
"Steel.Effect.Atomic.fsti.checked",
"Steel.Effect.fsti.checked",
"prims.fst.checked",
"FStar.Universe.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.PCM.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": true,
"source_file": "Steel.MonotonicReference.fst"
} | [] | [
"Steel.Memory.inames",
"FStar.Preorder.preorder",
"Steel.MonotonicReference.ref",
"Steel.FractionalPermission.perm",
"FStar.Ghost.erased",
"Steel.MonotonicHigherReference.gather",
"FStar.Universe.raise_t",
"Steel.MonotonicReference.raise_preorder",
"FStar.Ghost.hide",
"FStar.Universe.raise_val",
"FStar.Ghost.reveal",
"Prims.unit",
"Steel.Effect.Common.star",
"Steel.MonotonicReference.pts_to",
"Steel.FractionalPermission.sum_perm",
"Steel.Effect.Common.vprop"
] | [] | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Steel.MonotonicReference
open FStar.PCM
open FStar.Ghost
open Steel.FractionalPermission
open Steel.Memory
open Steel.Effect.Atomic
open Steel.Effect
module Preorder = FStar.Preorder
module MHR = Steel.MonotonicHigherReference
module U = FStar.Universe
let raise_preorder (#a:Type0) (p:Preorder.preorder a)
: Preorder.preorder (U.raise_t a)
= fun (x0 x1:U.raise_t a) ->
p (U.downgrade_val x0) (U.downgrade_val x1)
let ref a p = MHR.ref (FStar.Universe.raise_t a) (raise_preorder p)
/// The standard points to separation logic predicate
let pts_to_sl (#a:Type) (#p:Preorder.preorder a)
(r:ref a p)
(f:perm)
(v:a)
= MHR.pts_to_sl r f (hide (U.raise_val v))
/// Allocates a reference with value [x]. We have full permission on the newly
/// allocated reference.
let alloc (#a:Type) (p:Preorder.preorder a) (v:a)
: SteelT (ref a p) emp (fun r -> pts_to r full_perm v)
= let r = MHR.alloc (raise_preorder p) (U.raise_val v) in
rewrite_slprop
(MHR.pts_to r full_perm (hide (U.raise_val v)))
(pts_to r full_perm v)
(fun _ -> ());
return r
/// Writes value [x] in the reference [r], as long as we have full ownership of [r]
let write (#a:Type) (#p:Preorder.preorder a) (#v:erased a)
(r:ref a p) (x:a)
: Steel unit (pts_to r full_perm v)
(fun v -> pts_to r full_perm x)
(requires fun _ -> p v x /\ True)
(ensures fun _ _ _ -> True)
= MHR.write r (U.raise_val x);
rewrite_slprop
(MHR.pts_to _ _ _)
(pts_to r full_perm x)
(fun _ -> ())
let lift_property (#a:Type u#0) (p:property a)
: MHR.property (U.raise_t a)
= fun x -> p (U.downgrade_val x)
let witnessed (#a:Type u#0)
(#p:Preorder.preorder a)
(r:ref a p)
(fact:property a)
= MHR.witnessed r (lift_property fact)
/// If [fact] is a stable property for the reference preorder [p], and if
/// it holds for the current value [v] of the reference, then we can witness it
let witness (#inames: _)
(#a:Type)
(#q:perm)
(#p:Preorder.preorder a)
(r:erased (ref a p))
(fact:stable_property p)
(v:erased a)
(_:squash (fact v))
= MHR.witness r (lift_property fact) (hide (U.raise_val (reveal v))) ()
/// If we previously witnessed the validity of [fact], we can recall its validity
let recall (#inames: _)
(#a:Type u#0)
(#q:perm)
(#p:Preorder.preorder a)
(fact:property a)
(r:erased (ref a p))
(v:erased a)
(w:witnessed r fact)
= MHR.recall (lift_property fact) r (hide (U.raise_val (reveal v))) w
/// Monotonic references are also equipped with the usual fractional permission discipline
/// So, you can split a reference into two read-only shares
let share (#inames:_)
(#a:Type)
(#p:Preorder.preorder a)
(r:ref a p)
(f:perm)
(v:Ghost.erased a)
: SteelGhostT unit inames
(pts_to r f v)
(fun _ -> pts_to r (half_perm f) v `star` pts_to r (half_perm f) v)
= MHR.share r f (hide (U.raise_val (reveal v)))
/// And you can gather back the shares
let gather (#inames:_)
(#a:Type)
(#p:Preorder.preorder a)
(r:ref a p)
(f g:perm)
(v:Ghost.erased a)
: SteelGhostT unit inames
(pts_to r f v `star` pts_to r g v) | false | false | Steel.MonotonicReference.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gather (#inames:_)
(#a:Type)
(#p:Preorder.preorder a)
(r:ref a p)
(f g:perm)
(v:Ghost.erased a)
: SteelGhostT unit inames
(pts_to r f v `star` pts_to r g v)
(fun _ -> pts_to r (sum_perm f g) v) | [] | Steel.MonotonicReference.gather | {
"file_name": "lib/steel/Steel.MonotonicReference.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} |
r: Steel.MonotonicReference.ref a p ->
f: Steel.FractionalPermission.perm ->
g: Steel.FractionalPermission.perm ->
v: FStar.Ghost.erased a
-> Steel.Effect.Atomic.SteelGhostT Prims.unit | {
"end_col": 52,
"end_line": 126,
"start_col": 4,
"start_line": 126
} |
Steel.Effect.Steel | val write (#a:Type) (#p:Preorder.preorder a) (#v:erased a)
(r:ref a p) (x:a)
: Steel unit (pts_to r full_perm v)
(fun v -> pts_to r full_perm x)
(requires fun _ -> p v x /\ True)
(ensures fun _ _ _ -> True) | [
{
"abbrev": true,
"full_module": "FStar.Universe",
"short_module": "U"
},
{
"abbrev": true,
"full_module": "Steel.MonotonicHigherReference",
"short_module": "MHR"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let write (#a:Type) (#p:Preorder.preorder a) (#v:erased a)
(r:ref a p) (x:a)
: Steel unit (pts_to r full_perm v)
(fun v -> pts_to r full_perm x)
(requires fun _ -> p v x /\ True)
(ensures fun _ _ _ -> True)
= MHR.write r (U.raise_val x);
rewrite_slprop
(MHR.pts_to _ _ _)
(pts_to r full_perm x)
(fun _ -> ()) | val write (#a:Type) (#p:Preorder.preorder a) (#v:erased a)
(r:ref a p) (x:a)
: Steel unit (pts_to r full_perm v)
(fun v -> pts_to r full_perm x)
(requires fun _ -> p v x /\ True)
(ensures fun _ _ _ -> True)
let write (#a: Type) (#p: Preorder.preorder a) (#v: erased a) (r: ref a p) (x: a)
: Steel unit
(pts_to r full_perm v)
(fun v -> pts_to r full_perm x)
(requires fun _ -> p v x /\ True)
(ensures fun _ _ _ -> True) = | true | null | false | MHR.write r (U.raise_val x);
rewrite_slprop (MHR.pts_to _ _ _) (pts_to r full_perm x) (fun _ -> ()) | {
"checked_file": "Steel.MonotonicReference.fst.checked",
"dependencies": [
"Steel.MonotonicHigherReference.fsti.checked",
"Steel.Memory.fsti.checked",
"Steel.FractionalPermission.fst.checked",
"Steel.Effect.Atomic.fsti.checked",
"Steel.Effect.fsti.checked",
"prims.fst.checked",
"FStar.Universe.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.PCM.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": true,
"source_file": "Steel.MonotonicReference.fst"
} | [] | [
"FStar.Preorder.preorder",
"FStar.Ghost.erased",
"Steel.MonotonicReference.ref",
"Steel.Effect.Atomic.rewrite_slprop",
"FStar.Ghost.hide",
"FStar.Set.set",
"Steel.Memory.iname",
"FStar.Set.empty",
"Steel.MonotonicHigherReference.pts_to",
"FStar.Universe.raise_t",
"Steel.MonotonicReference.raise_preorder",
"Steel.FractionalPermission.full_perm",
"FStar.Universe.raise_val",
"Steel.MonotonicReference.pts_to",
"Steel.Memory.mem",
"Prims.unit",
"Steel.MonotonicHigherReference.write",
"FStar.Ghost.reveal",
"Steel.Effect.Common.vprop",
"Steel.Effect.Common.rmem",
"Prims.l_and",
"Prims.l_True"
] | [] | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Steel.MonotonicReference
open FStar.PCM
open FStar.Ghost
open Steel.FractionalPermission
open Steel.Memory
open Steel.Effect.Atomic
open Steel.Effect
module Preorder = FStar.Preorder
module MHR = Steel.MonotonicHigherReference
module U = FStar.Universe
let raise_preorder (#a:Type0) (p:Preorder.preorder a)
: Preorder.preorder (U.raise_t a)
= fun (x0 x1:U.raise_t a) ->
p (U.downgrade_val x0) (U.downgrade_val x1)
let ref a p = MHR.ref (FStar.Universe.raise_t a) (raise_preorder p)
/// The standard points to separation logic predicate
let pts_to_sl (#a:Type) (#p:Preorder.preorder a)
(r:ref a p)
(f:perm)
(v:a)
= MHR.pts_to_sl r f (hide (U.raise_val v))
/// Allocates a reference with value [x]. We have full permission on the newly
/// allocated reference.
let alloc (#a:Type) (p:Preorder.preorder a) (v:a)
: SteelT (ref a p) emp (fun r -> pts_to r full_perm v)
= let r = MHR.alloc (raise_preorder p) (U.raise_val v) in
rewrite_slprop
(MHR.pts_to r full_perm (hide (U.raise_val v)))
(pts_to r full_perm v)
(fun _ -> ());
return r
/// Writes value [x] in the reference [r], as long as we have full ownership of [r]
let write (#a:Type) (#p:Preorder.preorder a) (#v:erased a)
(r:ref a p) (x:a)
: Steel unit (pts_to r full_perm v)
(fun v -> pts_to r full_perm x)
(requires fun _ -> p v x /\ True) | false | false | Steel.MonotonicReference.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val write (#a:Type) (#p:Preorder.preorder a) (#v:erased a)
(r:ref a p) (x:a)
: Steel unit (pts_to r full_perm v)
(fun v -> pts_to r full_perm x)
(requires fun _ -> p v x /\ True)
(ensures fun _ _ _ -> True) | [] | Steel.MonotonicReference.write | {
"file_name": "lib/steel/Steel.MonotonicReference.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | r: Steel.MonotonicReference.ref a p -> x: a -> Steel.Effect.Steel Prims.unit | {
"end_col": 19,
"end_line": 68,
"start_col": 4,
"start_line": 64
} |
Steel.Effect.SteelT | val alloc (#a:Type) (p:Preorder.preorder a) (v:a)
: SteelT (ref a p) emp (fun r -> pts_to r full_perm v) | [
{
"abbrev": true,
"full_module": "FStar.Universe",
"short_module": "U"
},
{
"abbrev": true,
"full_module": "Steel.MonotonicHigherReference",
"short_module": "MHR"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "Preorder"
},
{
"abbrev": false,
"full_module": "Steel.Effect",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Effect.Atomic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.FractionalPermission",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Ghost",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.PCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let alloc (#a:Type) (p:Preorder.preorder a) (v:a)
: SteelT (ref a p) emp (fun r -> pts_to r full_perm v)
= let r = MHR.alloc (raise_preorder p) (U.raise_val v) in
rewrite_slprop
(MHR.pts_to r full_perm (hide (U.raise_val v)))
(pts_to r full_perm v)
(fun _ -> ());
return r | val alloc (#a:Type) (p:Preorder.preorder a) (v:a)
: SteelT (ref a p) emp (fun r -> pts_to r full_perm v)
let alloc (#a: Type) (p: Preorder.preorder a) (v: a)
: SteelT (ref a p) emp (fun r -> pts_to r full_perm v) = | true | null | false | let r = MHR.alloc (raise_preorder p) (U.raise_val v) in
rewrite_slprop (MHR.pts_to r full_perm (hide (U.raise_val v))) (pts_to r full_perm v) (fun _ -> ());
return r | {
"checked_file": "Steel.MonotonicReference.fst.checked",
"dependencies": [
"Steel.MonotonicHigherReference.fsti.checked",
"Steel.Memory.fsti.checked",
"Steel.FractionalPermission.fst.checked",
"Steel.Effect.Atomic.fsti.checked",
"Steel.Effect.fsti.checked",
"prims.fst.checked",
"FStar.Universe.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.PCM.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": true,
"source_file": "Steel.MonotonicReference.fst"
} | [] | [
"FStar.Preorder.preorder",
"Steel.Effect.Atomic.return",
"Steel.MonotonicReference.ref",
"FStar.Ghost.hide",
"FStar.Set.set",
"Steel.Memory.iname",
"FStar.Set.empty",
"Steel.MonotonicReference.pts_to",
"Steel.FractionalPermission.full_perm",
"Steel.Effect.Common.vprop",
"Prims.unit",
"Steel.Effect.Atomic.rewrite_slprop",
"Steel.MonotonicHigherReference.pts_to",
"FStar.Universe.raise_t",
"Steel.MonotonicReference.raise_preorder",
"FStar.Universe.raise_val",
"Steel.Memory.mem",
"Steel.MonotonicHigherReference.ref",
"Steel.MonotonicHigherReference.alloc",
"Steel.Effect.Common.emp"
] | [] | (*
Copyright 2020 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Steel.MonotonicReference
open FStar.PCM
open FStar.Ghost
open Steel.FractionalPermission
open Steel.Memory
open Steel.Effect.Atomic
open Steel.Effect
module Preorder = FStar.Preorder
module MHR = Steel.MonotonicHigherReference
module U = FStar.Universe
let raise_preorder (#a:Type0) (p:Preorder.preorder a)
: Preorder.preorder (U.raise_t a)
= fun (x0 x1:U.raise_t a) ->
p (U.downgrade_val x0) (U.downgrade_val x1)
let ref a p = MHR.ref (FStar.Universe.raise_t a) (raise_preorder p)
/// The standard points to separation logic predicate
let pts_to_sl (#a:Type) (#p:Preorder.preorder a)
(r:ref a p)
(f:perm)
(v:a)
= MHR.pts_to_sl r f (hide (U.raise_val v))
/// Allocates a reference with value [x]. We have full permission on the newly
/// allocated reference.
let alloc (#a:Type) (p:Preorder.preorder a) (v:a) | false | false | Steel.MonotonicReference.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val alloc (#a:Type) (p:Preorder.preorder a) (v:a)
: SteelT (ref a p) emp (fun r -> pts_to r full_perm v) | [] | Steel.MonotonicReference.alloc | {
"file_name": "lib/steel/Steel.MonotonicReference.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | p: FStar.Preorder.preorder a -> v: a -> Steel.Effect.SteelT (Steel.MonotonicReference.ref a p) | {
"end_col": 12,
"end_line": 54,
"start_col": 3,
"start_line": 49
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let st_wp = gst_wp | let st_wp = | false | null | false | gst_wp | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.HyperStack.ST.gst_wp"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q)
let st_pre = gst_pre
let st_post' = gst_post' | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val st_wp : a: Type -> Type | [] | FStar.HyperStack.ST.st_wp | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: Type -> Type | {
"end_col": 21,
"end_line": 82,
"start_col": 15,
"start_line": 82
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gst_post (a:Type) = st_post_h mem a | let gst_post (a: Type) = | false | null | false | st_post_h mem a | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Pervasives.st_post_h",
"FStar.Monotonic.HyperStack.mem"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gst_post : a: Type -> Type | [] | FStar.HyperStack.ST.gst_post | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: Type -> Type | {
"end_col": 39,
"end_line": 47,
"start_col": 24,
"start_line": 47
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre | let gst_post' (a pre: Type) = | false | null | false | st_post_h' mem a pre | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Pervasives.st_post_h'",
"FStar.Monotonic.HyperStack.mem"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gst_post' : a: Type -> pre: Type -> Type | [] | FStar.HyperStack.ST.gst_post' | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: Type -> pre: Type -> Type | {
"end_col": 56,
"end_line": 46,
"start_col": 36,
"start_line": 46
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gst_pre = st_pre_h mem | let gst_pre = | false | null | false | st_pre_h mem | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Pervasives.st_pre_h",
"FStar.Monotonic.HyperStack.mem"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gst_pre : Type | [] | FStar.HyperStack.ST.gst_pre | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | Type | {
"end_col": 36,
"end_line": 45,
"start_col": 24,
"start_line": 45
} |
|
Prims.Tot | val contained_non_tip_stack_region: mem -> mem -> rid -> Type0 | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let contained_non_tip_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_non_tip_region m0 m1 r | val contained_non_tip_stack_region: mem -> mem -> rid -> Type0
let contained_non_tip_stack_region: mem -> mem -> rid -> Type0 = | false | null | false | fun m0 m1 r -> is_stack_region r /\ contained_non_tip_region m0 m1 r | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Monotonic.HyperStack.mem",
"FStar.Monotonic.HyperHeap.rid",
"Prims.l_and",
"Prims.b2t",
"FStar.Monotonic.HyperStack.is_stack_region",
"FStar.HyperStack.ST.contained_non_tip_region"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q)
let st_pre = gst_pre
let st_post' = gst_post'
let st_post = gst_post
let st_wp = gst_wp
new_effect STATE = GST
unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp
sub_effect GST ~> STATE = lift_gst_state
(* effect State (a:Type) (wp:st_wp a) = *)
(* STATE a wp *)
(**
WARNING: this effect is unsafe, for C/C++ extraction it shall only be used by
code that would later extract to OCaml or by library functions
*)
effect Unsafe (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. pre h /\ post h a h1 ==> p a h1)) (* WP *)
(****** defining predicates for equal refs in some regions ******)
(*
// * AR: (may be this is an overkill)
// * various effects below talk about refs being equal in some regions (all regions, stack regions, etc.)
// * this was done by defining, for example, an equal_dom predicate with a (forall (r:rid)) quantifier
// * this quantifier was only guarded with Map.contains (HS.get_hmap m) r
// * which meant it could fire for all the contained regions
// *
// * instead now we define abstract predicates, e.g. same_refs_in_all_regions, and provide intro and elim forms
// * the advantage is that, the (lemma) quantifiers are now guarded additionally by same_refs_in_all_regions kind
// * of predicates, and hence should fire more contextually
// * should profile the queries to see if it actually helps
// *)
(*
// * marking these opaque, since expect them to be unfolded away beforehand
// *)
[@@"opaque_to_smt"]
unfold private let equal_heap_dom (r:rid) (m0 m1:mem) :Type0
= Heap.equal_dom (get_hmap m0 `Map.sel` r) (get_hmap m1 `Map.sel` r)
[@@"opaque_to_smt"]
unfold private let contained_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> m0 `contains_region` r /\ m1 `contains_region` r
[@@"opaque_to_smt"]
unfold private let contained_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> r =!= get_tip m0 /\ r =!= get_tip m1 /\ contained_region m0 m1 r
[@@"opaque_to_smt"] | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val contained_non_tip_stack_region: mem -> mem -> rid -> Type0 | [] | FStar.HyperStack.ST.contained_non_tip_stack_region | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
m0: FStar.Monotonic.HyperStack.mem ->
m1: FStar.Monotonic.HyperStack.mem ->
r: FStar.Monotonic.HyperHeap.rid
-> Type0 | {
"end_col": 72,
"end_line": 136,
"start_col": 4,
"start_line": 136
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h) | let lift_div_gst (a: Type) (wp: pure_wp a) (p: gst_post a) (h: mem) = | false | null | false | wp (fun a -> p a h) | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"Prims.pure_wp",
"FStar.HyperStack.ST.gst_post",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_True",
"Prims.pure_pre"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a | false | false | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lift_div_gst : a: Type ->
wp: Prims.pure_wp a ->
p: FStar.HyperStack.ST.gst_post a ->
h: FStar.Monotonic.HyperStack.mem
-> Prims.pure_pre | [] | FStar.HyperStack.ST.lift_div_gst | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
a: Type ->
wp: Prims.pure_wp a ->
p: FStar.HyperStack.ST.gst_post a ->
h: FStar.Monotonic.HyperStack.mem
-> Prims.pure_pre | {
"end_col": 92,
"end_line": 50,
"start_col": 73,
"start_line": 50
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let same_refs_common (p:mem -> mem -> rid -> Type0) (m0 m1:mem) =
forall (r:rid). p m0 m1 r ==> equal_heap_dom r m0 m1 | let same_refs_common (p: (mem -> mem -> rid -> Type0)) (m0 m1: mem) = | false | null | false | forall (r: rid). p m0 m1 r ==> equal_heap_dom r m0 m1 | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Monotonic.HyperStack.mem",
"FStar.Monotonic.HyperHeap.rid",
"Prims.l_Forall",
"Prims.l_imp",
"FStar.HyperStack.ST.equal_heap_dom",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q)
let st_pre = gst_pre
let st_post' = gst_post'
let st_post = gst_post
let st_wp = gst_wp
new_effect STATE = GST
unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp
sub_effect GST ~> STATE = lift_gst_state
(* effect State (a:Type) (wp:st_wp a) = *)
(* STATE a wp *)
(**
WARNING: this effect is unsafe, for C/C++ extraction it shall only be used by
code that would later extract to OCaml or by library functions
*)
effect Unsafe (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. pre h /\ post h a h1 ==> p a h1)) (* WP *)
(****** defining predicates for equal refs in some regions ******)
(*
// * AR: (may be this is an overkill)
// * various effects below talk about refs being equal in some regions (all regions, stack regions, etc.)
// * this was done by defining, for example, an equal_dom predicate with a (forall (r:rid)) quantifier
// * this quantifier was only guarded with Map.contains (HS.get_hmap m) r
// * which meant it could fire for all the contained regions
// *
// * instead now we define abstract predicates, e.g. same_refs_in_all_regions, and provide intro and elim forms
// * the advantage is that, the (lemma) quantifiers are now guarded additionally by same_refs_in_all_regions kind
// * of predicates, and hence should fire more contextually
// * should profile the queries to see if it actually helps
// *)
(*
// * marking these opaque, since expect them to be unfolded away beforehand
// *)
[@@"opaque_to_smt"]
unfold private let equal_heap_dom (r:rid) (m0 m1:mem) :Type0
= Heap.equal_dom (get_hmap m0 `Map.sel` r) (get_hmap m1 `Map.sel` r)
[@@"opaque_to_smt"]
unfold private let contained_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> m0 `contains_region` r /\ m1 `contains_region` r
[@@"opaque_to_smt"]
unfold private let contained_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> r =!= get_tip m0 /\ r =!= get_tip m1 /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_non_tip_region m0 m1 r
[@@"opaque_to_smt"] | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val same_refs_common : p:
(
_: FStar.Monotonic.HyperStack.mem ->
_: FStar.Monotonic.HyperStack.mem ->
_: FStar.Monotonic.HyperHeap.rid
-> Type0) ->
m0: FStar.Monotonic.HyperStack.mem ->
m1: FStar.Monotonic.HyperStack.mem
-> Prims.logical | [] | FStar.HyperStack.ST.same_refs_common | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
p:
(
_: FStar.Monotonic.HyperStack.mem ->
_: FStar.Monotonic.HyperStack.mem ->
_: FStar.Monotonic.HyperHeap.rid
-> Type0) ->
m0: FStar.Monotonic.HyperStack.mem ->
m1: FStar.Monotonic.HyperStack.mem
-> Prims.logical | {
"end_col": 54,
"end_line": 140,
"start_col": 2,
"start_line": 140
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r | let contains_region (m: mem) (r: rid) = | false | null | false | (get_hmap m) `Map.contains` r | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Monotonic.HyperStack.mem",
"FStar.Monotonic.HyperHeap.rid",
"FStar.Map.contains",
"FStar.Monotonic.Heap.heap",
"FStar.Monotonic.HyperStack.get_hmap",
"Prims.bool"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *) | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val contains_region : m: FStar.Monotonic.HyperStack.mem -> r: FStar.Monotonic.HyperHeap.rid -> Prims.bool | [] | FStar.HyperStack.ST.contains_region | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | m: FStar.Monotonic.HyperStack.mem -> r: FStar.Monotonic.HyperHeap.rid -> Prims.bool | {
"end_col": 80,
"end_line": 29,
"start_col": 53,
"start_line": 29
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let st_pre = gst_pre | let st_pre = | false | null | false | gst_pre | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.HyperStack.ST.gst_pre"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q) | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val st_pre : Type | [] | FStar.HyperStack.ST.st_pre | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | Type | {
"end_col": 22,
"end_line": 79,
"start_col": 15,
"start_line": 79
} |
|
Prims.Tot | val contained_non_tip_region: mem -> mem -> rid -> Type0 | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let contained_non_tip_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> r =!= get_tip m0 /\ r =!= get_tip m1 /\ contained_region m0 m1 r | val contained_non_tip_region: mem -> mem -> rid -> Type0
let contained_non_tip_region: mem -> mem -> rid -> Type0 = | false | null | false | fun m0 m1 r -> r =!= get_tip m0 /\ r =!= get_tip m1 /\ contained_region m0 m1 r | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Monotonic.HyperStack.mem",
"FStar.Monotonic.HyperHeap.rid",
"Prims.l_and",
"Prims.l_not",
"Prims.eq2",
"FStar.Monotonic.HyperStack.get_tip",
"FStar.HyperStack.ST.contained_region"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q)
let st_pre = gst_pre
let st_post' = gst_post'
let st_post = gst_post
let st_wp = gst_wp
new_effect STATE = GST
unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp
sub_effect GST ~> STATE = lift_gst_state
(* effect State (a:Type) (wp:st_wp a) = *)
(* STATE a wp *)
(**
WARNING: this effect is unsafe, for C/C++ extraction it shall only be used by
code that would later extract to OCaml or by library functions
*)
effect Unsafe (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. pre h /\ post h a h1 ==> p a h1)) (* WP *)
(****** defining predicates for equal refs in some regions ******)
(*
// * AR: (may be this is an overkill)
// * various effects below talk about refs being equal in some regions (all regions, stack regions, etc.)
// * this was done by defining, for example, an equal_dom predicate with a (forall (r:rid)) quantifier
// * this quantifier was only guarded with Map.contains (HS.get_hmap m) r
// * which meant it could fire for all the contained regions
// *
// * instead now we define abstract predicates, e.g. same_refs_in_all_regions, and provide intro and elim forms
// * the advantage is that, the (lemma) quantifiers are now guarded additionally by same_refs_in_all_regions kind
// * of predicates, and hence should fire more contextually
// * should profile the queries to see if it actually helps
// *)
(*
// * marking these opaque, since expect them to be unfolded away beforehand
// *)
[@@"opaque_to_smt"]
unfold private let equal_heap_dom (r:rid) (m0 m1:mem) :Type0
= Heap.equal_dom (get_hmap m0 `Map.sel` r) (get_hmap m1 `Map.sel` r)
[@@"opaque_to_smt"]
unfold private let contained_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> m0 `contains_region` r /\ m1 `contains_region` r
[@@"opaque_to_smt"]
unfold private let contained_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_region m0 m1 r
[@@"opaque_to_smt"] | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val contained_non_tip_region: mem -> mem -> rid -> Type0 | [] | FStar.HyperStack.ST.contained_non_tip_region | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
m0: FStar.Monotonic.HyperStack.mem ->
m1: FStar.Monotonic.HyperStack.mem ->
r: FStar.Monotonic.HyperHeap.rid
-> Type0 | {
"end_col": 83,
"end_line": 132,
"start_col": 4,
"start_line": 132
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let st_post' = gst_post' | let st_post' = | false | null | false | gst_post' | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.HyperStack.ST.gst_post'"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q) | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val st_post' : a: Type -> pre: Type -> Type | [] | FStar.HyperStack.ST.st_post' | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: Type -> pre: Type -> Type | {
"end_col": 24,
"end_line": 80,
"start_col": 15,
"start_line": 80
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let st_post = gst_post | let st_post = | false | null | false | gst_post | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.HyperStack.ST.gst_post"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q)
let st_pre = gst_pre | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val st_post : a: Type -> Type | [] | FStar.HyperStack.ST.st_post | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: Type -> Type | {
"end_col": 23,
"end_line": 81,
"start_col": 15,
"start_line": 81
} |
|
Prims.Tot | val equal_heap_dom (r: rid) (m0 m1: mem) : Type0 | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let equal_heap_dom (r:rid) (m0 m1:mem) :Type0
= Heap.equal_dom (get_hmap m0 `Map.sel` r) (get_hmap m1 `Map.sel` r) | val equal_heap_dom (r: rid) (m0 m1: mem) : Type0
let equal_heap_dom (r: rid) (m0 m1: mem) : Type0 = | false | null | false | Heap.equal_dom ((get_hmap m0) `Map.sel` r) ((get_hmap m1) `Map.sel` r) | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Monotonic.HyperHeap.rid",
"FStar.Monotonic.HyperStack.mem",
"FStar.Monotonic.Heap.equal_dom",
"FStar.Map.sel",
"FStar.Monotonic.Heap.heap",
"FStar.Monotonic.HyperStack.get_hmap"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q)
let st_pre = gst_pre
let st_post' = gst_post'
let st_post = gst_post
let st_wp = gst_wp
new_effect STATE = GST
unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp
sub_effect GST ~> STATE = lift_gst_state
(* effect State (a:Type) (wp:st_wp a) = *)
(* STATE a wp *)
(**
WARNING: this effect is unsafe, for C/C++ extraction it shall only be used by
code that would later extract to OCaml or by library functions
*)
effect Unsafe (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. pre h /\ post h a h1 ==> p a h1)) (* WP *)
(****** defining predicates for equal refs in some regions ******)
(*
// * AR: (may be this is an overkill)
// * various effects below talk about refs being equal in some regions (all regions, stack regions, etc.)
// * this was done by defining, for example, an equal_dom predicate with a (forall (r:rid)) quantifier
// * this quantifier was only guarded with Map.contains (HS.get_hmap m) r
// * which meant it could fire for all the contained regions
// *
// * instead now we define abstract predicates, e.g. same_refs_in_all_regions, and provide intro and elim forms
// * the advantage is that, the (lemma) quantifiers are now guarded additionally by same_refs_in_all_regions kind
// * of predicates, and hence should fire more contextually
// * should profile the queries to see if it actually helps
// *)
(*
// * marking these opaque, since expect them to be unfolded away beforehand
// *)
[@@"opaque_to_smt"] | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val equal_heap_dom (r: rid) (m0 m1: mem) : Type0 | [] | FStar.HyperStack.ST.equal_heap_dom | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
r: FStar.Monotonic.HyperHeap.rid ->
m0: FStar.Monotonic.HyperStack.mem ->
m1: FStar.Monotonic.HyperStack.mem
-> Type0 | {
"end_col": 70,
"end_line": 120,
"start_col": 4,
"start_line": 120
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gst_wp (a:Type) = st_wp_h mem a | let gst_wp (a: Type) = | false | null | false | st_wp_h mem a | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Pervasives.st_wp_h",
"FStar.Monotonic.HyperStack.mem"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gst_wp : a: Type -> Type | [] | FStar.HyperStack.ST.gst_wp | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: Type -> Type | {
"end_col": 37,
"end_line": 48,
"start_col": 24,
"start_line": 48
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let equal_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
Set.equal (Map.domain (get_hmap m0)) (Map.domain (get_hmap m1)) /\
same_refs_in_all_regions m0 m1 | let equal_domains (m0 m1: mem) = | false | null | false | get_tip m0 == get_tip m1 /\ Set.equal (Map.domain (get_hmap m0)) (Map.domain (get_hmap m1)) /\
same_refs_in_all_regions m0 m1 | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"Prims.eq2",
"FStar.Monotonic.HyperHeap.rid",
"FStar.Monotonic.HyperStack.get_tip",
"FStar.Set.equal",
"FStar.Map.domain",
"FStar.Monotonic.Heap.heap",
"FStar.Monotonic.HyperStack.get_hmap",
"FStar.HyperStack.ST.same_refs_in_all_regions",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q)
let st_pre = gst_pre
let st_post' = gst_post'
let st_post = gst_post
let st_wp = gst_wp
new_effect STATE = GST
unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp
sub_effect GST ~> STATE = lift_gst_state
(* effect State (a:Type) (wp:st_wp a) = *)
(* STATE a wp *)
(**
WARNING: this effect is unsafe, for C/C++ extraction it shall only be used by
code that would later extract to OCaml or by library functions
*)
effect Unsafe (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. pre h /\ post h a h1 ==> p a h1)) (* WP *)
(****** defining predicates for equal refs in some regions ******)
(*
// * AR: (may be this is an overkill)
// * various effects below talk about refs being equal in some regions (all regions, stack regions, etc.)
// * this was done by defining, for example, an equal_dom predicate with a (forall (r:rid)) quantifier
// * this quantifier was only guarded with Map.contains (HS.get_hmap m) r
// * which meant it could fire for all the contained regions
// *
// * instead now we define abstract predicates, e.g. same_refs_in_all_regions, and provide intro and elim forms
// * the advantage is that, the (lemma) quantifiers are now guarded additionally by same_refs_in_all_regions kind
// * of predicates, and hence should fire more contextually
// * should profile the queries to see if it actually helps
// *)
(*
// * marking these opaque, since expect them to be unfolded away beforehand
// *)
[@@"opaque_to_smt"]
unfold private let equal_heap_dom (r:rid) (m0 m1:mem) :Type0
= Heap.equal_dom (get_hmap m0 `Map.sel` r) (get_hmap m1 `Map.sel` r)
[@@"opaque_to_smt"]
unfold private let contained_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> m0 `contains_region` r /\ m1 `contains_region` r
[@@"opaque_to_smt"]
unfold private let contained_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> r =!= get_tip m0 /\ r =!= get_tip m1 /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_non_tip_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let same_refs_common (p:mem -> mem -> rid -> Type0) (m0 m1:mem) =
forall (r:rid). p m0 m1 r ==> equal_heap_dom r m0 m1
(* predicates *)
val same_refs_in_all_regions (m0 m1:mem) :Type0
val same_refs_in_stack_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_stack_regions (m0 m1:mem) :Type0
(* intro and elim forms *)
val lemma_same_refs_in_all_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_region m0 m1)) (ensures (same_refs_in_all_regions m0 m1))
[SMTPat (same_refs_in_all_regions m0 m1)]
val lemma_same_refs_in_all_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_all_regions m0 m1 /\ contained_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_stack_region m0 m1)) (ensures (same_refs_in_stack_regions m0 m1))
[SMTPat (same_refs_in_stack_regions m0 m1)]
val lemma_same_refs_in_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_stack_regions m0 m1 /\ contained_stack_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_region m0 m1)) (ensures (same_refs_in_non_tip_regions m0 m1))
[SMTPat (same_refs_in_non_tip_regions m0 m1)]
val lemma_same_refs_in_non_tip_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_regions m0 m1 /\ contained_non_tip_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_stack_region m0 m1)) (ensures (same_refs_in_non_tip_stack_regions m0 m1))
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1)]
val lemma_same_refs_in_non_tip_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_stack_regions m0 m1 /\ contained_non_tip_stack_region m0 m1 r))
(ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r);];
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
(******) | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val equal_domains : m0: FStar.Monotonic.HyperStack.mem -> m1: FStar.Monotonic.HyperStack.mem -> Prims.logical | [] | FStar.HyperStack.ST.equal_domains | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | m0: FStar.Monotonic.HyperStack.mem -> m1: FStar.Monotonic.HyperStack.mem -> Prims.logical | {
"end_col": 32,
"end_line": 188,
"start_col": 2,
"start_line": 186
} |
|
Prims.Tot | val contained_region: mem -> mem -> rid -> Type0 | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let contained_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> m0 `contains_region` r /\ m1 `contains_region` r | val contained_region: mem -> mem -> rid -> Type0
let contained_region: mem -> mem -> rid -> Type0 = | false | null | false | fun m0 m1 r -> m0 `contains_region` r /\ m1 `contains_region` r | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Monotonic.HyperStack.mem",
"FStar.Monotonic.HyperHeap.rid",
"Prims.l_and",
"Prims.b2t",
"FStar.HyperStack.ST.contains_region"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q)
let st_pre = gst_pre
let st_post' = gst_post'
let st_post = gst_post
let st_wp = gst_wp
new_effect STATE = GST
unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp
sub_effect GST ~> STATE = lift_gst_state
(* effect State (a:Type) (wp:st_wp a) = *)
(* STATE a wp *)
(**
WARNING: this effect is unsafe, for C/C++ extraction it shall only be used by
code that would later extract to OCaml or by library functions
*)
effect Unsafe (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. pre h /\ post h a h1 ==> p a h1)) (* WP *)
(****** defining predicates for equal refs in some regions ******)
(*
// * AR: (may be this is an overkill)
// * various effects below talk about refs being equal in some regions (all regions, stack regions, etc.)
// * this was done by defining, for example, an equal_dom predicate with a (forall (r:rid)) quantifier
// * this quantifier was only guarded with Map.contains (HS.get_hmap m) r
// * which meant it could fire for all the contained regions
// *
// * instead now we define abstract predicates, e.g. same_refs_in_all_regions, and provide intro and elim forms
// * the advantage is that, the (lemma) quantifiers are now guarded additionally by same_refs_in_all_regions kind
// * of predicates, and hence should fire more contextually
// * should profile the queries to see if it actually helps
// *)
(*
// * marking these opaque, since expect them to be unfolded away beforehand
// *)
[@@"opaque_to_smt"]
unfold private let equal_heap_dom (r:rid) (m0 m1:mem) :Type0
= Heap.equal_dom (get_hmap m0 `Map.sel` r) (get_hmap m1 `Map.sel` r)
[@@"opaque_to_smt"] | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val contained_region: mem -> mem -> rid -> Type0 | [] | FStar.HyperStack.ST.contained_region | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
m0: FStar.Monotonic.HyperStack.mem ->
m1: FStar.Monotonic.HyperStack.mem ->
r: FStar.Monotonic.HyperHeap.rid
-> Type0 | {
"end_col": 67,
"end_line": 124,
"start_col": 4,
"start_line": 124
} |
Prims.Tot | val contained_stack_region: mem -> mem -> rid -> Type0 | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let contained_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_region m0 m1 r | val contained_stack_region: mem -> mem -> rid -> Type0
let contained_stack_region: mem -> mem -> rid -> Type0 = | false | null | false | fun m0 m1 r -> is_stack_region r /\ contained_region m0 m1 r | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Monotonic.HyperStack.mem",
"FStar.Monotonic.HyperHeap.rid",
"Prims.l_and",
"Prims.b2t",
"FStar.Monotonic.HyperStack.is_stack_region",
"FStar.HyperStack.ST.contained_region"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q)
let st_pre = gst_pre
let st_post' = gst_post'
let st_post = gst_post
let st_wp = gst_wp
new_effect STATE = GST
unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp
sub_effect GST ~> STATE = lift_gst_state
(* effect State (a:Type) (wp:st_wp a) = *)
(* STATE a wp *)
(**
WARNING: this effect is unsafe, for C/C++ extraction it shall only be used by
code that would later extract to OCaml or by library functions
*)
effect Unsafe (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. pre h /\ post h a h1 ==> p a h1)) (* WP *)
(****** defining predicates for equal refs in some regions ******)
(*
// * AR: (may be this is an overkill)
// * various effects below talk about refs being equal in some regions (all regions, stack regions, etc.)
// * this was done by defining, for example, an equal_dom predicate with a (forall (r:rid)) quantifier
// * this quantifier was only guarded with Map.contains (HS.get_hmap m) r
// * which meant it could fire for all the contained regions
// *
// * instead now we define abstract predicates, e.g. same_refs_in_all_regions, and provide intro and elim forms
// * the advantage is that, the (lemma) quantifiers are now guarded additionally by same_refs_in_all_regions kind
// * of predicates, and hence should fire more contextually
// * should profile the queries to see if it actually helps
// *)
(*
// * marking these opaque, since expect them to be unfolded away beforehand
// *)
[@@"opaque_to_smt"]
unfold private let equal_heap_dom (r:rid) (m0 m1:mem) :Type0
= Heap.equal_dom (get_hmap m0 `Map.sel` r) (get_hmap m1 `Map.sel` r)
[@@"opaque_to_smt"]
unfold private let contained_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> m0 `contains_region` r /\ m1 `contains_region` r
[@@"opaque_to_smt"] | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val contained_stack_region: mem -> mem -> rid -> Type0 | [] | FStar.HyperStack.ST.contained_stack_region | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
m0: FStar.Monotonic.HyperStack.mem ->
m1: FStar.Monotonic.HyperStack.mem ->
r: FStar.Monotonic.HyperHeap.rid
-> Type0 | {
"end_col": 64,
"end_line": 128,
"start_col": 4,
"start_line": 128
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let equal_stack_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
same_refs_in_stack_regions m0 m1 | let equal_stack_domains (m0 m1: mem) = | false | null | false | get_tip m0 == get_tip m1 /\ same_refs_in_stack_regions m0 m1 | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"Prims.eq2",
"FStar.Monotonic.HyperHeap.rid",
"FStar.Monotonic.HyperStack.get_tip",
"FStar.HyperStack.ST.same_refs_in_stack_regions",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q)
let st_pre = gst_pre
let st_post' = gst_post'
let st_post = gst_post
let st_wp = gst_wp
new_effect STATE = GST
unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp
sub_effect GST ~> STATE = lift_gst_state
(* effect State (a:Type) (wp:st_wp a) = *)
(* STATE a wp *)
(**
WARNING: this effect is unsafe, for C/C++ extraction it shall only be used by
code that would later extract to OCaml or by library functions
*)
effect Unsafe (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. pre h /\ post h a h1 ==> p a h1)) (* WP *)
(****** defining predicates for equal refs in some regions ******)
(*
// * AR: (may be this is an overkill)
// * various effects below talk about refs being equal in some regions (all regions, stack regions, etc.)
// * this was done by defining, for example, an equal_dom predicate with a (forall (r:rid)) quantifier
// * this quantifier was only guarded with Map.contains (HS.get_hmap m) r
// * which meant it could fire for all the contained regions
// *
// * instead now we define abstract predicates, e.g. same_refs_in_all_regions, and provide intro and elim forms
// * the advantage is that, the (lemma) quantifiers are now guarded additionally by same_refs_in_all_regions kind
// * of predicates, and hence should fire more contextually
// * should profile the queries to see if it actually helps
// *)
(*
// * marking these opaque, since expect them to be unfolded away beforehand
// *)
[@@"opaque_to_smt"]
unfold private let equal_heap_dom (r:rid) (m0 m1:mem) :Type0
= Heap.equal_dom (get_hmap m0 `Map.sel` r) (get_hmap m1 `Map.sel` r)
[@@"opaque_to_smt"]
unfold private let contained_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> m0 `contains_region` r /\ m1 `contains_region` r
[@@"opaque_to_smt"]
unfold private let contained_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> r =!= get_tip m0 /\ r =!= get_tip m1 /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_non_tip_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let same_refs_common (p:mem -> mem -> rid -> Type0) (m0 m1:mem) =
forall (r:rid). p m0 m1 r ==> equal_heap_dom r m0 m1
(* predicates *)
val same_refs_in_all_regions (m0 m1:mem) :Type0
val same_refs_in_stack_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_stack_regions (m0 m1:mem) :Type0
(* intro and elim forms *)
val lemma_same_refs_in_all_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_region m0 m1)) (ensures (same_refs_in_all_regions m0 m1))
[SMTPat (same_refs_in_all_regions m0 m1)]
val lemma_same_refs_in_all_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_all_regions m0 m1 /\ contained_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_stack_region m0 m1)) (ensures (same_refs_in_stack_regions m0 m1))
[SMTPat (same_refs_in_stack_regions m0 m1)]
val lemma_same_refs_in_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_stack_regions m0 m1 /\ contained_stack_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_region m0 m1)) (ensures (same_refs_in_non_tip_regions m0 m1))
[SMTPat (same_refs_in_non_tip_regions m0 m1)]
val lemma_same_refs_in_non_tip_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_regions m0 m1 /\ contained_non_tip_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_stack_region m0 m1)) (ensures (same_refs_in_non_tip_stack_regions m0 m1))
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1)]
val lemma_same_refs_in_non_tip_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_stack_regions m0 m1 /\ contained_non_tip_stack_region m0 m1 r))
(ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r);];
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
(******)
let equal_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
Set.equal (Map.domain (get_hmap m0)) (Map.domain (get_hmap m1)) /\
same_refs_in_all_regions m0 m1
val lemma_equal_domains_trans (m0 m1 m2:mem)
:Lemma (requires (equal_domains m0 m1 /\ equal_domains m1 m2))
(ensures (equal_domains m0 m2))
[SMTPat (equal_domains m0 m1); SMTPat (equal_domains m1 m2)]
(**
* Effect of stacked based code: the 'equal_domains' clause enforces that
* - both mem have the same tip
* - both mem reference the same heaps (their map: rid -> heap have the same domain)
* - in each region id, the corresponding heaps contain the same references on both sides
*)
effect Stack (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_domains h h1) ==> p a h1)) (* WP *)
(**
* Effect of heap-based code.
* - assumes that the stack is empty (tip = root)
* - corresponds to the HyperHeap ST effect
* - can call to Stack and ST code freely
* - respects the stack invariant: the stack has to be empty when returning
*)
effect Heap (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ get_tip h = HS.root /\ get_tip h1 = HS.root ) ==> p a h1)) (* WP *) | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val equal_stack_domains : m0: FStar.Monotonic.HyperStack.mem -> m1: FStar.Monotonic.HyperStack.mem -> Prims.logical | [] | FStar.HyperStack.ST.equal_stack_domains | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | m0: FStar.Monotonic.HyperStack.mem -> m1: FStar.Monotonic.HyperStack.mem -> Prims.logical | {
"end_col": 34,
"end_line": 218,
"start_col": 2,
"start_line": 217
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lift_gst_state (a:Type) (wp:gst_wp a) = wp | let lift_gst_state (a: Type) (wp: gst_wp a) = | false | null | false | wp | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.HyperStack.ST.gst_wp"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q)
let st_pre = gst_pre
let st_post' = gst_post'
let st_post = gst_post
let st_wp = gst_wp
new_effect STATE = GST | false | false | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lift_gst_state : a: Type -> wp: FStar.HyperStack.ST.gst_wp a -> FStar.HyperStack.ST.gst_wp a | [] | FStar.HyperStack.ST.lift_gst_state | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: Type -> wp: FStar.HyperStack.ST.gst_wp a -> FStar.HyperStack.ST.gst_wp a | {
"end_col": 53,
"end_line": 86,
"start_col": 51,
"start_line": 86
} |
|
Prims.Tot | val is_eternal_region (r: rid) : Type0 | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let is_eternal_region (r:rid) :Type0
= HS.is_eternal_region_hs r /\ (r == HS.root \/ witnessed (region_contains_pred r)) | val is_eternal_region (r: rid) : Type0
let is_eternal_region (r: rid) : Type0 = | false | null | false | HS.is_eternal_region_hs r /\ (r == HS.root \/ witnessed (region_contains_pred r)) | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Monotonic.HyperHeap.rid",
"Prims.l_and",
"Prims.b2t",
"FStar.Monotonic.HyperStack.is_eternal_region_hs",
"Prims.l_or",
"Prims.eq2",
"FStar.Monotonic.HyperHeap.root",
"FStar.HyperStack.ST.witnessed",
"FStar.HyperStack.ST.region_contains_pred"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q)
let st_pre = gst_pre
let st_post' = gst_post'
let st_post = gst_post
let st_wp = gst_wp
new_effect STATE = GST
unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp
sub_effect GST ~> STATE = lift_gst_state
(* effect State (a:Type) (wp:st_wp a) = *)
(* STATE a wp *)
(**
WARNING: this effect is unsafe, for C/C++ extraction it shall only be used by
code that would later extract to OCaml or by library functions
*)
effect Unsafe (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. pre h /\ post h a h1 ==> p a h1)) (* WP *)
(****** defining predicates for equal refs in some regions ******)
(*
// * AR: (may be this is an overkill)
// * various effects below talk about refs being equal in some regions (all regions, stack regions, etc.)
// * this was done by defining, for example, an equal_dom predicate with a (forall (r:rid)) quantifier
// * this quantifier was only guarded with Map.contains (HS.get_hmap m) r
// * which meant it could fire for all the contained regions
// *
// * instead now we define abstract predicates, e.g. same_refs_in_all_regions, and provide intro and elim forms
// * the advantage is that, the (lemma) quantifiers are now guarded additionally by same_refs_in_all_regions kind
// * of predicates, and hence should fire more contextually
// * should profile the queries to see if it actually helps
// *)
(*
// * marking these opaque, since expect them to be unfolded away beforehand
// *)
[@@"opaque_to_smt"]
unfold private let equal_heap_dom (r:rid) (m0 m1:mem) :Type0
= Heap.equal_dom (get_hmap m0 `Map.sel` r) (get_hmap m1 `Map.sel` r)
[@@"opaque_to_smt"]
unfold private let contained_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> m0 `contains_region` r /\ m1 `contains_region` r
[@@"opaque_to_smt"]
unfold private let contained_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> r =!= get_tip m0 /\ r =!= get_tip m1 /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_non_tip_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let same_refs_common (p:mem -> mem -> rid -> Type0) (m0 m1:mem) =
forall (r:rid). p m0 m1 r ==> equal_heap_dom r m0 m1
(* predicates *)
val same_refs_in_all_regions (m0 m1:mem) :Type0
val same_refs_in_stack_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_stack_regions (m0 m1:mem) :Type0
(* intro and elim forms *)
val lemma_same_refs_in_all_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_region m0 m1)) (ensures (same_refs_in_all_regions m0 m1))
[SMTPat (same_refs_in_all_regions m0 m1)]
val lemma_same_refs_in_all_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_all_regions m0 m1 /\ contained_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_stack_region m0 m1)) (ensures (same_refs_in_stack_regions m0 m1))
[SMTPat (same_refs_in_stack_regions m0 m1)]
val lemma_same_refs_in_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_stack_regions m0 m1 /\ contained_stack_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_region m0 m1)) (ensures (same_refs_in_non_tip_regions m0 m1))
[SMTPat (same_refs_in_non_tip_regions m0 m1)]
val lemma_same_refs_in_non_tip_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_regions m0 m1 /\ contained_non_tip_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_stack_region m0 m1)) (ensures (same_refs_in_non_tip_stack_regions m0 m1))
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1)]
val lemma_same_refs_in_non_tip_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_stack_regions m0 m1 /\ contained_non_tip_stack_region m0 m1 r))
(ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r);];
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
(******)
let equal_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
Set.equal (Map.domain (get_hmap m0)) (Map.domain (get_hmap m1)) /\
same_refs_in_all_regions m0 m1
val lemma_equal_domains_trans (m0 m1 m2:mem)
:Lemma (requires (equal_domains m0 m1 /\ equal_domains m1 m2))
(ensures (equal_domains m0 m2))
[SMTPat (equal_domains m0 m1); SMTPat (equal_domains m1 m2)]
(**
* Effect of stacked based code: the 'equal_domains' clause enforces that
* - both mem have the same tip
* - both mem reference the same heaps (their map: rid -> heap have the same domain)
* - in each region id, the corresponding heaps contain the same references on both sides
*)
effect Stack (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_domains h h1) ==> p a h1)) (* WP *)
(**
* Effect of heap-based code.
* - assumes that the stack is empty (tip = root)
* - corresponds to the HyperHeap ST effect
* - can call to Stack and ST code freely
* - respects the stack invariant: the stack has to be empty when returning
*)
effect Heap (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ get_tip h = HS.root /\ get_tip h1 = HS.root ) ==> p a h1)) (* WP *)
let equal_stack_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
same_refs_in_stack_regions m0 m1
(**
* Effect of low-level code:
* - maintains the allocation invariant on the stack: no allocation unless in a new frame that has to be popped before returning
* - not constraints on heap allocation
*)
effect ST (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_stack_domains h h1) ==> p a h1)) (* WP *)
effect St (a:Type) = ST a (fun _ -> True) (fun _ _ _ -> True)
let inline_stack_inv h h' : GTot Type0 =
(* The frame invariant is enforced *)
get_tip h == get_tip h' /\
(* The heap structure is unchanged *)
Map.domain (get_hmap h) == Map.domain (get_hmap h') /\
(* Any region that is not the tip has no seen any allocation *)
same_refs_in_non_tip_regions h h'
(**
* Effect that indicates to the Karamel compiler that allocation may occur in the caller's frame.
* In other terms, the backend has to unfold the body into the caller's body.
* This effect maintains the stack AND the heap invariant: it can be inlined in the Stack effect
* function body as well as in a Heap effect function body
*)
effect StackInline (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ is_stack_region (get_tip h) /\ (forall a h1. (pre h /\ post h a h1 /\ inline_stack_inv h h1) ==> p a h1)) (* WP *)
let inline_inv h h' : GTot Type0 =
(* The stack invariant is enforced *)
get_tip h == get_tip h' /\
(* No frame may have received an allocation but the tip *)
same_refs_in_non_tip_stack_regions h h'
(**
* Effect that indicates to the Karamel compiler that allocation may occur in the caller's frame.
* In other terms, the backend has to unfold the body into the caller's body.
* This effect only maintains the stack invariant: the tip is left unchanged and no allocation
* may occurs in the stack lower than the tip.
* Region allocation is not constrained.
* Heap allocation is not constrained.
*)
effect Inline (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ inline_inv h h1) ==> p a h1)) (* WP *)
(**
* TODO:
* REMOVE AS SOON AS CONSENSUS IS REACHED ON NEW LOW EFFECT NAMES
*)
effect STL (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) = Stack a pre post
sub_effect
DIV ~> STATE = fun (a:Type) (wp:pure_wp a) (p:st_post a) (h:mem) -> wp (fun a -> p a h)
(*
* AR: The clients should open HyperStack.ST after the memory model files (as with Heap and FStar.ST)
*)
type mreference (a:Type) (rel:preorder a) =
r:HS.mreference a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mstackref (a:Type) (rel:preorder a) =
r:HS.mstackref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mref (a:Type) (rel:preorder a) =
r:HS.mref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mmmstackref (a:Type) (rel:preorder a) =
r:HS.mmmstackref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mmmref (a:Type) (rel:preorder a) =
r:HS.mmmref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type s_mref (i:rid) (a:Type) (rel:preorder a) =
r:HS.s_mref i a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type reference (a:Type) = mreference a (Heap.trivial_preorder a)
type stackref (a:Type) = mstackref a (Heap.trivial_preorder a)
type ref (a:Type) = mref a (Heap.trivial_preorder a)
type mmstackref (a:Type) = mmmstackref a (Heap.trivial_preorder a)
type mmref (a:Type) = mmmref a (Heap.trivial_preorder a)
type s_ref (i:rid) (a:Type) = s_mref i a (Heap.trivial_preorder a) | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val is_eternal_region (r: rid) : Type0 | [] | FStar.HyperStack.ST.is_eternal_region | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | r: FStar.Monotonic.HyperHeap.rid -> Type0 | {
"end_col": 85,
"end_line": 307,
"start_col": 4,
"start_line": 307
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let new_region_post_common (r0 r1:rid) (m0 m1:mem) =
r1 `HS.extends` r0 /\
HS.fresh_region r1 m0 m1 /\
get_hmap m1 == Map.upd (get_hmap m0) r1 Heap.emp /\
get_tip m1 == get_tip m0 /\
HS.live_region m0 r0 | let new_region_post_common (r0 r1: rid) (m0 m1: mem) = | false | null | false | r1 `HS.extends` r0 /\ HS.fresh_region r1 m0 m1 /\ get_hmap m1 == Map.upd (get_hmap m0) r1 Heap.emp /\
get_tip m1 == get_tip m0 /\ HS.live_region m0 r0 | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Monotonic.HyperHeap.rid",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"Prims.b2t",
"FStar.Monotonic.HyperHeap.extends",
"FStar.Monotonic.HyperStack.fresh_region",
"Prims.eq2",
"FStar.Map.t",
"FStar.Monotonic.Heap.heap",
"FStar.Monotonic.HyperStack.get_hmap",
"FStar.Map.upd",
"FStar.Monotonic.Heap.emp",
"FStar.Monotonic.HyperStack.get_tip",
"FStar.Monotonic.HyperStack.live_region",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q)
let st_pre = gst_pre
let st_post' = gst_post'
let st_post = gst_post
let st_wp = gst_wp
new_effect STATE = GST
unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp
sub_effect GST ~> STATE = lift_gst_state
(* effect State (a:Type) (wp:st_wp a) = *)
(* STATE a wp *)
(**
WARNING: this effect is unsafe, for C/C++ extraction it shall only be used by
code that would later extract to OCaml or by library functions
*)
effect Unsafe (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. pre h /\ post h a h1 ==> p a h1)) (* WP *)
(****** defining predicates for equal refs in some regions ******)
(*
// * AR: (may be this is an overkill)
// * various effects below talk about refs being equal in some regions (all regions, stack regions, etc.)
// * this was done by defining, for example, an equal_dom predicate with a (forall (r:rid)) quantifier
// * this quantifier was only guarded with Map.contains (HS.get_hmap m) r
// * which meant it could fire for all the contained regions
// *
// * instead now we define abstract predicates, e.g. same_refs_in_all_regions, and provide intro and elim forms
// * the advantage is that, the (lemma) quantifiers are now guarded additionally by same_refs_in_all_regions kind
// * of predicates, and hence should fire more contextually
// * should profile the queries to see if it actually helps
// *)
(*
// * marking these opaque, since expect them to be unfolded away beforehand
// *)
[@@"opaque_to_smt"]
unfold private let equal_heap_dom (r:rid) (m0 m1:mem) :Type0
= Heap.equal_dom (get_hmap m0 `Map.sel` r) (get_hmap m1 `Map.sel` r)
[@@"opaque_to_smt"]
unfold private let contained_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> m0 `contains_region` r /\ m1 `contains_region` r
[@@"opaque_to_smt"]
unfold private let contained_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> r =!= get_tip m0 /\ r =!= get_tip m1 /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_non_tip_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let same_refs_common (p:mem -> mem -> rid -> Type0) (m0 m1:mem) =
forall (r:rid). p m0 m1 r ==> equal_heap_dom r m0 m1
(* predicates *)
val same_refs_in_all_regions (m0 m1:mem) :Type0
val same_refs_in_stack_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_stack_regions (m0 m1:mem) :Type0
(* intro and elim forms *)
val lemma_same_refs_in_all_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_region m0 m1)) (ensures (same_refs_in_all_regions m0 m1))
[SMTPat (same_refs_in_all_regions m0 m1)]
val lemma_same_refs_in_all_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_all_regions m0 m1 /\ contained_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_stack_region m0 m1)) (ensures (same_refs_in_stack_regions m0 m1))
[SMTPat (same_refs_in_stack_regions m0 m1)]
val lemma_same_refs_in_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_stack_regions m0 m1 /\ contained_stack_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_region m0 m1)) (ensures (same_refs_in_non_tip_regions m0 m1))
[SMTPat (same_refs_in_non_tip_regions m0 m1)]
val lemma_same_refs_in_non_tip_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_regions m0 m1 /\ contained_non_tip_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_stack_region m0 m1)) (ensures (same_refs_in_non_tip_stack_regions m0 m1))
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1)]
val lemma_same_refs_in_non_tip_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_stack_regions m0 m1 /\ contained_non_tip_stack_region m0 m1 r))
(ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r);];
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
(******)
let equal_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
Set.equal (Map.domain (get_hmap m0)) (Map.domain (get_hmap m1)) /\
same_refs_in_all_regions m0 m1
val lemma_equal_domains_trans (m0 m1 m2:mem)
:Lemma (requires (equal_domains m0 m1 /\ equal_domains m1 m2))
(ensures (equal_domains m0 m2))
[SMTPat (equal_domains m0 m1); SMTPat (equal_domains m1 m2)]
(**
* Effect of stacked based code: the 'equal_domains' clause enforces that
* - both mem have the same tip
* - both mem reference the same heaps (their map: rid -> heap have the same domain)
* - in each region id, the corresponding heaps contain the same references on both sides
*)
effect Stack (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_domains h h1) ==> p a h1)) (* WP *)
(**
* Effect of heap-based code.
* - assumes that the stack is empty (tip = root)
* - corresponds to the HyperHeap ST effect
* - can call to Stack and ST code freely
* - respects the stack invariant: the stack has to be empty when returning
*)
effect Heap (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ get_tip h = HS.root /\ get_tip h1 = HS.root ) ==> p a h1)) (* WP *)
let equal_stack_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
same_refs_in_stack_regions m0 m1
(**
* Effect of low-level code:
* - maintains the allocation invariant on the stack: no allocation unless in a new frame that has to be popped before returning
* - not constraints on heap allocation
*)
effect ST (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_stack_domains h h1) ==> p a h1)) (* WP *)
effect St (a:Type) = ST a (fun _ -> True) (fun _ _ _ -> True)
let inline_stack_inv h h' : GTot Type0 =
(* The frame invariant is enforced *)
get_tip h == get_tip h' /\
(* The heap structure is unchanged *)
Map.domain (get_hmap h) == Map.domain (get_hmap h') /\
(* Any region that is not the tip has no seen any allocation *)
same_refs_in_non_tip_regions h h'
(**
* Effect that indicates to the Karamel compiler that allocation may occur in the caller's frame.
* In other terms, the backend has to unfold the body into the caller's body.
* This effect maintains the stack AND the heap invariant: it can be inlined in the Stack effect
* function body as well as in a Heap effect function body
*)
effect StackInline (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ is_stack_region (get_tip h) /\ (forall a h1. (pre h /\ post h a h1 /\ inline_stack_inv h h1) ==> p a h1)) (* WP *)
let inline_inv h h' : GTot Type0 =
(* The stack invariant is enforced *)
get_tip h == get_tip h' /\
(* No frame may have received an allocation but the tip *)
same_refs_in_non_tip_stack_regions h h'
(**
* Effect that indicates to the Karamel compiler that allocation may occur in the caller's frame.
* In other terms, the backend has to unfold the body into the caller's body.
* This effect only maintains the stack invariant: the tip is left unchanged and no allocation
* may occurs in the stack lower than the tip.
* Region allocation is not constrained.
* Heap allocation is not constrained.
*)
effect Inline (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ inline_inv h h1) ==> p a h1)) (* WP *)
(**
* TODO:
* REMOVE AS SOON AS CONSENSUS IS REACHED ON NEW LOW EFFECT NAMES
*)
effect STL (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) = Stack a pre post
sub_effect
DIV ~> STATE = fun (a:Type) (wp:pure_wp a) (p:st_post a) (h:mem) -> wp (fun a -> p a h)
(*
* AR: The clients should open HyperStack.ST after the memory model files (as with Heap and FStar.ST)
*)
type mreference (a:Type) (rel:preorder a) =
r:HS.mreference a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mstackref (a:Type) (rel:preorder a) =
r:HS.mstackref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mref (a:Type) (rel:preorder a) =
r:HS.mref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mmmstackref (a:Type) (rel:preorder a) =
r:HS.mmmstackref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mmmref (a:Type) (rel:preorder a) =
r:HS.mmmref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type s_mref (i:rid) (a:Type) (rel:preorder a) =
r:HS.s_mref i a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type reference (a:Type) = mreference a (Heap.trivial_preorder a)
type stackref (a:Type) = mstackref a (Heap.trivial_preorder a)
type ref (a:Type) = mref a (Heap.trivial_preorder a)
type mmstackref (a:Type) = mmmstackref a (Heap.trivial_preorder a)
type mmref (a:Type) = mmmref a (Heap.trivial_preorder a)
type s_ref (i:rid) (a:Type) = s_mref i a (Heap.trivial_preorder a)
let is_eternal_region (r:rid) :Type0
= HS.is_eternal_region_hs r /\ (r == HS.root \/ witnessed (region_contains_pred r))
(*
* AR: The change to using ST.rid may not be that bad itself,
* since subtyping should take care of most instances in the client usage.
* However, one case where it could be an issue is modifies clauses that use
* Set.set rid.
*)
(** Pushes a new empty frame on the stack **)
val push_frame (_:unit) :Unsafe unit (requires (fun m -> True)) (ensures (fun (m0:mem) _ (m1:mem) -> fresh_frame m0 m1))
(** Removes old frame from the stack **)
val pop_frame (_:unit)
:Unsafe unit (requires (fun m -> poppable m))
(ensures (fun (m0:mem) _ (m1:mem) -> poppable m0 /\ m1 == pop m0 /\ popped m0 m1))
#push-options "--z3rlimit 40"
let salloc_post (#a:Type) (#rel:preorder a) (init:a) (m0:mem)
(s:mreference a rel{is_stack_region (frameOf s)}) (m1:mem)
= is_stack_region (get_tip m0) /\
Map.domain (get_hmap m0) == Map.domain (get_hmap m1) /\
get_tip m0 == get_tip m1 /\
frameOf s = get_tip m1 /\
HS.fresh_ref s m0 m1 /\ //it's a fresh reference in the top frame
m1 == HyperStack.upd m0 s init //and it's been initialized
#pop-options
(**
* Allocates on the top-most stack frame
*)
val salloc (#a:Type) (#rel:preorder a) (init:a)
:StackInline (mstackref a rel) (requires (fun m -> is_stack_region (get_tip m)))
(ensures salloc_post init)
// JP, AR: these are not supported in C, and `salloc` already benefits from
// automatic memory management.
[@@ (deprecated "Use salloc instead") ]
val salloc_mm (#a:Type) (#rel:preorder a) (init:a)
:StackInline (mmmstackref a rel) (requires (fun m -> is_stack_region (get_tip m)))
(ensures salloc_post init)
[@@ (deprecated "Use salloc instead") ]
val sfree (#a:Type) (#rel:preorder a) (r:mmmstackref a rel)
:StackInline unit (requires (fun m0 -> frameOf r = get_tip m0 /\ m0 `contains` r))
(ensures (fun m0 _ m1 -> m0 `contains` r /\ m1 == HS.free r m0))
unfold | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val new_region_post_common : r0: FStar.Monotonic.HyperHeap.rid ->
r1: FStar.Monotonic.HyperHeap.rid ->
m0: FStar.Monotonic.HyperStack.mem ->
m1: FStar.Monotonic.HyperStack.mem
-> Prims.logical | [] | FStar.HyperStack.ST.new_region_post_common | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
r0: FStar.Monotonic.HyperHeap.rid ->
r1: FStar.Monotonic.HyperHeap.rid ->
m0: FStar.Monotonic.HyperStack.mem ->
m1: FStar.Monotonic.HyperStack.mem
-> Prims.logical | {
"end_col": 22,
"end_line": 360,
"start_col": 2,
"start_line": 356
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_none (h0:mem) (h1:mem) = modifies Set.empty h0 h1 | let modifies_none (h0 h1: mem) = | false | null | false | modifies Set.empty h0 h1 | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Monotonic.HyperStack.mem",
"FStar.Monotonic.HyperStack.modifies",
"FStar.Set.empty",
"FStar.Monotonic.HyperHeap.rid",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q)
let st_pre = gst_pre
let st_post' = gst_post'
let st_post = gst_post
let st_wp = gst_wp
new_effect STATE = GST
unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp
sub_effect GST ~> STATE = lift_gst_state
(* effect State (a:Type) (wp:st_wp a) = *)
(* STATE a wp *)
(**
WARNING: this effect is unsafe, for C/C++ extraction it shall only be used by
code that would later extract to OCaml or by library functions
*)
effect Unsafe (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. pre h /\ post h a h1 ==> p a h1)) (* WP *)
(****** defining predicates for equal refs in some regions ******)
(*
// * AR: (may be this is an overkill)
// * various effects below talk about refs being equal in some regions (all regions, stack regions, etc.)
// * this was done by defining, for example, an equal_dom predicate with a (forall (r:rid)) quantifier
// * this quantifier was only guarded with Map.contains (HS.get_hmap m) r
// * which meant it could fire for all the contained regions
// *
// * instead now we define abstract predicates, e.g. same_refs_in_all_regions, and provide intro and elim forms
// * the advantage is that, the (lemma) quantifiers are now guarded additionally by same_refs_in_all_regions kind
// * of predicates, and hence should fire more contextually
// * should profile the queries to see if it actually helps
// *)
(*
// * marking these opaque, since expect them to be unfolded away beforehand
// *)
[@@"opaque_to_smt"]
unfold private let equal_heap_dom (r:rid) (m0 m1:mem) :Type0
= Heap.equal_dom (get_hmap m0 `Map.sel` r) (get_hmap m1 `Map.sel` r)
[@@"opaque_to_smt"]
unfold private let contained_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> m0 `contains_region` r /\ m1 `contains_region` r
[@@"opaque_to_smt"]
unfold private let contained_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> r =!= get_tip m0 /\ r =!= get_tip m1 /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_non_tip_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let same_refs_common (p:mem -> mem -> rid -> Type0) (m0 m1:mem) =
forall (r:rid). p m0 m1 r ==> equal_heap_dom r m0 m1
(* predicates *)
val same_refs_in_all_regions (m0 m1:mem) :Type0
val same_refs_in_stack_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_stack_regions (m0 m1:mem) :Type0
(* intro and elim forms *)
val lemma_same_refs_in_all_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_region m0 m1)) (ensures (same_refs_in_all_regions m0 m1))
[SMTPat (same_refs_in_all_regions m0 m1)]
val lemma_same_refs_in_all_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_all_regions m0 m1 /\ contained_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_stack_region m0 m1)) (ensures (same_refs_in_stack_regions m0 m1))
[SMTPat (same_refs_in_stack_regions m0 m1)]
val lemma_same_refs_in_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_stack_regions m0 m1 /\ contained_stack_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_region m0 m1)) (ensures (same_refs_in_non_tip_regions m0 m1))
[SMTPat (same_refs_in_non_tip_regions m0 m1)]
val lemma_same_refs_in_non_tip_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_regions m0 m1 /\ contained_non_tip_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_stack_region m0 m1)) (ensures (same_refs_in_non_tip_stack_regions m0 m1))
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1)]
val lemma_same_refs_in_non_tip_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_stack_regions m0 m1 /\ contained_non_tip_stack_region m0 m1 r))
(ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r);];
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
(******)
let equal_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
Set.equal (Map.domain (get_hmap m0)) (Map.domain (get_hmap m1)) /\
same_refs_in_all_regions m0 m1
val lemma_equal_domains_trans (m0 m1 m2:mem)
:Lemma (requires (equal_domains m0 m1 /\ equal_domains m1 m2))
(ensures (equal_domains m0 m2))
[SMTPat (equal_domains m0 m1); SMTPat (equal_domains m1 m2)]
(**
* Effect of stacked based code: the 'equal_domains' clause enforces that
* - both mem have the same tip
* - both mem reference the same heaps (their map: rid -> heap have the same domain)
* - in each region id, the corresponding heaps contain the same references on both sides
*)
effect Stack (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_domains h h1) ==> p a h1)) (* WP *)
(**
* Effect of heap-based code.
* - assumes that the stack is empty (tip = root)
* - corresponds to the HyperHeap ST effect
* - can call to Stack and ST code freely
* - respects the stack invariant: the stack has to be empty when returning
*)
effect Heap (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ get_tip h = HS.root /\ get_tip h1 = HS.root ) ==> p a h1)) (* WP *)
let equal_stack_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
same_refs_in_stack_regions m0 m1
(**
* Effect of low-level code:
* - maintains the allocation invariant on the stack: no allocation unless in a new frame that has to be popped before returning
* - not constraints on heap allocation
*)
effect ST (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_stack_domains h h1) ==> p a h1)) (* WP *)
effect St (a:Type) = ST a (fun _ -> True) (fun _ _ _ -> True)
let inline_stack_inv h h' : GTot Type0 =
(* The frame invariant is enforced *)
get_tip h == get_tip h' /\
(* The heap structure is unchanged *)
Map.domain (get_hmap h) == Map.domain (get_hmap h') /\
(* Any region that is not the tip has no seen any allocation *)
same_refs_in_non_tip_regions h h'
(**
* Effect that indicates to the Karamel compiler that allocation may occur in the caller's frame.
* In other terms, the backend has to unfold the body into the caller's body.
* This effect maintains the stack AND the heap invariant: it can be inlined in the Stack effect
* function body as well as in a Heap effect function body
*)
effect StackInline (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ is_stack_region (get_tip h) /\ (forall a h1. (pre h /\ post h a h1 /\ inline_stack_inv h h1) ==> p a h1)) (* WP *)
let inline_inv h h' : GTot Type0 =
(* The stack invariant is enforced *)
get_tip h == get_tip h' /\
(* No frame may have received an allocation but the tip *)
same_refs_in_non_tip_stack_regions h h'
(**
* Effect that indicates to the Karamel compiler that allocation may occur in the caller's frame.
* In other terms, the backend has to unfold the body into the caller's body.
* This effect only maintains the stack invariant: the tip is left unchanged and no allocation
* may occurs in the stack lower than the tip.
* Region allocation is not constrained.
* Heap allocation is not constrained.
*)
effect Inline (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ inline_inv h h1) ==> p a h1)) (* WP *)
(**
* TODO:
* REMOVE AS SOON AS CONSENSUS IS REACHED ON NEW LOW EFFECT NAMES
*)
effect STL (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) = Stack a pre post
sub_effect
DIV ~> STATE = fun (a:Type) (wp:pure_wp a) (p:st_post a) (h:mem) -> wp (fun a -> p a h)
(*
* AR: The clients should open HyperStack.ST after the memory model files (as with Heap and FStar.ST)
*)
type mreference (a:Type) (rel:preorder a) =
r:HS.mreference a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mstackref (a:Type) (rel:preorder a) =
r:HS.mstackref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mref (a:Type) (rel:preorder a) =
r:HS.mref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mmmstackref (a:Type) (rel:preorder a) =
r:HS.mmmstackref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mmmref (a:Type) (rel:preorder a) =
r:HS.mmmref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type s_mref (i:rid) (a:Type) (rel:preorder a) =
r:HS.s_mref i a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type reference (a:Type) = mreference a (Heap.trivial_preorder a)
type stackref (a:Type) = mstackref a (Heap.trivial_preorder a)
type ref (a:Type) = mref a (Heap.trivial_preorder a)
type mmstackref (a:Type) = mmmstackref a (Heap.trivial_preorder a)
type mmref (a:Type) = mmmref a (Heap.trivial_preorder a)
type s_ref (i:rid) (a:Type) = s_mref i a (Heap.trivial_preorder a)
let is_eternal_region (r:rid) :Type0
= HS.is_eternal_region_hs r /\ (r == HS.root \/ witnessed (region_contains_pred r))
(*
* AR: The change to using ST.rid may not be that bad itself,
* since subtyping should take care of most instances in the client usage.
* However, one case where it could be an issue is modifies clauses that use
* Set.set rid.
*)
(** Pushes a new empty frame on the stack **)
val push_frame (_:unit) :Unsafe unit (requires (fun m -> True)) (ensures (fun (m0:mem) _ (m1:mem) -> fresh_frame m0 m1))
(** Removes old frame from the stack **)
val pop_frame (_:unit)
:Unsafe unit (requires (fun m -> poppable m))
(ensures (fun (m0:mem) _ (m1:mem) -> poppable m0 /\ m1 == pop m0 /\ popped m0 m1))
#push-options "--z3rlimit 40"
let salloc_post (#a:Type) (#rel:preorder a) (init:a) (m0:mem)
(s:mreference a rel{is_stack_region (frameOf s)}) (m1:mem)
= is_stack_region (get_tip m0) /\
Map.domain (get_hmap m0) == Map.domain (get_hmap m1) /\
get_tip m0 == get_tip m1 /\
frameOf s = get_tip m1 /\
HS.fresh_ref s m0 m1 /\ //it's a fresh reference in the top frame
m1 == HyperStack.upd m0 s init //and it's been initialized
#pop-options
(**
* Allocates on the top-most stack frame
*)
val salloc (#a:Type) (#rel:preorder a) (init:a)
:StackInline (mstackref a rel) (requires (fun m -> is_stack_region (get_tip m)))
(ensures salloc_post init)
// JP, AR: these are not supported in C, and `salloc` already benefits from
// automatic memory management.
[@@ (deprecated "Use salloc instead") ]
val salloc_mm (#a:Type) (#rel:preorder a) (init:a)
:StackInline (mmmstackref a rel) (requires (fun m -> is_stack_region (get_tip m)))
(ensures salloc_post init)
[@@ (deprecated "Use salloc instead") ]
val sfree (#a:Type) (#rel:preorder a) (r:mmmstackref a rel)
:StackInline unit (requires (fun m0 -> frameOf r = get_tip m0 /\ m0 `contains` r))
(ensures (fun m0 _ m1 -> m0 `contains` r /\ m1 == HS.free r m0))
unfold
let new_region_post_common (r0 r1:rid) (m0 m1:mem) =
r1 `HS.extends` r0 /\
HS.fresh_region r1 m0 m1 /\
get_hmap m1 == Map.upd (get_hmap m0) r1 Heap.emp /\
get_tip m1 == get_tip m0 /\
HS.live_region m0 r0
val new_region (r0:rid)
:ST rid
(requires (fun m -> is_eternal_region r0))
(ensures (fun m0 r1 m1 ->
new_region_post_common r0 r1 m0 m1 /\
HS.color r1 = HS.color r0 /\
is_eternal_region r1 /\
(r1, m1) == HS.new_eternal_region m0 r0 None))
val new_colored_region (r0:rid) (c:int)
:ST rid
(requires (fun m -> HS.is_heap_color c /\ is_eternal_region r0))
(ensures (fun m0 r1 m1 ->
new_region_post_common r0 r1 m0 m1 /\
HS.color r1 = c /\
is_eternal_region r1 /\
(r1, m1) == HS.new_eternal_region m0 r0 (Some c)))
let ralloc_post (#a:Type) (#rel:preorder a) (i:rid) (init:a) (m0:mem)
(x:mreference a rel) (m1:mem) =
let region_i = get_hmap m0 `Map.sel` i in
as_ref x `Heap.unused_in` region_i /\
i `is_in` get_hmap m0 /\
i = frameOf x /\
m1 == upd m0 x init
val ralloc (#a:Type) (#rel:preorder a) (i:rid) (init:a)
:ST (mref a rel) (requires (fun m -> is_eternal_region i))
(ensures (ralloc_post i init))
val ralloc_mm (#a:Type) (#rel:preorder a) (i:rid) (init:a)
:ST (mmmref a rel) (requires (fun m -> is_eternal_region i))
(ensures (ralloc_post i init))
(*
* AR: 12/26: For a ref to be readable/writable/free-able,
* the client can either prove contains
* or give us enough so that we can use monotonicity to derive contains
*)
let is_live_for_rw_in (#a:Type) (#rel:preorder a) (r:mreference a rel) (m:mem) :Type0 =
(m `contains` r) \/
(let i = HS.frameOf r in
(is_eternal_region i \/ i `HS.is_above` get_tip m) /\
(not (is_mm r) \/ m `HS.contains_ref_in_its_region` r))
val rfree (#a:Type) (#rel:preorder a) (r:mreference a rel{HS.is_mm r /\ HS.is_heap_color (HS.color (HS.frameOf r))})
:ST unit (requires (fun m0 -> r `is_live_for_rw_in` m0))
(ensures (fun m0 _ m1 -> m0 `contains` r /\ m1 == HS.free r m0))
unfold let assign_post (#a:Type) (#rel:preorder a) (r:mreference a rel) (v:a) (m0:mem) (_:unit) (m1:mem) =
m0 `contains` r /\ m1 == HyperStack.upd m0 r v
(**
* Assigns, provided that the reference exists.
* Guarantees the strongest low-level effect: Stack
*)
val op_Colon_Equals (#a:Type) (#rel:preorder a) (r:mreference a rel) (v:a)
:STL unit (requires (fun m -> r `is_live_for_rw_in` m /\ rel (HS.sel m r) v))
(ensures (assign_post r v))
unfold let deref_post (#a:Type) (#rel:preorder a) (r:mreference a rel) (m0:mem) (x:a) (m1:mem) =
m1 == m0 /\ m0 `contains` r /\ x == HyperStack.sel m0 r
(**
* Dereferences, provided that the reference exists.
* Guarantees the strongest low-level effect: Stack
*)
val op_Bang (#a:Type) (#rel:preorder a) (r:mreference a rel)
:Stack a (requires (fun m -> r `is_live_for_rw_in` m))
(ensures (deref_post r)) | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_none : h0: FStar.Monotonic.HyperStack.mem -> h1: FStar.Monotonic.HyperStack.mem -> Prims.logical | [] | FStar.HyperStack.ST.modifies_none | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | h0: FStar.Monotonic.HyperStack.mem -> h1: FStar.Monotonic.HyperStack.mem -> Prims.logical | {
"end_col": 62,
"end_line": 433,
"start_col": 38,
"start_line": 433
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let deref_post (#a:Type) (#rel:preorder a) (r:mreference a rel) (m0:mem) (x:a) (m1:mem) =
m1 == m0 /\ m0 `contains` r /\ x == HyperStack.sel m0 r | let deref_post (#a: Type) (#rel: preorder a) (r: mreference a rel) (m0: mem) (x: a) (m1: mem) = | false | null | false | m1 == m0 /\ m0 `contains` r /\ x == HyperStack.sel m0 r | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Preorder.preorder",
"FStar.HyperStack.ST.mreference",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"Prims.eq2",
"FStar.Monotonic.HyperStack.contains",
"FStar.Monotonic.HyperStack.sel",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q)
let st_pre = gst_pre
let st_post' = gst_post'
let st_post = gst_post
let st_wp = gst_wp
new_effect STATE = GST
unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp
sub_effect GST ~> STATE = lift_gst_state
(* effect State (a:Type) (wp:st_wp a) = *)
(* STATE a wp *)
(**
WARNING: this effect is unsafe, for C/C++ extraction it shall only be used by
code that would later extract to OCaml or by library functions
*)
effect Unsafe (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. pre h /\ post h a h1 ==> p a h1)) (* WP *)
(****** defining predicates for equal refs in some regions ******)
(*
// * AR: (may be this is an overkill)
// * various effects below talk about refs being equal in some regions (all regions, stack regions, etc.)
// * this was done by defining, for example, an equal_dom predicate with a (forall (r:rid)) quantifier
// * this quantifier was only guarded with Map.contains (HS.get_hmap m) r
// * which meant it could fire for all the contained regions
// *
// * instead now we define abstract predicates, e.g. same_refs_in_all_regions, and provide intro and elim forms
// * the advantage is that, the (lemma) quantifiers are now guarded additionally by same_refs_in_all_regions kind
// * of predicates, and hence should fire more contextually
// * should profile the queries to see if it actually helps
// *)
(*
// * marking these opaque, since expect them to be unfolded away beforehand
// *)
[@@"opaque_to_smt"]
unfold private let equal_heap_dom (r:rid) (m0 m1:mem) :Type0
= Heap.equal_dom (get_hmap m0 `Map.sel` r) (get_hmap m1 `Map.sel` r)
[@@"opaque_to_smt"]
unfold private let contained_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> m0 `contains_region` r /\ m1 `contains_region` r
[@@"opaque_to_smt"]
unfold private let contained_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> r =!= get_tip m0 /\ r =!= get_tip m1 /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_non_tip_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let same_refs_common (p:mem -> mem -> rid -> Type0) (m0 m1:mem) =
forall (r:rid). p m0 m1 r ==> equal_heap_dom r m0 m1
(* predicates *)
val same_refs_in_all_regions (m0 m1:mem) :Type0
val same_refs_in_stack_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_stack_regions (m0 m1:mem) :Type0
(* intro and elim forms *)
val lemma_same_refs_in_all_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_region m0 m1)) (ensures (same_refs_in_all_regions m0 m1))
[SMTPat (same_refs_in_all_regions m0 m1)]
val lemma_same_refs_in_all_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_all_regions m0 m1 /\ contained_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_stack_region m0 m1)) (ensures (same_refs_in_stack_regions m0 m1))
[SMTPat (same_refs_in_stack_regions m0 m1)]
val lemma_same_refs_in_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_stack_regions m0 m1 /\ contained_stack_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_region m0 m1)) (ensures (same_refs_in_non_tip_regions m0 m1))
[SMTPat (same_refs_in_non_tip_regions m0 m1)]
val lemma_same_refs_in_non_tip_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_regions m0 m1 /\ contained_non_tip_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_stack_region m0 m1)) (ensures (same_refs_in_non_tip_stack_regions m0 m1))
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1)]
val lemma_same_refs_in_non_tip_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_stack_regions m0 m1 /\ contained_non_tip_stack_region m0 m1 r))
(ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r);];
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
(******)
let equal_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
Set.equal (Map.domain (get_hmap m0)) (Map.domain (get_hmap m1)) /\
same_refs_in_all_regions m0 m1
val lemma_equal_domains_trans (m0 m1 m2:mem)
:Lemma (requires (equal_domains m0 m1 /\ equal_domains m1 m2))
(ensures (equal_domains m0 m2))
[SMTPat (equal_domains m0 m1); SMTPat (equal_domains m1 m2)]
(**
* Effect of stacked based code: the 'equal_domains' clause enforces that
* - both mem have the same tip
* - both mem reference the same heaps (their map: rid -> heap have the same domain)
* - in each region id, the corresponding heaps contain the same references on both sides
*)
effect Stack (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_domains h h1) ==> p a h1)) (* WP *)
(**
* Effect of heap-based code.
* - assumes that the stack is empty (tip = root)
* - corresponds to the HyperHeap ST effect
* - can call to Stack and ST code freely
* - respects the stack invariant: the stack has to be empty when returning
*)
effect Heap (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ get_tip h = HS.root /\ get_tip h1 = HS.root ) ==> p a h1)) (* WP *)
let equal_stack_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
same_refs_in_stack_regions m0 m1
(**
* Effect of low-level code:
* - maintains the allocation invariant on the stack: no allocation unless in a new frame that has to be popped before returning
* - not constraints on heap allocation
*)
effect ST (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_stack_domains h h1) ==> p a h1)) (* WP *)
effect St (a:Type) = ST a (fun _ -> True) (fun _ _ _ -> True)
let inline_stack_inv h h' : GTot Type0 =
(* The frame invariant is enforced *)
get_tip h == get_tip h' /\
(* The heap structure is unchanged *)
Map.domain (get_hmap h) == Map.domain (get_hmap h') /\
(* Any region that is not the tip has no seen any allocation *)
same_refs_in_non_tip_regions h h'
(**
* Effect that indicates to the Karamel compiler that allocation may occur in the caller's frame.
* In other terms, the backend has to unfold the body into the caller's body.
* This effect maintains the stack AND the heap invariant: it can be inlined in the Stack effect
* function body as well as in a Heap effect function body
*)
effect StackInline (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ is_stack_region (get_tip h) /\ (forall a h1. (pre h /\ post h a h1 /\ inline_stack_inv h h1) ==> p a h1)) (* WP *)
let inline_inv h h' : GTot Type0 =
(* The stack invariant is enforced *)
get_tip h == get_tip h' /\
(* No frame may have received an allocation but the tip *)
same_refs_in_non_tip_stack_regions h h'
(**
* Effect that indicates to the Karamel compiler that allocation may occur in the caller's frame.
* In other terms, the backend has to unfold the body into the caller's body.
* This effect only maintains the stack invariant: the tip is left unchanged and no allocation
* may occurs in the stack lower than the tip.
* Region allocation is not constrained.
* Heap allocation is not constrained.
*)
effect Inline (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ inline_inv h h1) ==> p a h1)) (* WP *)
(**
* TODO:
* REMOVE AS SOON AS CONSENSUS IS REACHED ON NEW LOW EFFECT NAMES
*)
effect STL (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) = Stack a pre post
sub_effect
DIV ~> STATE = fun (a:Type) (wp:pure_wp a) (p:st_post a) (h:mem) -> wp (fun a -> p a h)
(*
* AR: The clients should open HyperStack.ST after the memory model files (as with Heap and FStar.ST)
*)
type mreference (a:Type) (rel:preorder a) =
r:HS.mreference a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mstackref (a:Type) (rel:preorder a) =
r:HS.mstackref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mref (a:Type) (rel:preorder a) =
r:HS.mref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mmmstackref (a:Type) (rel:preorder a) =
r:HS.mmmstackref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mmmref (a:Type) (rel:preorder a) =
r:HS.mmmref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type s_mref (i:rid) (a:Type) (rel:preorder a) =
r:HS.s_mref i a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type reference (a:Type) = mreference a (Heap.trivial_preorder a)
type stackref (a:Type) = mstackref a (Heap.trivial_preorder a)
type ref (a:Type) = mref a (Heap.trivial_preorder a)
type mmstackref (a:Type) = mmmstackref a (Heap.trivial_preorder a)
type mmref (a:Type) = mmmref a (Heap.trivial_preorder a)
type s_ref (i:rid) (a:Type) = s_mref i a (Heap.trivial_preorder a)
let is_eternal_region (r:rid) :Type0
= HS.is_eternal_region_hs r /\ (r == HS.root \/ witnessed (region_contains_pred r))
(*
* AR: The change to using ST.rid may not be that bad itself,
* since subtyping should take care of most instances in the client usage.
* However, one case where it could be an issue is modifies clauses that use
* Set.set rid.
*)
(** Pushes a new empty frame on the stack **)
val push_frame (_:unit) :Unsafe unit (requires (fun m -> True)) (ensures (fun (m0:mem) _ (m1:mem) -> fresh_frame m0 m1))
(** Removes old frame from the stack **)
val pop_frame (_:unit)
:Unsafe unit (requires (fun m -> poppable m))
(ensures (fun (m0:mem) _ (m1:mem) -> poppable m0 /\ m1 == pop m0 /\ popped m0 m1))
#push-options "--z3rlimit 40"
let salloc_post (#a:Type) (#rel:preorder a) (init:a) (m0:mem)
(s:mreference a rel{is_stack_region (frameOf s)}) (m1:mem)
= is_stack_region (get_tip m0) /\
Map.domain (get_hmap m0) == Map.domain (get_hmap m1) /\
get_tip m0 == get_tip m1 /\
frameOf s = get_tip m1 /\
HS.fresh_ref s m0 m1 /\ //it's a fresh reference in the top frame
m1 == HyperStack.upd m0 s init //and it's been initialized
#pop-options
(**
* Allocates on the top-most stack frame
*)
val salloc (#a:Type) (#rel:preorder a) (init:a)
:StackInline (mstackref a rel) (requires (fun m -> is_stack_region (get_tip m)))
(ensures salloc_post init)
// JP, AR: these are not supported in C, and `salloc` already benefits from
// automatic memory management.
[@@ (deprecated "Use salloc instead") ]
val salloc_mm (#a:Type) (#rel:preorder a) (init:a)
:StackInline (mmmstackref a rel) (requires (fun m -> is_stack_region (get_tip m)))
(ensures salloc_post init)
[@@ (deprecated "Use salloc instead") ]
val sfree (#a:Type) (#rel:preorder a) (r:mmmstackref a rel)
:StackInline unit (requires (fun m0 -> frameOf r = get_tip m0 /\ m0 `contains` r))
(ensures (fun m0 _ m1 -> m0 `contains` r /\ m1 == HS.free r m0))
unfold
let new_region_post_common (r0 r1:rid) (m0 m1:mem) =
r1 `HS.extends` r0 /\
HS.fresh_region r1 m0 m1 /\
get_hmap m1 == Map.upd (get_hmap m0) r1 Heap.emp /\
get_tip m1 == get_tip m0 /\
HS.live_region m0 r0
val new_region (r0:rid)
:ST rid
(requires (fun m -> is_eternal_region r0))
(ensures (fun m0 r1 m1 ->
new_region_post_common r0 r1 m0 m1 /\
HS.color r1 = HS.color r0 /\
is_eternal_region r1 /\
(r1, m1) == HS.new_eternal_region m0 r0 None))
val new_colored_region (r0:rid) (c:int)
:ST rid
(requires (fun m -> HS.is_heap_color c /\ is_eternal_region r0))
(ensures (fun m0 r1 m1 ->
new_region_post_common r0 r1 m0 m1 /\
HS.color r1 = c /\
is_eternal_region r1 /\
(r1, m1) == HS.new_eternal_region m0 r0 (Some c)))
let ralloc_post (#a:Type) (#rel:preorder a) (i:rid) (init:a) (m0:mem)
(x:mreference a rel) (m1:mem) =
let region_i = get_hmap m0 `Map.sel` i in
as_ref x `Heap.unused_in` region_i /\
i `is_in` get_hmap m0 /\
i = frameOf x /\
m1 == upd m0 x init
val ralloc (#a:Type) (#rel:preorder a) (i:rid) (init:a)
:ST (mref a rel) (requires (fun m -> is_eternal_region i))
(ensures (ralloc_post i init))
val ralloc_mm (#a:Type) (#rel:preorder a) (i:rid) (init:a)
:ST (mmmref a rel) (requires (fun m -> is_eternal_region i))
(ensures (ralloc_post i init))
(*
* AR: 12/26: For a ref to be readable/writable/free-able,
* the client can either prove contains
* or give us enough so that we can use monotonicity to derive contains
*)
let is_live_for_rw_in (#a:Type) (#rel:preorder a) (r:mreference a rel) (m:mem) :Type0 =
(m `contains` r) \/
(let i = HS.frameOf r in
(is_eternal_region i \/ i `HS.is_above` get_tip m) /\
(not (is_mm r) \/ m `HS.contains_ref_in_its_region` r))
val rfree (#a:Type) (#rel:preorder a) (r:mreference a rel{HS.is_mm r /\ HS.is_heap_color (HS.color (HS.frameOf r))})
:ST unit (requires (fun m0 -> r `is_live_for_rw_in` m0))
(ensures (fun m0 _ m1 -> m0 `contains` r /\ m1 == HS.free r m0))
unfold let assign_post (#a:Type) (#rel:preorder a) (r:mreference a rel) (v:a) (m0:mem) (_:unit) (m1:mem) =
m0 `contains` r /\ m1 == HyperStack.upd m0 r v
(**
* Assigns, provided that the reference exists.
* Guarantees the strongest low-level effect: Stack
*)
val op_Colon_Equals (#a:Type) (#rel:preorder a) (r:mreference a rel) (v:a)
:STL unit (requires (fun m -> r `is_live_for_rw_in` m /\ rel (HS.sel m r) v))
(ensures (assign_post r v)) | false | false | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val deref_post : r: FStar.HyperStack.ST.mreference a rel ->
m0: FStar.Monotonic.HyperStack.mem ->
x: a ->
m1: FStar.Monotonic.HyperStack.mem
-> Prims.logical | [] | FStar.HyperStack.ST.deref_post | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
r: FStar.HyperStack.ST.mreference a rel ->
m0: FStar.Monotonic.HyperStack.mem ->
x: a ->
m1: FStar.Monotonic.HyperStack.mem
-> Prims.logical | {
"end_col": 57,
"end_line": 423,
"start_col": 2,
"start_line": 423
} |
|
Prims.Tot | val is_live_for_rw_in (#a: Type) (#rel: preorder a) (r: mreference a rel) (m: mem) : Type0 | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let is_live_for_rw_in (#a:Type) (#rel:preorder a) (r:mreference a rel) (m:mem) :Type0 =
(m `contains` r) \/
(let i = HS.frameOf r in
(is_eternal_region i \/ i `HS.is_above` get_tip m) /\
(not (is_mm r) \/ m `HS.contains_ref_in_its_region` r)) | val is_live_for_rw_in (#a: Type) (#rel: preorder a) (r: mreference a rel) (m: mem) : Type0
let is_live_for_rw_in (#a: Type) (#rel: preorder a) (r: mreference a rel) (m: mem) : Type0 = | false | null | false | (m `contains` r) \/
(let i = HS.frameOf r in
(is_eternal_region i \/ i `HS.is_above` (get_tip m)) /\
(not (is_mm r) \/ m `HS.contains_ref_in_its_region` r)) | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Preorder.preorder",
"FStar.HyperStack.ST.mreference",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_or",
"FStar.Monotonic.HyperStack.contains",
"Prims.l_and",
"FStar.HyperStack.ST.is_eternal_region",
"Prims.b2t",
"FStar.Monotonic.HyperStack.is_above",
"FStar.Monotonic.HyperStack.get_tip",
"Prims.op_Negation",
"FStar.Monotonic.HyperStack.is_mm",
"FStar.Monotonic.HyperStack.contains_ref_in_its_region",
"FStar.Monotonic.HyperHeap.rid",
"FStar.Monotonic.HyperStack.frameOf"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q)
let st_pre = gst_pre
let st_post' = gst_post'
let st_post = gst_post
let st_wp = gst_wp
new_effect STATE = GST
unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp
sub_effect GST ~> STATE = lift_gst_state
(* effect State (a:Type) (wp:st_wp a) = *)
(* STATE a wp *)
(**
WARNING: this effect is unsafe, for C/C++ extraction it shall only be used by
code that would later extract to OCaml or by library functions
*)
effect Unsafe (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. pre h /\ post h a h1 ==> p a h1)) (* WP *)
(****** defining predicates for equal refs in some regions ******)
(*
// * AR: (may be this is an overkill)
// * various effects below talk about refs being equal in some regions (all regions, stack regions, etc.)
// * this was done by defining, for example, an equal_dom predicate with a (forall (r:rid)) quantifier
// * this quantifier was only guarded with Map.contains (HS.get_hmap m) r
// * which meant it could fire for all the contained regions
// *
// * instead now we define abstract predicates, e.g. same_refs_in_all_regions, and provide intro and elim forms
// * the advantage is that, the (lemma) quantifiers are now guarded additionally by same_refs_in_all_regions kind
// * of predicates, and hence should fire more contextually
// * should profile the queries to see if it actually helps
// *)
(*
// * marking these opaque, since expect them to be unfolded away beforehand
// *)
[@@"opaque_to_smt"]
unfold private let equal_heap_dom (r:rid) (m0 m1:mem) :Type0
= Heap.equal_dom (get_hmap m0 `Map.sel` r) (get_hmap m1 `Map.sel` r)
[@@"opaque_to_smt"]
unfold private let contained_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> m0 `contains_region` r /\ m1 `contains_region` r
[@@"opaque_to_smt"]
unfold private let contained_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> r =!= get_tip m0 /\ r =!= get_tip m1 /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_non_tip_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let same_refs_common (p:mem -> mem -> rid -> Type0) (m0 m1:mem) =
forall (r:rid). p m0 m1 r ==> equal_heap_dom r m0 m1
(* predicates *)
val same_refs_in_all_regions (m0 m1:mem) :Type0
val same_refs_in_stack_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_stack_regions (m0 m1:mem) :Type0
(* intro and elim forms *)
val lemma_same_refs_in_all_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_region m0 m1)) (ensures (same_refs_in_all_regions m0 m1))
[SMTPat (same_refs_in_all_regions m0 m1)]
val lemma_same_refs_in_all_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_all_regions m0 m1 /\ contained_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_stack_region m0 m1)) (ensures (same_refs_in_stack_regions m0 m1))
[SMTPat (same_refs_in_stack_regions m0 m1)]
val lemma_same_refs_in_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_stack_regions m0 m1 /\ contained_stack_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_region m0 m1)) (ensures (same_refs_in_non_tip_regions m0 m1))
[SMTPat (same_refs_in_non_tip_regions m0 m1)]
val lemma_same_refs_in_non_tip_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_regions m0 m1 /\ contained_non_tip_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_stack_region m0 m1)) (ensures (same_refs_in_non_tip_stack_regions m0 m1))
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1)]
val lemma_same_refs_in_non_tip_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_stack_regions m0 m1 /\ contained_non_tip_stack_region m0 m1 r))
(ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r);];
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
(******)
let equal_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
Set.equal (Map.domain (get_hmap m0)) (Map.domain (get_hmap m1)) /\
same_refs_in_all_regions m0 m1
val lemma_equal_domains_trans (m0 m1 m2:mem)
:Lemma (requires (equal_domains m0 m1 /\ equal_domains m1 m2))
(ensures (equal_domains m0 m2))
[SMTPat (equal_domains m0 m1); SMTPat (equal_domains m1 m2)]
(**
* Effect of stacked based code: the 'equal_domains' clause enforces that
* - both mem have the same tip
* - both mem reference the same heaps (their map: rid -> heap have the same domain)
* - in each region id, the corresponding heaps contain the same references on both sides
*)
effect Stack (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_domains h h1) ==> p a h1)) (* WP *)
(**
* Effect of heap-based code.
* - assumes that the stack is empty (tip = root)
* - corresponds to the HyperHeap ST effect
* - can call to Stack and ST code freely
* - respects the stack invariant: the stack has to be empty when returning
*)
effect Heap (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ get_tip h = HS.root /\ get_tip h1 = HS.root ) ==> p a h1)) (* WP *)
let equal_stack_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
same_refs_in_stack_regions m0 m1
(**
* Effect of low-level code:
* - maintains the allocation invariant on the stack: no allocation unless in a new frame that has to be popped before returning
* - not constraints on heap allocation
*)
effect ST (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_stack_domains h h1) ==> p a h1)) (* WP *)
effect St (a:Type) = ST a (fun _ -> True) (fun _ _ _ -> True)
let inline_stack_inv h h' : GTot Type0 =
(* The frame invariant is enforced *)
get_tip h == get_tip h' /\
(* The heap structure is unchanged *)
Map.domain (get_hmap h) == Map.domain (get_hmap h') /\
(* Any region that is not the tip has no seen any allocation *)
same_refs_in_non_tip_regions h h'
(**
* Effect that indicates to the Karamel compiler that allocation may occur in the caller's frame.
* In other terms, the backend has to unfold the body into the caller's body.
* This effect maintains the stack AND the heap invariant: it can be inlined in the Stack effect
* function body as well as in a Heap effect function body
*)
effect StackInline (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ is_stack_region (get_tip h) /\ (forall a h1. (pre h /\ post h a h1 /\ inline_stack_inv h h1) ==> p a h1)) (* WP *)
let inline_inv h h' : GTot Type0 =
(* The stack invariant is enforced *)
get_tip h == get_tip h' /\
(* No frame may have received an allocation but the tip *)
same_refs_in_non_tip_stack_regions h h'
(**
* Effect that indicates to the Karamel compiler that allocation may occur in the caller's frame.
* In other terms, the backend has to unfold the body into the caller's body.
* This effect only maintains the stack invariant: the tip is left unchanged and no allocation
* may occurs in the stack lower than the tip.
* Region allocation is not constrained.
* Heap allocation is not constrained.
*)
effect Inline (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ inline_inv h h1) ==> p a h1)) (* WP *)
(**
* TODO:
* REMOVE AS SOON AS CONSENSUS IS REACHED ON NEW LOW EFFECT NAMES
*)
effect STL (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) = Stack a pre post
sub_effect
DIV ~> STATE = fun (a:Type) (wp:pure_wp a) (p:st_post a) (h:mem) -> wp (fun a -> p a h)
(*
* AR: The clients should open HyperStack.ST after the memory model files (as with Heap and FStar.ST)
*)
type mreference (a:Type) (rel:preorder a) =
r:HS.mreference a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mstackref (a:Type) (rel:preorder a) =
r:HS.mstackref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mref (a:Type) (rel:preorder a) =
r:HS.mref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mmmstackref (a:Type) (rel:preorder a) =
r:HS.mmmstackref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mmmref (a:Type) (rel:preorder a) =
r:HS.mmmref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type s_mref (i:rid) (a:Type) (rel:preorder a) =
r:HS.s_mref i a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type reference (a:Type) = mreference a (Heap.trivial_preorder a)
type stackref (a:Type) = mstackref a (Heap.trivial_preorder a)
type ref (a:Type) = mref a (Heap.trivial_preorder a)
type mmstackref (a:Type) = mmmstackref a (Heap.trivial_preorder a)
type mmref (a:Type) = mmmref a (Heap.trivial_preorder a)
type s_ref (i:rid) (a:Type) = s_mref i a (Heap.trivial_preorder a)
let is_eternal_region (r:rid) :Type0
= HS.is_eternal_region_hs r /\ (r == HS.root \/ witnessed (region_contains_pred r))
(*
* AR: The change to using ST.rid may not be that bad itself,
* since subtyping should take care of most instances in the client usage.
* However, one case where it could be an issue is modifies clauses that use
* Set.set rid.
*)
(** Pushes a new empty frame on the stack **)
val push_frame (_:unit) :Unsafe unit (requires (fun m -> True)) (ensures (fun (m0:mem) _ (m1:mem) -> fresh_frame m0 m1))
(** Removes old frame from the stack **)
val pop_frame (_:unit)
:Unsafe unit (requires (fun m -> poppable m))
(ensures (fun (m0:mem) _ (m1:mem) -> poppable m0 /\ m1 == pop m0 /\ popped m0 m1))
#push-options "--z3rlimit 40"
let salloc_post (#a:Type) (#rel:preorder a) (init:a) (m0:mem)
(s:mreference a rel{is_stack_region (frameOf s)}) (m1:mem)
= is_stack_region (get_tip m0) /\
Map.domain (get_hmap m0) == Map.domain (get_hmap m1) /\
get_tip m0 == get_tip m1 /\
frameOf s = get_tip m1 /\
HS.fresh_ref s m0 m1 /\ //it's a fresh reference in the top frame
m1 == HyperStack.upd m0 s init //and it's been initialized
#pop-options
(**
* Allocates on the top-most stack frame
*)
val salloc (#a:Type) (#rel:preorder a) (init:a)
:StackInline (mstackref a rel) (requires (fun m -> is_stack_region (get_tip m)))
(ensures salloc_post init)
// JP, AR: these are not supported in C, and `salloc` already benefits from
// automatic memory management.
[@@ (deprecated "Use salloc instead") ]
val salloc_mm (#a:Type) (#rel:preorder a) (init:a)
:StackInline (mmmstackref a rel) (requires (fun m -> is_stack_region (get_tip m)))
(ensures salloc_post init)
[@@ (deprecated "Use salloc instead") ]
val sfree (#a:Type) (#rel:preorder a) (r:mmmstackref a rel)
:StackInline unit (requires (fun m0 -> frameOf r = get_tip m0 /\ m0 `contains` r))
(ensures (fun m0 _ m1 -> m0 `contains` r /\ m1 == HS.free r m0))
unfold
let new_region_post_common (r0 r1:rid) (m0 m1:mem) =
r1 `HS.extends` r0 /\
HS.fresh_region r1 m0 m1 /\
get_hmap m1 == Map.upd (get_hmap m0) r1 Heap.emp /\
get_tip m1 == get_tip m0 /\
HS.live_region m0 r0
val new_region (r0:rid)
:ST rid
(requires (fun m -> is_eternal_region r0))
(ensures (fun m0 r1 m1 ->
new_region_post_common r0 r1 m0 m1 /\
HS.color r1 = HS.color r0 /\
is_eternal_region r1 /\
(r1, m1) == HS.new_eternal_region m0 r0 None))
val new_colored_region (r0:rid) (c:int)
:ST rid
(requires (fun m -> HS.is_heap_color c /\ is_eternal_region r0))
(ensures (fun m0 r1 m1 ->
new_region_post_common r0 r1 m0 m1 /\
HS.color r1 = c /\
is_eternal_region r1 /\
(r1, m1) == HS.new_eternal_region m0 r0 (Some c)))
let ralloc_post (#a:Type) (#rel:preorder a) (i:rid) (init:a) (m0:mem)
(x:mreference a rel) (m1:mem) =
let region_i = get_hmap m0 `Map.sel` i in
as_ref x `Heap.unused_in` region_i /\
i `is_in` get_hmap m0 /\
i = frameOf x /\
m1 == upd m0 x init
val ralloc (#a:Type) (#rel:preorder a) (i:rid) (init:a)
:ST (mref a rel) (requires (fun m -> is_eternal_region i))
(ensures (ralloc_post i init))
val ralloc_mm (#a:Type) (#rel:preorder a) (i:rid) (init:a)
:ST (mmmref a rel) (requires (fun m -> is_eternal_region i))
(ensures (ralloc_post i init))
(*
* AR: 12/26: For a ref to be readable/writable/free-able,
* the client can either prove contains
* or give us enough so that we can use monotonicity to derive contains
*) | false | false | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val is_live_for_rw_in (#a: Type) (#rel: preorder a) (r: mreference a rel) (m: mem) : Type0 | [] | FStar.HyperStack.ST.is_live_for_rw_in | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | r: FStar.HyperStack.ST.mreference a rel -> m: FStar.Monotonic.HyperStack.mem -> Type0 | {
"end_col": 66,
"end_line": 405,
"start_col": 2,
"start_line": 402
} |
Prims.Tot | val is_freeable_heap_region (r: rid) : Type0 | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let is_freeable_heap_region (r:rid) : Type0 =
HS.is_heap_color (color r) /\ HS.rid_freeable r /\ witnessed (region_contains_pred r) | val is_freeable_heap_region (r: rid) : Type0
let is_freeable_heap_region (r: rid) : Type0 = | false | null | false | HS.is_heap_color (color r) /\ HS.rid_freeable r /\ witnessed (region_contains_pred r) | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Monotonic.HyperHeap.rid",
"Prims.l_and",
"Prims.b2t",
"FStar.Monotonic.HyperStack.is_heap_color",
"FStar.Monotonic.HyperHeap.color",
"FStar.Monotonic.HyperHeap.rid_freeable",
"FStar.HyperStack.ST.witnessed",
"FStar.HyperStack.ST.region_contains_pred"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q)
let st_pre = gst_pre
let st_post' = gst_post'
let st_post = gst_post
let st_wp = gst_wp
new_effect STATE = GST
unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp
sub_effect GST ~> STATE = lift_gst_state
(* effect State (a:Type) (wp:st_wp a) = *)
(* STATE a wp *)
(**
WARNING: this effect is unsafe, for C/C++ extraction it shall only be used by
code that would later extract to OCaml or by library functions
*)
effect Unsafe (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. pre h /\ post h a h1 ==> p a h1)) (* WP *)
(****** defining predicates for equal refs in some regions ******)
(*
// * AR: (may be this is an overkill)
// * various effects below talk about refs being equal in some regions (all regions, stack regions, etc.)
// * this was done by defining, for example, an equal_dom predicate with a (forall (r:rid)) quantifier
// * this quantifier was only guarded with Map.contains (HS.get_hmap m) r
// * which meant it could fire for all the contained regions
// *
// * instead now we define abstract predicates, e.g. same_refs_in_all_regions, and provide intro and elim forms
// * the advantage is that, the (lemma) quantifiers are now guarded additionally by same_refs_in_all_regions kind
// * of predicates, and hence should fire more contextually
// * should profile the queries to see if it actually helps
// *)
(*
// * marking these opaque, since expect them to be unfolded away beforehand
// *)
[@@"opaque_to_smt"]
unfold private let equal_heap_dom (r:rid) (m0 m1:mem) :Type0
= Heap.equal_dom (get_hmap m0 `Map.sel` r) (get_hmap m1 `Map.sel` r)
[@@"opaque_to_smt"]
unfold private let contained_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> m0 `contains_region` r /\ m1 `contains_region` r
[@@"opaque_to_smt"]
unfold private let contained_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> r =!= get_tip m0 /\ r =!= get_tip m1 /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_non_tip_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let same_refs_common (p:mem -> mem -> rid -> Type0) (m0 m1:mem) =
forall (r:rid). p m0 m1 r ==> equal_heap_dom r m0 m1
(* predicates *)
val same_refs_in_all_regions (m0 m1:mem) :Type0
val same_refs_in_stack_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_stack_regions (m0 m1:mem) :Type0
(* intro and elim forms *)
val lemma_same_refs_in_all_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_region m0 m1)) (ensures (same_refs_in_all_regions m0 m1))
[SMTPat (same_refs_in_all_regions m0 m1)]
val lemma_same_refs_in_all_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_all_regions m0 m1 /\ contained_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_stack_region m0 m1)) (ensures (same_refs_in_stack_regions m0 m1))
[SMTPat (same_refs_in_stack_regions m0 m1)]
val lemma_same_refs_in_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_stack_regions m0 m1 /\ contained_stack_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_region m0 m1)) (ensures (same_refs_in_non_tip_regions m0 m1))
[SMTPat (same_refs_in_non_tip_regions m0 m1)]
val lemma_same_refs_in_non_tip_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_regions m0 m1 /\ contained_non_tip_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_stack_region m0 m1)) (ensures (same_refs_in_non_tip_stack_regions m0 m1))
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1)]
val lemma_same_refs_in_non_tip_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_stack_regions m0 m1 /\ contained_non_tip_stack_region m0 m1 r))
(ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r);];
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
(******)
let equal_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
Set.equal (Map.domain (get_hmap m0)) (Map.domain (get_hmap m1)) /\
same_refs_in_all_regions m0 m1
val lemma_equal_domains_trans (m0 m1 m2:mem)
:Lemma (requires (equal_domains m0 m1 /\ equal_domains m1 m2))
(ensures (equal_domains m0 m2))
[SMTPat (equal_domains m0 m1); SMTPat (equal_domains m1 m2)]
(**
* Effect of stacked based code: the 'equal_domains' clause enforces that
* - both mem have the same tip
* - both mem reference the same heaps (their map: rid -> heap have the same domain)
* - in each region id, the corresponding heaps contain the same references on both sides
*)
effect Stack (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_domains h h1) ==> p a h1)) (* WP *)
(**
* Effect of heap-based code.
* - assumes that the stack is empty (tip = root)
* - corresponds to the HyperHeap ST effect
* - can call to Stack and ST code freely
* - respects the stack invariant: the stack has to be empty when returning
*)
effect Heap (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ get_tip h = HS.root /\ get_tip h1 = HS.root ) ==> p a h1)) (* WP *)
let equal_stack_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
same_refs_in_stack_regions m0 m1
(**
* Effect of low-level code:
* - maintains the allocation invariant on the stack: no allocation unless in a new frame that has to be popped before returning
* - not constraints on heap allocation
*)
effect ST (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_stack_domains h h1) ==> p a h1)) (* WP *)
effect St (a:Type) = ST a (fun _ -> True) (fun _ _ _ -> True)
let inline_stack_inv h h' : GTot Type0 =
(* The frame invariant is enforced *)
get_tip h == get_tip h' /\
(* The heap structure is unchanged *)
Map.domain (get_hmap h) == Map.domain (get_hmap h') /\
(* Any region that is not the tip has no seen any allocation *)
same_refs_in_non_tip_regions h h'
(**
* Effect that indicates to the Karamel compiler that allocation may occur in the caller's frame.
* In other terms, the backend has to unfold the body into the caller's body.
* This effect maintains the stack AND the heap invariant: it can be inlined in the Stack effect
* function body as well as in a Heap effect function body
*)
effect StackInline (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ is_stack_region (get_tip h) /\ (forall a h1. (pre h /\ post h a h1 /\ inline_stack_inv h h1) ==> p a h1)) (* WP *)
let inline_inv h h' : GTot Type0 =
(* The stack invariant is enforced *)
get_tip h == get_tip h' /\
(* No frame may have received an allocation but the tip *)
same_refs_in_non_tip_stack_regions h h'
(**
* Effect that indicates to the Karamel compiler that allocation may occur in the caller's frame.
* In other terms, the backend has to unfold the body into the caller's body.
* This effect only maintains the stack invariant: the tip is left unchanged and no allocation
* may occurs in the stack lower than the tip.
* Region allocation is not constrained.
* Heap allocation is not constrained.
*)
effect Inline (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ inline_inv h h1) ==> p a h1)) (* WP *)
(**
* TODO:
* REMOVE AS SOON AS CONSENSUS IS REACHED ON NEW LOW EFFECT NAMES
*)
effect STL (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) = Stack a pre post
sub_effect
DIV ~> STATE = fun (a:Type) (wp:pure_wp a) (p:st_post a) (h:mem) -> wp (fun a -> p a h)
(*
* AR: The clients should open HyperStack.ST after the memory model files (as with Heap and FStar.ST)
*)
type mreference (a:Type) (rel:preorder a) =
r:HS.mreference a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mstackref (a:Type) (rel:preorder a) =
r:HS.mstackref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mref (a:Type) (rel:preorder a) =
r:HS.mref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mmmstackref (a:Type) (rel:preorder a) =
r:HS.mmmstackref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mmmref (a:Type) (rel:preorder a) =
r:HS.mmmref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type s_mref (i:rid) (a:Type) (rel:preorder a) =
r:HS.s_mref i a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type reference (a:Type) = mreference a (Heap.trivial_preorder a)
type stackref (a:Type) = mstackref a (Heap.trivial_preorder a)
type ref (a:Type) = mref a (Heap.trivial_preorder a)
type mmstackref (a:Type) = mmmstackref a (Heap.trivial_preorder a)
type mmref (a:Type) = mmmref a (Heap.trivial_preorder a)
type s_ref (i:rid) (a:Type) = s_mref i a (Heap.trivial_preorder a)
let is_eternal_region (r:rid) :Type0
= HS.is_eternal_region_hs r /\ (r == HS.root \/ witnessed (region_contains_pred r))
(*
* AR: The change to using ST.rid may not be that bad itself,
* since subtyping should take care of most instances in the client usage.
* However, one case where it could be an issue is modifies clauses that use
* Set.set rid.
*)
(** Pushes a new empty frame on the stack **)
val push_frame (_:unit) :Unsafe unit (requires (fun m -> True)) (ensures (fun (m0:mem) _ (m1:mem) -> fresh_frame m0 m1))
(** Removes old frame from the stack **)
val pop_frame (_:unit)
:Unsafe unit (requires (fun m -> poppable m))
(ensures (fun (m0:mem) _ (m1:mem) -> poppable m0 /\ m1 == pop m0 /\ popped m0 m1))
#push-options "--z3rlimit 40"
let salloc_post (#a:Type) (#rel:preorder a) (init:a) (m0:mem)
(s:mreference a rel{is_stack_region (frameOf s)}) (m1:mem)
= is_stack_region (get_tip m0) /\
Map.domain (get_hmap m0) == Map.domain (get_hmap m1) /\
get_tip m0 == get_tip m1 /\
frameOf s = get_tip m1 /\
HS.fresh_ref s m0 m1 /\ //it's a fresh reference in the top frame
m1 == HyperStack.upd m0 s init //and it's been initialized
#pop-options
(**
* Allocates on the top-most stack frame
*)
val salloc (#a:Type) (#rel:preorder a) (init:a)
:StackInline (mstackref a rel) (requires (fun m -> is_stack_region (get_tip m)))
(ensures salloc_post init)
// JP, AR: these are not supported in C, and `salloc` already benefits from
// automatic memory management.
[@@ (deprecated "Use salloc instead") ]
val salloc_mm (#a:Type) (#rel:preorder a) (init:a)
:StackInline (mmmstackref a rel) (requires (fun m -> is_stack_region (get_tip m)))
(ensures salloc_post init)
[@@ (deprecated "Use salloc instead") ]
val sfree (#a:Type) (#rel:preorder a) (r:mmmstackref a rel)
:StackInline unit (requires (fun m0 -> frameOf r = get_tip m0 /\ m0 `contains` r))
(ensures (fun m0 _ m1 -> m0 `contains` r /\ m1 == HS.free r m0))
unfold
let new_region_post_common (r0 r1:rid) (m0 m1:mem) =
r1 `HS.extends` r0 /\
HS.fresh_region r1 m0 m1 /\
get_hmap m1 == Map.upd (get_hmap m0) r1 Heap.emp /\
get_tip m1 == get_tip m0 /\
HS.live_region m0 r0
val new_region (r0:rid)
:ST rid
(requires (fun m -> is_eternal_region r0))
(ensures (fun m0 r1 m1 ->
new_region_post_common r0 r1 m0 m1 /\
HS.color r1 = HS.color r0 /\
is_eternal_region r1 /\
(r1, m1) == HS.new_eternal_region m0 r0 None))
val new_colored_region (r0:rid) (c:int)
:ST rid
(requires (fun m -> HS.is_heap_color c /\ is_eternal_region r0))
(ensures (fun m0 r1 m1 ->
new_region_post_common r0 r1 m0 m1 /\
HS.color r1 = c /\
is_eternal_region r1 /\
(r1, m1) == HS.new_eternal_region m0 r0 (Some c)))
let ralloc_post (#a:Type) (#rel:preorder a) (i:rid) (init:a) (m0:mem)
(x:mreference a rel) (m1:mem) =
let region_i = get_hmap m0 `Map.sel` i in
as_ref x `Heap.unused_in` region_i /\
i `is_in` get_hmap m0 /\
i = frameOf x /\
m1 == upd m0 x init
val ralloc (#a:Type) (#rel:preorder a) (i:rid) (init:a)
:ST (mref a rel) (requires (fun m -> is_eternal_region i))
(ensures (ralloc_post i init))
val ralloc_mm (#a:Type) (#rel:preorder a) (i:rid) (init:a)
:ST (mmmref a rel) (requires (fun m -> is_eternal_region i))
(ensures (ralloc_post i init))
(*
* AR: 12/26: For a ref to be readable/writable/free-able,
* the client can either prove contains
* or give us enough so that we can use monotonicity to derive contains
*)
let is_live_for_rw_in (#a:Type) (#rel:preorder a) (r:mreference a rel) (m:mem) :Type0 =
(m `contains` r) \/
(let i = HS.frameOf r in
(is_eternal_region i \/ i `HS.is_above` get_tip m) /\
(not (is_mm r) \/ m `HS.contains_ref_in_its_region` r))
val rfree (#a:Type) (#rel:preorder a) (r:mreference a rel{HS.is_mm r /\ HS.is_heap_color (HS.color (HS.frameOf r))})
:ST unit (requires (fun m0 -> r `is_live_for_rw_in` m0))
(ensures (fun m0 _ m1 -> m0 `contains` r /\ m1 == HS.free r m0))
unfold let assign_post (#a:Type) (#rel:preorder a) (r:mreference a rel) (v:a) (m0:mem) (_:unit) (m1:mem) =
m0 `contains` r /\ m1 == HyperStack.upd m0 r v
(**
* Assigns, provided that the reference exists.
* Guarantees the strongest low-level effect: Stack
*)
val op_Colon_Equals (#a:Type) (#rel:preorder a) (r:mreference a rel) (v:a)
:STL unit (requires (fun m -> r `is_live_for_rw_in` m /\ rel (HS.sel m r) v))
(ensures (assign_post r v))
unfold let deref_post (#a:Type) (#rel:preorder a) (r:mreference a rel) (m0:mem) (x:a) (m1:mem) =
m1 == m0 /\ m0 `contains` r /\ x == HyperStack.sel m0 r
(**
* Dereferences, provided that the reference exists.
* Guarantees the strongest low-level effect: Stack
*)
val op_Bang (#a:Type) (#rel:preorder a) (r:mreference a rel)
:Stack a (requires (fun m -> r `is_live_for_rw_in` m))
(ensures (deref_post r))
let modifies_none (h0:mem) (h1:mem) = modifies Set.empty h0 h1
// NS: This version is just fine; all the operation on mem are ghost
// and we can rig it so that mem just get erased at the end
(**
* Returns the current stack of heaps --- it should be erased
*)
val get (_:unit)
:Stack mem (requires (fun m -> True))
(ensures (fun m0 x m1 -> m0 == x /\ m1 == m0))
(**
* We can only recall refs with mm bit unset, not stack refs
*)
val recall (#a:Type) (#rel:preorder a) (r:mreference a rel{not (HS.is_mm r)})
:Stack unit (requires (fun m -> is_eternal_region (HS.frameOf r) \/ m `contains_region` (HS.frameOf r)))
(ensures (fun m0 _ m1 -> m0 == m1 /\ m1 `contains` r))
(**
* We can only recall eternal regions, not stack regions
*)
val recall_region (i:rid{is_eternal_region i})
:Stack unit (requires (fun m -> True))
(ensures (fun m0 _ m1 -> m0 == m1 /\ i `is_in` get_hmap m1))
val witness_region (i:rid)
:Stack unit (requires (fun m0 -> HS.is_eternal_region_hs i ==> i `is_in` get_hmap m0))
(ensures (fun m0 _ m1 -> m0 == m1 /\ witnessed (region_contains_pred i)))
val witness_hsref (#a:Type) (#rel:preorder a) (r:HS.mreference a rel)
:ST unit (fun h0 -> h0 `HS.contains` r)
(fun h0 _ h1 -> h0 == h1 /\ witnessed (ref_contains_pred r))
(** MR witness etc. **)
type erid = r:rid{is_eternal_region r}
type m_rref (r:erid) (a:Type) (b:preorder a) = x:mref a b{HS.frameOf x = r}
(* states that p is preserved by any valid updates on r; note that h0 and h1 may differ arbitrarily elsewhere, hence proving stability usually requires that p depends only on r's content.
*)
unfold type stable_on (#a:Type0) (#rel:preorder a) (p:mem_predicate) (r:mreference a rel)
= forall (h0 h1:mem).{:pattern (p h0); rel (HS.sel h0 r) (HS.sel h1 r)}
(p h0 /\ rel (HS.sel h0 r) (HS.sel h1 r)) ==> p h1
(*
* The stable_on_t and mr_witness API is here for legacy reasons,
* the preferred API is stable_on and witness_p
*)
unfold type stable_on_t (#i:erid) (#a:Type) (#b:preorder a)
(r:m_rref i a b) (p:mem_predicate)
= stable_on p r
val mr_witness (#r:erid) (#a:Type) (#b:preorder a)
(m:m_rref r a b) (p:mem_predicate)
:ST unit (requires (fun h0 -> p h0 /\ stable_on_t m p))
(ensures (fun h0 _ h1 -> h0==h1 /\ witnessed p))
val weaken_witness (p q:mem_predicate)
:Lemma ((forall h. p h ==> q h) /\ witnessed p ==> witnessed q)
val testify (p:mem_predicate)
:ST unit (requires (fun _ -> witnessed p))
(ensures (fun h0 _ h1 -> h0==h1 /\ p h1))
val testify_forall (#c:Type) (#p:(c -> mem -> Type0))
($s:squash (forall (x:c). witnessed (p x)))
:ST unit (requires (fun h -> True))
(ensures (fun h0 _ h1 -> h0==h1 /\ (forall (x:c). p x h1)))
val testify_forall_region_contains_pred (#c:Type) (#p:(c -> GTot rid))
($s:squash (forall (x:c). witnessed (region_contains_pred (p x))))
:ST unit (requires (fun _ -> True))
(ensures (fun h0 _ h1 -> h0 == h1 /\
(forall (x:c). HS.is_eternal_region_hs (p x) ==> h1 `contains_region` (p x))))
(****** Begin: preferred API for witnessing and recalling predicates ******)
val token_p (#a:Type0) (#rel:preorder a) (r:mreference a rel) (p:mem_predicate) :Type0
val witness_p (#a:Type0) (#rel:preorder a) (r:mreference a rel) (p:mem_predicate)
:ST unit (fun h0 -> p h0 /\ p `stable_on` r)
(fun h0 _ h1 -> h0 == h1 /\ token_p r p)
val recall_p (#a:Type0) (#rel:preorder a) (r:mreference a rel) (p:mem_predicate)
:ST unit (fun h0 -> ((is_eternal_region (HS.frameOf r) /\ not (HS.is_mm r)) \/ h0 `HS.contains` r) /\ token_p r p)
(fun h0 _ h1 -> h0 == h1 /\ h0 `HS.contains` r /\ p h0)
val token_functoriality
(#a:Type0) (#rel:preorder a) (r:mreference a rel)
(p:mem_predicate{token_p r p}) (q:mem_predicate{forall (h:mem). p h ==> q h})
: Lemma (token_p r q)
(****** End: preferred API for witnessing and recalling predicates ******)
type ex_rid = erid
(****** logical properties of witnessed ******)
val lemma_witnessed_constant (p:Type0)
:Lemma (witnessed (fun (m:mem) -> p) <==> p)
val lemma_witnessed_nested (p:mem_predicate)
: Lemma (witnessed (fun (m:mem) -> witnessed p) <==> witnessed p)
val lemma_witnessed_and (p q:mem_predicate)
:Lemma (witnessed (fun s -> p s /\ q s) <==> (witnessed p /\ witnessed q))
val lemma_witnessed_or (p q:mem_predicate)
:Lemma ((witnessed p \/ witnessed q) ==> witnessed (fun s -> p s \/ q s))
val lemma_witnessed_impl (p q:mem_predicate)
:Lemma ((witnessed (fun s -> p s ==> q s) /\ witnessed p) ==> witnessed q)
val lemma_witnessed_forall (#t:Type) (p:(t -> mem_predicate))
:Lemma ((witnessed (fun s -> forall x. p x s)) <==> (forall x. witnessed (p x)))
val lemma_witnessed_exists (#t:Type) (p:(t -> mem_predicate))
:Lemma ((exists x. witnessed (p x)) ==> witnessed (fun s -> exists x. p x s))
(*** Support for dynamic regions ***) | false | true | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val is_freeable_heap_region (r: rid) : Type0 | [] | FStar.HyperStack.ST.is_freeable_heap_region | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | r: FStar.Monotonic.HyperHeap.rid -> Type0 | {
"end_col": 87,
"end_line": 564,
"start_col": 2,
"start_line": 564
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ralloc_post (#a:Type) (#rel:preorder a) (i:rid) (init:a) (m0:mem)
(x:mreference a rel) (m1:mem) =
let region_i = get_hmap m0 `Map.sel` i in
as_ref x `Heap.unused_in` region_i /\
i `is_in` get_hmap m0 /\
i = frameOf x /\
m1 == upd m0 x init | let ralloc_post
(#a: Type)
(#rel: preorder a)
(i: rid)
(init: a)
(m0: mem)
(x: mreference a rel)
(m1: mem)
= | false | null | false | let region_i = (get_hmap m0) `Map.sel` i in
(as_ref x) `Heap.unused_in` region_i /\ i `is_in` (get_hmap m0) /\ i = frameOf x /\
m1 == upd m0 x init | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Preorder.preorder",
"FStar.Monotonic.HyperHeap.rid",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.mreference",
"Prims.l_and",
"FStar.Monotonic.Heap.unused_in",
"FStar.Monotonic.HyperStack.as_ref",
"Prims.b2t",
"FStar.Monotonic.HyperStack.is_in",
"FStar.Monotonic.HyperStack.get_hmap",
"Prims.op_Equality",
"FStar.Monotonic.HyperStack.frameOf",
"Prims.eq2",
"FStar.Monotonic.HyperStack.upd",
"FStar.Monotonic.Heap.heap",
"FStar.Map.sel",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q)
let st_pre = gst_pre
let st_post' = gst_post'
let st_post = gst_post
let st_wp = gst_wp
new_effect STATE = GST
unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp
sub_effect GST ~> STATE = lift_gst_state
(* effect State (a:Type) (wp:st_wp a) = *)
(* STATE a wp *)
(**
WARNING: this effect is unsafe, for C/C++ extraction it shall only be used by
code that would later extract to OCaml or by library functions
*)
effect Unsafe (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. pre h /\ post h a h1 ==> p a h1)) (* WP *)
(****** defining predicates for equal refs in some regions ******)
(*
// * AR: (may be this is an overkill)
// * various effects below talk about refs being equal in some regions (all regions, stack regions, etc.)
// * this was done by defining, for example, an equal_dom predicate with a (forall (r:rid)) quantifier
// * this quantifier was only guarded with Map.contains (HS.get_hmap m) r
// * which meant it could fire for all the contained regions
// *
// * instead now we define abstract predicates, e.g. same_refs_in_all_regions, and provide intro and elim forms
// * the advantage is that, the (lemma) quantifiers are now guarded additionally by same_refs_in_all_regions kind
// * of predicates, and hence should fire more contextually
// * should profile the queries to see if it actually helps
// *)
(*
// * marking these opaque, since expect them to be unfolded away beforehand
// *)
[@@"opaque_to_smt"]
unfold private let equal_heap_dom (r:rid) (m0 m1:mem) :Type0
= Heap.equal_dom (get_hmap m0 `Map.sel` r) (get_hmap m1 `Map.sel` r)
[@@"opaque_to_smt"]
unfold private let contained_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> m0 `contains_region` r /\ m1 `contains_region` r
[@@"opaque_to_smt"]
unfold private let contained_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> r =!= get_tip m0 /\ r =!= get_tip m1 /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_non_tip_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let same_refs_common (p:mem -> mem -> rid -> Type0) (m0 m1:mem) =
forall (r:rid). p m0 m1 r ==> equal_heap_dom r m0 m1
(* predicates *)
val same_refs_in_all_regions (m0 m1:mem) :Type0
val same_refs_in_stack_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_stack_regions (m0 m1:mem) :Type0
(* intro and elim forms *)
val lemma_same_refs_in_all_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_region m0 m1)) (ensures (same_refs_in_all_regions m0 m1))
[SMTPat (same_refs_in_all_regions m0 m1)]
val lemma_same_refs_in_all_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_all_regions m0 m1 /\ contained_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_stack_region m0 m1)) (ensures (same_refs_in_stack_regions m0 m1))
[SMTPat (same_refs_in_stack_regions m0 m1)]
val lemma_same_refs_in_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_stack_regions m0 m1 /\ contained_stack_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_region m0 m1)) (ensures (same_refs_in_non_tip_regions m0 m1))
[SMTPat (same_refs_in_non_tip_regions m0 m1)]
val lemma_same_refs_in_non_tip_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_regions m0 m1 /\ contained_non_tip_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_stack_region m0 m1)) (ensures (same_refs_in_non_tip_stack_regions m0 m1))
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1)]
val lemma_same_refs_in_non_tip_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_stack_regions m0 m1 /\ contained_non_tip_stack_region m0 m1 r))
(ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r);];
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
(******)
let equal_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
Set.equal (Map.domain (get_hmap m0)) (Map.domain (get_hmap m1)) /\
same_refs_in_all_regions m0 m1
val lemma_equal_domains_trans (m0 m1 m2:mem)
:Lemma (requires (equal_domains m0 m1 /\ equal_domains m1 m2))
(ensures (equal_domains m0 m2))
[SMTPat (equal_domains m0 m1); SMTPat (equal_domains m1 m2)]
(**
* Effect of stacked based code: the 'equal_domains' clause enforces that
* - both mem have the same tip
* - both mem reference the same heaps (their map: rid -> heap have the same domain)
* - in each region id, the corresponding heaps contain the same references on both sides
*)
effect Stack (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_domains h h1) ==> p a h1)) (* WP *)
(**
* Effect of heap-based code.
* - assumes that the stack is empty (tip = root)
* - corresponds to the HyperHeap ST effect
* - can call to Stack and ST code freely
* - respects the stack invariant: the stack has to be empty when returning
*)
effect Heap (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ get_tip h = HS.root /\ get_tip h1 = HS.root ) ==> p a h1)) (* WP *)
let equal_stack_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
same_refs_in_stack_regions m0 m1
(**
* Effect of low-level code:
* - maintains the allocation invariant on the stack: no allocation unless in a new frame that has to be popped before returning
* - not constraints on heap allocation
*)
effect ST (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_stack_domains h h1) ==> p a h1)) (* WP *)
effect St (a:Type) = ST a (fun _ -> True) (fun _ _ _ -> True)
let inline_stack_inv h h' : GTot Type0 =
(* The frame invariant is enforced *)
get_tip h == get_tip h' /\
(* The heap structure is unchanged *)
Map.domain (get_hmap h) == Map.domain (get_hmap h') /\
(* Any region that is not the tip has no seen any allocation *)
same_refs_in_non_tip_regions h h'
(**
* Effect that indicates to the Karamel compiler that allocation may occur in the caller's frame.
* In other terms, the backend has to unfold the body into the caller's body.
* This effect maintains the stack AND the heap invariant: it can be inlined in the Stack effect
* function body as well as in a Heap effect function body
*)
effect StackInline (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ is_stack_region (get_tip h) /\ (forall a h1. (pre h /\ post h a h1 /\ inline_stack_inv h h1) ==> p a h1)) (* WP *)
let inline_inv h h' : GTot Type0 =
(* The stack invariant is enforced *)
get_tip h == get_tip h' /\
(* No frame may have received an allocation but the tip *)
same_refs_in_non_tip_stack_regions h h'
(**
* Effect that indicates to the Karamel compiler that allocation may occur in the caller's frame.
* In other terms, the backend has to unfold the body into the caller's body.
* This effect only maintains the stack invariant: the tip is left unchanged and no allocation
* may occurs in the stack lower than the tip.
* Region allocation is not constrained.
* Heap allocation is not constrained.
*)
effect Inline (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ inline_inv h h1) ==> p a h1)) (* WP *)
(**
* TODO:
* REMOVE AS SOON AS CONSENSUS IS REACHED ON NEW LOW EFFECT NAMES
*)
effect STL (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) = Stack a pre post
sub_effect
DIV ~> STATE = fun (a:Type) (wp:pure_wp a) (p:st_post a) (h:mem) -> wp (fun a -> p a h)
(*
* AR: The clients should open HyperStack.ST after the memory model files (as with Heap and FStar.ST)
*)
type mreference (a:Type) (rel:preorder a) =
r:HS.mreference a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mstackref (a:Type) (rel:preorder a) =
r:HS.mstackref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mref (a:Type) (rel:preorder a) =
r:HS.mref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mmmstackref (a:Type) (rel:preorder a) =
r:HS.mmmstackref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mmmref (a:Type) (rel:preorder a) =
r:HS.mmmref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type s_mref (i:rid) (a:Type) (rel:preorder a) =
r:HS.s_mref i a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type reference (a:Type) = mreference a (Heap.trivial_preorder a)
type stackref (a:Type) = mstackref a (Heap.trivial_preorder a)
type ref (a:Type) = mref a (Heap.trivial_preorder a)
type mmstackref (a:Type) = mmmstackref a (Heap.trivial_preorder a)
type mmref (a:Type) = mmmref a (Heap.trivial_preorder a)
type s_ref (i:rid) (a:Type) = s_mref i a (Heap.trivial_preorder a)
let is_eternal_region (r:rid) :Type0
= HS.is_eternal_region_hs r /\ (r == HS.root \/ witnessed (region_contains_pred r))
(*
* AR: The change to using ST.rid may not be that bad itself,
* since subtyping should take care of most instances in the client usage.
* However, one case where it could be an issue is modifies clauses that use
* Set.set rid.
*)
(** Pushes a new empty frame on the stack **)
val push_frame (_:unit) :Unsafe unit (requires (fun m -> True)) (ensures (fun (m0:mem) _ (m1:mem) -> fresh_frame m0 m1))
(** Removes old frame from the stack **)
val pop_frame (_:unit)
:Unsafe unit (requires (fun m -> poppable m))
(ensures (fun (m0:mem) _ (m1:mem) -> poppable m0 /\ m1 == pop m0 /\ popped m0 m1))
#push-options "--z3rlimit 40"
let salloc_post (#a:Type) (#rel:preorder a) (init:a) (m0:mem)
(s:mreference a rel{is_stack_region (frameOf s)}) (m1:mem)
= is_stack_region (get_tip m0) /\
Map.domain (get_hmap m0) == Map.domain (get_hmap m1) /\
get_tip m0 == get_tip m1 /\
frameOf s = get_tip m1 /\
HS.fresh_ref s m0 m1 /\ //it's a fresh reference in the top frame
m1 == HyperStack.upd m0 s init //and it's been initialized
#pop-options
(**
* Allocates on the top-most stack frame
*)
val salloc (#a:Type) (#rel:preorder a) (init:a)
:StackInline (mstackref a rel) (requires (fun m -> is_stack_region (get_tip m)))
(ensures salloc_post init)
// JP, AR: these are not supported in C, and `salloc` already benefits from
// automatic memory management.
[@@ (deprecated "Use salloc instead") ]
val salloc_mm (#a:Type) (#rel:preorder a) (init:a)
:StackInline (mmmstackref a rel) (requires (fun m -> is_stack_region (get_tip m)))
(ensures salloc_post init)
[@@ (deprecated "Use salloc instead") ]
val sfree (#a:Type) (#rel:preorder a) (r:mmmstackref a rel)
:StackInline unit (requires (fun m0 -> frameOf r = get_tip m0 /\ m0 `contains` r))
(ensures (fun m0 _ m1 -> m0 `contains` r /\ m1 == HS.free r m0))
unfold
let new_region_post_common (r0 r1:rid) (m0 m1:mem) =
r1 `HS.extends` r0 /\
HS.fresh_region r1 m0 m1 /\
get_hmap m1 == Map.upd (get_hmap m0) r1 Heap.emp /\
get_tip m1 == get_tip m0 /\
HS.live_region m0 r0
val new_region (r0:rid)
:ST rid
(requires (fun m -> is_eternal_region r0))
(ensures (fun m0 r1 m1 ->
new_region_post_common r0 r1 m0 m1 /\
HS.color r1 = HS.color r0 /\
is_eternal_region r1 /\
(r1, m1) == HS.new_eternal_region m0 r0 None))
val new_colored_region (r0:rid) (c:int)
:ST rid
(requires (fun m -> HS.is_heap_color c /\ is_eternal_region r0))
(ensures (fun m0 r1 m1 ->
new_region_post_common r0 r1 m0 m1 /\
HS.color r1 = c /\
is_eternal_region r1 /\
(r1, m1) == HS.new_eternal_region m0 r0 (Some c))) | false | false | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ralloc_post : i: FStar.Monotonic.HyperHeap.rid ->
init: a ->
m0: FStar.Monotonic.HyperStack.mem ->
x: FStar.HyperStack.ST.mreference a rel ->
m1: FStar.Monotonic.HyperStack.mem
-> Prims.logical | [] | FStar.HyperStack.ST.ralloc_post | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
i: FStar.Monotonic.HyperHeap.rid ->
init: a ->
m0: FStar.Monotonic.HyperStack.mem ->
x: FStar.HyperStack.ST.mreference a rel ->
m1: FStar.Monotonic.HyperStack.mem
-> Prims.logical | {
"end_col": 21,
"end_line": 386,
"start_col": 54,
"start_line": 381
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let salloc_post (#a:Type) (#rel:preorder a) (init:a) (m0:mem)
(s:mreference a rel{is_stack_region (frameOf s)}) (m1:mem)
= is_stack_region (get_tip m0) /\
Map.domain (get_hmap m0) == Map.domain (get_hmap m1) /\
get_tip m0 == get_tip m1 /\
frameOf s = get_tip m1 /\
HS.fresh_ref s m0 m1 /\ //it's a fresh reference in the top frame
m1 == HyperStack.upd m0 s init | let salloc_post
(#a: Type)
(#rel: preorder a)
(init: a)
(m0: mem)
(s: mreference a rel {is_stack_region (frameOf s)})
(m1: mem)
= | false | null | false | is_stack_region (get_tip m0) /\ Map.domain (get_hmap m0) == Map.domain (get_hmap m1) /\
get_tip m0 == get_tip m1 /\ frameOf s = get_tip m1 /\ HS.fresh_ref s m0 m1 /\
m1 == HyperStack.upd m0 s init | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Preorder.preorder",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.mreference",
"Prims.b2t",
"FStar.Monotonic.HyperStack.is_stack_region",
"FStar.Monotonic.HyperStack.frameOf",
"Prims.l_and",
"FStar.Monotonic.HyperStack.get_tip",
"Prims.eq2",
"FStar.Set.set",
"FStar.Monotonic.HyperHeap.rid",
"FStar.Map.domain",
"FStar.Monotonic.Heap.heap",
"FStar.Monotonic.HyperStack.get_hmap",
"Prims.op_Equality",
"FStar.Monotonic.HyperStack.fresh_ref",
"FStar.Monotonic.HyperStack.upd",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q)
let st_pre = gst_pre
let st_post' = gst_post'
let st_post = gst_post
let st_wp = gst_wp
new_effect STATE = GST
unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp
sub_effect GST ~> STATE = lift_gst_state
(* effect State (a:Type) (wp:st_wp a) = *)
(* STATE a wp *)
(**
WARNING: this effect is unsafe, for C/C++ extraction it shall only be used by
code that would later extract to OCaml or by library functions
*)
effect Unsafe (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. pre h /\ post h a h1 ==> p a h1)) (* WP *)
(****** defining predicates for equal refs in some regions ******)
(*
// * AR: (may be this is an overkill)
// * various effects below talk about refs being equal in some regions (all regions, stack regions, etc.)
// * this was done by defining, for example, an equal_dom predicate with a (forall (r:rid)) quantifier
// * this quantifier was only guarded with Map.contains (HS.get_hmap m) r
// * which meant it could fire for all the contained regions
// *
// * instead now we define abstract predicates, e.g. same_refs_in_all_regions, and provide intro and elim forms
// * the advantage is that, the (lemma) quantifiers are now guarded additionally by same_refs_in_all_regions kind
// * of predicates, and hence should fire more contextually
// * should profile the queries to see if it actually helps
// *)
(*
// * marking these opaque, since expect them to be unfolded away beforehand
// *)
[@@"opaque_to_smt"]
unfold private let equal_heap_dom (r:rid) (m0 m1:mem) :Type0
= Heap.equal_dom (get_hmap m0 `Map.sel` r) (get_hmap m1 `Map.sel` r)
[@@"opaque_to_smt"]
unfold private let contained_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> m0 `contains_region` r /\ m1 `contains_region` r
[@@"opaque_to_smt"]
unfold private let contained_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> r =!= get_tip m0 /\ r =!= get_tip m1 /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_non_tip_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let same_refs_common (p:mem -> mem -> rid -> Type0) (m0 m1:mem) =
forall (r:rid). p m0 m1 r ==> equal_heap_dom r m0 m1
(* predicates *)
val same_refs_in_all_regions (m0 m1:mem) :Type0
val same_refs_in_stack_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_stack_regions (m0 m1:mem) :Type0
(* intro and elim forms *)
val lemma_same_refs_in_all_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_region m0 m1)) (ensures (same_refs_in_all_regions m0 m1))
[SMTPat (same_refs_in_all_regions m0 m1)]
val lemma_same_refs_in_all_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_all_regions m0 m1 /\ contained_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_stack_region m0 m1)) (ensures (same_refs_in_stack_regions m0 m1))
[SMTPat (same_refs_in_stack_regions m0 m1)]
val lemma_same_refs_in_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_stack_regions m0 m1 /\ contained_stack_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_region m0 m1)) (ensures (same_refs_in_non_tip_regions m0 m1))
[SMTPat (same_refs_in_non_tip_regions m0 m1)]
val lemma_same_refs_in_non_tip_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_regions m0 m1 /\ contained_non_tip_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_stack_region m0 m1)) (ensures (same_refs_in_non_tip_stack_regions m0 m1))
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1)]
val lemma_same_refs_in_non_tip_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_stack_regions m0 m1 /\ contained_non_tip_stack_region m0 m1 r))
(ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r);];
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
(******)
let equal_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
Set.equal (Map.domain (get_hmap m0)) (Map.domain (get_hmap m1)) /\
same_refs_in_all_regions m0 m1
val lemma_equal_domains_trans (m0 m1 m2:mem)
:Lemma (requires (equal_domains m0 m1 /\ equal_domains m1 m2))
(ensures (equal_domains m0 m2))
[SMTPat (equal_domains m0 m1); SMTPat (equal_domains m1 m2)]
(**
* Effect of stacked based code: the 'equal_domains' clause enforces that
* - both mem have the same tip
* - both mem reference the same heaps (their map: rid -> heap have the same domain)
* - in each region id, the corresponding heaps contain the same references on both sides
*)
effect Stack (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_domains h h1) ==> p a h1)) (* WP *)
(**
* Effect of heap-based code.
* - assumes that the stack is empty (tip = root)
* - corresponds to the HyperHeap ST effect
* - can call to Stack and ST code freely
* - respects the stack invariant: the stack has to be empty when returning
*)
effect Heap (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ get_tip h = HS.root /\ get_tip h1 = HS.root ) ==> p a h1)) (* WP *)
let equal_stack_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
same_refs_in_stack_regions m0 m1
(**
* Effect of low-level code:
* - maintains the allocation invariant on the stack: no allocation unless in a new frame that has to be popped before returning
* - not constraints on heap allocation
*)
effect ST (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_stack_domains h h1) ==> p a h1)) (* WP *)
effect St (a:Type) = ST a (fun _ -> True) (fun _ _ _ -> True)
let inline_stack_inv h h' : GTot Type0 =
(* The frame invariant is enforced *)
get_tip h == get_tip h' /\
(* The heap structure is unchanged *)
Map.domain (get_hmap h) == Map.domain (get_hmap h') /\
(* Any region that is not the tip has no seen any allocation *)
same_refs_in_non_tip_regions h h'
(**
* Effect that indicates to the Karamel compiler that allocation may occur in the caller's frame.
* In other terms, the backend has to unfold the body into the caller's body.
* This effect maintains the stack AND the heap invariant: it can be inlined in the Stack effect
* function body as well as in a Heap effect function body
*)
effect StackInline (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ is_stack_region (get_tip h) /\ (forall a h1. (pre h /\ post h a h1 /\ inline_stack_inv h h1) ==> p a h1)) (* WP *)
let inline_inv h h' : GTot Type0 =
(* The stack invariant is enforced *)
get_tip h == get_tip h' /\
(* No frame may have received an allocation but the tip *)
same_refs_in_non_tip_stack_regions h h'
(**
* Effect that indicates to the Karamel compiler that allocation may occur in the caller's frame.
* In other terms, the backend has to unfold the body into the caller's body.
* This effect only maintains the stack invariant: the tip is left unchanged and no allocation
* may occurs in the stack lower than the tip.
* Region allocation is not constrained.
* Heap allocation is not constrained.
*)
effect Inline (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ inline_inv h h1) ==> p a h1)) (* WP *)
(**
* TODO:
* REMOVE AS SOON AS CONSENSUS IS REACHED ON NEW LOW EFFECT NAMES
*)
effect STL (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) = Stack a pre post
sub_effect
DIV ~> STATE = fun (a:Type) (wp:pure_wp a) (p:st_post a) (h:mem) -> wp (fun a -> p a h)
(*
* AR: The clients should open HyperStack.ST after the memory model files (as with Heap and FStar.ST)
*)
type mreference (a:Type) (rel:preorder a) =
r:HS.mreference a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mstackref (a:Type) (rel:preorder a) =
r:HS.mstackref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mref (a:Type) (rel:preorder a) =
r:HS.mref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mmmstackref (a:Type) (rel:preorder a) =
r:HS.mmmstackref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mmmref (a:Type) (rel:preorder a) =
r:HS.mmmref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type s_mref (i:rid) (a:Type) (rel:preorder a) =
r:HS.s_mref i a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type reference (a:Type) = mreference a (Heap.trivial_preorder a)
type stackref (a:Type) = mstackref a (Heap.trivial_preorder a)
type ref (a:Type) = mref a (Heap.trivial_preorder a)
type mmstackref (a:Type) = mmmstackref a (Heap.trivial_preorder a)
type mmref (a:Type) = mmmref a (Heap.trivial_preorder a)
type s_ref (i:rid) (a:Type) = s_mref i a (Heap.trivial_preorder a)
let is_eternal_region (r:rid) :Type0
= HS.is_eternal_region_hs r /\ (r == HS.root \/ witnessed (region_contains_pred r))
(*
* AR: The change to using ST.rid may not be that bad itself,
* since subtyping should take care of most instances in the client usage.
* However, one case where it could be an issue is modifies clauses that use
* Set.set rid.
*)
(** Pushes a new empty frame on the stack **)
val push_frame (_:unit) :Unsafe unit (requires (fun m -> True)) (ensures (fun (m0:mem) _ (m1:mem) -> fresh_frame m0 m1))
(** Removes old frame from the stack **)
val pop_frame (_:unit)
:Unsafe unit (requires (fun m -> poppable m))
(ensures (fun (m0:mem) _ (m1:mem) -> poppable m0 /\ m1 == pop m0 /\ popped m0 m1))
#push-options "--z3rlimit 40"
let salloc_post (#a:Type) (#rel:preorder a) (init:a) (m0:mem) | false | false | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 40,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val salloc_post : init: a ->
m0: FStar.Monotonic.HyperStack.mem ->
s:
FStar.HyperStack.ST.mreference a rel
{FStar.Monotonic.HyperStack.is_stack_region (FStar.Monotonic.HyperStack.frameOf s)} ->
m1: FStar.Monotonic.HyperStack.mem
-> Prims.logical | [] | FStar.HyperStack.ST.salloc_post | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
init: a ->
m0: FStar.Monotonic.HyperStack.mem ->
s:
FStar.HyperStack.ST.mreference a rel
{FStar.Monotonic.HyperStack.is_stack_region (FStar.Monotonic.HyperStack.frameOf s)} ->
m1: FStar.Monotonic.HyperStack.mem
-> Prims.logical | {
"end_col": 34,
"end_line": 332,
"start_col": 4,
"start_line": 327
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Preorder",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let assign_post (#a:Type) (#rel:preorder a) (r:mreference a rel) (v:a) (m0:mem) (_:unit) (m1:mem) =
m0 `contains` r /\ m1 == HyperStack.upd m0 r v | let assign_post
(#a: Type)
(#rel: preorder a)
(r: mreference a rel)
(v: a)
(m0: mem)
(_: unit)
(m1: mem)
= | false | null | false | m0 `contains` r /\ m1 == HyperStack.upd m0 r v | {
"checked_file": "FStar.HyperStack.ST.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked"
],
"interface_file": false,
"source_file": "FStar.HyperStack.ST.fsti"
} | [
"total"
] | [
"FStar.Preorder.preorder",
"FStar.HyperStack.ST.mreference",
"FStar.Monotonic.HyperStack.mem",
"Prims.unit",
"Prims.l_and",
"FStar.Monotonic.HyperStack.contains",
"Prims.eq2",
"FStar.Monotonic.HyperStack.upd",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.HyperStack.ST
open FStar.HyperStack
module HS = FStar.HyperStack
open FStar.Preorder
(* Setting up the preorder for mem *)
(* Starting the predicates that constitute the preorder *)
[@@"opaque_to_smt"]
private unfold let contains_region (m:mem) (r:rid) = get_hmap m `Map.contains` r
(* The preorder is the conjunction of above predicates *)
val mem_rel :preorder mem
type mem_predicate = mem -> Type0
(* Predicates that we will witness with regions and refs *)
val region_contains_pred (r:HS.rid) :mem_predicate
val ref_contains_pred (#a:Type) (#rel:preorder a) (r:HS.mreference a rel) :mem_predicate
(***** Global ST (GST) effect with put, get, witness, and recall *****)
new_effect GST = STATE_h mem
let gst_pre = st_pre_h mem
let gst_post' (a:Type) (pre:Type) = st_post_h' mem a pre
let gst_post (a:Type) = st_post_h mem a
let gst_wp (a:Type) = st_wp_h mem a
unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:mem) = wp (fun a -> p a h)
sub_effect DIV ~> GST = lift_div_gst
(*
* AR: A few notes about the interface:
* - The interface closely mimics the interface we formalized in our POPL'18 paper
* - Specifically, `witnessed` is defined for any mem_predicate (not necessarily stable ones)
* - `stable p` is a precondition for `gst_witness`
* - `gst_recall` does not have a precondition for `stable p`, since `gst_witness` is the only way
* clients would have obtained `witnessed p`, and so, `p` should already be stable
* - `lemma_functoriality` does not require stability for either `p` or `q`
* Our metatheory ensures that this is sound (without requiring stability of `q`)
* This form is useful in defining the MRRef interface (see mr_witness)
*)
val stable (p:mem_predicate) :Type0
val witnessed (p:mem_predicate) :Type0
(* TODO: we should derive these using DM4F *)
private val gst_get: unit -> GST mem (fun p h0 -> p h0 h0)
private val gst_put: h1:mem -> GST unit (fun p h0 -> mem_rel h0 h1 /\ p () h1)
private val gst_witness: p:mem_predicate -> GST unit (fun post h0 -> p h0 /\ stable p /\ (witnessed p ==> post () h0))
private val gst_recall: p:mem_predicate -> GST unit (fun post h0 -> witnessed p /\ (p h0 ==> post () h0))
val lemma_functoriality (p:mem_predicate{witnessed p}) (q:mem_predicate{(forall (h:mem). p h ==> q h)})
: Lemma (witnessed q)
let st_pre = gst_pre
let st_post' = gst_post'
let st_post = gst_post
let st_wp = gst_wp
new_effect STATE = GST
unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp
sub_effect GST ~> STATE = lift_gst_state
(* effect State (a:Type) (wp:st_wp a) = *)
(* STATE a wp *)
(**
WARNING: this effect is unsafe, for C/C++ extraction it shall only be used by
code that would later extract to OCaml or by library functions
*)
effect Unsafe (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. pre h /\ post h a h1 ==> p a h1)) (* WP *)
(****** defining predicates for equal refs in some regions ******)
(*
// * AR: (may be this is an overkill)
// * various effects below talk about refs being equal in some regions (all regions, stack regions, etc.)
// * this was done by defining, for example, an equal_dom predicate with a (forall (r:rid)) quantifier
// * this quantifier was only guarded with Map.contains (HS.get_hmap m) r
// * which meant it could fire for all the contained regions
// *
// * instead now we define abstract predicates, e.g. same_refs_in_all_regions, and provide intro and elim forms
// * the advantage is that, the (lemma) quantifiers are now guarded additionally by same_refs_in_all_regions kind
// * of predicates, and hence should fire more contextually
// * should profile the queries to see if it actually helps
// *)
(*
// * marking these opaque, since expect them to be unfolded away beforehand
// *)
[@@"opaque_to_smt"]
unfold private let equal_heap_dom (r:rid) (m0 m1:mem) :Type0
= Heap.equal_dom (get_hmap m0 `Map.sel` r) (get_hmap m1 `Map.sel` r)
[@@"opaque_to_smt"]
unfold private let contained_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> m0 `contains_region` r /\ m1 `contains_region` r
[@@"opaque_to_smt"]
unfold private let contained_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> r =!= get_tip m0 /\ r =!= get_tip m1 /\ contained_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let contained_non_tip_stack_region :mem -> mem -> rid -> Type0
= fun m0 m1 r -> is_stack_region r /\ contained_non_tip_region m0 m1 r
[@@"opaque_to_smt"]
unfold private let same_refs_common (p:mem -> mem -> rid -> Type0) (m0 m1:mem) =
forall (r:rid). p m0 m1 r ==> equal_heap_dom r m0 m1
(* predicates *)
val same_refs_in_all_regions (m0 m1:mem) :Type0
val same_refs_in_stack_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_regions (m0 m1:mem) :Type0
val same_refs_in_non_tip_stack_regions (m0 m1:mem) :Type0
(* intro and elim forms *)
val lemma_same_refs_in_all_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_region m0 m1)) (ensures (same_refs_in_all_regions m0 m1))
[SMTPat (same_refs_in_all_regions m0 m1)]
val lemma_same_refs_in_all_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_all_regions m0 m1 /\ contained_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_all_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_stack_region m0 m1)) (ensures (same_refs_in_stack_regions m0 m1))
[SMTPat (same_refs_in_stack_regions m0 m1)]
val lemma_same_refs_in_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_stack_regions m0 m1 /\ contained_stack_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_region m0 m1)) (ensures (same_refs_in_non_tip_regions m0 m1))
[SMTPat (same_refs_in_non_tip_regions m0 m1)]
val lemma_same_refs_in_non_tip_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_regions m0 m1 /\ contained_non_tip_region m0 m1 r)) (ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m0 `contains_region` r)];
[SMTPat (same_refs_in_non_tip_regions m0 m1); SMTPat (m1 `contains_region` r)]]]
val lemma_same_refs_in_non_tip_stack_regions_intro (m0 m1:mem)
:Lemma (requires (same_refs_common contained_non_tip_stack_region m0 m1)) (ensures (same_refs_in_non_tip_stack_regions m0 m1))
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1)]
val lemma_same_refs_in_non_tip_stack_regions_elim (m0 m1:mem) (r:rid)
:Lemma (requires (same_refs_in_non_tip_stack_regions m0 m1 /\ contained_non_tip_stack_region m0 m1 r))
(ensures (equal_heap_dom r m0 m1))
[SMTPatOr [[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m0 `contains_region` r);];
[SMTPat (same_refs_in_non_tip_stack_regions m0 m1); SMTPat (is_stack_region r); SMTPat (m1 `contains_region` r)]]]
(******)
let equal_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
Set.equal (Map.domain (get_hmap m0)) (Map.domain (get_hmap m1)) /\
same_refs_in_all_regions m0 m1
val lemma_equal_domains_trans (m0 m1 m2:mem)
:Lemma (requires (equal_domains m0 m1 /\ equal_domains m1 m2))
(ensures (equal_domains m0 m2))
[SMTPat (equal_domains m0 m1); SMTPat (equal_domains m1 m2)]
(**
* Effect of stacked based code: the 'equal_domains' clause enforces that
* - both mem have the same tip
* - both mem reference the same heaps (their map: rid -> heap have the same domain)
* - in each region id, the corresponding heaps contain the same references on both sides
*)
effect Stack (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_domains h h1) ==> p a h1)) (* WP *)
(**
* Effect of heap-based code.
* - assumes that the stack is empty (tip = root)
* - corresponds to the HyperHeap ST effect
* - can call to Stack and ST code freely
* - respects the stack invariant: the stack has to be empty when returning
*)
effect Heap (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ get_tip h = HS.root /\ get_tip h1 = HS.root ) ==> p a h1)) (* WP *)
let equal_stack_domains (m0 m1:mem) =
get_tip m0 == get_tip m1 /\
same_refs_in_stack_regions m0 m1
(**
* Effect of low-level code:
* - maintains the allocation invariant on the stack: no allocation unless in a new frame that has to be popped before returning
* - not constraints on heap allocation
*)
effect ST (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ equal_stack_domains h h1) ==> p a h1)) (* WP *)
effect St (a:Type) = ST a (fun _ -> True) (fun _ _ _ -> True)
let inline_stack_inv h h' : GTot Type0 =
(* The frame invariant is enforced *)
get_tip h == get_tip h' /\
(* The heap structure is unchanged *)
Map.domain (get_hmap h) == Map.domain (get_hmap h') /\
(* Any region that is not the tip has no seen any allocation *)
same_refs_in_non_tip_regions h h'
(**
* Effect that indicates to the Karamel compiler that allocation may occur in the caller's frame.
* In other terms, the backend has to unfold the body into the caller's body.
* This effect maintains the stack AND the heap invariant: it can be inlined in the Stack effect
* function body as well as in a Heap effect function body
*)
effect StackInline (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ is_stack_region (get_tip h) /\ (forall a h1. (pre h /\ post h a h1 /\ inline_stack_inv h h1) ==> p a h1)) (* WP *)
let inline_inv h h' : GTot Type0 =
(* The stack invariant is enforced *)
get_tip h == get_tip h' /\
(* No frame may have received an allocation but the tip *)
same_refs_in_non_tip_stack_regions h h'
(**
* Effect that indicates to the Karamel compiler that allocation may occur in the caller's frame.
* In other terms, the backend has to unfold the body into the caller's body.
* This effect only maintains the stack invariant: the tip is left unchanged and no allocation
* may occurs in the stack lower than the tip.
* Region allocation is not constrained.
* Heap allocation is not constrained.
*)
effect Inline (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) =
STATE a
(fun (p:st_post a) (h:mem) -> pre h /\ (forall a h1. (pre h /\ post h a h1 /\ inline_inv h h1) ==> p a h1)) (* WP *)
(**
* TODO:
* REMOVE AS SOON AS CONSENSUS IS REACHED ON NEW LOW EFFECT NAMES
*)
effect STL (a:Type) (pre:st_pre) (post: (m0:mem -> Tot (st_post' a (pre m0)))) = Stack a pre post
sub_effect
DIV ~> STATE = fun (a:Type) (wp:pure_wp a) (p:st_post a) (h:mem) -> wp (fun a -> p a h)
(*
* AR: The clients should open HyperStack.ST after the memory model files (as with Heap and FStar.ST)
*)
type mreference (a:Type) (rel:preorder a) =
r:HS.mreference a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mstackref (a:Type) (rel:preorder a) =
r:HS.mstackref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mref (a:Type) (rel:preorder a) =
r:HS.mref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mmmstackref (a:Type) (rel:preorder a) =
r:HS.mmmstackref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type mmmref (a:Type) (rel:preorder a) =
r:HS.mmmref a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type s_mref (i:rid) (a:Type) (rel:preorder a) =
r:HS.s_mref i a rel{witnessed (ref_contains_pred r) /\
witnessed (region_contains_pred (HS.frameOf r))}
type reference (a:Type) = mreference a (Heap.trivial_preorder a)
type stackref (a:Type) = mstackref a (Heap.trivial_preorder a)
type ref (a:Type) = mref a (Heap.trivial_preorder a)
type mmstackref (a:Type) = mmmstackref a (Heap.trivial_preorder a)
type mmref (a:Type) = mmmref a (Heap.trivial_preorder a)
type s_ref (i:rid) (a:Type) = s_mref i a (Heap.trivial_preorder a)
let is_eternal_region (r:rid) :Type0
= HS.is_eternal_region_hs r /\ (r == HS.root \/ witnessed (region_contains_pred r))
(*
* AR: The change to using ST.rid may not be that bad itself,
* since subtyping should take care of most instances in the client usage.
* However, one case where it could be an issue is modifies clauses that use
* Set.set rid.
*)
(** Pushes a new empty frame on the stack **)
val push_frame (_:unit) :Unsafe unit (requires (fun m -> True)) (ensures (fun (m0:mem) _ (m1:mem) -> fresh_frame m0 m1))
(** Removes old frame from the stack **)
val pop_frame (_:unit)
:Unsafe unit (requires (fun m -> poppable m))
(ensures (fun (m0:mem) _ (m1:mem) -> poppable m0 /\ m1 == pop m0 /\ popped m0 m1))
#push-options "--z3rlimit 40"
let salloc_post (#a:Type) (#rel:preorder a) (init:a) (m0:mem)
(s:mreference a rel{is_stack_region (frameOf s)}) (m1:mem)
= is_stack_region (get_tip m0) /\
Map.domain (get_hmap m0) == Map.domain (get_hmap m1) /\
get_tip m0 == get_tip m1 /\
frameOf s = get_tip m1 /\
HS.fresh_ref s m0 m1 /\ //it's a fresh reference in the top frame
m1 == HyperStack.upd m0 s init //and it's been initialized
#pop-options
(**
* Allocates on the top-most stack frame
*)
val salloc (#a:Type) (#rel:preorder a) (init:a)
:StackInline (mstackref a rel) (requires (fun m -> is_stack_region (get_tip m)))
(ensures salloc_post init)
// JP, AR: these are not supported in C, and `salloc` already benefits from
// automatic memory management.
[@@ (deprecated "Use salloc instead") ]
val salloc_mm (#a:Type) (#rel:preorder a) (init:a)
:StackInline (mmmstackref a rel) (requires (fun m -> is_stack_region (get_tip m)))
(ensures salloc_post init)
[@@ (deprecated "Use salloc instead") ]
val sfree (#a:Type) (#rel:preorder a) (r:mmmstackref a rel)
:StackInline unit (requires (fun m0 -> frameOf r = get_tip m0 /\ m0 `contains` r))
(ensures (fun m0 _ m1 -> m0 `contains` r /\ m1 == HS.free r m0))
unfold
let new_region_post_common (r0 r1:rid) (m0 m1:mem) =
r1 `HS.extends` r0 /\
HS.fresh_region r1 m0 m1 /\
get_hmap m1 == Map.upd (get_hmap m0) r1 Heap.emp /\
get_tip m1 == get_tip m0 /\
HS.live_region m0 r0
val new_region (r0:rid)
:ST rid
(requires (fun m -> is_eternal_region r0))
(ensures (fun m0 r1 m1 ->
new_region_post_common r0 r1 m0 m1 /\
HS.color r1 = HS.color r0 /\
is_eternal_region r1 /\
(r1, m1) == HS.new_eternal_region m0 r0 None))
val new_colored_region (r0:rid) (c:int)
:ST rid
(requires (fun m -> HS.is_heap_color c /\ is_eternal_region r0))
(ensures (fun m0 r1 m1 ->
new_region_post_common r0 r1 m0 m1 /\
HS.color r1 = c /\
is_eternal_region r1 /\
(r1, m1) == HS.new_eternal_region m0 r0 (Some c)))
let ralloc_post (#a:Type) (#rel:preorder a) (i:rid) (init:a) (m0:mem)
(x:mreference a rel) (m1:mem) =
let region_i = get_hmap m0 `Map.sel` i in
as_ref x `Heap.unused_in` region_i /\
i `is_in` get_hmap m0 /\
i = frameOf x /\
m1 == upd m0 x init
val ralloc (#a:Type) (#rel:preorder a) (i:rid) (init:a)
:ST (mref a rel) (requires (fun m -> is_eternal_region i))
(ensures (ralloc_post i init))
val ralloc_mm (#a:Type) (#rel:preorder a) (i:rid) (init:a)
:ST (mmmref a rel) (requires (fun m -> is_eternal_region i))
(ensures (ralloc_post i init))
(*
* AR: 12/26: For a ref to be readable/writable/free-able,
* the client can either prove contains
* or give us enough so that we can use monotonicity to derive contains
*)
let is_live_for_rw_in (#a:Type) (#rel:preorder a) (r:mreference a rel) (m:mem) :Type0 =
(m `contains` r) \/
(let i = HS.frameOf r in
(is_eternal_region i \/ i `HS.is_above` get_tip m) /\
(not (is_mm r) \/ m `HS.contains_ref_in_its_region` r))
val rfree (#a:Type) (#rel:preorder a) (r:mreference a rel{HS.is_mm r /\ HS.is_heap_color (HS.color (HS.frameOf r))})
:ST unit (requires (fun m0 -> r `is_live_for_rw_in` m0))
(ensures (fun m0 _ m1 -> m0 `contains` r /\ m1 == HS.free r m0)) | false | false | FStar.HyperStack.ST.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val assign_post : r: FStar.HyperStack.ST.mreference a rel ->
v: a ->
m0: FStar.Monotonic.HyperStack.mem ->
_: Prims.unit ->
m1: FStar.Monotonic.HyperStack.mem
-> Prims.logical | [] | FStar.HyperStack.ST.assign_post | {
"file_name": "ulib/FStar.HyperStack.ST.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
r: FStar.HyperStack.ST.mreference a rel ->
v: a ->
m0: FStar.Monotonic.HyperStack.mem ->
_: Prims.unit ->
m1: FStar.Monotonic.HyperStack.mem
-> Prims.logical | {
"end_col": 48,
"end_line": 412,
"start_col": 2,
"start_line": 412
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax.Naming",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let set_minus (#a:eqtype) (s:Set.set a) (x:a) =
Set.intersect s (Set.complement (Set.singleton x)) | let set_minus (#a: eqtype) (s: Set.set a) (x: a) = | false | null | false | Set.intersect s (Set.complement (Set.singleton x)) | {
"checked_file": "Pulse.Typing.FV.fsti.checked",
"dependencies": [
"Pulse.Typing.fst.checked",
"Pulse.Syntax.Naming.fsti.checked",
"Pulse.Syntax.fst.checked",
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "Pulse.Typing.FV.fsti"
} | [
"total"
] | [
"Prims.eqtype",
"FStar.Set.set",
"FStar.Set.intersect",
"FStar.Set.complement",
"FStar.Set.singleton"
] | [] | module Pulse.Typing.FV
module RT = FStar.Reflection.Typing
module R = FStar.Reflection.V2
module L = FStar.List.Tot
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Syntax.Naming
open Pulse.Typing
let mem_intension_pat (#a:eqtype) (x:a) (f:(a -> Tot bool))
: Lemma
(ensures FStar.Set.(mem x (intension f) = f x))
[SMTPat FStar.Set.(mem x (intension f))]
= Set.mem_intension x f
let contains (g:env) (x:var) = Some? (lookup g x)
let vars_of_env (g:env) = dom g | false | false | Pulse.Typing.FV.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val set_minus : s: FStar.Set.set a -> x: a -> FStar.Set.set a | [] | Pulse.Typing.FV.set_minus | {
"file_name": "lib/steel/pulse/Pulse.Typing.FV.fsti",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | s: FStar.Set.set a -> x: a -> FStar.Set.set a | {
"end_col": 52,
"end_line": 20,
"start_col": 2,
"start_line": 20
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax.Naming",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let contains (g:env) (x:var) = Some? (lookup g x) | let contains (g: env) (x: var) = | false | null | false | Some? (lookup g x) | {
"checked_file": "Pulse.Typing.FV.fsti.checked",
"dependencies": [
"Pulse.Typing.fst.checked",
"Pulse.Syntax.Naming.fsti.checked",
"Pulse.Syntax.fst.checked",
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "Pulse.Typing.FV.fsti"
} | [
"total"
] | [
"Pulse.Typing.Env.env",
"Pulse.Syntax.Base.var",
"FStar.Pervasives.Native.uu___is_Some",
"Pulse.Syntax.Base.typ",
"Pulse.Typing.Env.lookup",
"Prims.bool"
] | [] | module Pulse.Typing.FV
module RT = FStar.Reflection.Typing
module R = FStar.Reflection.V2
module L = FStar.List.Tot
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Syntax.Naming
open Pulse.Typing
let mem_intension_pat (#a:eqtype) (x:a) (f:(a -> Tot bool))
: Lemma
(ensures FStar.Set.(mem x (intension f) = f x))
[SMTPat FStar.Set.(mem x (intension f))]
= Set.mem_intension x f | false | true | Pulse.Typing.FV.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val contains : g: Pulse.Typing.Env.env -> x: Pulse.Syntax.Base.var -> Prims.bool | [] | Pulse.Typing.FV.contains | {
"file_name": "lib/steel/pulse/Pulse.Typing.FV.fsti",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | g: Pulse.Typing.Env.env -> x: Pulse.Syntax.Base.var -> Prims.bool | {
"end_col": 49,
"end_line": 16,
"start_col": 31,
"start_line": 16
} |
|
FStar.Pervasives.Lemma | val mem_intension_pat (#a: eqtype) (x: a) (f: (a -> Tot bool))
: Lemma (ensures FStar.Set.(mem x (intension f) = f x)) [SMTPat FStar.Set.(mem x (intension f))] | [
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax.Naming",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mem_intension_pat (#a:eqtype) (x:a) (f:(a -> Tot bool))
: Lemma
(ensures FStar.Set.(mem x (intension f) = f x))
[SMTPat FStar.Set.(mem x (intension f))]
= Set.mem_intension x f | val mem_intension_pat (#a: eqtype) (x: a) (f: (a -> Tot bool))
: Lemma (ensures FStar.Set.(mem x (intension f) = f x)) [SMTPat FStar.Set.(mem x (intension f))]
let mem_intension_pat (#a: eqtype) (x: a) (f: (a -> Tot bool))
: Lemma (ensures FStar.Set.(mem x (intension f) = f x)) [SMTPat FStar.Set.(mem x (intension f))] = | false | null | true | Set.mem_intension x f | {
"checked_file": "Pulse.Typing.FV.fsti.checked",
"dependencies": [
"Pulse.Typing.fst.checked",
"Pulse.Syntax.Naming.fsti.checked",
"Pulse.Syntax.fst.checked",
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "Pulse.Typing.FV.fsti"
} | [
"lemma"
] | [
"Prims.eqtype",
"Prims.bool",
"FStar.Set.mem_intension",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.b2t",
"Prims.op_Equality",
"FStar.Set.mem",
"FStar.Set.intension",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | module Pulse.Typing.FV
module RT = FStar.Reflection.Typing
module R = FStar.Reflection.V2
module L = FStar.List.Tot
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Syntax.Naming
open Pulse.Typing
let mem_intension_pat (#a:eqtype) (x:a) (f:(a -> Tot bool))
: Lemma
(ensures FStar.Set.(mem x (intension f) = f x)) | false | false | Pulse.Typing.FV.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mem_intension_pat (#a: eqtype) (x: a) (f: (a -> Tot bool))
: Lemma (ensures FStar.Set.(mem x (intension f) = f x)) [SMTPat FStar.Set.(mem x (intension f))] | [] | Pulse.Typing.FV.mem_intension_pat | {
"file_name": "lib/steel/pulse/Pulse.Typing.FV.fsti",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | x: a -> f: (_: a -> Prims.bool)
-> FStar.Pervasives.Lemma (ensures FStar.Set.mem x (FStar.Set.intension f) = f x)
[SMTPat (FStar.Set.mem x (FStar.Set.intension f))] | {
"end_col": 25,
"end_line": 14,
"start_col": 4,
"start_line": 14
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax.Naming",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let vars_of_env (g:env) = dom g | let vars_of_env (g: env) = | false | null | false | dom g | {
"checked_file": "Pulse.Typing.FV.fsti.checked",
"dependencies": [
"Pulse.Typing.fst.checked",
"Pulse.Syntax.Naming.fsti.checked",
"Pulse.Syntax.fst.checked",
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "Pulse.Typing.FV.fsti"
} | [
"total"
] | [
"Pulse.Typing.Env.env",
"Pulse.Typing.Env.dom",
"FStar.Set.set",
"Pulse.Syntax.Base.var"
] | [] | module Pulse.Typing.FV
module RT = FStar.Reflection.Typing
module R = FStar.Reflection.V2
module L = FStar.List.Tot
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Syntax.Naming
open Pulse.Typing
let mem_intension_pat (#a:eqtype) (x:a) (f:(a -> Tot bool))
: Lemma
(ensures FStar.Set.(mem x (intension f) = f x))
[SMTPat FStar.Set.(mem x (intension f))]
= Set.mem_intension x f | false | true | Pulse.Typing.FV.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val vars_of_env : g: Pulse.Typing.Env.env -> FStar.Set.set Pulse.Syntax.Base.var | [] | Pulse.Typing.FV.vars_of_env | {
"file_name": "lib/steel/pulse/Pulse.Typing.FV.fsti",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | g: Pulse.Typing.Env.env -> FStar.Set.set Pulse.Syntax.Base.var | {
"end_col": 31,
"end_line": 17,
"start_col": 26,
"start_line": 17
} |
|
FStar.Pervasives.Lemma | val st_typing_freevars_inv (#g #t #c: _) (d: st_typing g t c) (x: var)
: Lemma (requires None? (lookup g x))
(ensures ~(x `Set.mem` (freevars_st t)) /\ ~(x `Set.mem` (freevars_comp c))) | [
{
"abbrev": false,
"full_module": "Pulse.Soundness.Common",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Elaborate",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax.Naming",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let st_typing_freevars_inv (#g:_) (#t:_) (#c:_)
(d:st_typing g t c)
(x:var)
: Lemma
(requires None? (lookup g x))
(ensures ~(x `Set.mem` freevars_st t) /\
~(x `Set.mem` freevars_comp c))
= st_typing_freevars d | val st_typing_freevars_inv (#g #t #c: _) (d: st_typing g t c) (x: var)
: Lemma (requires None? (lookup g x))
(ensures ~(x `Set.mem` (freevars_st t)) /\ ~(x `Set.mem` (freevars_comp c)))
let st_typing_freevars_inv (#g #t #c: _) (d: st_typing g t c) (x: var)
: Lemma (requires None? (lookup g x))
(ensures ~(x `Set.mem` (freevars_st t)) /\ ~(x `Set.mem` (freevars_comp c))) = | false | null | true | st_typing_freevars d | {
"checked_file": "Pulse.Typing.FV.fsti.checked",
"dependencies": [
"Pulse.Typing.fst.checked",
"Pulse.Syntax.Naming.fsti.checked",
"Pulse.Syntax.fst.checked",
"prims.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "Pulse.Typing.FV.fsti"
} | [
"lemma"
] | [
"Pulse.Typing.Env.env",
"Pulse.Syntax.Base.st_term",
"Pulse.Syntax.Base.comp",
"Pulse.Typing.st_typing",
"Pulse.Syntax.Base.var",
"Pulse.Typing.FV.st_typing_freevars",
"Prims.unit",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_None",
"Pulse.Syntax.Base.typ",
"Pulse.Typing.Env.lookup",
"Prims.squash",
"Prims.l_and",
"Prims.l_not",
"FStar.Set.mem",
"Pulse.Syntax.Naming.freevars_st",
"Pulse.Syntax.Naming.freevars_comp",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module Pulse.Typing.FV
module RT = FStar.Reflection.Typing
module R = FStar.Reflection.V2
module L = FStar.List.Tot
open FStar.List.Tot
open Pulse.Syntax
open Pulse.Syntax.Naming
open Pulse.Typing
let mem_intension_pat (#a:eqtype) (x:a) (f:(a -> Tot bool))
: Lemma
(ensures FStar.Set.(mem x (intension f) = f x))
[SMTPat FStar.Set.(mem x (intension f))]
= Set.mem_intension x f
let contains (g:env) (x:var) = Some? (lookup g x)
let vars_of_env (g:env) = dom g
let set_minus (#a:eqtype) (s:Set.set a) (x:a) =
Set.intersect s (Set.complement (Set.singleton x))
val freevars_close_term (e:term) (x:var) (i:index)
: Lemma
(ensures freevars (close_term' e x i) ==
freevars e `set_minus` x)
[SMTPat (freevars (close_term' e x i))]
val freevars_close_st_term (e:st_term) (x:var) (i:index)
: Lemma
(ensures freevars_st (close_st_term' e x i) ==
freevars_st e `set_minus` x)
[SMTPat (freevars_st (close_st_term' e x i))]
val tot_typing_freevars (#g:_) (#t:_) (#ty:_)
(d:tot_typing g t ty)
: Lemma
(ensures freevars t `Set.subset` vars_of_env g /\
freevars ty `Set.subset` vars_of_env g)
val comp_typing_freevars (#g:_) (#c:_) (#u:_)
(d:comp_typing g c u)
: Lemma
(ensures freevars_comp c `Set.subset` vars_of_env g)
val st_typing_freevars (#g:_) (#t:_) (#c:_)
(d:st_typing g t c)
: Lemma
(ensures freevars_st t `Set.subset` vars_of_env g /\
freevars_comp c `Set.subset` vars_of_env g)
let st_typing_freevars_inv (#g:_) (#t:_) (#c:_)
(d:st_typing g t c)
(x:var)
: Lemma
(requires None? (lookup g x))
(ensures ~(x `Set.mem` freevars_st t) /\ | false | false | Pulse.Typing.FV.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val st_typing_freevars_inv (#g #t #c: _) (d: st_typing g t c) (x: var)
: Lemma (requires None? (lookup g x))
(ensures ~(x `Set.mem` (freevars_st t)) /\ ~(x `Set.mem` (freevars_comp c))) | [] | Pulse.Typing.FV.st_typing_freevars_inv | {
"file_name": "lib/steel/pulse/Pulse.Typing.FV.fsti",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | d: Pulse.Typing.st_typing g t c -> x: Pulse.Syntax.Base.var
-> FStar.Pervasives.Lemma (requires None? (Pulse.Typing.Env.lookup g x))
(ensures
~(FStar.Set.mem x (Pulse.Syntax.Naming.freevars_st t)) /\
~(FStar.Set.mem x (Pulse.Syntax.Naming.freevars_comp c))) | {
"end_col": 24,
"end_line": 59,
"start_col": 4,
"start_line": 59
} |
Prims.Tot | val cons (#a: Type) (x: a) (s: seq a) : Tot (seq a) | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s | val cons (#a: Type) (x: a) (s: seq a) : Tot (seq a)
let cons (#a: Type) (x: a) (s: seq a) : Tot (seq a) = | false | null | false | append (create 1 x) s | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total"
] | [
"FStar.Seq.Base.seq",
"FStar.Seq.Base.append",
"FStar.Seq.Base.create"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val cons (#a: Type) (x: a) (s: seq a) : Tot (seq a) | [] | FStar.Seq.Properties.cons | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | x: a -> s: FStar.Seq.Base.seq a -> FStar.Seq.Base.seq a | {
"end_col": 72,
"end_line": 52,
"start_col": 51,
"start_line": 52
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s | let indexable (#a: Type) (s: Seq.seq a) (j: int) = | false | null | false | 0 <= j /\ j < Seq.length s | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total"
] | [
"FStar.Seq.Base.seq",
"Prims.int",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_LessThan",
"FStar.Seq.Base.length",
"Prims.logical"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l } | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val indexable : s: FStar.Seq.Base.seq a -> j: Prims.int -> Prims.logical | [] | FStar.Seq.Properties.indexable | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | s: FStar.Seq.Base.seq a -> j: Prims.int -> Prims.logical | {
"end_col": 74,
"end_line": 25,
"start_col": 48,
"start_line": 25
} |
|
Prims.Tot | val mem (#a: eqtype) (x: a) (l: seq a) : Tot bool | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0 | val mem (#a: eqtype) (x: a) (l: seq a) : Tot bool
let mem (#a: eqtype) (x: a) (l: seq a) : Tot bool = | false | null | false | count x l > 0 | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total"
] | [
"Prims.eqtype",
"FStar.Seq.Base.seq",
"Prims.op_GreaterThan",
"FStar.Seq.Properties.count",
"Prims.bool"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mem (#a: eqtype) (x: a) (l: seq a) : Tot bool | [] | FStar.Seq.Properties.mem | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | x: a -> l: FStar.Seq.Base.seq a -> Prims.bool | {
"end_col": 62,
"end_line": 79,
"start_col": 49,
"start_line": 79
} |
Prims.Tot | val snoc (#a: Type) (s: seq a) (x: a) : Tot (seq a) | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let snoc (#a:Type) (s:seq a) (x:a) : Tot (seq a) = Seq.append s (Seq.create 1 x) | val snoc (#a: Type) (s: seq a) (x: a) : Tot (seq a)
let snoc (#a: Type) (s: seq a) (x: a) : Tot (seq a) = | false | null | false | Seq.append s (Seq.create 1 x) | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total"
] | [
"FStar.Seq.Base.seq",
"FStar.Seq.Base.append",
"FStar.Seq.Base.create"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s)
val sorted_feq (#a:Type)
(f g : (a -> a -> Tot bool))
(s:seq a{forall x y. f x y == g x y})
: Lemma (ensures (sorted f s <==> sorted g s))
val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (forall x. count x (append lo hi) = (count x lo + count x hi)))
val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (count x (append lo hi) = (count x lo + count x hi)))
val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma
(ensures (forall x. mem x s = (x=head s || mem x (tail s))))
val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma
(requires (forall (i:nat{i<length s}). f (index s i)))
(ensures (forall (x:a). mem x s ==> f x))
val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma
(requires True)
(ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s))))
type total_order (a:eqtype) (f: (a -> a -> Tot bool)) =
(forall a. f a a) (* reflexivity *)
/\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *)
/\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *)
type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f}
val sorted_concat_lemma: #a:eqtype
-> f:(a -> a -> Tot bool){total_order a f}
-> lo:seq a{sorted f lo}
-> pivot:a
-> hi:seq a{sorted f hi}
-> Lemma (requires (forall y. (mem y lo ==> f y pivot)
/\ (mem y hi ==> f pivot y)))
(ensures (sorted f (append lo (cons pivot hi))))
val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a))
(requires True)
(ensures (fun x ->
(length x = 5
/\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4)))))
/\ equal (index x 0) (slice s 0 i)
/\ equal (index x 1) (slice s i (i+1))
/\ equal (index x 2) (slice s (i+1) j)
/\ equal (index x 3) (slice s j (j + 1))
/\ equal (index x 4) (slice s (j + 1) (length s)))))
val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s}
-> i':nat -> j':nat{i' <= j' /\ j'<=length s /\
(j < i' //high slice
\/ j' <= i //low slice
\/ (i < i' /\ j' <= j)) //mid slice
}
-> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j'
/\ slice s i (i + 1) == slice (swap s i j) j (j + 1)
/\ slice s j (j + 1) == slice (swap s i j) i (i + 1)))
val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s} -> x:a -> Lemma
(requires True)
(ensures (count x s = count x (swap s i j)))
type permutation (a:eqtype) (s1:seq a) (s2:seq a) =
(forall i. count i s1 = count i s2)
val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma
(requires permutation a s1 s1' /\ permutation a s2 s2')
(ensures permutation a (append s1 s2) (append s1' s2'))
val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i<length s}) (j:nat{i <= j && j<length s})
: Lemma (permutation a s (swap s i j))
(* perm_len:
A lemma that shows that two sequences that are permutations
of each other also have the same length
*)
val perm_len (#a:eqtype) (s1 s2: seq a)
: Lemma (requires (permutation a s1 s2))
(ensures (length s1 == length s2))
val cons_perm: #a:eqtype -> tl:seq a -> s:seq a{length s > 0} ->
Lemma (requires (permutation a tl (tail s)))
(ensures (permutation a (cons (head s) tl) s))
val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a
-> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2)))
val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j))))
val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1)))))
val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv))
(ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv)))
val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back)))
(ensures ((forall y. mem y (slice s back len) ==> f pv y)))
val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i == slice (swap s i j) lo i))
val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i'))
val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s}
-> Lemma (ensures (slice s k hi == slice (swap s i j) k hi))
val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start))))
val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len)))
(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *)
let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
= Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1)))
(* replace with sub *)
let replace_subseq (#a:Type0) (s:Seq.seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (sub:Seq.seq a{length sub == j - i}) :Tot (Seq.seq a)
= Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s)))
val splice_refl : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s}
-> Lemma
(ensures (s == splice s i s j))
val lemma_swap_splice : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma
(ensures (swap s i j == splice s start (swap s i j) len))
val lemma_seq_frame_hi: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j <= m} -> n:nat{m < n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 i s1 j)))
(ensures ((slice s1 m n == slice s2 m n) /\ (index s1 m == index s2 m)))
val lemma_seq_frame_lo: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j < m} -> n:nat{m <= n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 m s1 n)))
(ensures ((slice s1 i j == slice s2 i j) /\ (index s1 j == index s2 j)))
val lemma_tail_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma
(requires True)
(ensures (tail (slice s i j) == slice s (i + 1) j))
[SMTPat (tail (slice s i j))]
val lemma_weaken_frame_right : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j))
(ensures (s1 == splice s2 i s1 k))
val lemma_weaken_frame_left : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k))
(ensures (s1 == splice s2 i s1 k))
val lemma_trans_frame : #a:Type -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i <= j && j <= length s1}
-> Lemma
(requires ((s1 == splice s2 i s1 j) /\ s2 == splice s3 i s2 j))
(ensures (s1 == splice s3 i s1 j))
val lemma_weaken_perm_left: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k /\ permutation a (slice s2 j k) (slice s1 j k)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_weaken_perm_right: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j /\ permutation a (slice s2 i j) (slice s1 i j)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_trans_perm: #a:eqtype -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i<=j && j <= length s1}
-> Lemma
(requires (permutation a (slice s1 i j) (slice s2 i j)
/\ permutation a (slice s2 i j) (slice s3 i j)))
(ensures (permutation a (slice s1 i j) (slice s3 i j)))
(*New additions, please review*) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val snoc (#a: Type) (s: seq a) (x: a) : Tot (seq a) | [] | FStar.Seq.Properties.snoc | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | s: FStar.Seq.Base.seq a -> x: a -> FStar.Seq.Base.seq a | {
"end_col": 80,
"end_line": 318,
"start_col": 51,
"start_line": 318
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let suffix_of
(#a: Type)
(s_suff s: seq a)
= exists s_pref . (s == append s_pref s_suff) | let suffix_of (#a: Type) (s_suff s: seq a) = | false | null | false | exists s_pref. (s == append s_pref s_suff) | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total"
] | [
"FStar.Seq.Base.seq",
"Prims.l_Exists",
"Prims.eq2",
"FStar.Seq.Base.append",
"Prims.logical"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s)
val sorted_feq (#a:Type)
(f g : (a -> a -> Tot bool))
(s:seq a{forall x y. f x y == g x y})
: Lemma (ensures (sorted f s <==> sorted g s))
val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (forall x. count x (append lo hi) = (count x lo + count x hi)))
val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (count x (append lo hi) = (count x lo + count x hi)))
val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma
(ensures (forall x. mem x s = (x=head s || mem x (tail s))))
val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma
(requires (forall (i:nat{i<length s}). f (index s i)))
(ensures (forall (x:a). mem x s ==> f x))
val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma
(requires True)
(ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s))))
type total_order (a:eqtype) (f: (a -> a -> Tot bool)) =
(forall a. f a a) (* reflexivity *)
/\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *)
/\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *)
type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f}
val sorted_concat_lemma: #a:eqtype
-> f:(a -> a -> Tot bool){total_order a f}
-> lo:seq a{sorted f lo}
-> pivot:a
-> hi:seq a{sorted f hi}
-> Lemma (requires (forall y. (mem y lo ==> f y pivot)
/\ (mem y hi ==> f pivot y)))
(ensures (sorted f (append lo (cons pivot hi))))
val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a))
(requires True)
(ensures (fun x ->
(length x = 5
/\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4)))))
/\ equal (index x 0) (slice s 0 i)
/\ equal (index x 1) (slice s i (i+1))
/\ equal (index x 2) (slice s (i+1) j)
/\ equal (index x 3) (slice s j (j + 1))
/\ equal (index x 4) (slice s (j + 1) (length s)))))
val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s}
-> i':nat -> j':nat{i' <= j' /\ j'<=length s /\
(j < i' //high slice
\/ j' <= i //low slice
\/ (i < i' /\ j' <= j)) //mid slice
}
-> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j'
/\ slice s i (i + 1) == slice (swap s i j) j (j + 1)
/\ slice s j (j + 1) == slice (swap s i j) i (i + 1)))
val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s} -> x:a -> Lemma
(requires True)
(ensures (count x s = count x (swap s i j)))
type permutation (a:eqtype) (s1:seq a) (s2:seq a) =
(forall i. count i s1 = count i s2)
val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma
(requires permutation a s1 s1' /\ permutation a s2 s2')
(ensures permutation a (append s1 s2) (append s1' s2'))
val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i<length s}) (j:nat{i <= j && j<length s})
: Lemma (permutation a s (swap s i j))
(* perm_len:
A lemma that shows that two sequences that are permutations
of each other also have the same length
*)
val perm_len (#a:eqtype) (s1 s2: seq a)
: Lemma (requires (permutation a s1 s2))
(ensures (length s1 == length s2))
val cons_perm: #a:eqtype -> tl:seq a -> s:seq a{length s > 0} ->
Lemma (requires (permutation a tl (tail s)))
(ensures (permutation a (cons (head s) tl) s))
val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a
-> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2)))
val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j))))
val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1)))))
val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv))
(ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv)))
val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back)))
(ensures ((forall y. mem y (slice s back len) ==> f pv y)))
val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i == slice (swap s i j) lo i))
val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i'))
val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s}
-> Lemma (ensures (slice s k hi == slice (swap s i j) k hi))
val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start))))
val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len)))
(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *)
let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
= Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1)))
(* replace with sub *)
let replace_subseq (#a:Type0) (s:Seq.seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (sub:Seq.seq a{length sub == j - i}) :Tot (Seq.seq a)
= Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s)))
val splice_refl : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s}
-> Lemma
(ensures (s == splice s i s j))
val lemma_swap_splice : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma
(ensures (swap s i j == splice s start (swap s i j) len))
val lemma_seq_frame_hi: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j <= m} -> n:nat{m < n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 i s1 j)))
(ensures ((slice s1 m n == slice s2 m n) /\ (index s1 m == index s2 m)))
val lemma_seq_frame_lo: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j < m} -> n:nat{m <= n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 m s1 n)))
(ensures ((slice s1 i j == slice s2 i j) /\ (index s1 j == index s2 j)))
val lemma_tail_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma
(requires True)
(ensures (tail (slice s i j) == slice s (i + 1) j))
[SMTPat (tail (slice s i j))]
val lemma_weaken_frame_right : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j))
(ensures (s1 == splice s2 i s1 k))
val lemma_weaken_frame_left : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k))
(ensures (s1 == splice s2 i s1 k))
val lemma_trans_frame : #a:Type -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i <= j && j <= length s1}
-> Lemma
(requires ((s1 == splice s2 i s1 j) /\ s2 == splice s3 i s2 j))
(ensures (s1 == splice s3 i s1 j))
val lemma_weaken_perm_left: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k /\ permutation a (slice s2 j k) (slice s1 j k)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_weaken_perm_right: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j /\ permutation a (slice s2 i j) (slice s1 i j)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_trans_perm: #a:eqtype -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i<=j && j <= length s1}
-> Lemma
(requires (permutation a (slice s1 i j) (slice s2 i j)
/\ permutation a (slice s2 i j) (slice s3 i j)))
(ensures (permutation a (slice s1 i j) (slice s3 i j)))
(*New additions, please review*)
let snoc (#a:Type) (s:seq a) (x:a) : Tot (seq a) = Seq.append s (Seq.create 1 x)
val lemma_cons_snoc (#a:Type) (hd:a) (s:Seq.seq a) (tl:a)
: Lemma (requires True)
(ensures (Seq.equal (cons hd (snoc s tl))
(snoc (cons hd s) tl)))
val lemma_tail_snoc: #a:Type -> s:Seq.seq a{Seq.length s > 0} -> x:a
-> Lemma (ensures (tail (snoc s x) == snoc (tail s) x))
val lemma_snoc_inj: #a:Type -> s1:seq a -> s2:seq a -> v1:a -> v2:a
-> Lemma (requires (equal (snoc s1 v1) (snoc s2 v2)))
(ensures (v1 == v2 /\ equal s1 s2))
val lemma_mem_snoc : #a:eqtype -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. mem y (snoc s x) <==> mem y s \/ x=y))
let rec find_l (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else find_l f (tail l)
let rec ghost_find_l (#a:Type) (f:a -> GTot bool) (l:seq a)
: GTot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else ghost_find_l f (tail l)
val find_append_some: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (Some? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_append_none: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s2))
val find_append_none_s2: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s2)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_snoc: #a:Type -> s:Seq.seq a -> x:a -> f:(a -> Tot bool)
-> Lemma (ensures (let res = find_l f (snoc s x) in
match res with
| None -> find_l f s == None /\ not (f x)
| Some y -> res == find_l f s \/ (f x /\ x==y)))
let un_snoc (#a:Type) (s:seq a{length s <> 0}) : Tot (r:(seq a * a){s == snoc (fst r) (snd r)}) =
let s', a = split s (length s - 1) in
assert (Seq.equal (snoc s' (Seq.index a 0)) s);
s', Seq.index a 0
val un_snoc_snoc (#a:Type) (s:seq a) (x:a) : Lemma (un_snoc (snoc s x) == (s, x))
let rec find_r (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else let prefix, last = un_snoc l in
if f last then Some last
else find_r f prefix
type found (i:nat) = True
let rec seq_find_aux (#a:Type) (f:a -> Tot bool) (l:seq a) (ctr:nat{ctr <= Seq.length l})
: Pure (option a)
(requires (forall (i:nat{ i < Seq.length l /\ i >= ctr}).
not (f (Seq.index l i) )))
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}). {:pattern (found i)}
found i /\ x == Seq.index l i)))
= match ctr with
| 0 -> None
| _ -> let i = ctr - 1 in
if f (Seq.index l i)
then (
cut (found i);
Some (Seq.index l i))
else seq_find_aux f l i
let seq_find (#a:Type) (f:a -> Tot bool) (l:seq a)
: Pure (option a)
(requires True)
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}).{:pattern (found i)}
found i /\ x == Seq.index l i)))
= seq_find_aux f l (Seq.length l)
val find_mem (#a:eqtype) (s:seq a) (f:a -> Tot bool) (x:a{f x})
: Lemma (requires (mem x s))
(ensures (Some? (seq_find f s) /\ f (Some?.v (seq_find f s))))
let for_all
(#a: Type)
(f: (a -> Tot bool))
(l: seq a)
: Pure bool
(requires True)
(ensures (fun b -> (b == true <==> (forall (i: nat {i < Seq.length l} ) . f (index l i) == true))))
= None? (seq_find (fun i -> not (f i)) l)
val seq_mem_k: #a:eqtype -> s:seq a -> n:nat{n < Seq.length s} ->
Lemma (requires True)
(ensures (mem (Seq.index s n) s))
[SMTPat (mem (Seq.index s n) s)]
module L = FStar.List.Tot
let rec seq_to_list (#a:Type) (s:seq a)
: Tot (l:list a{L.length l = length s})
(decreases (length s))
= if length s = 0 then []
else index s 0::seq_to_list (slice s 1 (length s))
[@@"opaque_to_smt"]
let rec seq_of_list (#a:Type) (l:list a) : Tot (s:seq a{L.length l = length s}) =
match l with
| [] -> Seq.empty #a
| hd::tl -> create 1 hd @| seq_of_list tl
val lemma_seq_of_list_induction (#a:Type) (l:list a)
:Lemma (requires True)
(ensures (let s = seq_of_list l in
match l with
| [] -> Seq.equal s empty
| hd::tl -> s == cons hd (seq_of_list tl) /\
head s == hd /\ tail s == (seq_of_list tl)))
val lemma_seq_list_bij: #a:Type -> s:seq a -> Lemma
(requires (True))
(ensures (seq_of_list (seq_to_list s) == s))
val lemma_list_seq_bij: #a:Type -> l:list a -> Lemma
(requires (True))
(ensures (seq_to_list (seq_of_list l) == l))
unfold let createL_post (#a:Type0) (l:list a) (s:seq a) : GTot Type0 =
normalize (L.length l = length s) /\ seq_to_list s == l /\ seq_of_list l == s
let createL (#a:Type0) (l:list a)
: Pure (seq a)
(requires True)
(ensures (fun s -> createL_post #a l s))
= let s = seq_of_list l in
lemma_list_seq_bij l;
s
val lemma_index_is_nth: #a:Type -> s:seq a -> i:nat{i < length s} -> Lemma
(requires True)
(ensures (L.index (seq_to_list s) i == index s i))
////////////////////////////////////////////////////////////////////////////////
//s `contains` x : Type0
// An undecidable version of `mem`,
// for when the sequence payload is not an eqtype
////////////////////////////////////////////////////////////////////////////////
[@@ remove_unused_type_parameters [0; 1; 2]]
val contains (#a:Type) (s:seq a) (x:a) : Tot Type0
val contains_intro (#a:Type) (s:seq a) (k:nat) (x:a)
: Lemma (k < Seq.length s /\ Seq.index s k == x
==>
s `contains` x)
val contains_elim (#a:Type) (s:seq a) (x:a)
: Lemma (s `contains` x
==>
(exists (k:nat). k < Seq.length s /\ Seq.index s k == x))
val lemma_contains_empty (#a:Type) : Lemma (forall (x:a). ~ (contains Seq.empty x))
val lemma_contains_singleton (#a:Type) (x:a) : Lemma (forall (y:a). contains (create 1 x) y ==> y == x)
val append_contains_equiv (#a:Type) (s1:seq a) (s2:seq a) (x:a)
: Lemma ((append s1 s2) `contains` x
<==>
(s1 `contains` x \/ s2 `contains` x))
val contains_snoc : #a:Type -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. (snoc s x) `contains` y <==> s `contains` y \/ x==y))
val lemma_find_l_contains (#a:Type) (f:a -> Tot bool) (l:seq a)
: Lemma (requires True) (ensures Some? (find_l f l) ==> l `contains` (Some?.v (find_l f l)))
val contains_cons (#a:Type) (hd:a) (tl:Seq.seq a) (x:a)
: Lemma ((cons hd tl) `contains` x
<==>
(x==hd \/ tl `contains` x))
val append_cons_snoc (#a:Type) (u: Seq.seq a) (x:a) (v:Seq.seq a)
: Lemma (Seq.equal (Seq.append u (cons x v))
(Seq.append (snoc u x) v))
val append_slices (#a:Type) (s1:Seq.seq a) (s2:Seq.seq a)
: Lemma ( Seq.equal s1 (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) /\
Seq.equal s2 (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length s1 + Seq.length s2)) /\
(forall (i:nat) (j:nat).
i <= j /\ j <= Seq.length s2 ==>
Seq.equal (Seq.slice s2 i j)
(Seq.slice (Seq.append s1 s2) (Seq.length s1 + i) (Seq.length s1 + j))))
val find_l_none_no_index (#a:Type) (s:Seq.seq a) (f:(a -> Tot bool)) :
Lemma (requires (None? (find_l f s)))
(ensures (forall (i:nat{i < Seq.length s}). not (f (Seq.index s i))))
(decreases (Seq.length s))
(** More properties, with new naming conventions *)
let suffix_of
(#a: Type) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val suffix_of : s_suff: FStar.Seq.Base.seq a -> s: FStar.Seq.Base.seq a -> Prims.logical | [] | FStar.Seq.Properties.suffix_of | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | s_suff: FStar.Seq.Base.seq a -> s: FStar.Seq.Base.seq a -> Prims.logical | {
"end_col": 45,
"end_line": 534,
"start_col": 2,
"start_line": 534
} |
|
Prims.Tot | val of_list (#a: Type) (l: list a) : seq a | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let of_list (#a:Type) (l:list a) :seq a = seq_of_list l | val of_list (#a: Type) (l: list a) : seq a
let of_list (#a: Type) (l: list a) : seq a = | false | null | false | seq_of_list l | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total"
] | [
"Prims.list",
"FStar.Seq.Properties.seq_of_list",
"FStar.Seq.Base.seq"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s)
val sorted_feq (#a:Type)
(f g : (a -> a -> Tot bool))
(s:seq a{forall x y. f x y == g x y})
: Lemma (ensures (sorted f s <==> sorted g s))
val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (forall x. count x (append lo hi) = (count x lo + count x hi)))
val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (count x (append lo hi) = (count x lo + count x hi)))
val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma
(ensures (forall x. mem x s = (x=head s || mem x (tail s))))
val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma
(requires (forall (i:nat{i<length s}). f (index s i)))
(ensures (forall (x:a). mem x s ==> f x))
val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma
(requires True)
(ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s))))
type total_order (a:eqtype) (f: (a -> a -> Tot bool)) =
(forall a. f a a) (* reflexivity *)
/\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *)
/\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *)
type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f}
val sorted_concat_lemma: #a:eqtype
-> f:(a -> a -> Tot bool){total_order a f}
-> lo:seq a{sorted f lo}
-> pivot:a
-> hi:seq a{sorted f hi}
-> Lemma (requires (forall y. (mem y lo ==> f y pivot)
/\ (mem y hi ==> f pivot y)))
(ensures (sorted f (append lo (cons pivot hi))))
val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a))
(requires True)
(ensures (fun x ->
(length x = 5
/\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4)))))
/\ equal (index x 0) (slice s 0 i)
/\ equal (index x 1) (slice s i (i+1))
/\ equal (index x 2) (slice s (i+1) j)
/\ equal (index x 3) (slice s j (j + 1))
/\ equal (index x 4) (slice s (j + 1) (length s)))))
val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s}
-> i':nat -> j':nat{i' <= j' /\ j'<=length s /\
(j < i' //high slice
\/ j' <= i //low slice
\/ (i < i' /\ j' <= j)) //mid slice
}
-> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j'
/\ slice s i (i + 1) == slice (swap s i j) j (j + 1)
/\ slice s j (j + 1) == slice (swap s i j) i (i + 1)))
val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s} -> x:a -> Lemma
(requires True)
(ensures (count x s = count x (swap s i j)))
type permutation (a:eqtype) (s1:seq a) (s2:seq a) =
(forall i. count i s1 = count i s2)
val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma
(requires permutation a s1 s1' /\ permutation a s2 s2')
(ensures permutation a (append s1 s2) (append s1' s2'))
val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i<length s}) (j:nat{i <= j && j<length s})
: Lemma (permutation a s (swap s i j))
(* perm_len:
A lemma that shows that two sequences that are permutations
of each other also have the same length
*)
val perm_len (#a:eqtype) (s1 s2: seq a)
: Lemma (requires (permutation a s1 s2))
(ensures (length s1 == length s2))
val cons_perm: #a:eqtype -> tl:seq a -> s:seq a{length s > 0} ->
Lemma (requires (permutation a tl (tail s)))
(ensures (permutation a (cons (head s) tl) s))
val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a
-> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2)))
val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j))))
val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1)))))
val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv))
(ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv)))
val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back)))
(ensures ((forall y. mem y (slice s back len) ==> f pv y)))
val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i == slice (swap s i j) lo i))
val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i'))
val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s}
-> Lemma (ensures (slice s k hi == slice (swap s i j) k hi))
val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start))))
val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len)))
(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *)
let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
= Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1)))
(* replace with sub *)
let replace_subseq (#a:Type0) (s:Seq.seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (sub:Seq.seq a{length sub == j - i}) :Tot (Seq.seq a)
= Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s)))
val splice_refl : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s}
-> Lemma
(ensures (s == splice s i s j))
val lemma_swap_splice : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma
(ensures (swap s i j == splice s start (swap s i j) len))
val lemma_seq_frame_hi: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j <= m} -> n:nat{m < n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 i s1 j)))
(ensures ((slice s1 m n == slice s2 m n) /\ (index s1 m == index s2 m)))
val lemma_seq_frame_lo: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j < m} -> n:nat{m <= n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 m s1 n)))
(ensures ((slice s1 i j == slice s2 i j) /\ (index s1 j == index s2 j)))
val lemma_tail_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma
(requires True)
(ensures (tail (slice s i j) == slice s (i + 1) j))
[SMTPat (tail (slice s i j))]
val lemma_weaken_frame_right : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j))
(ensures (s1 == splice s2 i s1 k))
val lemma_weaken_frame_left : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k))
(ensures (s1 == splice s2 i s1 k))
val lemma_trans_frame : #a:Type -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i <= j && j <= length s1}
-> Lemma
(requires ((s1 == splice s2 i s1 j) /\ s2 == splice s3 i s2 j))
(ensures (s1 == splice s3 i s1 j))
val lemma_weaken_perm_left: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k /\ permutation a (slice s2 j k) (slice s1 j k)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_weaken_perm_right: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j /\ permutation a (slice s2 i j) (slice s1 i j)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_trans_perm: #a:eqtype -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i<=j && j <= length s1}
-> Lemma
(requires (permutation a (slice s1 i j) (slice s2 i j)
/\ permutation a (slice s2 i j) (slice s3 i j)))
(ensures (permutation a (slice s1 i j) (slice s3 i j)))
(*New additions, please review*)
let snoc (#a:Type) (s:seq a) (x:a) : Tot (seq a) = Seq.append s (Seq.create 1 x)
val lemma_cons_snoc (#a:Type) (hd:a) (s:Seq.seq a) (tl:a)
: Lemma (requires True)
(ensures (Seq.equal (cons hd (snoc s tl))
(snoc (cons hd s) tl)))
val lemma_tail_snoc: #a:Type -> s:Seq.seq a{Seq.length s > 0} -> x:a
-> Lemma (ensures (tail (snoc s x) == snoc (tail s) x))
val lemma_snoc_inj: #a:Type -> s1:seq a -> s2:seq a -> v1:a -> v2:a
-> Lemma (requires (equal (snoc s1 v1) (snoc s2 v2)))
(ensures (v1 == v2 /\ equal s1 s2))
val lemma_mem_snoc : #a:eqtype -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. mem y (snoc s x) <==> mem y s \/ x=y))
let rec find_l (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else find_l f (tail l)
let rec ghost_find_l (#a:Type) (f:a -> GTot bool) (l:seq a)
: GTot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else ghost_find_l f (tail l)
val find_append_some: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (Some? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_append_none: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s2))
val find_append_none_s2: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s2)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_snoc: #a:Type -> s:Seq.seq a -> x:a -> f:(a -> Tot bool)
-> Lemma (ensures (let res = find_l f (snoc s x) in
match res with
| None -> find_l f s == None /\ not (f x)
| Some y -> res == find_l f s \/ (f x /\ x==y)))
let un_snoc (#a:Type) (s:seq a{length s <> 0}) : Tot (r:(seq a * a){s == snoc (fst r) (snd r)}) =
let s', a = split s (length s - 1) in
assert (Seq.equal (snoc s' (Seq.index a 0)) s);
s', Seq.index a 0
val un_snoc_snoc (#a:Type) (s:seq a) (x:a) : Lemma (un_snoc (snoc s x) == (s, x))
let rec find_r (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else let prefix, last = un_snoc l in
if f last then Some last
else find_r f prefix
type found (i:nat) = True
let rec seq_find_aux (#a:Type) (f:a -> Tot bool) (l:seq a) (ctr:nat{ctr <= Seq.length l})
: Pure (option a)
(requires (forall (i:nat{ i < Seq.length l /\ i >= ctr}).
not (f (Seq.index l i) )))
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}). {:pattern (found i)}
found i /\ x == Seq.index l i)))
= match ctr with
| 0 -> None
| _ -> let i = ctr - 1 in
if f (Seq.index l i)
then (
cut (found i);
Some (Seq.index l i))
else seq_find_aux f l i
let seq_find (#a:Type) (f:a -> Tot bool) (l:seq a)
: Pure (option a)
(requires True)
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}).{:pattern (found i)}
found i /\ x == Seq.index l i)))
= seq_find_aux f l (Seq.length l)
val find_mem (#a:eqtype) (s:seq a) (f:a -> Tot bool) (x:a{f x})
: Lemma (requires (mem x s))
(ensures (Some? (seq_find f s) /\ f (Some?.v (seq_find f s))))
let for_all
(#a: Type)
(f: (a -> Tot bool))
(l: seq a)
: Pure bool
(requires True)
(ensures (fun b -> (b == true <==> (forall (i: nat {i < Seq.length l} ) . f (index l i) == true))))
= None? (seq_find (fun i -> not (f i)) l)
val seq_mem_k: #a:eqtype -> s:seq a -> n:nat{n < Seq.length s} ->
Lemma (requires True)
(ensures (mem (Seq.index s n) s))
[SMTPat (mem (Seq.index s n) s)]
module L = FStar.List.Tot
let rec seq_to_list (#a:Type) (s:seq a)
: Tot (l:list a{L.length l = length s})
(decreases (length s))
= if length s = 0 then []
else index s 0::seq_to_list (slice s 1 (length s))
[@@"opaque_to_smt"]
let rec seq_of_list (#a:Type) (l:list a) : Tot (s:seq a{L.length l = length s}) =
match l with
| [] -> Seq.empty #a
| hd::tl -> create 1 hd @| seq_of_list tl
val lemma_seq_of_list_induction (#a:Type) (l:list a)
:Lemma (requires True)
(ensures (let s = seq_of_list l in
match l with
| [] -> Seq.equal s empty
| hd::tl -> s == cons hd (seq_of_list tl) /\
head s == hd /\ tail s == (seq_of_list tl)))
val lemma_seq_list_bij: #a:Type -> s:seq a -> Lemma
(requires (True))
(ensures (seq_of_list (seq_to_list s) == s))
val lemma_list_seq_bij: #a:Type -> l:list a -> Lemma
(requires (True))
(ensures (seq_to_list (seq_of_list l) == l))
unfold let createL_post (#a:Type0) (l:list a) (s:seq a) : GTot Type0 =
normalize (L.length l = length s) /\ seq_to_list s == l /\ seq_of_list l == s
let createL (#a:Type0) (l:list a)
: Pure (seq a)
(requires True)
(ensures (fun s -> createL_post #a l s))
= let s = seq_of_list l in
lemma_list_seq_bij l;
s
val lemma_index_is_nth: #a:Type -> s:seq a -> i:nat{i < length s} -> Lemma
(requires True)
(ensures (L.index (seq_to_list s) i == index s i))
////////////////////////////////////////////////////////////////////////////////
//s `contains` x : Type0
// An undecidable version of `mem`,
// for when the sequence payload is not an eqtype
////////////////////////////////////////////////////////////////////////////////
[@@ remove_unused_type_parameters [0; 1; 2]]
val contains (#a:Type) (s:seq a) (x:a) : Tot Type0
val contains_intro (#a:Type) (s:seq a) (k:nat) (x:a)
: Lemma (k < Seq.length s /\ Seq.index s k == x
==>
s `contains` x)
val contains_elim (#a:Type) (s:seq a) (x:a)
: Lemma (s `contains` x
==>
(exists (k:nat). k < Seq.length s /\ Seq.index s k == x))
val lemma_contains_empty (#a:Type) : Lemma (forall (x:a). ~ (contains Seq.empty x))
val lemma_contains_singleton (#a:Type) (x:a) : Lemma (forall (y:a). contains (create 1 x) y ==> y == x)
val append_contains_equiv (#a:Type) (s1:seq a) (s2:seq a) (x:a)
: Lemma ((append s1 s2) `contains` x
<==>
(s1 `contains` x \/ s2 `contains` x))
val contains_snoc : #a:Type -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. (snoc s x) `contains` y <==> s `contains` y \/ x==y))
val lemma_find_l_contains (#a:Type) (f:a -> Tot bool) (l:seq a)
: Lemma (requires True) (ensures Some? (find_l f l) ==> l `contains` (Some?.v (find_l f l)))
val contains_cons (#a:Type) (hd:a) (tl:Seq.seq a) (x:a)
: Lemma ((cons hd tl) `contains` x
<==>
(x==hd \/ tl `contains` x))
val append_cons_snoc (#a:Type) (u: Seq.seq a) (x:a) (v:Seq.seq a)
: Lemma (Seq.equal (Seq.append u (cons x v))
(Seq.append (snoc u x) v))
val append_slices (#a:Type) (s1:Seq.seq a) (s2:Seq.seq a)
: Lemma ( Seq.equal s1 (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) /\
Seq.equal s2 (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length s1 + Seq.length s2)) /\
(forall (i:nat) (j:nat).
i <= j /\ j <= Seq.length s2 ==>
Seq.equal (Seq.slice s2 i j)
(Seq.slice (Seq.append s1 s2) (Seq.length s1 + i) (Seq.length s1 + j))))
val find_l_none_no_index (#a:Type) (s:Seq.seq a) (f:(a -> Tot bool)) :
Lemma (requires (None? (find_l f s)))
(ensures (forall (i:nat{i < Seq.length s}). not (f (Seq.index s i))))
(decreases (Seq.length s))
(** More properties, with new naming conventions *)
let suffix_of
(#a: Type)
(s_suff s: seq a)
= exists s_pref . (s == append s_pref s_suff)
val cons_head_tail
(#a: Type)
(s: seq a {length s > 0})
: Lemma
(requires True)
(ensures (s == cons (head s) (tail s)))
[SMTPat (cons (head s) (tail s))]
val head_cons
(#a: Type)
(x: a)
(s: seq a)
: Lemma
(ensures (head (cons x s) == x))
val suffix_of_tail
(#a: Type)
(s: seq a {length s > 0})
: Lemma
(requires True)
(ensures ((tail s) `suffix_of` s))
[SMTPat ((tail s) `suffix_of` s)]
val index_cons_l
(#a: Type)
(c: a)
(s: seq a)
: Lemma
(ensures (index (cons c s) 0 == c))
val index_cons_r
(#a: Type)
(c: a)
(s: seq a)
(i: nat {1 <= i /\ i <= length s})
: Lemma
(ensures (index (cons c s) i == index s (i - 1)))
val append_cons
(#a: Type)
(c: a)
(s1 s2: seq a)
: Lemma
(ensures (append (cons c s1) s2 == cons c (append s1 s2)))
val index_tail
(#a: Type)
(s: seq a {length s > 0})
(i: nat {i < length s - 1} )
: Lemma
(ensures (index (tail s) i == index s (i + 1)))
val mem_cons
(#a:eqtype)
(x:a)
(s:seq a)
: Lemma
(ensures (forall y. mem y (cons x s) <==> mem y s \/ x=y))
val snoc_slice_index
(#a: Type)
(s: seq a)
(i: nat)
(j: nat {i <= j /\ j < length s} )
: Lemma
(requires True)
(ensures (snoc (slice s i j) (index s j) == slice s i (j + 1)))
[SMTPat (snoc (slice s i j) (index s j))]
val cons_index_slice
(#a: Type)
(s: seq a)
(i: nat)
(j: nat {i < j /\ j <= length s} )
(k:nat{k == i+1})
: Lemma
(requires True)
(ensures (cons (index s i) (slice s k j) == slice s i j))
[SMTPat (cons (index s i) (slice s k j))]
val slice_is_empty
(#a: Type)
(s: seq a)
(i: nat {i <= length s})
: Lemma
(requires True)
(ensures (slice s i i == Seq.empty))
[SMTPat (slice s i i)]
val slice_length
(#a: Type)
(s: seq a)
: Lemma
(requires True)
(ensures (slice s 0 (length s) == s))
[SMTPat (slice s 0 (length s))]
val slice_slice
(#a: Type)
(s: seq a)
(i1: nat)
(j1: nat {i1 <= j1 /\ j1 <= length s} )
(i2: nat)
(j2: nat {i2 <= j2 /\ j2 <= j1 - i1} )
: Lemma
(requires True)
(ensures (slice (slice s i1 j1) i2 j2 == slice s (i1 + i2) (i1 + j2)))
[SMTPat (slice (slice s i1 j1) i2 j2)]
val lemma_seq_of_list_index (#a:Type) (l:list a) (i:nat{i < List.Tot.length l})
:Lemma (requires True)
(ensures (index (seq_of_list l) i == List.Tot.index l i))
[SMTPat (index (seq_of_list l) i)] | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val of_list (#a: Type) (l: list a) : seq a | [] | FStar.Seq.Properties.of_list | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | l: Prims.list a -> FStar.Seq.Base.seq a | {
"end_col": 55,
"end_line": 651,
"start_col": 42,
"start_line": 651
} |
Prims.Tot | val sortWith (#a: eqtype) (f: (a -> a -> Tot int)) (s: seq a) : Tot (seq a) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let sortWith (#a:eqtype) (f:a -> a -> Tot int) (s:seq a) :Tot (seq a)
= seq_of_list (List.Tot.Base.sortWith f (seq_to_list s)) | val sortWith (#a: eqtype) (f: (a -> a -> Tot int)) (s: seq a) : Tot (seq a)
let sortWith (#a: eqtype) (f: (a -> a -> Tot int)) (s: seq a) : Tot (seq a) = | false | null | false | seq_of_list (List.Tot.Base.sortWith f (seq_to_list s)) | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total"
] | [
"Prims.eqtype",
"Prims.int",
"FStar.Seq.Base.seq",
"FStar.Seq.Properties.seq_of_list",
"FStar.List.Tot.Base.sortWith",
"FStar.Seq.Properties.seq_to_list"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s)
val sorted_feq (#a:Type)
(f g : (a -> a -> Tot bool))
(s:seq a{forall x y. f x y == g x y})
: Lemma (ensures (sorted f s <==> sorted g s))
val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (forall x. count x (append lo hi) = (count x lo + count x hi)))
val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (count x (append lo hi) = (count x lo + count x hi)))
val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma
(ensures (forall x. mem x s = (x=head s || mem x (tail s))))
val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma
(requires (forall (i:nat{i<length s}). f (index s i)))
(ensures (forall (x:a). mem x s ==> f x))
val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma
(requires True)
(ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s))))
type total_order (a:eqtype) (f: (a -> a -> Tot bool)) =
(forall a. f a a) (* reflexivity *)
/\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *)
/\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *)
type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f}
val sorted_concat_lemma: #a:eqtype
-> f:(a -> a -> Tot bool){total_order a f}
-> lo:seq a{sorted f lo}
-> pivot:a
-> hi:seq a{sorted f hi}
-> Lemma (requires (forall y. (mem y lo ==> f y pivot)
/\ (mem y hi ==> f pivot y)))
(ensures (sorted f (append lo (cons pivot hi))))
val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a))
(requires True)
(ensures (fun x ->
(length x = 5
/\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4)))))
/\ equal (index x 0) (slice s 0 i)
/\ equal (index x 1) (slice s i (i+1))
/\ equal (index x 2) (slice s (i+1) j)
/\ equal (index x 3) (slice s j (j + 1))
/\ equal (index x 4) (slice s (j + 1) (length s)))))
val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s}
-> i':nat -> j':nat{i' <= j' /\ j'<=length s /\
(j < i' //high slice
\/ j' <= i //low slice
\/ (i < i' /\ j' <= j)) //mid slice
}
-> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j'
/\ slice s i (i + 1) == slice (swap s i j) j (j + 1)
/\ slice s j (j + 1) == slice (swap s i j) i (i + 1)))
val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s} -> x:a -> Lemma
(requires True)
(ensures (count x s = count x (swap s i j)))
type permutation (a:eqtype) (s1:seq a) (s2:seq a) =
(forall i. count i s1 = count i s2)
val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma
(requires permutation a s1 s1' /\ permutation a s2 s2')
(ensures permutation a (append s1 s2) (append s1' s2'))
val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i<length s}) (j:nat{i <= j && j<length s})
: Lemma (permutation a s (swap s i j))
(* perm_len:
A lemma that shows that two sequences that are permutations
of each other also have the same length
*)
val perm_len (#a:eqtype) (s1 s2: seq a)
: Lemma (requires (permutation a s1 s2))
(ensures (length s1 == length s2))
val cons_perm: #a:eqtype -> tl:seq a -> s:seq a{length s > 0} ->
Lemma (requires (permutation a tl (tail s)))
(ensures (permutation a (cons (head s) tl) s))
val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a
-> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2)))
val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j))))
val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1)))))
val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv))
(ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv)))
val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back)))
(ensures ((forall y. mem y (slice s back len) ==> f pv y)))
val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i == slice (swap s i j) lo i))
val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i'))
val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s}
-> Lemma (ensures (slice s k hi == slice (swap s i j) k hi))
val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start))))
val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len)))
(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *)
let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
= Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1)))
(* replace with sub *)
let replace_subseq (#a:Type0) (s:Seq.seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (sub:Seq.seq a{length sub == j - i}) :Tot (Seq.seq a)
= Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s)))
val splice_refl : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s}
-> Lemma
(ensures (s == splice s i s j))
val lemma_swap_splice : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma
(ensures (swap s i j == splice s start (swap s i j) len))
val lemma_seq_frame_hi: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j <= m} -> n:nat{m < n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 i s1 j)))
(ensures ((slice s1 m n == slice s2 m n) /\ (index s1 m == index s2 m)))
val lemma_seq_frame_lo: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j < m} -> n:nat{m <= n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 m s1 n)))
(ensures ((slice s1 i j == slice s2 i j) /\ (index s1 j == index s2 j)))
val lemma_tail_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma
(requires True)
(ensures (tail (slice s i j) == slice s (i + 1) j))
[SMTPat (tail (slice s i j))]
val lemma_weaken_frame_right : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j))
(ensures (s1 == splice s2 i s1 k))
val lemma_weaken_frame_left : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k))
(ensures (s1 == splice s2 i s1 k))
val lemma_trans_frame : #a:Type -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i <= j && j <= length s1}
-> Lemma
(requires ((s1 == splice s2 i s1 j) /\ s2 == splice s3 i s2 j))
(ensures (s1 == splice s3 i s1 j))
val lemma_weaken_perm_left: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k /\ permutation a (slice s2 j k) (slice s1 j k)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_weaken_perm_right: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j /\ permutation a (slice s2 i j) (slice s1 i j)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_trans_perm: #a:eqtype -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i<=j && j <= length s1}
-> Lemma
(requires (permutation a (slice s1 i j) (slice s2 i j)
/\ permutation a (slice s2 i j) (slice s3 i j)))
(ensures (permutation a (slice s1 i j) (slice s3 i j)))
(*New additions, please review*)
let snoc (#a:Type) (s:seq a) (x:a) : Tot (seq a) = Seq.append s (Seq.create 1 x)
val lemma_cons_snoc (#a:Type) (hd:a) (s:Seq.seq a) (tl:a)
: Lemma (requires True)
(ensures (Seq.equal (cons hd (snoc s tl))
(snoc (cons hd s) tl)))
val lemma_tail_snoc: #a:Type -> s:Seq.seq a{Seq.length s > 0} -> x:a
-> Lemma (ensures (tail (snoc s x) == snoc (tail s) x))
val lemma_snoc_inj: #a:Type -> s1:seq a -> s2:seq a -> v1:a -> v2:a
-> Lemma (requires (equal (snoc s1 v1) (snoc s2 v2)))
(ensures (v1 == v2 /\ equal s1 s2))
val lemma_mem_snoc : #a:eqtype -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. mem y (snoc s x) <==> mem y s \/ x=y))
let rec find_l (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else find_l f (tail l)
let rec ghost_find_l (#a:Type) (f:a -> GTot bool) (l:seq a)
: GTot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else ghost_find_l f (tail l)
val find_append_some: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (Some? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_append_none: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s2))
val find_append_none_s2: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s2)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_snoc: #a:Type -> s:Seq.seq a -> x:a -> f:(a -> Tot bool)
-> Lemma (ensures (let res = find_l f (snoc s x) in
match res with
| None -> find_l f s == None /\ not (f x)
| Some y -> res == find_l f s \/ (f x /\ x==y)))
let un_snoc (#a:Type) (s:seq a{length s <> 0}) : Tot (r:(seq a * a){s == snoc (fst r) (snd r)}) =
let s', a = split s (length s - 1) in
assert (Seq.equal (snoc s' (Seq.index a 0)) s);
s', Seq.index a 0
val un_snoc_snoc (#a:Type) (s:seq a) (x:a) : Lemma (un_snoc (snoc s x) == (s, x))
let rec find_r (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else let prefix, last = un_snoc l in
if f last then Some last
else find_r f prefix
type found (i:nat) = True
let rec seq_find_aux (#a:Type) (f:a -> Tot bool) (l:seq a) (ctr:nat{ctr <= Seq.length l})
: Pure (option a)
(requires (forall (i:nat{ i < Seq.length l /\ i >= ctr}).
not (f (Seq.index l i) )))
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}). {:pattern (found i)}
found i /\ x == Seq.index l i)))
= match ctr with
| 0 -> None
| _ -> let i = ctr - 1 in
if f (Seq.index l i)
then (
cut (found i);
Some (Seq.index l i))
else seq_find_aux f l i
let seq_find (#a:Type) (f:a -> Tot bool) (l:seq a)
: Pure (option a)
(requires True)
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}).{:pattern (found i)}
found i /\ x == Seq.index l i)))
= seq_find_aux f l (Seq.length l)
val find_mem (#a:eqtype) (s:seq a) (f:a -> Tot bool) (x:a{f x})
: Lemma (requires (mem x s))
(ensures (Some? (seq_find f s) /\ f (Some?.v (seq_find f s))))
let for_all
(#a: Type)
(f: (a -> Tot bool))
(l: seq a)
: Pure bool
(requires True)
(ensures (fun b -> (b == true <==> (forall (i: nat {i < Seq.length l} ) . f (index l i) == true))))
= None? (seq_find (fun i -> not (f i)) l)
val seq_mem_k: #a:eqtype -> s:seq a -> n:nat{n < Seq.length s} ->
Lemma (requires True)
(ensures (mem (Seq.index s n) s))
[SMTPat (mem (Seq.index s n) s)]
module L = FStar.List.Tot
let rec seq_to_list (#a:Type) (s:seq a)
: Tot (l:list a{L.length l = length s})
(decreases (length s))
= if length s = 0 then []
else index s 0::seq_to_list (slice s 1 (length s))
[@@"opaque_to_smt"]
let rec seq_of_list (#a:Type) (l:list a) : Tot (s:seq a{L.length l = length s}) =
match l with
| [] -> Seq.empty #a
| hd::tl -> create 1 hd @| seq_of_list tl
val lemma_seq_of_list_induction (#a:Type) (l:list a)
:Lemma (requires True)
(ensures (let s = seq_of_list l in
match l with
| [] -> Seq.equal s empty
| hd::tl -> s == cons hd (seq_of_list tl) /\
head s == hd /\ tail s == (seq_of_list tl)))
val lemma_seq_list_bij: #a:Type -> s:seq a -> Lemma
(requires (True))
(ensures (seq_of_list (seq_to_list s) == s))
val lemma_list_seq_bij: #a:Type -> l:list a -> Lemma
(requires (True))
(ensures (seq_to_list (seq_of_list l) == l))
unfold let createL_post (#a:Type0) (l:list a) (s:seq a) : GTot Type0 =
normalize (L.length l = length s) /\ seq_to_list s == l /\ seq_of_list l == s
let createL (#a:Type0) (l:list a)
: Pure (seq a)
(requires True)
(ensures (fun s -> createL_post #a l s))
= let s = seq_of_list l in
lemma_list_seq_bij l;
s
val lemma_index_is_nth: #a:Type -> s:seq a -> i:nat{i < length s} -> Lemma
(requires True)
(ensures (L.index (seq_to_list s) i == index s i))
////////////////////////////////////////////////////////////////////////////////
//s `contains` x : Type0
// An undecidable version of `mem`,
// for when the sequence payload is not an eqtype
////////////////////////////////////////////////////////////////////////////////
[@@ remove_unused_type_parameters [0; 1; 2]]
val contains (#a:Type) (s:seq a) (x:a) : Tot Type0
val contains_intro (#a:Type) (s:seq a) (k:nat) (x:a)
: Lemma (k < Seq.length s /\ Seq.index s k == x
==>
s `contains` x)
val contains_elim (#a:Type) (s:seq a) (x:a)
: Lemma (s `contains` x
==>
(exists (k:nat). k < Seq.length s /\ Seq.index s k == x))
val lemma_contains_empty (#a:Type) : Lemma (forall (x:a). ~ (contains Seq.empty x))
val lemma_contains_singleton (#a:Type) (x:a) : Lemma (forall (y:a). contains (create 1 x) y ==> y == x)
val append_contains_equiv (#a:Type) (s1:seq a) (s2:seq a) (x:a)
: Lemma ((append s1 s2) `contains` x
<==>
(s1 `contains` x \/ s2 `contains` x))
val contains_snoc : #a:Type -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. (snoc s x) `contains` y <==> s `contains` y \/ x==y))
val lemma_find_l_contains (#a:Type) (f:a -> Tot bool) (l:seq a)
: Lemma (requires True) (ensures Some? (find_l f l) ==> l `contains` (Some?.v (find_l f l)))
val contains_cons (#a:Type) (hd:a) (tl:Seq.seq a) (x:a)
: Lemma ((cons hd tl) `contains` x
<==>
(x==hd \/ tl `contains` x))
val append_cons_snoc (#a:Type) (u: Seq.seq a) (x:a) (v:Seq.seq a)
: Lemma (Seq.equal (Seq.append u (cons x v))
(Seq.append (snoc u x) v))
val append_slices (#a:Type) (s1:Seq.seq a) (s2:Seq.seq a)
: Lemma ( Seq.equal s1 (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) /\
Seq.equal s2 (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length s1 + Seq.length s2)) /\
(forall (i:nat) (j:nat).
i <= j /\ j <= Seq.length s2 ==>
Seq.equal (Seq.slice s2 i j)
(Seq.slice (Seq.append s1 s2) (Seq.length s1 + i) (Seq.length s1 + j))))
val find_l_none_no_index (#a:Type) (s:Seq.seq a) (f:(a -> Tot bool)) :
Lemma (requires (None? (find_l f s)))
(ensures (forall (i:nat{i < Seq.length s}). not (f (Seq.index s i))))
(decreases (Seq.length s))
(** More properties, with new naming conventions *)
let suffix_of
(#a: Type)
(s_suff s: seq a)
= exists s_pref . (s == append s_pref s_suff)
val cons_head_tail
(#a: Type)
(s: seq a {length s > 0})
: Lemma
(requires True)
(ensures (s == cons (head s) (tail s)))
[SMTPat (cons (head s) (tail s))]
val head_cons
(#a: Type)
(x: a)
(s: seq a)
: Lemma
(ensures (head (cons x s) == x))
val suffix_of_tail
(#a: Type)
(s: seq a {length s > 0})
: Lemma
(requires True)
(ensures ((tail s) `suffix_of` s))
[SMTPat ((tail s) `suffix_of` s)]
val index_cons_l
(#a: Type)
(c: a)
(s: seq a)
: Lemma
(ensures (index (cons c s) 0 == c))
val index_cons_r
(#a: Type)
(c: a)
(s: seq a)
(i: nat {1 <= i /\ i <= length s})
: Lemma
(ensures (index (cons c s) i == index s (i - 1)))
val append_cons
(#a: Type)
(c: a)
(s1 s2: seq a)
: Lemma
(ensures (append (cons c s1) s2 == cons c (append s1 s2)))
val index_tail
(#a: Type)
(s: seq a {length s > 0})
(i: nat {i < length s - 1} )
: Lemma
(ensures (index (tail s) i == index s (i + 1)))
val mem_cons
(#a:eqtype)
(x:a)
(s:seq a)
: Lemma
(ensures (forall y. mem y (cons x s) <==> mem y s \/ x=y))
val snoc_slice_index
(#a: Type)
(s: seq a)
(i: nat)
(j: nat {i <= j /\ j < length s} )
: Lemma
(requires True)
(ensures (snoc (slice s i j) (index s j) == slice s i (j + 1)))
[SMTPat (snoc (slice s i j) (index s j))]
val cons_index_slice
(#a: Type)
(s: seq a)
(i: nat)
(j: nat {i < j /\ j <= length s} )
(k:nat{k == i+1})
: Lemma
(requires True)
(ensures (cons (index s i) (slice s k j) == slice s i j))
[SMTPat (cons (index s i) (slice s k j))]
val slice_is_empty
(#a: Type)
(s: seq a)
(i: nat {i <= length s})
: Lemma
(requires True)
(ensures (slice s i i == Seq.empty))
[SMTPat (slice s i i)]
val slice_length
(#a: Type)
(s: seq a)
: Lemma
(requires True)
(ensures (slice s 0 (length s) == s))
[SMTPat (slice s 0 (length s))]
val slice_slice
(#a: Type)
(s: seq a)
(i1: nat)
(j1: nat {i1 <= j1 /\ j1 <= length s} )
(i2: nat)
(j2: nat {i2 <= j2 /\ j2 <= j1 - i1} )
: Lemma
(requires True)
(ensures (slice (slice s i1 j1) i2 j2 == slice s (i1 + i2) (i1 + j2)))
[SMTPat (slice (slice s i1 j1) i2 j2)]
val lemma_seq_of_list_index (#a:Type) (l:list a) (i:nat{i < List.Tot.length l})
:Lemma (requires True)
(ensures (index (seq_of_list l) i == List.Tot.index l i))
[SMTPat (index (seq_of_list l) i)]
[@@(deprecated "seq_of_list")]
let of_list (#a:Type) (l:list a) :seq a = seq_of_list l
val seq_of_list_tl
(#a: Type)
(l: list a { List.Tot.length l > 0 } )
: Lemma
(requires True)
(ensures (seq_of_list (List.Tot.tl l) == tail (seq_of_list l)))
val mem_seq_of_list
(#a: eqtype)
(x: a)
(l: list a)
: Lemma
(requires True)
(ensures (mem x (seq_of_list l) == List.Tot.mem x l))
[SMTPat (mem x (seq_of_list l))]
(** Dealing efficiently with `seq_of_list` by meta-evaluating conjunctions over
an entire list. *)
let rec explode_and (#a: Type)
(i: nat)
(s: seq a { i <= length s })
(l: list a { List.Tot.length l + i = length s }):
Tot Type
(decreases (List.Tot.length l))
= match l with
| [] -> True
| hd :: tl -> index s i == hd /\ explode_and (i + 1) s tl
unfold
let pointwise_and s l =
norm [ iota; zeta; primops; delta_only [ `%(explode_and) ] ] (explode_and 0 s l)
val intro_of_list': #a:Type ->
i:nat ->
s:seq a ->
l:list a ->
Lemma
(requires (
List.Tot.length l + i = length s /\
i <= length s /\
explode_and i s l))
(ensures (
equal (seq_of_list l) (slice s i (length s))))
val intro_of_list (#a: Type) (s: seq a) (l: list a):
Lemma
(requires (
List.Tot.length l = length s /\
pointwise_and s l))
(ensures (
s == seq_of_list l))
val elim_of_list': #a:Type ->
i:nat ->
s:seq a ->
l:list a ->
Lemma
(requires (
List.Tot.length l + i = length s /\
i <= length s /\
slice s i (length s) == seq_of_list l))
(ensures (
explode_and i s l))
val elim_of_list (#a: Type) (l: list a):
Lemma
(ensures (
let s = seq_of_list l in
pointwise_and s l))
(****** sortWith ******) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val sortWith (#a: eqtype) (f: (a -> a -> Tot int)) (s: seq a) : Tot (seq a) | [] | FStar.Seq.Properties.sortWith | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | f: (_: a -> _: a -> Prims.int) -> s: FStar.Seq.Base.seq a -> FStar.Seq.Base.seq a | {
"end_col": 58,
"end_line": 726,
"start_col": 4,
"start_line": 726
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let pointwise_and s l =
norm [ iota; zeta; primops; delta_only [ `%(explode_and) ] ] (explode_and 0 s l) | let pointwise_and s l = | false | null | false | norm [iota; zeta; primops; delta_only [`%(explode_and)]] (explode_and 0 s l) | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total"
] | [
"FStar.Seq.Base.seq",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.Seq.Base.length",
"Prims.list",
"Prims.op_Equality",
"Prims.int",
"Prims.op_Addition",
"FStar.List.Tot.Base.length",
"FStar.Pervasives.norm",
"Prims.Cons",
"FStar.Pervasives.norm_step",
"FStar.Pervasives.iota",
"FStar.Pervasives.zeta",
"FStar.Pervasives.primops",
"FStar.Pervasives.delta_only",
"Prims.string",
"Prims.Nil",
"FStar.Seq.Properties.explode_and"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s)
val sorted_feq (#a:Type)
(f g : (a -> a -> Tot bool))
(s:seq a{forall x y. f x y == g x y})
: Lemma (ensures (sorted f s <==> sorted g s))
val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (forall x. count x (append lo hi) = (count x lo + count x hi)))
val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (count x (append lo hi) = (count x lo + count x hi)))
val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma
(ensures (forall x. mem x s = (x=head s || mem x (tail s))))
val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma
(requires (forall (i:nat{i<length s}). f (index s i)))
(ensures (forall (x:a). mem x s ==> f x))
val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma
(requires True)
(ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s))))
type total_order (a:eqtype) (f: (a -> a -> Tot bool)) =
(forall a. f a a) (* reflexivity *)
/\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *)
/\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *)
type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f}
val sorted_concat_lemma: #a:eqtype
-> f:(a -> a -> Tot bool){total_order a f}
-> lo:seq a{sorted f lo}
-> pivot:a
-> hi:seq a{sorted f hi}
-> Lemma (requires (forall y. (mem y lo ==> f y pivot)
/\ (mem y hi ==> f pivot y)))
(ensures (sorted f (append lo (cons pivot hi))))
val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a))
(requires True)
(ensures (fun x ->
(length x = 5
/\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4)))))
/\ equal (index x 0) (slice s 0 i)
/\ equal (index x 1) (slice s i (i+1))
/\ equal (index x 2) (slice s (i+1) j)
/\ equal (index x 3) (slice s j (j + 1))
/\ equal (index x 4) (slice s (j + 1) (length s)))))
val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s}
-> i':nat -> j':nat{i' <= j' /\ j'<=length s /\
(j < i' //high slice
\/ j' <= i //low slice
\/ (i < i' /\ j' <= j)) //mid slice
}
-> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j'
/\ slice s i (i + 1) == slice (swap s i j) j (j + 1)
/\ slice s j (j + 1) == slice (swap s i j) i (i + 1)))
val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s} -> x:a -> Lemma
(requires True)
(ensures (count x s = count x (swap s i j)))
type permutation (a:eqtype) (s1:seq a) (s2:seq a) =
(forall i. count i s1 = count i s2)
val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma
(requires permutation a s1 s1' /\ permutation a s2 s2')
(ensures permutation a (append s1 s2) (append s1' s2'))
val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i<length s}) (j:nat{i <= j && j<length s})
: Lemma (permutation a s (swap s i j))
(* perm_len:
A lemma that shows that two sequences that are permutations
of each other also have the same length
*)
val perm_len (#a:eqtype) (s1 s2: seq a)
: Lemma (requires (permutation a s1 s2))
(ensures (length s1 == length s2))
val cons_perm: #a:eqtype -> tl:seq a -> s:seq a{length s > 0} ->
Lemma (requires (permutation a tl (tail s)))
(ensures (permutation a (cons (head s) tl) s))
val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a
-> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2)))
val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j))))
val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1)))))
val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv))
(ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv)))
val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back)))
(ensures ((forall y. mem y (slice s back len) ==> f pv y)))
val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i == slice (swap s i j) lo i))
val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i'))
val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s}
-> Lemma (ensures (slice s k hi == slice (swap s i j) k hi))
val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start))))
val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len)))
(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *)
let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
= Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1)))
(* replace with sub *)
let replace_subseq (#a:Type0) (s:Seq.seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (sub:Seq.seq a{length sub == j - i}) :Tot (Seq.seq a)
= Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s)))
val splice_refl : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s}
-> Lemma
(ensures (s == splice s i s j))
val lemma_swap_splice : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma
(ensures (swap s i j == splice s start (swap s i j) len))
val lemma_seq_frame_hi: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j <= m} -> n:nat{m < n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 i s1 j)))
(ensures ((slice s1 m n == slice s2 m n) /\ (index s1 m == index s2 m)))
val lemma_seq_frame_lo: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j < m} -> n:nat{m <= n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 m s1 n)))
(ensures ((slice s1 i j == slice s2 i j) /\ (index s1 j == index s2 j)))
val lemma_tail_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma
(requires True)
(ensures (tail (slice s i j) == slice s (i + 1) j))
[SMTPat (tail (slice s i j))]
val lemma_weaken_frame_right : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j))
(ensures (s1 == splice s2 i s1 k))
val lemma_weaken_frame_left : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k))
(ensures (s1 == splice s2 i s1 k))
val lemma_trans_frame : #a:Type -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i <= j && j <= length s1}
-> Lemma
(requires ((s1 == splice s2 i s1 j) /\ s2 == splice s3 i s2 j))
(ensures (s1 == splice s3 i s1 j))
val lemma_weaken_perm_left: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k /\ permutation a (slice s2 j k) (slice s1 j k)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_weaken_perm_right: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j /\ permutation a (slice s2 i j) (slice s1 i j)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_trans_perm: #a:eqtype -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i<=j && j <= length s1}
-> Lemma
(requires (permutation a (slice s1 i j) (slice s2 i j)
/\ permutation a (slice s2 i j) (slice s3 i j)))
(ensures (permutation a (slice s1 i j) (slice s3 i j)))
(*New additions, please review*)
let snoc (#a:Type) (s:seq a) (x:a) : Tot (seq a) = Seq.append s (Seq.create 1 x)
val lemma_cons_snoc (#a:Type) (hd:a) (s:Seq.seq a) (tl:a)
: Lemma (requires True)
(ensures (Seq.equal (cons hd (snoc s tl))
(snoc (cons hd s) tl)))
val lemma_tail_snoc: #a:Type -> s:Seq.seq a{Seq.length s > 0} -> x:a
-> Lemma (ensures (tail (snoc s x) == snoc (tail s) x))
val lemma_snoc_inj: #a:Type -> s1:seq a -> s2:seq a -> v1:a -> v2:a
-> Lemma (requires (equal (snoc s1 v1) (snoc s2 v2)))
(ensures (v1 == v2 /\ equal s1 s2))
val lemma_mem_snoc : #a:eqtype -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. mem y (snoc s x) <==> mem y s \/ x=y))
let rec find_l (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else find_l f (tail l)
let rec ghost_find_l (#a:Type) (f:a -> GTot bool) (l:seq a)
: GTot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else ghost_find_l f (tail l)
val find_append_some: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (Some? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_append_none: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s2))
val find_append_none_s2: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s2)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_snoc: #a:Type -> s:Seq.seq a -> x:a -> f:(a -> Tot bool)
-> Lemma (ensures (let res = find_l f (snoc s x) in
match res with
| None -> find_l f s == None /\ not (f x)
| Some y -> res == find_l f s \/ (f x /\ x==y)))
let un_snoc (#a:Type) (s:seq a{length s <> 0}) : Tot (r:(seq a * a){s == snoc (fst r) (snd r)}) =
let s', a = split s (length s - 1) in
assert (Seq.equal (snoc s' (Seq.index a 0)) s);
s', Seq.index a 0
val un_snoc_snoc (#a:Type) (s:seq a) (x:a) : Lemma (un_snoc (snoc s x) == (s, x))
let rec find_r (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else let prefix, last = un_snoc l in
if f last then Some last
else find_r f prefix
type found (i:nat) = True
let rec seq_find_aux (#a:Type) (f:a -> Tot bool) (l:seq a) (ctr:nat{ctr <= Seq.length l})
: Pure (option a)
(requires (forall (i:nat{ i < Seq.length l /\ i >= ctr}).
not (f (Seq.index l i) )))
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}). {:pattern (found i)}
found i /\ x == Seq.index l i)))
= match ctr with
| 0 -> None
| _ -> let i = ctr - 1 in
if f (Seq.index l i)
then (
cut (found i);
Some (Seq.index l i))
else seq_find_aux f l i
let seq_find (#a:Type) (f:a -> Tot bool) (l:seq a)
: Pure (option a)
(requires True)
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}).{:pattern (found i)}
found i /\ x == Seq.index l i)))
= seq_find_aux f l (Seq.length l)
val find_mem (#a:eqtype) (s:seq a) (f:a -> Tot bool) (x:a{f x})
: Lemma (requires (mem x s))
(ensures (Some? (seq_find f s) /\ f (Some?.v (seq_find f s))))
let for_all
(#a: Type)
(f: (a -> Tot bool))
(l: seq a)
: Pure bool
(requires True)
(ensures (fun b -> (b == true <==> (forall (i: nat {i < Seq.length l} ) . f (index l i) == true))))
= None? (seq_find (fun i -> not (f i)) l)
val seq_mem_k: #a:eqtype -> s:seq a -> n:nat{n < Seq.length s} ->
Lemma (requires True)
(ensures (mem (Seq.index s n) s))
[SMTPat (mem (Seq.index s n) s)]
module L = FStar.List.Tot
let rec seq_to_list (#a:Type) (s:seq a)
: Tot (l:list a{L.length l = length s})
(decreases (length s))
= if length s = 0 then []
else index s 0::seq_to_list (slice s 1 (length s))
[@@"opaque_to_smt"]
let rec seq_of_list (#a:Type) (l:list a) : Tot (s:seq a{L.length l = length s}) =
match l with
| [] -> Seq.empty #a
| hd::tl -> create 1 hd @| seq_of_list tl
val lemma_seq_of_list_induction (#a:Type) (l:list a)
:Lemma (requires True)
(ensures (let s = seq_of_list l in
match l with
| [] -> Seq.equal s empty
| hd::tl -> s == cons hd (seq_of_list tl) /\
head s == hd /\ tail s == (seq_of_list tl)))
val lemma_seq_list_bij: #a:Type -> s:seq a -> Lemma
(requires (True))
(ensures (seq_of_list (seq_to_list s) == s))
val lemma_list_seq_bij: #a:Type -> l:list a -> Lemma
(requires (True))
(ensures (seq_to_list (seq_of_list l) == l))
unfold let createL_post (#a:Type0) (l:list a) (s:seq a) : GTot Type0 =
normalize (L.length l = length s) /\ seq_to_list s == l /\ seq_of_list l == s
let createL (#a:Type0) (l:list a)
: Pure (seq a)
(requires True)
(ensures (fun s -> createL_post #a l s))
= let s = seq_of_list l in
lemma_list_seq_bij l;
s
val lemma_index_is_nth: #a:Type -> s:seq a -> i:nat{i < length s} -> Lemma
(requires True)
(ensures (L.index (seq_to_list s) i == index s i))
////////////////////////////////////////////////////////////////////////////////
//s `contains` x : Type0
// An undecidable version of `mem`,
// for when the sequence payload is not an eqtype
////////////////////////////////////////////////////////////////////////////////
[@@ remove_unused_type_parameters [0; 1; 2]]
val contains (#a:Type) (s:seq a) (x:a) : Tot Type0
val contains_intro (#a:Type) (s:seq a) (k:nat) (x:a)
: Lemma (k < Seq.length s /\ Seq.index s k == x
==>
s `contains` x)
val contains_elim (#a:Type) (s:seq a) (x:a)
: Lemma (s `contains` x
==>
(exists (k:nat). k < Seq.length s /\ Seq.index s k == x))
val lemma_contains_empty (#a:Type) : Lemma (forall (x:a). ~ (contains Seq.empty x))
val lemma_contains_singleton (#a:Type) (x:a) : Lemma (forall (y:a). contains (create 1 x) y ==> y == x)
val append_contains_equiv (#a:Type) (s1:seq a) (s2:seq a) (x:a)
: Lemma ((append s1 s2) `contains` x
<==>
(s1 `contains` x \/ s2 `contains` x))
val contains_snoc : #a:Type -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. (snoc s x) `contains` y <==> s `contains` y \/ x==y))
val lemma_find_l_contains (#a:Type) (f:a -> Tot bool) (l:seq a)
: Lemma (requires True) (ensures Some? (find_l f l) ==> l `contains` (Some?.v (find_l f l)))
val contains_cons (#a:Type) (hd:a) (tl:Seq.seq a) (x:a)
: Lemma ((cons hd tl) `contains` x
<==>
(x==hd \/ tl `contains` x))
val append_cons_snoc (#a:Type) (u: Seq.seq a) (x:a) (v:Seq.seq a)
: Lemma (Seq.equal (Seq.append u (cons x v))
(Seq.append (snoc u x) v))
val append_slices (#a:Type) (s1:Seq.seq a) (s2:Seq.seq a)
: Lemma ( Seq.equal s1 (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) /\
Seq.equal s2 (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length s1 + Seq.length s2)) /\
(forall (i:nat) (j:nat).
i <= j /\ j <= Seq.length s2 ==>
Seq.equal (Seq.slice s2 i j)
(Seq.slice (Seq.append s1 s2) (Seq.length s1 + i) (Seq.length s1 + j))))
val find_l_none_no_index (#a:Type) (s:Seq.seq a) (f:(a -> Tot bool)) :
Lemma (requires (None? (find_l f s)))
(ensures (forall (i:nat{i < Seq.length s}). not (f (Seq.index s i))))
(decreases (Seq.length s))
(** More properties, with new naming conventions *)
let suffix_of
(#a: Type)
(s_suff s: seq a)
= exists s_pref . (s == append s_pref s_suff)
val cons_head_tail
(#a: Type)
(s: seq a {length s > 0})
: Lemma
(requires True)
(ensures (s == cons (head s) (tail s)))
[SMTPat (cons (head s) (tail s))]
val head_cons
(#a: Type)
(x: a)
(s: seq a)
: Lemma
(ensures (head (cons x s) == x))
val suffix_of_tail
(#a: Type)
(s: seq a {length s > 0})
: Lemma
(requires True)
(ensures ((tail s) `suffix_of` s))
[SMTPat ((tail s) `suffix_of` s)]
val index_cons_l
(#a: Type)
(c: a)
(s: seq a)
: Lemma
(ensures (index (cons c s) 0 == c))
val index_cons_r
(#a: Type)
(c: a)
(s: seq a)
(i: nat {1 <= i /\ i <= length s})
: Lemma
(ensures (index (cons c s) i == index s (i - 1)))
val append_cons
(#a: Type)
(c: a)
(s1 s2: seq a)
: Lemma
(ensures (append (cons c s1) s2 == cons c (append s1 s2)))
val index_tail
(#a: Type)
(s: seq a {length s > 0})
(i: nat {i < length s - 1} )
: Lemma
(ensures (index (tail s) i == index s (i + 1)))
val mem_cons
(#a:eqtype)
(x:a)
(s:seq a)
: Lemma
(ensures (forall y. mem y (cons x s) <==> mem y s \/ x=y))
val snoc_slice_index
(#a: Type)
(s: seq a)
(i: nat)
(j: nat {i <= j /\ j < length s} )
: Lemma
(requires True)
(ensures (snoc (slice s i j) (index s j) == slice s i (j + 1)))
[SMTPat (snoc (slice s i j) (index s j))]
val cons_index_slice
(#a: Type)
(s: seq a)
(i: nat)
(j: nat {i < j /\ j <= length s} )
(k:nat{k == i+1})
: Lemma
(requires True)
(ensures (cons (index s i) (slice s k j) == slice s i j))
[SMTPat (cons (index s i) (slice s k j))]
val slice_is_empty
(#a: Type)
(s: seq a)
(i: nat {i <= length s})
: Lemma
(requires True)
(ensures (slice s i i == Seq.empty))
[SMTPat (slice s i i)]
val slice_length
(#a: Type)
(s: seq a)
: Lemma
(requires True)
(ensures (slice s 0 (length s) == s))
[SMTPat (slice s 0 (length s))]
val slice_slice
(#a: Type)
(s: seq a)
(i1: nat)
(j1: nat {i1 <= j1 /\ j1 <= length s} )
(i2: nat)
(j2: nat {i2 <= j2 /\ j2 <= j1 - i1} )
: Lemma
(requires True)
(ensures (slice (slice s i1 j1) i2 j2 == slice s (i1 + i2) (i1 + j2)))
[SMTPat (slice (slice s i1 j1) i2 j2)]
val lemma_seq_of_list_index (#a:Type) (l:list a) (i:nat{i < List.Tot.length l})
:Lemma (requires True)
(ensures (index (seq_of_list l) i == List.Tot.index l i))
[SMTPat (index (seq_of_list l) i)]
[@@(deprecated "seq_of_list")]
let of_list (#a:Type) (l:list a) :seq a = seq_of_list l
val seq_of_list_tl
(#a: Type)
(l: list a { List.Tot.length l > 0 } )
: Lemma
(requires True)
(ensures (seq_of_list (List.Tot.tl l) == tail (seq_of_list l)))
val mem_seq_of_list
(#a: eqtype)
(x: a)
(l: list a)
: Lemma
(requires True)
(ensures (mem x (seq_of_list l) == List.Tot.mem x l))
[SMTPat (mem x (seq_of_list l))]
(** Dealing efficiently with `seq_of_list` by meta-evaluating conjunctions over
an entire list. *)
let rec explode_and (#a: Type)
(i: nat)
(s: seq a { i <= length s })
(l: list a { List.Tot.length l + i = length s }):
Tot Type
(decreases (List.Tot.length l))
= match l with
| [] -> True
| hd :: tl -> index s i == hd /\ explode_and (i + 1) s tl
unfold | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val pointwise_and : s: FStar.Seq.Base.seq _ {0 <= FStar.Seq.Base.length s} ->
l: Prims.list _ {FStar.List.Tot.Base.length l + 0 = FStar.Seq.Base.length s}
-> Type0 | [] | FStar.Seq.Properties.pointwise_and | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
s: FStar.Seq.Base.seq _ {0 <= FStar.Seq.Base.length s} ->
l: Prims.list _ {FStar.List.Tot.Base.length l + 0 = FStar.Seq.Base.length s}
-> Type0 | {
"end_col": 82,
"end_line": 684,
"start_col": 2,
"start_line": 684
} |
|
Prims.Tot | val head (#a: Type) (s: seq a {length s > 0}) : Tot a | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0 | val head (#a: Type) (s: seq a {length s > 0}) : Tot a
let head (#a: Type) (s: seq a {length s > 0}) : Tot a = | false | null | false | index s 0 | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total"
] | [
"FStar.Seq.Base.seq",
"Prims.b2t",
"Prims.op_GreaterThan",
"FStar.Seq.Base.length",
"FStar.Seq.Base.index"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2)) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val head (#a: Type) (s: seq a {length s > 0}) : Tot a | [] | FStar.Seq.Properties.head | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | s: FStar.Seq.Base.seq a {FStar.Seq.Base.length s > 0} -> a | {
"end_col": 62,
"end_line": 40,
"start_col": 53,
"start_line": 40
} |
Prims.Tot | val tail (#a: Type) (s: seq a {length s > 0}) : Tot (seq a) | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s) | val tail (#a: Type) (s: seq a {length s > 0}) : Tot (seq a)
let tail (#a: Type) (s: seq a {length s > 0}) : Tot (seq a) = | false | null | false | slice s 1 (length s) | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total"
] | [
"FStar.Seq.Base.seq",
"Prims.b2t",
"Prims.op_GreaterThan",
"FStar.Seq.Base.length",
"FStar.Seq.Base.slice"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0 | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tail (#a: Type) (s: seq a {length s > 0}) : Tot (seq a) | [] | FStar.Seq.Properties.tail | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | s: FStar.Seq.Base.seq a {FStar.Seq.Base.length s > 0} -> FStar.Seq.Base.seq a | {
"end_col": 79,
"end_line": 42,
"start_col": 59,
"start_line": 42
} |
Prims.Tot | val last (#a: Type) (s: seq a {length s > 0}) : Tot a | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1) | val last (#a: Type) (s: seq a {length s > 0}) : Tot a
let last (#a: Type) (s: seq a {length s > 0}) : Tot a = | false | null | false | index s (length s - 1) | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total"
] | [
"FStar.Seq.Base.seq",
"Prims.b2t",
"Prims.op_GreaterThan",
"FStar.Seq.Base.length",
"FStar.Seq.Base.index",
"Prims.op_Subtraction"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val last (#a: Type) (s: seq a {length s > 0}) : Tot a | [] | FStar.Seq.Properties.last | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | s: FStar.Seq.Base.seq a {FStar.Seq.Base.length s > 0} -> a | {
"end_col": 75,
"end_line": 50,
"start_col": 53,
"start_line": 50
} |
Prims.Tot | val swap (#a: Type) (s: seq a) (i: nat{i < length s}) (j: nat{j < length s}) : Tot (seq a) | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j) | val swap (#a: Type) (s: seq a) (i: nat{i < length s}) (j: nat{j < length s}) : Tot (seq a)
let swap (#a: Type) (s: seq a) (i: nat{i < length s}) (j: nat{j < length s}) : Tot (seq a) = | false | null | false | upd (upd s j (index s i)) i (index s j) | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total"
] | [
"FStar.Seq.Base.seq",
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThan",
"FStar.Seq.Base.length",
"FStar.Seq.Base.upd",
"FStar.Seq.Base.index"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val swap (#a: Type) (s: seq a) (i: nat{i < length s}) (j: nat{j < length s}) : Tot (seq a) | [] | FStar.Seq.Properties.swap | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
s: FStar.Seq.Base.seq a ->
i: Prims.nat{i < FStar.Seq.Base.length s} ->
j: Prims.nat{j < FStar.Seq.Base.length s}
-> FStar.Seq.Base.seq a | {
"end_col": 41,
"end_line": 97,
"start_col": 2,
"start_line": 97
} |
Prims.Pure | val split_eq (#a: Type) (s: seq a) (i: nat{(0 <= i /\ i <= length s)})
: Pure (seq a * seq a) (requires True) (ensures (fun x -> (append (fst x) (snd x) == s))) | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x | val split_eq (#a: Type) (s: seq a) (i: nat{(0 <= i /\ i <= length s)})
: Pure (seq a * seq a) (requires True) (ensures (fun x -> (append (fst x) (snd x) == s)))
let split_eq (#a: Type) (s: seq a) (i: nat{(0 <= i /\ i <= length s)})
: Pure (seq a * seq a) (requires True) (ensures (fun x -> (append (fst x) (snd x) == s))) = | false | null | false | let x = split s i in
lemma_split s i;
x | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [] | [
"FStar.Seq.Base.seq",
"Prims.nat",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.Seq.Base.length",
"Prims.unit",
"FStar.Seq.Properties.lemma_split",
"FStar.Pervasives.Native.tuple2",
"FStar.Seq.Properties.split",
"Prims.l_True",
"Prims.eq2",
"FStar.Seq.Base.append",
"FStar.Pervasives.Native.fst",
"FStar.Pervasives.Native.snd"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val split_eq (#a: Type) (s: seq a) (i: nat{(0 <= i /\ i <= length s)})
: Pure (seq a * seq a) (requires True) (ensures (fun x -> (append (fst x) (snd x) == s))) | [] | FStar.Seq.Properties.split_eq | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | s: FStar.Seq.Base.seq a -> i: Prims.nat{0 <= i /\ i <= FStar.Seq.Base.length s}
-> Prims.Pure (FStar.Seq.Base.seq a * FStar.Seq.Base.seq a) | {
"end_col": 3,
"end_line": 71,
"start_col": 1,
"start_line": 69
} |
Prims.Tot | val sorted (#a: Type) (f: (a -> a -> Tot bool)) (s: seq a) : Tot bool (decreases (length s)) | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s) | val sorted (#a: Type) (f: (a -> a -> Tot bool)) (s: seq a) : Tot bool (decreases (length s))
let rec sorted (#a: Type) (f: (a -> a -> Tot bool)) (s: seq a) : Tot bool (decreases (length s)) = | false | null | false | if length s <= 1
then true
else
let hd = head s in
f hd (index s 1) && sorted f (tail s) | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total",
""
] | [
"Prims.bool",
"FStar.Seq.Base.seq",
"Prims.op_LessThanOrEqual",
"FStar.Seq.Base.length",
"Prims.op_AmpAmp",
"FStar.Seq.Base.index",
"FStar.Seq.Properties.sorted",
"FStar.Seq.Properties.tail",
"FStar.Seq.Properties.head"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val sorted (#a: Type) (f: (a -> a -> Tot bool)) (s: seq a) : Tot bool (decreases (length s)) | [
"recursion"
] | FStar.Seq.Properties.sorted | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | f: (_: a -> _: a -> Prims.bool) -> s: FStar.Seq.Base.seq a -> Prims.Tot Prims.bool | {
"end_col": 44,
"end_line": 130,
"start_col": 2,
"start_line": 127
} |
Prims.Tot | val split (#a: Type) (s: seq a) (i: nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a) | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s) | val split (#a: Type) (s: seq a) (i: nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
let split (#a: Type) (s: seq a) (i: nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a) = | false | null | false | slice s 0 i, slice s i (length s) | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total"
] | [
"FStar.Seq.Base.seq",
"Prims.nat",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.Seq.Base.length",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Seq.Base.slice",
"FStar.Pervasives.Native.tuple2"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2)) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val split (#a: Type) (s: seq a) (i: nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a) | [] | FStar.Seq.Properties.split | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | s: FStar.Seq.Base.seq a -> i: Prims.nat{0 <= i /\ i <= FStar.Seq.Base.length s}
-> FStar.Seq.Base.seq a * FStar.Seq.Base.seq a | {
"end_col": 37,
"end_line": 59,
"start_col": 4,
"start_line": 59
} |
Prims.Tot | val count (#a: eqtype) (x: a) (s: seq a) : Tot nat (decreases (length s)) | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s) | val count (#a: eqtype) (x: a) (s: seq a) : Tot nat (decreases (length s))
let rec count (#a: eqtype) (x: a) (s: seq a) : Tot nat (decreases (length s)) = | false | null | false | if length s = 0 then 0 else if head s = x then 1 + count x (tail s) else count x (tail s) | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total",
""
] | [
"Prims.eqtype",
"FStar.Seq.Base.seq",
"Prims.op_Equality",
"Prims.int",
"FStar.Seq.Base.length",
"Prims.bool",
"FStar.Seq.Properties.head",
"Prims.op_Addition",
"FStar.Seq.Properties.count",
"FStar.Seq.Properties.tail",
"Prims.nat"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val count (#a: eqtype) (x: a) (s: seq a) : Tot nat (decreases (length s)) | [
"recursion"
] | FStar.Seq.Properties.count | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | x: a -> s: FStar.Seq.Base.seq a -> Prims.Tot Prims.nat | {
"end_col": 23,
"end_line": 77,
"start_col": 2,
"start_line": 74
} |
Prims.Tot | val replace_subseq
(#a: Type0)
(s: Seq.seq a)
(i: nat)
(j: nat{i <= j /\ j <= length s})
(sub: Seq.seq a {length sub == j - i})
: Tot (Seq.seq a) | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let replace_subseq (#a:Type0) (s:Seq.seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (sub:Seq.seq a{length sub == j - i}) :Tot (Seq.seq a)
= Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s))) | val replace_subseq
(#a: Type0)
(s: Seq.seq a)
(i: nat)
(j: nat{i <= j /\ j <= length s})
(sub: Seq.seq a {length sub == j - i})
: Tot (Seq.seq a)
let replace_subseq
(#a: Type0)
(s: Seq.seq a)
(i: nat)
(j: nat{i <= j /\ j <= length s})
(sub: Seq.seq a {length sub == j - i})
: Tot (Seq.seq a) = | false | null | false | Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s))) | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total"
] | [
"FStar.Seq.Base.seq",
"Prims.nat",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.Seq.Base.length",
"Prims.eq2",
"Prims.int",
"Prims.op_Subtraction",
"FStar.Seq.Base.append",
"FStar.Seq.Base.slice"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s)
val sorted_feq (#a:Type)
(f g : (a -> a -> Tot bool))
(s:seq a{forall x y. f x y == g x y})
: Lemma (ensures (sorted f s <==> sorted g s))
val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (forall x. count x (append lo hi) = (count x lo + count x hi)))
val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (count x (append lo hi) = (count x lo + count x hi)))
val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma
(ensures (forall x. mem x s = (x=head s || mem x (tail s))))
val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma
(requires (forall (i:nat{i<length s}). f (index s i)))
(ensures (forall (x:a). mem x s ==> f x))
val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma
(requires True)
(ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s))))
type total_order (a:eqtype) (f: (a -> a -> Tot bool)) =
(forall a. f a a) (* reflexivity *)
/\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *)
/\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *)
type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f}
val sorted_concat_lemma: #a:eqtype
-> f:(a -> a -> Tot bool){total_order a f}
-> lo:seq a{sorted f lo}
-> pivot:a
-> hi:seq a{sorted f hi}
-> Lemma (requires (forall y. (mem y lo ==> f y pivot)
/\ (mem y hi ==> f pivot y)))
(ensures (sorted f (append lo (cons pivot hi))))
val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a))
(requires True)
(ensures (fun x ->
(length x = 5
/\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4)))))
/\ equal (index x 0) (slice s 0 i)
/\ equal (index x 1) (slice s i (i+1))
/\ equal (index x 2) (slice s (i+1) j)
/\ equal (index x 3) (slice s j (j + 1))
/\ equal (index x 4) (slice s (j + 1) (length s)))))
val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s}
-> i':nat -> j':nat{i' <= j' /\ j'<=length s /\
(j < i' //high slice
\/ j' <= i //low slice
\/ (i < i' /\ j' <= j)) //mid slice
}
-> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j'
/\ slice s i (i + 1) == slice (swap s i j) j (j + 1)
/\ slice s j (j + 1) == slice (swap s i j) i (i + 1)))
val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s} -> x:a -> Lemma
(requires True)
(ensures (count x s = count x (swap s i j)))
type permutation (a:eqtype) (s1:seq a) (s2:seq a) =
(forall i. count i s1 = count i s2)
val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma
(requires permutation a s1 s1' /\ permutation a s2 s2')
(ensures permutation a (append s1 s2) (append s1' s2'))
val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i<length s}) (j:nat{i <= j && j<length s})
: Lemma (permutation a s (swap s i j))
(* perm_len:
A lemma that shows that two sequences that are permutations
of each other also have the same length
*)
val perm_len (#a:eqtype) (s1 s2: seq a)
: Lemma (requires (permutation a s1 s2))
(ensures (length s1 == length s2))
val cons_perm: #a:eqtype -> tl:seq a -> s:seq a{length s > 0} ->
Lemma (requires (permutation a tl (tail s)))
(ensures (permutation a (cons (head s) tl) s))
val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a
-> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2)))
val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j))))
val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1)))))
val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv))
(ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv)))
val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back)))
(ensures ((forall y. mem y (slice s back len) ==> f pv y)))
val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i == slice (swap s i j) lo i))
val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i'))
val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s}
-> Lemma (ensures (slice s k hi == slice (swap s i j) k hi))
val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start))))
val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len)))
(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *)
let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
= Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1)))
(* replace with sub *) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val replace_subseq
(#a: Type0)
(s: Seq.seq a)
(i: nat)
(j: nat{i <= j /\ j <= length s})
(sub: Seq.seq a {length sub == j - i})
: Tot (Seq.seq a) | [] | FStar.Seq.Properties.replace_subseq | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
s: FStar.Seq.Base.seq a ->
i: Prims.nat ->
j: Prims.nat{i <= j /\ j <= FStar.Seq.Base.length s} ->
sub: FStar.Seq.Base.seq a {FStar.Seq.Base.length sub == j - i}
-> FStar.Seq.Base.seq a | {
"end_col": 80,
"end_line": 258,
"start_col": 4,
"start_line": 258
} |
Prims.Pure | val index_mem (#a: eqtype) (x: a) (s: seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s)) | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s) | val index_mem (#a: eqtype) (x: a) (s: seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
let rec index_mem (#a: eqtype) (x: a) (s: seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s)) = | false | null | false | if head s = x then 0 else 1 + index_mem x (tail s) | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
""
] | [
"Prims.eqtype",
"FStar.Seq.Base.seq",
"Prims.op_Equality",
"FStar.Seq.Properties.head",
"Prims.bool",
"Prims.op_Addition",
"FStar.Seq.Properties.index_mem",
"FStar.Seq.Properties.tail",
"Prims.nat",
"Prims.b2t",
"FStar.Seq.Properties.mem",
"Prims.l_and",
"Prims.op_LessThan",
"FStar.Seq.Base.length",
"Prims.eq2",
"FStar.Seq.Base.index"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x)) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val index_mem (#a: eqtype) (x: a) (s: seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s)) | [
"recursion"
] | FStar.Seq.Properties.index_mem | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | x: a -> s: FStar.Seq.Base.seq a -> Prims.Pure Prims.nat | {
"end_col": 35,
"end_line": 94,
"start_col": 6,
"start_line": 93
} |
Prims.GTot | val ghost_find_l (#a: Type) (f: (a -> GTot bool)) (l: seq a)
: GTot (o: option a {Some? o ==> f (Some?.v o)}) (decreases (Seq.length l)) | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec ghost_find_l (#a:Type) (f:a -> GTot bool) (l:seq a)
: GTot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else ghost_find_l f (tail l) | val ghost_find_l (#a: Type) (f: (a -> GTot bool)) (l: seq a)
: GTot (o: option a {Some? o ==> f (Some?.v o)}) (decreases (Seq.length l))
let rec ghost_find_l (#a: Type) (f: (a -> GTot bool)) (l: seq a)
: GTot (o: option a {Some? o ==> f (Some?.v o)}) (decreases (Seq.length l)) = | false | null | false | if Seq.length l = 0 then None else if f (head l) then Some (head l) else ghost_find_l f (tail l) | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"sometrivial",
""
] | [
"Prims.bool",
"FStar.Seq.Base.seq",
"Prims.op_Equality",
"Prims.int",
"FStar.Seq.Base.length",
"FStar.Pervasives.Native.None",
"FStar.Seq.Properties.head",
"FStar.Pervasives.Native.Some",
"FStar.Seq.Properties.ghost_find_l",
"FStar.Seq.Properties.tail",
"FStar.Pervasives.Native.option",
"Prims.l_imp",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some",
"FStar.Pervasives.Native.__proj__Some__item__v"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s)
val sorted_feq (#a:Type)
(f g : (a -> a -> Tot bool))
(s:seq a{forall x y. f x y == g x y})
: Lemma (ensures (sorted f s <==> sorted g s))
val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (forall x. count x (append lo hi) = (count x lo + count x hi)))
val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (count x (append lo hi) = (count x lo + count x hi)))
val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma
(ensures (forall x. mem x s = (x=head s || mem x (tail s))))
val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma
(requires (forall (i:nat{i<length s}). f (index s i)))
(ensures (forall (x:a). mem x s ==> f x))
val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma
(requires True)
(ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s))))
type total_order (a:eqtype) (f: (a -> a -> Tot bool)) =
(forall a. f a a) (* reflexivity *)
/\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *)
/\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *)
type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f}
val sorted_concat_lemma: #a:eqtype
-> f:(a -> a -> Tot bool){total_order a f}
-> lo:seq a{sorted f lo}
-> pivot:a
-> hi:seq a{sorted f hi}
-> Lemma (requires (forall y. (mem y lo ==> f y pivot)
/\ (mem y hi ==> f pivot y)))
(ensures (sorted f (append lo (cons pivot hi))))
val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a))
(requires True)
(ensures (fun x ->
(length x = 5
/\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4)))))
/\ equal (index x 0) (slice s 0 i)
/\ equal (index x 1) (slice s i (i+1))
/\ equal (index x 2) (slice s (i+1) j)
/\ equal (index x 3) (slice s j (j + 1))
/\ equal (index x 4) (slice s (j + 1) (length s)))))
val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s}
-> i':nat -> j':nat{i' <= j' /\ j'<=length s /\
(j < i' //high slice
\/ j' <= i //low slice
\/ (i < i' /\ j' <= j)) //mid slice
}
-> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j'
/\ slice s i (i + 1) == slice (swap s i j) j (j + 1)
/\ slice s j (j + 1) == slice (swap s i j) i (i + 1)))
val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s} -> x:a -> Lemma
(requires True)
(ensures (count x s = count x (swap s i j)))
type permutation (a:eqtype) (s1:seq a) (s2:seq a) =
(forall i. count i s1 = count i s2)
val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma
(requires permutation a s1 s1' /\ permutation a s2 s2')
(ensures permutation a (append s1 s2) (append s1' s2'))
val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i<length s}) (j:nat{i <= j && j<length s})
: Lemma (permutation a s (swap s i j))
(* perm_len:
A lemma that shows that two sequences that are permutations
of each other also have the same length
*)
val perm_len (#a:eqtype) (s1 s2: seq a)
: Lemma (requires (permutation a s1 s2))
(ensures (length s1 == length s2))
val cons_perm: #a:eqtype -> tl:seq a -> s:seq a{length s > 0} ->
Lemma (requires (permutation a tl (tail s)))
(ensures (permutation a (cons (head s) tl) s))
val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a
-> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2)))
val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j))))
val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1)))))
val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv))
(ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv)))
val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back)))
(ensures ((forall y. mem y (slice s back len) ==> f pv y)))
val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i == slice (swap s i j) lo i))
val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i'))
val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s}
-> Lemma (ensures (slice s k hi == slice (swap s i j) k hi))
val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start))))
val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len)))
(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *)
let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
= Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1)))
(* replace with sub *)
let replace_subseq (#a:Type0) (s:Seq.seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (sub:Seq.seq a{length sub == j - i}) :Tot (Seq.seq a)
= Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s)))
val splice_refl : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s}
-> Lemma
(ensures (s == splice s i s j))
val lemma_swap_splice : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma
(ensures (swap s i j == splice s start (swap s i j) len))
val lemma_seq_frame_hi: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j <= m} -> n:nat{m < n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 i s1 j)))
(ensures ((slice s1 m n == slice s2 m n) /\ (index s1 m == index s2 m)))
val lemma_seq_frame_lo: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j < m} -> n:nat{m <= n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 m s1 n)))
(ensures ((slice s1 i j == slice s2 i j) /\ (index s1 j == index s2 j)))
val lemma_tail_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma
(requires True)
(ensures (tail (slice s i j) == slice s (i + 1) j))
[SMTPat (tail (slice s i j))]
val lemma_weaken_frame_right : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j))
(ensures (s1 == splice s2 i s1 k))
val lemma_weaken_frame_left : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k))
(ensures (s1 == splice s2 i s1 k))
val lemma_trans_frame : #a:Type -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i <= j && j <= length s1}
-> Lemma
(requires ((s1 == splice s2 i s1 j) /\ s2 == splice s3 i s2 j))
(ensures (s1 == splice s3 i s1 j))
val lemma_weaken_perm_left: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k /\ permutation a (slice s2 j k) (slice s1 j k)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_weaken_perm_right: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j /\ permutation a (slice s2 i j) (slice s1 i j)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_trans_perm: #a:eqtype -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i<=j && j <= length s1}
-> Lemma
(requires (permutation a (slice s1 i j) (slice s2 i j)
/\ permutation a (slice s2 i j) (slice s3 i j)))
(ensures (permutation a (slice s1 i j) (slice s3 i j)))
(*New additions, please review*)
let snoc (#a:Type) (s:seq a) (x:a) : Tot (seq a) = Seq.append s (Seq.create 1 x)
val lemma_cons_snoc (#a:Type) (hd:a) (s:Seq.seq a) (tl:a)
: Lemma (requires True)
(ensures (Seq.equal (cons hd (snoc s tl))
(snoc (cons hd s) tl)))
val lemma_tail_snoc: #a:Type -> s:Seq.seq a{Seq.length s > 0} -> x:a
-> Lemma (ensures (tail (snoc s x) == snoc (tail s) x))
val lemma_snoc_inj: #a:Type -> s1:seq a -> s2:seq a -> v1:a -> v2:a
-> Lemma (requires (equal (snoc s1 v1) (snoc s2 v2)))
(ensures (v1 == v2 /\ equal s1 s2))
val lemma_mem_snoc : #a:eqtype -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. mem y (snoc s x) <==> mem y s \/ x=y))
let rec find_l (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else find_l f (tail l)
let rec ghost_find_l (#a:Type) (f:a -> GTot bool) (l:seq a)
: GTot (o:option a{Some? o ==> f (Some?.v o)}) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ghost_find_l (#a: Type) (f: (a -> GTot bool)) (l: seq a)
: GTot (o: option a {Some? o ==> f (Some?.v o)}) (decreases (Seq.length l)) | [
"recursion"
] | FStar.Seq.Properties.ghost_find_l | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | f: (_: a -> Prims.GTot Prims.bool) -> l: FStar.Seq.Base.seq a
-> Prims.GTot (o: FStar.Pervasives.Native.option a {Some? o ==> f (Some?.v o)}) | {
"end_col": 30,
"end_line": 347,
"start_col": 2,
"start_line": 345
} |
Prims.Pure | val createL (#a: Type0) (l: list a)
: Pure (seq a) (requires True) (ensures (fun s -> createL_post #a l s)) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let createL (#a:Type0) (l:list a)
: Pure (seq a)
(requires True)
(ensures (fun s -> createL_post #a l s))
= let s = seq_of_list l in
lemma_list_seq_bij l;
s | val createL (#a: Type0) (l: list a)
: Pure (seq a) (requires True) (ensures (fun s -> createL_post #a l s))
let createL (#a: Type0) (l: list a)
: Pure (seq a) (requires True) (ensures (fun s -> createL_post #a l s)) = | false | null | false | let s = seq_of_list l in
lemma_list_seq_bij l;
s | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [] | [
"Prims.list",
"Prims.unit",
"FStar.Seq.Properties.lemma_list_seq_bij",
"FStar.Seq.Base.seq",
"Prims.b2t",
"Prims.op_Equality",
"Prims.nat",
"FStar.List.Tot.Base.length",
"FStar.Seq.Base.length",
"FStar.Seq.Properties.seq_of_list",
"Prims.l_True",
"FStar.Seq.Properties.createL_post"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s)
val sorted_feq (#a:Type)
(f g : (a -> a -> Tot bool))
(s:seq a{forall x y. f x y == g x y})
: Lemma (ensures (sorted f s <==> sorted g s))
val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (forall x. count x (append lo hi) = (count x lo + count x hi)))
val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (count x (append lo hi) = (count x lo + count x hi)))
val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma
(ensures (forall x. mem x s = (x=head s || mem x (tail s))))
val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma
(requires (forall (i:nat{i<length s}). f (index s i)))
(ensures (forall (x:a). mem x s ==> f x))
val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma
(requires True)
(ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s))))
type total_order (a:eqtype) (f: (a -> a -> Tot bool)) =
(forall a. f a a) (* reflexivity *)
/\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *)
/\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *)
type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f}
val sorted_concat_lemma: #a:eqtype
-> f:(a -> a -> Tot bool){total_order a f}
-> lo:seq a{sorted f lo}
-> pivot:a
-> hi:seq a{sorted f hi}
-> Lemma (requires (forall y. (mem y lo ==> f y pivot)
/\ (mem y hi ==> f pivot y)))
(ensures (sorted f (append lo (cons pivot hi))))
val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a))
(requires True)
(ensures (fun x ->
(length x = 5
/\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4)))))
/\ equal (index x 0) (slice s 0 i)
/\ equal (index x 1) (slice s i (i+1))
/\ equal (index x 2) (slice s (i+1) j)
/\ equal (index x 3) (slice s j (j + 1))
/\ equal (index x 4) (slice s (j + 1) (length s)))))
val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s}
-> i':nat -> j':nat{i' <= j' /\ j'<=length s /\
(j < i' //high slice
\/ j' <= i //low slice
\/ (i < i' /\ j' <= j)) //mid slice
}
-> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j'
/\ slice s i (i + 1) == slice (swap s i j) j (j + 1)
/\ slice s j (j + 1) == slice (swap s i j) i (i + 1)))
val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s} -> x:a -> Lemma
(requires True)
(ensures (count x s = count x (swap s i j)))
type permutation (a:eqtype) (s1:seq a) (s2:seq a) =
(forall i. count i s1 = count i s2)
val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma
(requires permutation a s1 s1' /\ permutation a s2 s2')
(ensures permutation a (append s1 s2) (append s1' s2'))
val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i<length s}) (j:nat{i <= j && j<length s})
: Lemma (permutation a s (swap s i j))
(* perm_len:
A lemma that shows that two sequences that are permutations
of each other also have the same length
*)
val perm_len (#a:eqtype) (s1 s2: seq a)
: Lemma (requires (permutation a s1 s2))
(ensures (length s1 == length s2))
val cons_perm: #a:eqtype -> tl:seq a -> s:seq a{length s > 0} ->
Lemma (requires (permutation a tl (tail s)))
(ensures (permutation a (cons (head s) tl) s))
val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a
-> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2)))
val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j))))
val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1)))))
val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv))
(ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv)))
val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back)))
(ensures ((forall y. mem y (slice s back len) ==> f pv y)))
val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i == slice (swap s i j) lo i))
val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i'))
val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s}
-> Lemma (ensures (slice s k hi == slice (swap s i j) k hi))
val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start))))
val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len)))
(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *)
let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
= Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1)))
(* replace with sub *)
let replace_subseq (#a:Type0) (s:Seq.seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (sub:Seq.seq a{length sub == j - i}) :Tot (Seq.seq a)
= Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s)))
val splice_refl : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s}
-> Lemma
(ensures (s == splice s i s j))
val lemma_swap_splice : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma
(ensures (swap s i j == splice s start (swap s i j) len))
val lemma_seq_frame_hi: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j <= m} -> n:nat{m < n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 i s1 j)))
(ensures ((slice s1 m n == slice s2 m n) /\ (index s1 m == index s2 m)))
val lemma_seq_frame_lo: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j < m} -> n:nat{m <= n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 m s1 n)))
(ensures ((slice s1 i j == slice s2 i j) /\ (index s1 j == index s2 j)))
val lemma_tail_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma
(requires True)
(ensures (tail (slice s i j) == slice s (i + 1) j))
[SMTPat (tail (slice s i j))]
val lemma_weaken_frame_right : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j))
(ensures (s1 == splice s2 i s1 k))
val lemma_weaken_frame_left : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k))
(ensures (s1 == splice s2 i s1 k))
val lemma_trans_frame : #a:Type -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i <= j && j <= length s1}
-> Lemma
(requires ((s1 == splice s2 i s1 j) /\ s2 == splice s3 i s2 j))
(ensures (s1 == splice s3 i s1 j))
val lemma_weaken_perm_left: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k /\ permutation a (slice s2 j k) (slice s1 j k)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_weaken_perm_right: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j /\ permutation a (slice s2 i j) (slice s1 i j)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_trans_perm: #a:eqtype -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i<=j && j <= length s1}
-> Lemma
(requires (permutation a (slice s1 i j) (slice s2 i j)
/\ permutation a (slice s2 i j) (slice s3 i j)))
(ensures (permutation a (slice s1 i j) (slice s3 i j)))
(*New additions, please review*)
let snoc (#a:Type) (s:seq a) (x:a) : Tot (seq a) = Seq.append s (Seq.create 1 x)
val lemma_cons_snoc (#a:Type) (hd:a) (s:Seq.seq a) (tl:a)
: Lemma (requires True)
(ensures (Seq.equal (cons hd (snoc s tl))
(snoc (cons hd s) tl)))
val lemma_tail_snoc: #a:Type -> s:Seq.seq a{Seq.length s > 0} -> x:a
-> Lemma (ensures (tail (snoc s x) == snoc (tail s) x))
val lemma_snoc_inj: #a:Type -> s1:seq a -> s2:seq a -> v1:a -> v2:a
-> Lemma (requires (equal (snoc s1 v1) (snoc s2 v2)))
(ensures (v1 == v2 /\ equal s1 s2))
val lemma_mem_snoc : #a:eqtype -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. mem y (snoc s x) <==> mem y s \/ x=y))
let rec find_l (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else find_l f (tail l)
let rec ghost_find_l (#a:Type) (f:a -> GTot bool) (l:seq a)
: GTot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else ghost_find_l f (tail l)
val find_append_some: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (Some? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_append_none: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s2))
val find_append_none_s2: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s2)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_snoc: #a:Type -> s:Seq.seq a -> x:a -> f:(a -> Tot bool)
-> Lemma (ensures (let res = find_l f (snoc s x) in
match res with
| None -> find_l f s == None /\ not (f x)
| Some y -> res == find_l f s \/ (f x /\ x==y)))
let un_snoc (#a:Type) (s:seq a{length s <> 0}) : Tot (r:(seq a * a){s == snoc (fst r) (snd r)}) =
let s', a = split s (length s - 1) in
assert (Seq.equal (snoc s' (Seq.index a 0)) s);
s', Seq.index a 0
val un_snoc_snoc (#a:Type) (s:seq a) (x:a) : Lemma (un_snoc (snoc s x) == (s, x))
let rec find_r (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else let prefix, last = un_snoc l in
if f last then Some last
else find_r f prefix
type found (i:nat) = True
let rec seq_find_aux (#a:Type) (f:a -> Tot bool) (l:seq a) (ctr:nat{ctr <= Seq.length l})
: Pure (option a)
(requires (forall (i:nat{ i < Seq.length l /\ i >= ctr}).
not (f (Seq.index l i) )))
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}). {:pattern (found i)}
found i /\ x == Seq.index l i)))
= match ctr with
| 0 -> None
| _ -> let i = ctr - 1 in
if f (Seq.index l i)
then (
cut (found i);
Some (Seq.index l i))
else seq_find_aux f l i
let seq_find (#a:Type) (f:a -> Tot bool) (l:seq a)
: Pure (option a)
(requires True)
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}).{:pattern (found i)}
found i /\ x == Seq.index l i)))
= seq_find_aux f l (Seq.length l)
val find_mem (#a:eqtype) (s:seq a) (f:a -> Tot bool) (x:a{f x})
: Lemma (requires (mem x s))
(ensures (Some? (seq_find f s) /\ f (Some?.v (seq_find f s))))
let for_all
(#a: Type)
(f: (a -> Tot bool))
(l: seq a)
: Pure bool
(requires True)
(ensures (fun b -> (b == true <==> (forall (i: nat {i < Seq.length l} ) . f (index l i) == true))))
= None? (seq_find (fun i -> not (f i)) l)
val seq_mem_k: #a:eqtype -> s:seq a -> n:nat{n < Seq.length s} ->
Lemma (requires True)
(ensures (mem (Seq.index s n) s))
[SMTPat (mem (Seq.index s n) s)]
module L = FStar.List.Tot
let rec seq_to_list (#a:Type) (s:seq a)
: Tot (l:list a{L.length l = length s})
(decreases (length s))
= if length s = 0 then []
else index s 0::seq_to_list (slice s 1 (length s))
[@@"opaque_to_smt"]
let rec seq_of_list (#a:Type) (l:list a) : Tot (s:seq a{L.length l = length s}) =
match l with
| [] -> Seq.empty #a
| hd::tl -> create 1 hd @| seq_of_list tl
val lemma_seq_of_list_induction (#a:Type) (l:list a)
:Lemma (requires True)
(ensures (let s = seq_of_list l in
match l with
| [] -> Seq.equal s empty
| hd::tl -> s == cons hd (seq_of_list tl) /\
head s == hd /\ tail s == (seq_of_list tl)))
val lemma_seq_list_bij: #a:Type -> s:seq a -> Lemma
(requires (True))
(ensures (seq_of_list (seq_to_list s) == s))
val lemma_list_seq_bij: #a:Type -> l:list a -> Lemma
(requires (True))
(ensures (seq_to_list (seq_of_list l) == l))
unfold let createL_post (#a:Type0) (l:list a) (s:seq a) : GTot Type0 =
normalize (L.length l = length s) /\ seq_to_list s == l /\ seq_of_list l == s
let createL (#a:Type0) (l:list a)
: Pure (seq a)
(requires True) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val createL (#a: Type0) (l: list a)
: Pure (seq a) (requires True) (ensures (fun s -> createL_post #a l s)) | [] | FStar.Seq.Properties.createL | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | l: Prims.list a -> Prims.Pure (FStar.Seq.Base.seq a) | {
"end_col": 3,
"end_line": 467,
"start_col": 1,
"start_line": 465
} |
Prims.GTot | val createL_post (#a: Type0) (l: list a) (s: seq a) : GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let createL_post (#a:Type0) (l:list a) (s:seq a) : GTot Type0 =
normalize (L.length l = length s) /\ seq_to_list s == l /\ seq_of_list l == s | val createL_post (#a: Type0) (l: list a) (s: seq a) : GTot Type0
let createL_post (#a: Type0) (l: list a) (s: seq a) : GTot Type0 = | false | null | false | normalize (L.length l = length s) /\ seq_to_list s == l /\ seq_of_list l == s | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"sometrivial"
] | [
"Prims.list",
"FStar.Seq.Base.seq",
"Prims.l_and",
"FStar.Pervasives.normalize",
"Prims.b2t",
"Prims.op_Equality",
"Prims.nat",
"FStar.List.Tot.Base.length",
"FStar.Seq.Base.length",
"Prims.eq2",
"FStar.Seq.Properties.seq_to_list",
"FStar.Seq.Properties.seq_of_list"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s)
val sorted_feq (#a:Type)
(f g : (a -> a -> Tot bool))
(s:seq a{forall x y. f x y == g x y})
: Lemma (ensures (sorted f s <==> sorted g s))
val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (forall x. count x (append lo hi) = (count x lo + count x hi)))
val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (count x (append lo hi) = (count x lo + count x hi)))
val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma
(ensures (forall x. mem x s = (x=head s || mem x (tail s))))
val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma
(requires (forall (i:nat{i<length s}). f (index s i)))
(ensures (forall (x:a). mem x s ==> f x))
val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma
(requires True)
(ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s))))
type total_order (a:eqtype) (f: (a -> a -> Tot bool)) =
(forall a. f a a) (* reflexivity *)
/\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *)
/\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *)
type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f}
val sorted_concat_lemma: #a:eqtype
-> f:(a -> a -> Tot bool){total_order a f}
-> lo:seq a{sorted f lo}
-> pivot:a
-> hi:seq a{sorted f hi}
-> Lemma (requires (forall y. (mem y lo ==> f y pivot)
/\ (mem y hi ==> f pivot y)))
(ensures (sorted f (append lo (cons pivot hi))))
val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a))
(requires True)
(ensures (fun x ->
(length x = 5
/\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4)))))
/\ equal (index x 0) (slice s 0 i)
/\ equal (index x 1) (slice s i (i+1))
/\ equal (index x 2) (slice s (i+1) j)
/\ equal (index x 3) (slice s j (j + 1))
/\ equal (index x 4) (slice s (j + 1) (length s)))))
val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s}
-> i':nat -> j':nat{i' <= j' /\ j'<=length s /\
(j < i' //high slice
\/ j' <= i //low slice
\/ (i < i' /\ j' <= j)) //mid slice
}
-> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j'
/\ slice s i (i + 1) == slice (swap s i j) j (j + 1)
/\ slice s j (j + 1) == slice (swap s i j) i (i + 1)))
val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s} -> x:a -> Lemma
(requires True)
(ensures (count x s = count x (swap s i j)))
type permutation (a:eqtype) (s1:seq a) (s2:seq a) =
(forall i. count i s1 = count i s2)
val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma
(requires permutation a s1 s1' /\ permutation a s2 s2')
(ensures permutation a (append s1 s2) (append s1' s2'))
val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i<length s}) (j:nat{i <= j && j<length s})
: Lemma (permutation a s (swap s i j))
(* perm_len:
A lemma that shows that two sequences that are permutations
of each other also have the same length
*)
val perm_len (#a:eqtype) (s1 s2: seq a)
: Lemma (requires (permutation a s1 s2))
(ensures (length s1 == length s2))
val cons_perm: #a:eqtype -> tl:seq a -> s:seq a{length s > 0} ->
Lemma (requires (permutation a tl (tail s)))
(ensures (permutation a (cons (head s) tl) s))
val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a
-> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2)))
val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j))))
val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1)))))
val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv))
(ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv)))
val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back)))
(ensures ((forall y. mem y (slice s back len) ==> f pv y)))
val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i == slice (swap s i j) lo i))
val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i'))
val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s}
-> Lemma (ensures (slice s k hi == slice (swap s i j) k hi))
val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start))))
val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len)))
(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *)
let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
= Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1)))
(* replace with sub *)
let replace_subseq (#a:Type0) (s:Seq.seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (sub:Seq.seq a{length sub == j - i}) :Tot (Seq.seq a)
= Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s)))
val splice_refl : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s}
-> Lemma
(ensures (s == splice s i s j))
val lemma_swap_splice : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma
(ensures (swap s i j == splice s start (swap s i j) len))
val lemma_seq_frame_hi: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j <= m} -> n:nat{m < n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 i s1 j)))
(ensures ((slice s1 m n == slice s2 m n) /\ (index s1 m == index s2 m)))
val lemma_seq_frame_lo: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j < m} -> n:nat{m <= n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 m s1 n)))
(ensures ((slice s1 i j == slice s2 i j) /\ (index s1 j == index s2 j)))
val lemma_tail_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma
(requires True)
(ensures (tail (slice s i j) == slice s (i + 1) j))
[SMTPat (tail (slice s i j))]
val lemma_weaken_frame_right : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j))
(ensures (s1 == splice s2 i s1 k))
val lemma_weaken_frame_left : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k))
(ensures (s1 == splice s2 i s1 k))
val lemma_trans_frame : #a:Type -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i <= j && j <= length s1}
-> Lemma
(requires ((s1 == splice s2 i s1 j) /\ s2 == splice s3 i s2 j))
(ensures (s1 == splice s3 i s1 j))
val lemma_weaken_perm_left: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k /\ permutation a (slice s2 j k) (slice s1 j k)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_weaken_perm_right: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j /\ permutation a (slice s2 i j) (slice s1 i j)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_trans_perm: #a:eqtype -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i<=j && j <= length s1}
-> Lemma
(requires (permutation a (slice s1 i j) (slice s2 i j)
/\ permutation a (slice s2 i j) (slice s3 i j)))
(ensures (permutation a (slice s1 i j) (slice s3 i j)))
(*New additions, please review*)
let snoc (#a:Type) (s:seq a) (x:a) : Tot (seq a) = Seq.append s (Seq.create 1 x)
val lemma_cons_snoc (#a:Type) (hd:a) (s:Seq.seq a) (tl:a)
: Lemma (requires True)
(ensures (Seq.equal (cons hd (snoc s tl))
(snoc (cons hd s) tl)))
val lemma_tail_snoc: #a:Type -> s:Seq.seq a{Seq.length s > 0} -> x:a
-> Lemma (ensures (tail (snoc s x) == snoc (tail s) x))
val lemma_snoc_inj: #a:Type -> s1:seq a -> s2:seq a -> v1:a -> v2:a
-> Lemma (requires (equal (snoc s1 v1) (snoc s2 v2)))
(ensures (v1 == v2 /\ equal s1 s2))
val lemma_mem_snoc : #a:eqtype -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. mem y (snoc s x) <==> mem y s \/ x=y))
let rec find_l (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else find_l f (tail l)
let rec ghost_find_l (#a:Type) (f:a -> GTot bool) (l:seq a)
: GTot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else ghost_find_l f (tail l)
val find_append_some: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (Some? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_append_none: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s2))
val find_append_none_s2: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s2)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_snoc: #a:Type -> s:Seq.seq a -> x:a -> f:(a -> Tot bool)
-> Lemma (ensures (let res = find_l f (snoc s x) in
match res with
| None -> find_l f s == None /\ not (f x)
| Some y -> res == find_l f s \/ (f x /\ x==y)))
let un_snoc (#a:Type) (s:seq a{length s <> 0}) : Tot (r:(seq a * a){s == snoc (fst r) (snd r)}) =
let s', a = split s (length s - 1) in
assert (Seq.equal (snoc s' (Seq.index a 0)) s);
s', Seq.index a 0
val un_snoc_snoc (#a:Type) (s:seq a) (x:a) : Lemma (un_snoc (snoc s x) == (s, x))
let rec find_r (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else let prefix, last = un_snoc l in
if f last then Some last
else find_r f prefix
type found (i:nat) = True
let rec seq_find_aux (#a:Type) (f:a -> Tot bool) (l:seq a) (ctr:nat{ctr <= Seq.length l})
: Pure (option a)
(requires (forall (i:nat{ i < Seq.length l /\ i >= ctr}).
not (f (Seq.index l i) )))
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}). {:pattern (found i)}
found i /\ x == Seq.index l i)))
= match ctr with
| 0 -> None
| _ -> let i = ctr - 1 in
if f (Seq.index l i)
then (
cut (found i);
Some (Seq.index l i))
else seq_find_aux f l i
let seq_find (#a:Type) (f:a -> Tot bool) (l:seq a)
: Pure (option a)
(requires True)
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}).{:pattern (found i)}
found i /\ x == Seq.index l i)))
= seq_find_aux f l (Seq.length l)
val find_mem (#a:eqtype) (s:seq a) (f:a -> Tot bool) (x:a{f x})
: Lemma (requires (mem x s))
(ensures (Some? (seq_find f s) /\ f (Some?.v (seq_find f s))))
let for_all
(#a: Type)
(f: (a -> Tot bool))
(l: seq a)
: Pure bool
(requires True)
(ensures (fun b -> (b == true <==> (forall (i: nat {i < Seq.length l} ) . f (index l i) == true))))
= None? (seq_find (fun i -> not (f i)) l)
val seq_mem_k: #a:eqtype -> s:seq a -> n:nat{n < Seq.length s} ->
Lemma (requires True)
(ensures (mem (Seq.index s n) s))
[SMTPat (mem (Seq.index s n) s)]
module L = FStar.List.Tot
let rec seq_to_list (#a:Type) (s:seq a)
: Tot (l:list a{L.length l = length s})
(decreases (length s))
= if length s = 0 then []
else index s 0::seq_to_list (slice s 1 (length s))
[@@"opaque_to_smt"]
let rec seq_of_list (#a:Type) (l:list a) : Tot (s:seq a{L.length l = length s}) =
match l with
| [] -> Seq.empty #a
| hd::tl -> create 1 hd @| seq_of_list tl
val lemma_seq_of_list_induction (#a:Type) (l:list a)
:Lemma (requires True)
(ensures (let s = seq_of_list l in
match l with
| [] -> Seq.equal s empty
| hd::tl -> s == cons hd (seq_of_list tl) /\
head s == hd /\ tail s == (seq_of_list tl)))
val lemma_seq_list_bij: #a:Type -> s:seq a -> Lemma
(requires (True))
(ensures (seq_of_list (seq_to_list s) == s))
val lemma_list_seq_bij: #a:Type -> l:list a -> Lemma
(requires (True))
(ensures (seq_to_list (seq_of_list l) == l)) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val createL_post (#a: Type0) (l: list a) (s: seq a) : GTot Type0 | [] | FStar.Seq.Properties.createL_post | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | l: Prims.list a -> s: FStar.Seq.Base.seq a -> Prims.GTot Type0 | {
"end_col": 79,
"end_line": 459,
"start_col": 2,
"start_line": 459
} |
Prims.Tot | val find_r (#a: Type) (f: (a -> Tot bool)) (l: seq a)
: Tot (o: option a {Some? o ==> f (Some?.v o)}) (decreases (Seq.length l)) | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec find_r (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else let prefix, last = un_snoc l in
if f last then Some last
else find_r f prefix | val find_r (#a: Type) (f: (a -> Tot bool)) (l: seq a)
: Tot (o: option a {Some? o ==> f (Some?.v o)}) (decreases (Seq.length l))
let rec find_r (#a: Type) (f: (a -> Tot bool)) (l: seq a)
: Tot (o: option a {Some? o ==> f (Some?.v o)}) (decreases (Seq.length l)) = | false | null | false | if Seq.length l = 0
then None
else
let prefix, last = un_snoc l in
if f last then Some last else find_r f prefix | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total",
""
] | [
"Prims.bool",
"FStar.Seq.Base.seq",
"Prims.op_Equality",
"Prims.int",
"FStar.Seq.Base.length",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.Some",
"FStar.Seq.Properties.find_r",
"FStar.Pervasives.Native.option",
"Prims.l_imp",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some",
"FStar.Pervasives.Native.__proj__Some__item__v",
"FStar.Pervasives.Native.tuple2",
"Prims.eq2",
"FStar.Seq.Properties.snoc",
"FStar.Pervasives.Native.fst",
"FStar.Pervasives.Native.snd",
"FStar.Seq.Properties.un_snoc"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s)
val sorted_feq (#a:Type)
(f g : (a -> a -> Tot bool))
(s:seq a{forall x y. f x y == g x y})
: Lemma (ensures (sorted f s <==> sorted g s))
val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (forall x. count x (append lo hi) = (count x lo + count x hi)))
val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (count x (append lo hi) = (count x lo + count x hi)))
val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma
(ensures (forall x. mem x s = (x=head s || mem x (tail s))))
val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma
(requires (forall (i:nat{i<length s}). f (index s i)))
(ensures (forall (x:a). mem x s ==> f x))
val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma
(requires True)
(ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s))))
type total_order (a:eqtype) (f: (a -> a -> Tot bool)) =
(forall a. f a a) (* reflexivity *)
/\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *)
/\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *)
type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f}
val sorted_concat_lemma: #a:eqtype
-> f:(a -> a -> Tot bool){total_order a f}
-> lo:seq a{sorted f lo}
-> pivot:a
-> hi:seq a{sorted f hi}
-> Lemma (requires (forall y. (mem y lo ==> f y pivot)
/\ (mem y hi ==> f pivot y)))
(ensures (sorted f (append lo (cons pivot hi))))
val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a))
(requires True)
(ensures (fun x ->
(length x = 5
/\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4)))))
/\ equal (index x 0) (slice s 0 i)
/\ equal (index x 1) (slice s i (i+1))
/\ equal (index x 2) (slice s (i+1) j)
/\ equal (index x 3) (slice s j (j + 1))
/\ equal (index x 4) (slice s (j + 1) (length s)))))
val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s}
-> i':nat -> j':nat{i' <= j' /\ j'<=length s /\
(j < i' //high slice
\/ j' <= i //low slice
\/ (i < i' /\ j' <= j)) //mid slice
}
-> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j'
/\ slice s i (i + 1) == slice (swap s i j) j (j + 1)
/\ slice s j (j + 1) == slice (swap s i j) i (i + 1)))
val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s} -> x:a -> Lemma
(requires True)
(ensures (count x s = count x (swap s i j)))
type permutation (a:eqtype) (s1:seq a) (s2:seq a) =
(forall i. count i s1 = count i s2)
val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma
(requires permutation a s1 s1' /\ permutation a s2 s2')
(ensures permutation a (append s1 s2) (append s1' s2'))
val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i<length s}) (j:nat{i <= j && j<length s})
: Lemma (permutation a s (swap s i j))
(* perm_len:
A lemma that shows that two sequences that are permutations
of each other also have the same length
*)
val perm_len (#a:eqtype) (s1 s2: seq a)
: Lemma (requires (permutation a s1 s2))
(ensures (length s1 == length s2))
val cons_perm: #a:eqtype -> tl:seq a -> s:seq a{length s > 0} ->
Lemma (requires (permutation a tl (tail s)))
(ensures (permutation a (cons (head s) tl) s))
val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a
-> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2)))
val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j))))
val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1)))))
val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv))
(ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv)))
val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back)))
(ensures ((forall y. mem y (slice s back len) ==> f pv y)))
val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i == slice (swap s i j) lo i))
val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i'))
val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s}
-> Lemma (ensures (slice s k hi == slice (swap s i j) k hi))
val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start))))
val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len)))
(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *)
let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
= Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1)))
(* replace with sub *)
let replace_subseq (#a:Type0) (s:Seq.seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (sub:Seq.seq a{length sub == j - i}) :Tot (Seq.seq a)
= Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s)))
val splice_refl : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s}
-> Lemma
(ensures (s == splice s i s j))
val lemma_swap_splice : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma
(ensures (swap s i j == splice s start (swap s i j) len))
val lemma_seq_frame_hi: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j <= m} -> n:nat{m < n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 i s1 j)))
(ensures ((slice s1 m n == slice s2 m n) /\ (index s1 m == index s2 m)))
val lemma_seq_frame_lo: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j < m} -> n:nat{m <= n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 m s1 n)))
(ensures ((slice s1 i j == slice s2 i j) /\ (index s1 j == index s2 j)))
val lemma_tail_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma
(requires True)
(ensures (tail (slice s i j) == slice s (i + 1) j))
[SMTPat (tail (slice s i j))]
val lemma_weaken_frame_right : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j))
(ensures (s1 == splice s2 i s1 k))
val lemma_weaken_frame_left : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k))
(ensures (s1 == splice s2 i s1 k))
val lemma_trans_frame : #a:Type -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i <= j && j <= length s1}
-> Lemma
(requires ((s1 == splice s2 i s1 j) /\ s2 == splice s3 i s2 j))
(ensures (s1 == splice s3 i s1 j))
val lemma_weaken_perm_left: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k /\ permutation a (slice s2 j k) (slice s1 j k)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_weaken_perm_right: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j /\ permutation a (slice s2 i j) (slice s1 i j)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_trans_perm: #a:eqtype -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i<=j && j <= length s1}
-> Lemma
(requires (permutation a (slice s1 i j) (slice s2 i j)
/\ permutation a (slice s2 i j) (slice s3 i j)))
(ensures (permutation a (slice s1 i j) (slice s3 i j)))
(*New additions, please review*)
let snoc (#a:Type) (s:seq a) (x:a) : Tot (seq a) = Seq.append s (Seq.create 1 x)
val lemma_cons_snoc (#a:Type) (hd:a) (s:Seq.seq a) (tl:a)
: Lemma (requires True)
(ensures (Seq.equal (cons hd (snoc s tl))
(snoc (cons hd s) tl)))
val lemma_tail_snoc: #a:Type -> s:Seq.seq a{Seq.length s > 0} -> x:a
-> Lemma (ensures (tail (snoc s x) == snoc (tail s) x))
val lemma_snoc_inj: #a:Type -> s1:seq a -> s2:seq a -> v1:a -> v2:a
-> Lemma (requires (equal (snoc s1 v1) (snoc s2 v2)))
(ensures (v1 == v2 /\ equal s1 s2))
val lemma_mem_snoc : #a:eqtype -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. mem y (snoc s x) <==> mem y s \/ x=y))
let rec find_l (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else find_l f (tail l)
let rec ghost_find_l (#a:Type) (f:a -> GTot bool) (l:seq a)
: GTot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else ghost_find_l f (tail l)
val find_append_some: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (Some? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_append_none: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s2))
val find_append_none_s2: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s2)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_snoc: #a:Type -> s:Seq.seq a -> x:a -> f:(a -> Tot bool)
-> Lemma (ensures (let res = find_l f (snoc s x) in
match res with
| None -> find_l f s == None /\ not (f x)
| Some y -> res == find_l f s \/ (f x /\ x==y)))
let un_snoc (#a:Type) (s:seq a{length s <> 0}) : Tot (r:(seq a * a){s == snoc (fst r) (snd r)}) =
let s', a = split s (length s - 1) in
assert (Seq.equal (snoc s' (Seq.index a 0)) s);
s', Seq.index a 0
val un_snoc_snoc (#a:Type) (s:seq a) (x:a) : Lemma (un_snoc (snoc s x) == (s, x))
let rec find_r (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)}) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val find_r (#a: Type) (f: (a -> Tot bool)) (l: seq a)
: Tot (o: option a {Some? o ==> f (Some?.v o)}) (decreases (Seq.length l)) | [
"recursion"
] | FStar.Seq.Properties.find_r | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | f: (_: a -> Prims.bool) -> l: FStar.Seq.Base.seq a
-> Prims.Tot (o: FStar.Pervasives.Native.option a {Some? o ==> f (Some?.v o)}) | {
"end_col": 27,
"end_line": 380,
"start_col": 2,
"start_line": 377
} |
Prims.Tot | val find_l (#a: Type) (f: (a -> Tot bool)) (l: seq a)
: Tot (o: option a {Some? o ==> f (Some?.v o)}) (decreases (Seq.length l)) | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec find_l (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else find_l f (tail l) | val find_l (#a: Type) (f: (a -> Tot bool)) (l: seq a)
: Tot (o: option a {Some? o ==> f (Some?.v o)}) (decreases (Seq.length l))
let rec find_l (#a: Type) (f: (a -> Tot bool)) (l: seq a)
: Tot (o: option a {Some? o ==> f (Some?.v o)}) (decreases (Seq.length l)) = | false | null | false | if Seq.length l = 0 then None else if f (head l) then Some (head l) else find_l f (tail l) | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total",
""
] | [
"Prims.bool",
"FStar.Seq.Base.seq",
"Prims.op_Equality",
"Prims.int",
"FStar.Seq.Base.length",
"FStar.Pervasives.Native.None",
"FStar.Seq.Properties.head",
"FStar.Pervasives.Native.Some",
"FStar.Seq.Properties.find_l",
"FStar.Seq.Properties.tail",
"FStar.Pervasives.Native.option",
"Prims.l_imp",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some",
"FStar.Pervasives.Native.__proj__Some__item__v"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s)
val sorted_feq (#a:Type)
(f g : (a -> a -> Tot bool))
(s:seq a{forall x y. f x y == g x y})
: Lemma (ensures (sorted f s <==> sorted g s))
val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (forall x. count x (append lo hi) = (count x lo + count x hi)))
val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (count x (append lo hi) = (count x lo + count x hi)))
val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma
(ensures (forall x. mem x s = (x=head s || mem x (tail s))))
val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma
(requires (forall (i:nat{i<length s}). f (index s i)))
(ensures (forall (x:a). mem x s ==> f x))
val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma
(requires True)
(ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s))))
type total_order (a:eqtype) (f: (a -> a -> Tot bool)) =
(forall a. f a a) (* reflexivity *)
/\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *)
/\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *)
type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f}
val sorted_concat_lemma: #a:eqtype
-> f:(a -> a -> Tot bool){total_order a f}
-> lo:seq a{sorted f lo}
-> pivot:a
-> hi:seq a{sorted f hi}
-> Lemma (requires (forall y. (mem y lo ==> f y pivot)
/\ (mem y hi ==> f pivot y)))
(ensures (sorted f (append lo (cons pivot hi))))
val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a))
(requires True)
(ensures (fun x ->
(length x = 5
/\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4)))))
/\ equal (index x 0) (slice s 0 i)
/\ equal (index x 1) (slice s i (i+1))
/\ equal (index x 2) (slice s (i+1) j)
/\ equal (index x 3) (slice s j (j + 1))
/\ equal (index x 4) (slice s (j + 1) (length s)))))
val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s}
-> i':nat -> j':nat{i' <= j' /\ j'<=length s /\
(j < i' //high slice
\/ j' <= i //low slice
\/ (i < i' /\ j' <= j)) //mid slice
}
-> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j'
/\ slice s i (i + 1) == slice (swap s i j) j (j + 1)
/\ slice s j (j + 1) == slice (swap s i j) i (i + 1)))
val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s} -> x:a -> Lemma
(requires True)
(ensures (count x s = count x (swap s i j)))
type permutation (a:eqtype) (s1:seq a) (s2:seq a) =
(forall i. count i s1 = count i s2)
val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma
(requires permutation a s1 s1' /\ permutation a s2 s2')
(ensures permutation a (append s1 s2) (append s1' s2'))
val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i<length s}) (j:nat{i <= j && j<length s})
: Lemma (permutation a s (swap s i j))
(* perm_len:
A lemma that shows that two sequences that are permutations
of each other also have the same length
*)
val perm_len (#a:eqtype) (s1 s2: seq a)
: Lemma (requires (permutation a s1 s2))
(ensures (length s1 == length s2))
val cons_perm: #a:eqtype -> tl:seq a -> s:seq a{length s > 0} ->
Lemma (requires (permutation a tl (tail s)))
(ensures (permutation a (cons (head s) tl) s))
val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a
-> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2)))
val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j))))
val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1)))))
val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv))
(ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv)))
val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back)))
(ensures ((forall y. mem y (slice s back len) ==> f pv y)))
val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i == slice (swap s i j) lo i))
val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i'))
val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s}
-> Lemma (ensures (slice s k hi == slice (swap s i j) k hi))
val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start))))
val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len)))
(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *)
let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
= Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1)))
(* replace with sub *)
let replace_subseq (#a:Type0) (s:Seq.seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (sub:Seq.seq a{length sub == j - i}) :Tot (Seq.seq a)
= Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s)))
val splice_refl : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s}
-> Lemma
(ensures (s == splice s i s j))
val lemma_swap_splice : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma
(ensures (swap s i j == splice s start (swap s i j) len))
val lemma_seq_frame_hi: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j <= m} -> n:nat{m < n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 i s1 j)))
(ensures ((slice s1 m n == slice s2 m n) /\ (index s1 m == index s2 m)))
val lemma_seq_frame_lo: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j < m} -> n:nat{m <= n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 m s1 n)))
(ensures ((slice s1 i j == slice s2 i j) /\ (index s1 j == index s2 j)))
val lemma_tail_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma
(requires True)
(ensures (tail (slice s i j) == slice s (i + 1) j))
[SMTPat (tail (slice s i j))]
val lemma_weaken_frame_right : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j))
(ensures (s1 == splice s2 i s1 k))
val lemma_weaken_frame_left : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k))
(ensures (s1 == splice s2 i s1 k))
val lemma_trans_frame : #a:Type -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i <= j && j <= length s1}
-> Lemma
(requires ((s1 == splice s2 i s1 j) /\ s2 == splice s3 i s2 j))
(ensures (s1 == splice s3 i s1 j))
val lemma_weaken_perm_left: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k /\ permutation a (slice s2 j k) (slice s1 j k)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_weaken_perm_right: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j /\ permutation a (slice s2 i j) (slice s1 i j)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_trans_perm: #a:eqtype -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i<=j && j <= length s1}
-> Lemma
(requires (permutation a (slice s1 i j) (slice s2 i j)
/\ permutation a (slice s2 i j) (slice s3 i j)))
(ensures (permutation a (slice s1 i j) (slice s3 i j)))
(*New additions, please review*)
let snoc (#a:Type) (s:seq a) (x:a) : Tot (seq a) = Seq.append s (Seq.create 1 x)
val lemma_cons_snoc (#a:Type) (hd:a) (s:Seq.seq a) (tl:a)
: Lemma (requires True)
(ensures (Seq.equal (cons hd (snoc s tl))
(snoc (cons hd s) tl)))
val lemma_tail_snoc: #a:Type -> s:Seq.seq a{Seq.length s > 0} -> x:a
-> Lemma (ensures (tail (snoc s x) == snoc (tail s) x))
val lemma_snoc_inj: #a:Type -> s1:seq a -> s2:seq a -> v1:a -> v2:a
-> Lemma (requires (equal (snoc s1 v1) (snoc s2 v2)))
(ensures (v1 == v2 /\ equal s1 s2))
val lemma_mem_snoc : #a:eqtype -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. mem y (snoc s x) <==> mem y s \/ x=y))
let rec find_l (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)}) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val find_l (#a: Type) (f: (a -> Tot bool)) (l: seq a)
: Tot (o: option a {Some? o ==> f (Some?.v o)}) (decreases (Seq.length l)) | [
"recursion"
] | FStar.Seq.Properties.find_l | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | f: (_: a -> Prims.bool) -> l: FStar.Seq.Base.seq a
-> Prims.Tot (o: FStar.Pervasives.Native.option a {Some? o ==> f (Some?.v o)}) | {
"end_col": 24,
"end_line": 340,
"start_col": 2,
"start_line": 338
} |
Prims.Tot | val un_snoc (#a: Type) (s: seq a {length s <> 0}) : Tot (r: (seq a * a){s == snoc (fst r) (snd r)}) | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let un_snoc (#a:Type) (s:seq a{length s <> 0}) : Tot (r:(seq a * a){s == snoc (fst r) (snd r)}) =
let s', a = split s (length s - 1) in
assert (Seq.equal (snoc s' (Seq.index a 0)) s);
s', Seq.index a 0 | val un_snoc (#a: Type) (s: seq a {length s <> 0}) : Tot (r: (seq a * a){s == snoc (fst r) (snd r)})
let un_snoc (#a: Type) (s: seq a {length s <> 0}) : Tot (r: (seq a * a){s == snoc (fst r) (snd r)}) = | false | null | false | let s', a = split s (length s - 1) in
assert (Seq.equal (snoc s' (Seq.index a 0)) s);
s', Seq.index a 0 | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total"
] | [
"FStar.Seq.Base.seq",
"Prims.b2t",
"Prims.op_disEquality",
"Prims.int",
"FStar.Seq.Base.length",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Seq.Base.index",
"Prims.unit",
"Prims._assert",
"FStar.Seq.Base.equal",
"FStar.Seq.Properties.snoc",
"FStar.Pervasives.Native.tuple2",
"Prims.eq2",
"FStar.Pervasives.Native.fst",
"FStar.Pervasives.Native.snd",
"FStar.Seq.Properties.split",
"Prims.op_Subtraction"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s)
val sorted_feq (#a:Type)
(f g : (a -> a -> Tot bool))
(s:seq a{forall x y. f x y == g x y})
: Lemma (ensures (sorted f s <==> sorted g s))
val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (forall x. count x (append lo hi) = (count x lo + count x hi)))
val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (count x (append lo hi) = (count x lo + count x hi)))
val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma
(ensures (forall x. mem x s = (x=head s || mem x (tail s))))
val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma
(requires (forall (i:nat{i<length s}). f (index s i)))
(ensures (forall (x:a). mem x s ==> f x))
val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma
(requires True)
(ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s))))
type total_order (a:eqtype) (f: (a -> a -> Tot bool)) =
(forall a. f a a) (* reflexivity *)
/\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *)
/\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *)
type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f}
val sorted_concat_lemma: #a:eqtype
-> f:(a -> a -> Tot bool){total_order a f}
-> lo:seq a{sorted f lo}
-> pivot:a
-> hi:seq a{sorted f hi}
-> Lemma (requires (forall y. (mem y lo ==> f y pivot)
/\ (mem y hi ==> f pivot y)))
(ensures (sorted f (append lo (cons pivot hi))))
val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a))
(requires True)
(ensures (fun x ->
(length x = 5
/\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4)))))
/\ equal (index x 0) (slice s 0 i)
/\ equal (index x 1) (slice s i (i+1))
/\ equal (index x 2) (slice s (i+1) j)
/\ equal (index x 3) (slice s j (j + 1))
/\ equal (index x 4) (slice s (j + 1) (length s)))))
val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s}
-> i':nat -> j':nat{i' <= j' /\ j'<=length s /\
(j < i' //high slice
\/ j' <= i //low slice
\/ (i < i' /\ j' <= j)) //mid slice
}
-> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j'
/\ slice s i (i + 1) == slice (swap s i j) j (j + 1)
/\ slice s j (j + 1) == slice (swap s i j) i (i + 1)))
val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s} -> x:a -> Lemma
(requires True)
(ensures (count x s = count x (swap s i j)))
type permutation (a:eqtype) (s1:seq a) (s2:seq a) =
(forall i. count i s1 = count i s2)
val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma
(requires permutation a s1 s1' /\ permutation a s2 s2')
(ensures permutation a (append s1 s2) (append s1' s2'))
val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i<length s}) (j:nat{i <= j && j<length s})
: Lemma (permutation a s (swap s i j))
(* perm_len:
A lemma that shows that two sequences that are permutations
of each other also have the same length
*)
val perm_len (#a:eqtype) (s1 s2: seq a)
: Lemma (requires (permutation a s1 s2))
(ensures (length s1 == length s2))
val cons_perm: #a:eqtype -> tl:seq a -> s:seq a{length s > 0} ->
Lemma (requires (permutation a tl (tail s)))
(ensures (permutation a (cons (head s) tl) s))
val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a
-> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2)))
val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j))))
val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1)))))
val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv))
(ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv)))
val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back)))
(ensures ((forall y. mem y (slice s back len) ==> f pv y)))
val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i == slice (swap s i j) lo i))
val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i'))
val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s}
-> Lemma (ensures (slice s k hi == slice (swap s i j) k hi))
val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start))))
val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len)))
(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *)
let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
= Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1)))
(* replace with sub *)
let replace_subseq (#a:Type0) (s:Seq.seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (sub:Seq.seq a{length sub == j - i}) :Tot (Seq.seq a)
= Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s)))
val splice_refl : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s}
-> Lemma
(ensures (s == splice s i s j))
val lemma_swap_splice : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma
(ensures (swap s i j == splice s start (swap s i j) len))
val lemma_seq_frame_hi: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j <= m} -> n:nat{m < n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 i s1 j)))
(ensures ((slice s1 m n == slice s2 m n) /\ (index s1 m == index s2 m)))
val lemma_seq_frame_lo: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j < m} -> n:nat{m <= n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 m s1 n)))
(ensures ((slice s1 i j == slice s2 i j) /\ (index s1 j == index s2 j)))
val lemma_tail_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma
(requires True)
(ensures (tail (slice s i j) == slice s (i + 1) j))
[SMTPat (tail (slice s i j))]
val lemma_weaken_frame_right : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j))
(ensures (s1 == splice s2 i s1 k))
val lemma_weaken_frame_left : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k))
(ensures (s1 == splice s2 i s1 k))
val lemma_trans_frame : #a:Type -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i <= j && j <= length s1}
-> Lemma
(requires ((s1 == splice s2 i s1 j) /\ s2 == splice s3 i s2 j))
(ensures (s1 == splice s3 i s1 j))
val lemma_weaken_perm_left: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k /\ permutation a (slice s2 j k) (slice s1 j k)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_weaken_perm_right: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j /\ permutation a (slice s2 i j) (slice s1 i j)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_trans_perm: #a:eqtype -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i<=j && j <= length s1}
-> Lemma
(requires (permutation a (slice s1 i j) (slice s2 i j)
/\ permutation a (slice s2 i j) (slice s3 i j)))
(ensures (permutation a (slice s1 i j) (slice s3 i j)))
(*New additions, please review*)
let snoc (#a:Type) (s:seq a) (x:a) : Tot (seq a) = Seq.append s (Seq.create 1 x)
val lemma_cons_snoc (#a:Type) (hd:a) (s:Seq.seq a) (tl:a)
: Lemma (requires True)
(ensures (Seq.equal (cons hd (snoc s tl))
(snoc (cons hd s) tl)))
val lemma_tail_snoc: #a:Type -> s:Seq.seq a{Seq.length s > 0} -> x:a
-> Lemma (ensures (tail (snoc s x) == snoc (tail s) x))
val lemma_snoc_inj: #a:Type -> s1:seq a -> s2:seq a -> v1:a -> v2:a
-> Lemma (requires (equal (snoc s1 v1) (snoc s2 v2)))
(ensures (v1 == v2 /\ equal s1 s2))
val lemma_mem_snoc : #a:eqtype -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. mem y (snoc s x) <==> mem y s \/ x=y))
let rec find_l (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else find_l f (tail l)
let rec ghost_find_l (#a:Type) (f:a -> GTot bool) (l:seq a)
: GTot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else ghost_find_l f (tail l)
val find_append_some: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (Some? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_append_none: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s2))
val find_append_none_s2: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s2)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_snoc: #a:Type -> s:Seq.seq a -> x:a -> f:(a -> Tot bool)
-> Lemma (ensures (let res = find_l f (snoc s x) in
match res with
| None -> find_l f s == None /\ not (f x)
| Some y -> res == find_l f s \/ (f x /\ x==y))) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val un_snoc (#a: Type) (s: seq a {length s <> 0}) : Tot (r: (seq a * a){s == snoc (fst r) (snd r)}) | [] | FStar.Seq.Properties.un_snoc | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | s: FStar.Seq.Base.seq a {FStar.Seq.Base.length s <> 0}
-> r:
(FStar.Seq.Base.seq a * a)
{ s ==
FStar.Seq.Properties.snoc (FStar.Pervasives.Native.fst r) (FStar.Pervasives.Native.snd r) } | {
"end_col": 19,
"end_line": 370,
"start_col": 97,
"start_line": 367
} |
Prims.Pure | val for_all (#a: Type) (f: (a -> Tot bool)) (l: seq a)
: Pure bool
(requires True)
(ensures
(fun b -> (b == true <==> (forall (i: nat{i < Seq.length l}). f (index l i) == true)))) | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let for_all
(#a: Type)
(f: (a -> Tot bool))
(l: seq a)
: Pure bool
(requires True)
(ensures (fun b -> (b == true <==> (forall (i: nat {i < Seq.length l} ) . f (index l i) == true))))
= None? (seq_find (fun i -> not (f i)) l) | val for_all (#a: Type) (f: (a -> Tot bool)) (l: seq a)
: Pure bool
(requires True)
(ensures
(fun b -> (b == true <==> (forall (i: nat{i < Seq.length l}). f (index l i) == true))))
let for_all (#a: Type) (f: (a -> Tot bool)) (l: seq a)
: Pure bool
(requires True)
(ensures
(fun b -> (b == true <==> (forall (i: nat{i < Seq.length l}). f (index l i) == true)))) = | false | null | false | None? (seq_find (fun i -> not (f i)) l) | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [] | [
"Prims.bool",
"FStar.Seq.Base.seq",
"FStar.Pervasives.Native.uu___is_None",
"FStar.Seq.Properties.seq_find",
"Prims.op_Negation",
"Prims.l_True",
"Prims.l_iff",
"Prims.eq2",
"Prims.l_Forall",
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThan",
"FStar.Seq.Base.length",
"FStar.Seq.Base.index"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s)
val sorted_feq (#a:Type)
(f g : (a -> a -> Tot bool))
(s:seq a{forall x y. f x y == g x y})
: Lemma (ensures (sorted f s <==> sorted g s))
val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (forall x. count x (append lo hi) = (count x lo + count x hi)))
val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (count x (append lo hi) = (count x lo + count x hi)))
val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma
(ensures (forall x. mem x s = (x=head s || mem x (tail s))))
val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma
(requires (forall (i:nat{i<length s}). f (index s i)))
(ensures (forall (x:a). mem x s ==> f x))
val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma
(requires True)
(ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s))))
type total_order (a:eqtype) (f: (a -> a -> Tot bool)) =
(forall a. f a a) (* reflexivity *)
/\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *)
/\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *)
type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f}
val sorted_concat_lemma: #a:eqtype
-> f:(a -> a -> Tot bool){total_order a f}
-> lo:seq a{sorted f lo}
-> pivot:a
-> hi:seq a{sorted f hi}
-> Lemma (requires (forall y. (mem y lo ==> f y pivot)
/\ (mem y hi ==> f pivot y)))
(ensures (sorted f (append lo (cons pivot hi))))
val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a))
(requires True)
(ensures (fun x ->
(length x = 5
/\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4)))))
/\ equal (index x 0) (slice s 0 i)
/\ equal (index x 1) (slice s i (i+1))
/\ equal (index x 2) (slice s (i+1) j)
/\ equal (index x 3) (slice s j (j + 1))
/\ equal (index x 4) (slice s (j + 1) (length s)))))
val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s}
-> i':nat -> j':nat{i' <= j' /\ j'<=length s /\
(j < i' //high slice
\/ j' <= i //low slice
\/ (i < i' /\ j' <= j)) //mid slice
}
-> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j'
/\ slice s i (i + 1) == slice (swap s i j) j (j + 1)
/\ slice s j (j + 1) == slice (swap s i j) i (i + 1)))
val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s} -> x:a -> Lemma
(requires True)
(ensures (count x s = count x (swap s i j)))
type permutation (a:eqtype) (s1:seq a) (s2:seq a) =
(forall i. count i s1 = count i s2)
val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma
(requires permutation a s1 s1' /\ permutation a s2 s2')
(ensures permutation a (append s1 s2) (append s1' s2'))
val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i<length s}) (j:nat{i <= j && j<length s})
: Lemma (permutation a s (swap s i j))
(* perm_len:
A lemma that shows that two sequences that are permutations
of each other also have the same length
*)
val perm_len (#a:eqtype) (s1 s2: seq a)
: Lemma (requires (permutation a s1 s2))
(ensures (length s1 == length s2))
val cons_perm: #a:eqtype -> tl:seq a -> s:seq a{length s > 0} ->
Lemma (requires (permutation a tl (tail s)))
(ensures (permutation a (cons (head s) tl) s))
val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a
-> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2)))
val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j))))
val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1)))))
val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv))
(ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv)))
val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back)))
(ensures ((forall y. mem y (slice s back len) ==> f pv y)))
val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i == slice (swap s i j) lo i))
val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i'))
val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s}
-> Lemma (ensures (slice s k hi == slice (swap s i j) k hi))
val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start))))
val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len)))
(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *)
let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
= Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1)))
(* replace with sub *)
let replace_subseq (#a:Type0) (s:Seq.seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (sub:Seq.seq a{length sub == j - i}) :Tot (Seq.seq a)
= Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s)))
val splice_refl : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s}
-> Lemma
(ensures (s == splice s i s j))
val lemma_swap_splice : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma
(ensures (swap s i j == splice s start (swap s i j) len))
val lemma_seq_frame_hi: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j <= m} -> n:nat{m < n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 i s1 j)))
(ensures ((slice s1 m n == slice s2 m n) /\ (index s1 m == index s2 m)))
val lemma_seq_frame_lo: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j < m} -> n:nat{m <= n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 m s1 n)))
(ensures ((slice s1 i j == slice s2 i j) /\ (index s1 j == index s2 j)))
val lemma_tail_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma
(requires True)
(ensures (tail (slice s i j) == slice s (i + 1) j))
[SMTPat (tail (slice s i j))]
val lemma_weaken_frame_right : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j))
(ensures (s1 == splice s2 i s1 k))
val lemma_weaken_frame_left : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k))
(ensures (s1 == splice s2 i s1 k))
val lemma_trans_frame : #a:Type -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i <= j && j <= length s1}
-> Lemma
(requires ((s1 == splice s2 i s1 j) /\ s2 == splice s3 i s2 j))
(ensures (s1 == splice s3 i s1 j))
val lemma_weaken_perm_left: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k /\ permutation a (slice s2 j k) (slice s1 j k)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_weaken_perm_right: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j /\ permutation a (slice s2 i j) (slice s1 i j)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_trans_perm: #a:eqtype -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i<=j && j <= length s1}
-> Lemma
(requires (permutation a (slice s1 i j) (slice s2 i j)
/\ permutation a (slice s2 i j) (slice s3 i j)))
(ensures (permutation a (slice s1 i j) (slice s3 i j)))
(*New additions, please review*)
let snoc (#a:Type) (s:seq a) (x:a) : Tot (seq a) = Seq.append s (Seq.create 1 x)
val lemma_cons_snoc (#a:Type) (hd:a) (s:Seq.seq a) (tl:a)
: Lemma (requires True)
(ensures (Seq.equal (cons hd (snoc s tl))
(snoc (cons hd s) tl)))
val lemma_tail_snoc: #a:Type -> s:Seq.seq a{Seq.length s > 0} -> x:a
-> Lemma (ensures (tail (snoc s x) == snoc (tail s) x))
val lemma_snoc_inj: #a:Type -> s1:seq a -> s2:seq a -> v1:a -> v2:a
-> Lemma (requires (equal (snoc s1 v1) (snoc s2 v2)))
(ensures (v1 == v2 /\ equal s1 s2))
val lemma_mem_snoc : #a:eqtype -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. mem y (snoc s x) <==> mem y s \/ x=y))
let rec find_l (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else find_l f (tail l)
let rec ghost_find_l (#a:Type) (f:a -> GTot bool) (l:seq a)
: GTot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else ghost_find_l f (tail l)
val find_append_some: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (Some? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_append_none: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s2))
val find_append_none_s2: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s2)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_snoc: #a:Type -> s:Seq.seq a -> x:a -> f:(a -> Tot bool)
-> Lemma (ensures (let res = find_l f (snoc s x) in
match res with
| None -> find_l f s == None /\ not (f x)
| Some y -> res == find_l f s \/ (f x /\ x==y)))
let un_snoc (#a:Type) (s:seq a{length s <> 0}) : Tot (r:(seq a * a){s == snoc (fst r) (snd r)}) =
let s', a = split s (length s - 1) in
assert (Seq.equal (snoc s' (Seq.index a 0)) s);
s', Seq.index a 0
val un_snoc_snoc (#a:Type) (s:seq a) (x:a) : Lemma (un_snoc (snoc s x) == (s, x))
let rec find_r (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else let prefix, last = un_snoc l in
if f last then Some last
else find_r f prefix
type found (i:nat) = True
let rec seq_find_aux (#a:Type) (f:a -> Tot bool) (l:seq a) (ctr:nat{ctr <= Seq.length l})
: Pure (option a)
(requires (forall (i:nat{ i < Seq.length l /\ i >= ctr}).
not (f (Seq.index l i) )))
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}). {:pattern (found i)}
found i /\ x == Seq.index l i)))
= match ctr with
| 0 -> None
| _ -> let i = ctr - 1 in
if f (Seq.index l i)
then (
cut (found i);
Some (Seq.index l i))
else seq_find_aux f l i
let seq_find (#a:Type) (f:a -> Tot bool) (l:seq a)
: Pure (option a)
(requires True)
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}).{:pattern (found i)}
found i /\ x == Seq.index l i)))
= seq_find_aux f l (Seq.length l)
val find_mem (#a:eqtype) (s:seq a) (f:a -> Tot bool) (x:a{f x})
: Lemma (requires (mem x s))
(ensures (Some? (seq_find f s) /\ f (Some?.v (seq_find f s))))
let for_all
(#a: Type)
(f: (a -> Tot bool))
(l: seq a)
: Pure bool
(requires True) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val for_all (#a: Type) (f: (a -> Tot bool)) (l: seq a)
: Pure bool
(requires True)
(ensures
(fun b -> (b == true <==> (forall (i: nat{i < Seq.length l}). f (index l i) == true)))) | [] | FStar.Seq.Properties.for_all | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | f: (_: a -> Prims.bool) -> l: FStar.Seq.Base.seq a -> Prims.Pure Prims.bool | {
"end_col": 41,
"end_line": 421,
"start_col": 2,
"start_line": 421
} |
Prims.Tot | val foldr (#a #b: Type) (f: (b -> a -> Tot a)) (s: seq b) (init: a) : Tot a (decreases (length s)) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec foldr (#a #b:Type) (f:b -> a -> Tot a) (s:seq b) (init:a)
: Tot a (decreases (length s))
= if length s = 0 then init
else f (head s) (foldr f (tail s) init) | val foldr (#a #b: Type) (f: (b -> a -> Tot a)) (s: seq b) (init: a) : Tot a (decreases (length s))
let rec foldr (#a #b: Type) (f: (b -> a -> Tot a)) (s: seq b) (init: a)
: Tot a (decreases (length s)) = | false | null | false | if length s = 0 then init else f (head s) (foldr f (tail s) init) | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total",
""
] | [
"FStar.Seq.Base.seq",
"Prims.op_Equality",
"Prims.int",
"FStar.Seq.Base.length",
"Prims.bool",
"FStar.Seq.Properties.head",
"FStar.Seq.Properties.foldr",
"FStar.Seq.Properties.tail"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s)
val sorted_feq (#a:Type)
(f g : (a -> a -> Tot bool))
(s:seq a{forall x y. f x y == g x y})
: Lemma (ensures (sorted f s <==> sorted g s))
val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (forall x. count x (append lo hi) = (count x lo + count x hi)))
val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (count x (append lo hi) = (count x lo + count x hi)))
val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma
(ensures (forall x. mem x s = (x=head s || mem x (tail s))))
val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma
(requires (forall (i:nat{i<length s}). f (index s i)))
(ensures (forall (x:a). mem x s ==> f x))
val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma
(requires True)
(ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s))))
type total_order (a:eqtype) (f: (a -> a -> Tot bool)) =
(forall a. f a a) (* reflexivity *)
/\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *)
/\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *)
type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f}
val sorted_concat_lemma: #a:eqtype
-> f:(a -> a -> Tot bool){total_order a f}
-> lo:seq a{sorted f lo}
-> pivot:a
-> hi:seq a{sorted f hi}
-> Lemma (requires (forall y. (mem y lo ==> f y pivot)
/\ (mem y hi ==> f pivot y)))
(ensures (sorted f (append lo (cons pivot hi))))
val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a))
(requires True)
(ensures (fun x ->
(length x = 5
/\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4)))))
/\ equal (index x 0) (slice s 0 i)
/\ equal (index x 1) (slice s i (i+1))
/\ equal (index x 2) (slice s (i+1) j)
/\ equal (index x 3) (slice s j (j + 1))
/\ equal (index x 4) (slice s (j + 1) (length s)))))
val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s}
-> i':nat -> j':nat{i' <= j' /\ j'<=length s /\
(j < i' //high slice
\/ j' <= i //low slice
\/ (i < i' /\ j' <= j)) //mid slice
}
-> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j'
/\ slice s i (i + 1) == slice (swap s i j) j (j + 1)
/\ slice s j (j + 1) == slice (swap s i j) i (i + 1)))
val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s} -> x:a -> Lemma
(requires True)
(ensures (count x s = count x (swap s i j)))
type permutation (a:eqtype) (s1:seq a) (s2:seq a) =
(forall i. count i s1 = count i s2)
val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma
(requires permutation a s1 s1' /\ permutation a s2 s2')
(ensures permutation a (append s1 s2) (append s1' s2'))
val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i<length s}) (j:nat{i <= j && j<length s})
: Lemma (permutation a s (swap s i j))
(* perm_len:
A lemma that shows that two sequences that are permutations
of each other also have the same length
*)
val perm_len (#a:eqtype) (s1 s2: seq a)
: Lemma (requires (permutation a s1 s2))
(ensures (length s1 == length s2))
val cons_perm: #a:eqtype -> tl:seq a -> s:seq a{length s > 0} ->
Lemma (requires (permutation a tl (tail s)))
(ensures (permutation a (cons (head s) tl) s))
val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a
-> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2)))
val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j))))
val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1)))))
val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv))
(ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv)))
val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back)))
(ensures ((forall y. mem y (slice s back len) ==> f pv y)))
val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i == slice (swap s i j) lo i))
val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i'))
val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s}
-> Lemma (ensures (slice s k hi == slice (swap s i j) k hi))
val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start))))
val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len)))
(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *)
let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
= Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1)))
(* replace with sub *)
let replace_subseq (#a:Type0) (s:Seq.seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (sub:Seq.seq a{length sub == j - i}) :Tot (Seq.seq a)
= Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s)))
val splice_refl : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s}
-> Lemma
(ensures (s == splice s i s j))
val lemma_swap_splice : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma
(ensures (swap s i j == splice s start (swap s i j) len))
val lemma_seq_frame_hi: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j <= m} -> n:nat{m < n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 i s1 j)))
(ensures ((slice s1 m n == slice s2 m n) /\ (index s1 m == index s2 m)))
val lemma_seq_frame_lo: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j < m} -> n:nat{m <= n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 m s1 n)))
(ensures ((slice s1 i j == slice s2 i j) /\ (index s1 j == index s2 j)))
val lemma_tail_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma
(requires True)
(ensures (tail (slice s i j) == slice s (i + 1) j))
[SMTPat (tail (slice s i j))]
val lemma_weaken_frame_right : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j))
(ensures (s1 == splice s2 i s1 k))
val lemma_weaken_frame_left : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k))
(ensures (s1 == splice s2 i s1 k))
val lemma_trans_frame : #a:Type -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i <= j && j <= length s1}
-> Lemma
(requires ((s1 == splice s2 i s1 j) /\ s2 == splice s3 i s2 j))
(ensures (s1 == splice s3 i s1 j))
val lemma_weaken_perm_left: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k /\ permutation a (slice s2 j k) (slice s1 j k)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_weaken_perm_right: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j /\ permutation a (slice s2 i j) (slice s1 i j)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_trans_perm: #a:eqtype -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i<=j && j <= length s1}
-> Lemma
(requires (permutation a (slice s1 i j) (slice s2 i j)
/\ permutation a (slice s2 i j) (slice s3 i j)))
(ensures (permutation a (slice s1 i j) (slice s3 i j)))
(*New additions, please review*)
let snoc (#a:Type) (s:seq a) (x:a) : Tot (seq a) = Seq.append s (Seq.create 1 x)
val lemma_cons_snoc (#a:Type) (hd:a) (s:Seq.seq a) (tl:a)
: Lemma (requires True)
(ensures (Seq.equal (cons hd (snoc s tl))
(snoc (cons hd s) tl)))
val lemma_tail_snoc: #a:Type -> s:Seq.seq a{Seq.length s > 0} -> x:a
-> Lemma (ensures (tail (snoc s x) == snoc (tail s) x))
val lemma_snoc_inj: #a:Type -> s1:seq a -> s2:seq a -> v1:a -> v2:a
-> Lemma (requires (equal (snoc s1 v1) (snoc s2 v2)))
(ensures (v1 == v2 /\ equal s1 s2))
val lemma_mem_snoc : #a:eqtype -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. mem y (snoc s x) <==> mem y s \/ x=y))
let rec find_l (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else find_l f (tail l)
let rec ghost_find_l (#a:Type) (f:a -> GTot bool) (l:seq a)
: GTot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else ghost_find_l f (tail l)
val find_append_some: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (Some? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_append_none: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s2))
val find_append_none_s2: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s2)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_snoc: #a:Type -> s:Seq.seq a -> x:a -> f:(a -> Tot bool)
-> Lemma (ensures (let res = find_l f (snoc s x) in
match res with
| None -> find_l f s == None /\ not (f x)
| Some y -> res == find_l f s \/ (f x /\ x==y)))
let un_snoc (#a:Type) (s:seq a{length s <> 0}) : Tot (r:(seq a * a){s == snoc (fst r) (snd r)}) =
let s', a = split s (length s - 1) in
assert (Seq.equal (snoc s' (Seq.index a 0)) s);
s', Seq.index a 0
val un_snoc_snoc (#a:Type) (s:seq a) (x:a) : Lemma (un_snoc (snoc s x) == (s, x))
let rec find_r (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else let prefix, last = un_snoc l in
if f last then Some last
else find_r f prefix
type found (i:nat) = True
let rec seq_find_aux (#a:Type) (f:a -> Tot bool) (l:seq a) (ctr:nat{ctr <= Seq.length l})
: Pure (option a)
(requires (forall (i:nat{ i < Seq.length l /\ i >= ctr}).
not (f (Seq.index l i) )))
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}). {:pattern (found i)}
found i /\ x == Seq.index l i)))
= match ctr with
| 0 -> None
| _ -> let i = ctr - 1 in
if f (Seq.index l i)
then (
cut (found i);
Some (Seq.index l i))
else seq_find_aux f l i
let seq_find (#a:Type) (f:a -> Tot bool) (l:seq a)
: Pure (option a)
(requires True)
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}).{:pattern (found i)}
found i /\ x == Seq.index l i)))
= seq_find_aux f l (Seq.length l)
val find_mem (#a:eqtype) (s:seq a) (f:a -> Tot bool) (x:a{f x})
: Lemma (requires (mem x s))
(ensures (Some? (seq_find f s) /\ f (Some?.v (seq_find f s))))
let for_all
(#a: Type)
(f: (a -> Tot bool))
(l: seq a)
: Pure bool
(requires True)
(ensures (fun b -> (b == true <==> (forall (i: nat {i < Seq.length l} ) . f (index l i) == true))))
= None? (seq_find (fun i -> not (f i)) l)
val seq_mem_k: #a:eqtype -> s:seq a -> n:nat{n < Seq.length s} ->
Lemma (requires True)
(ensures (mem (Seq.index s n) s))
[SMTPat (mem (Seq.index s n) s)]
module L = FStar.List.Tot
let rec seq_to_list (#a:Type) (s:seq a)
: Tot (l:list a{L.length l = length s})
(decreases (length s))
= if length s = 0 then []
else index s 0::seq_to_list (slice s 1 (length s))
[@@"opaque_to_smt"]
let rec seq_of_list (#a:Type) (l:list a) : Tot (s:seq a{L.length l = length s}) =
match l with
| [] -> Seq.empty #a
| hd::tl -> create 1 hd @| seq_of_list tl
val lemma_seq_of_list_induction (#a:Type) (l:list a)
:Lemma (requires True)
(ensures (let s = seq_of_list l in
match l with
| [] -> Seq.equal s empty
| hd::tl -> s == cons hd (seq_of_list tl) /\
head s == hd /\ tail s == (seq_of_list tl)))
val lemma_seq_list_bij: #a:Type -> s:seq a -> Lemma
(requires (True))
(ensures (seq_of_list (seq_to_list s) == s))
val lemma_list_seq_bij: #a:Type -> l:list a -> Lemma
(requires (True))
(ensures (seq_to_list (seq_of_list l) == l))
unfold let createL_post (#a:Type0) (l:list a) (s:seq a) : GTot Type0 =
normalize (L.length l = length s) /\ seq_to_list s == l /\ seq_of_list l == s
let createL (#a:Type0) (l:list a)
: Pure (seq a)
(requires True)
(ensures (fun s -> createL_post #a l s))
= let s = seq_of_list l in
lemma_list_seq_bij l;
s
val lemma_index_is_nth: #a:Type -> s:seq a -> i:nat{i < length s} -> Lemma
(requires True)
(ensures (L.index (seq_to_list s) i == index s i))
////////////////////////////////////////////////////////////////////////////////
//s `contains` x : Type0
// An undecidable version of `mem`,
// for when the sequence payload is not an eqtype
////////////////////////////////////////////////////////////////////////////////
[@@ remove_unused_type_parameters [0; 1; 2]]
val contains (#a:Type) (s:seq a) (x:a) : Tot Type0
val contains_intro (#a:Type) (s:seq a) (k:nat) (x:a)
: Lemma (k < Seq.length s /\ Seq.index s k == x
==>
s `contains` x)
val contains_elim (#a:Type) (s:seq a) (x:a)
: Lemma (s `contains` x
==>
(exists (k:nat). k < Seq.length s /\ Seq.index s k == x))
val lemma_contains_empty (#a:Type) : Lemma (forall (x:a). ~ (contains Seq.empty x))
val lemma_contains_singleton (#a:Type) (x:a) : Lemma (forall (y:a). contains (create 1 x) y ==> y == x)
val append_contains_equiv (#a:Type) (s1:seq a) (s2:seq a) (x:a)
: Lemma ((append s1 s2) `contains` x
<==>
(s1 `contains` x \/ s2 `contains` x))
val contains_snoc : #a:Type -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. (snoc s x) `contains` y <==> s `contains` y \/ x==y))
val lemma_find_l_contains (#a:Type) (f:a -> Tot bool) (l:seq a)
: Lemma (requires True) (ensures Some? (find_l f l) ==> l `contains` (Some?.v (find_l f l)))
val contains_cons (#a:Type) (hd:a) (tl:Seq.seq a) (x:a)
: Lemma ((cons hd tl) `contains` x
<==>
(x==hd \/ tl `contains` x))
val append_cons_snoc (#a:Type) (u: Seq.seq a) (x:a) (v:Seq.seq a)
: Lemma (Seq.equal (Seq.append u (cons x v))
(Seq.append (snoc u x) v))
val append_slices (#a:Type) (s1:Seq.seq a) (s2:Seq.seq a)
: Lemma ( Seq.equal s1 (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) /\
Seq.equal s2 (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length s1 + Seq.length s2)) /\
(forall (i:nat) (j:nat).
i <= j /\ j <= Seq.length s2 ==>
Seq.equal (Seq.slice s2 i j)
(Seq.slice (Seq.append s1 s2) (Seq.length s1 + i) (Seq.length s1 + j))))
val find_l_none_no_index (#a:Type) (s:Seq.seq a) (f:(a -> Tot bool)) :
Lemma (requires (None? (find_l f s)))
(ensures (forall (i:nat{i < Seq.length s}). not (f (Seq.index s i))))
(decreases (Seq.length s))
(** More properties, with new naming conventions *)
let suffix_of
(#a: Type)
(s_suff s: seq a)
= exists s_pref . (s == append s_pref s_suff)
val cons_head_tail
(#a: Type)
(s: seq a {length s > 0})
: Lemma
(requires True)
(ensures (s == cons (head s) (tail s)))
[SMTPat (cons (head s) (tail s))]
val head_cons
(#a: Type)
(x: a)
(s: seq a)
: Lemma
(ensures (head (cons x s) == x))
val suffix_of_tail
(#a: Type)
(s: seq a {length s > 0})
: Lemma
(requires True)
(ensures ((tail s) `suffix_of` s))
[SMTPat ((tail s) `suffix_of` s)]
val index_cons_l
(#a: Type)
(c: a)
(s: seq a)
: Lemma
(ensures (index (cons c s) 0 == c))
val index_cons_r
(#a: Type)
(c: a)
(s: seq a)
(i: nat {1 <= i /\ i <= length s})
: Lemma
(ensures (index (cons c s) i == index s (i - 1)))
val append_cons
(#a: Type)
(c: a)
(s1 s2: seq a)
: Lemma
(ensures (append (cons c s1) s2 == cons c (append s1 s2)))
val index_tail
(#a: Type)
(s: seq a {length s > 0})
(i: nat {i < length s - 1} )
: Lemma
(ensures (index (tail s) i == index s (i + 1)))
val mem_cons
(#a:eqtype)
(x:a)
(s:seq a)
: Lemma
(ensures (forall y. mem y (cons x s) <==> mem y s \/ x=y))
val snoc_slice_index
(#a: Type)
(s: seq a)
(i: nat)
(j: nat {i <= j /\ j < length s} )
: Lemma
(requires True)
(ensures (snoc (slice s i j) (index s j) == slice s i (j + 1)))
[SMTPat (snoc (slice s i j) (index s j))]
val cons_index_slice
(#a: Type)
(s: seq a)
(i: nat)
(j: nat {i < j /\ j <= length s} )
(k:nat{k == i+1})
: Lemma
(requires True)
(ensures (cons (index s i) (slice s k j) == slice s i j))
[SMTPat (cons (index s i) (slice s k j))]
val slice_is_empty
(#a: Type)
(s: seq a)
(i: nat {i <= length s})
: Lemma
(requires True)
(ensures (slice s i i == Seq.empty))
[SMTPat (slice s i i)]
val slice_length
(#a: Type)
(s: seq a)
: Lemma
(requires True)
(ensures (slice s 0 (length s) == s))
[SMTPat (slice s 0 (length s))]
val slice_slice
(#a: Type)
(s: seq a)
(i1: nat)
(j1: nat {i1 <= j1 /\ j1 <= length s} )
(i2: nat)
(j2: nat {i2 <= j2 /\ j2 <= j1 - i1} )
: Lemma
(requires True)
(ensures (slice (slice s i1 j1) i2 j2 == slice s (i1 + i2) (i1 + j2)))
[SMTPat (slice (slice s i1 j1) i2 j2)]
val lemma_seq_of_list_index (#a:Type) (l:list a) (i:nat{i < List.Tot.length l})
:Lemma (requires True)
(ensures (index (seq_of_list l) i == List.Tot.index l i))
[SMTPat (index (seq_of_list l) i)]
[@@(deprecated "seq_of_list")]
let of_list (#a:Type) (l:list a) :seq a = seq_of_list l
val seq_of_list_tl
(#a: Type)
(l: list a { List.Tot.length l > 0 } )
: Lemma
(requires True)
(ensures (seq_of_list (List.Tot.tl l) == tail (seq_of_list l)))
val mem_seq_of_list
(#a: eqtype)
(x: a)
(l: list a)
: Lemma
(requires True)
(ensures (mem x (seq_of_list l) == List.Tot.mem x l))
[SMTPat (mem x (seq_of_list l))]
(** Dealing efficiently with `seq_of_list` by meta-evaluating conjunctions over
an entire list. *)
let rec explode_and (#a: Type)
(i: nat)
(s: seq a { i <= length s })
(l: list a { List.Tot.length l + i = length s }):
Tot Type
(decreases (List.Tot.length l))
= match l with
| [] -> True
| hd :: tl -> index s i == hd /\ explode_and (i + 1) s tl
unfold
let pointwise_and s l =
norm [ iota; zeta; primops; delta_only [ `%(explode_and) ] ] (explode_and 0 s l)
val intro_of_list': #a:Type ->
i:nat ->
s:seq a ->
l:list a ->
Lemma
(requires (
List.Tot.length l + i = length s /\
i <= length s /\
explode_and i s l))
(ensures (
equal (seq_of_list l) (slice s i (length s))))
val intro_of_list (#a: Type) (s: seq a) (l: list a):
Lemma
(requires (
List.Tot.length l = length s /\
pointwise_and s l))
(ensures (
s == seq_of_list l))
val elim_of_list': #a:Type ->
i:nat ->
s:seq a ->
l:list a ->
Lemma
(requires (
List.Tot.length l + i = length s /\
i <= length s /\
slice s i (length s) == seq_of_list l))
(ensures (
explode_and i s l))
val elim_of_list (#a: Type) (l: list a):
Lemma
(ensures (
let s = seq_of_list l in
pointwise_and s l))
(****** sortWith ******)
let sortWith (#a:eqtype) (f:a -> a -> Tot int) (s:seq a) :Tot (seq a)
= seq_of_list (List.Tot.Base.sortWith f (seq_to_list s))
val lemma_seq_to_list_permutation (#a:eqtype) (s:seq a)
:Lemma (requires True) (ensures (forall x. count x s == List.Tot.Base.count x (seq_to_list s))) (decreases (length s))
val lemma_seq_of_list_permutation (#a:eqtype) (l:list a)
:Lemma (forall x. List.Tot.Base.count x l == count x (seq_of_list l))
val lemma_seq_of_list_sorted (#a:Type) (f:a -> a -> Tot bool) (l:list a)
:Lemma (requires (List.Tot.Properties.sorted f l)) (ensures (sorted f (seq_of_list l)))
val lemma_seq_sortwith_correctness (#a:eqtype) (f:a -> a -> Tot int) (s:seq a)
:Lemma (requires (total_order a (List.Tot.Base.bool_of_compare f)))
(ensures (let s' = sortWith f s in sorted (List.Tot.Base.bool_of_compare f) s' /\ permutation a s s'))
(* sort_lseq:
A wrapper of Seq.sortWith which proves that the output sequences
is a sorted permutation of the input sequence with the same length
*)
let sort_lseq (#a:eqtype) #n (f:tot_ord a) (s:lseq a n)
: s':lseq a n{sorted f s' /\ permutation a s s'} =
lemma_seq_sortwith_correctness (L.compare_of_bool f) s;
let s' = sortWith (L.compare_of_bool f) s in
perm_len s s';
sorted_feq f (L.bool_of_compare (L.compare_of_bool f)) s';
s'
let rec foldr (#a #b:Type) (f:b -> a -> Tot a) (s:seq b) (init:a) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val foldr (#a #b: Type) (f: (b -> a -> Tot a)) (s: seq b) (init: a) : Tot a (decreases (length s)) | [
"recursion"
] | FStar.Seq.Properties.foldr | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | f: (_: b -> _: a -> a) -> s: FStar.Seq.Base.seq b -> init: a -> Prims.Tot a | {
"end_col": 43,
"end_line": 756,
"start_col": 4,
"start_line": 755
} |
Prims.Tot | val splice
(#a: Type)
(s1: seq a)
(i: nat)
(s2: seq a {length s1 = length s2})
(j: nat{i <= j /\ j <= (length s2)})
: Tot (seq a) | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
= Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1))) | val splice
(#a: Type)
(s1: seq a)
(i: nat)
(s2: seq a {length s1 = length s2})
(j: nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
let splice
(#a: Type)
(s1: seq a)
(i: nat)
(s2: seq a {length s1 = length s2})
(j: nat{i <= j /\ j <= (length s2)})
: Tot (seq a) = | false | null | false | Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1))) | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total"
] | [
"FStar.Seq.Base.seq",
"Prims.nat",
"Prims.b2t",
"Prims.op_Equality",
"FStar.Seq.Base.length",
"Prims.l_and",
"Prims.op_LessThanOrEqual",
"FStar.Seq.Base.append",
"FStar.Seq.Base.slice"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s)
val sorted_feq (#a:Type)
(f g : (a -> a -> Tot bool))
(s:seq a{forall x y. f x y == g x y})
: Lemma (ensures (sorted f s <==> sorted g s))
val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (forall x. count x (append lo hi) = (count x lo + count x hi)))
val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (count x (append lo hi) = (count x lo + count x hi)))
val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma
(ensures (forall x. mem x s = (x=head s || mem x (tail s))))
val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma
(requires (forall (i:nat{i<length s}). f (index s i)))
(ensures (forall (x:a). mem x s ==> f x))
val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma
(requires True)
(ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s))))
type total_order (a:eqtype) (f: (a -> a -> Tot bool)) =
(forall a. f a a) (* reflexivity *)
/\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *)
/\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *)
type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f}
val sorted_concat_lemma: #a:eqtype
-> f:(a -> a -> Tot bool){total_order a f}
-> lo:seq a{sorted f lo}
-> pivot:a
-> hi:seq a{sorted f hi}
-> Lemma (requires (forall y. (mem y lo ==> f y pivot)
/\ (mem y hi ==> f pivot y)))
(ensures (sorted f (append lo (cons pivot hi))))
val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a))
(requires True)
(ensures (fun x ->
(length x = 5
/\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4)))))
/\ equal (index x 0) (slice s 0 i)
/\ equal (index x 1) (slice s i (i+1))
/\ equal (index x 2) (slice s (i+1) j)
/\ equal (index x 3) (slice s j (j + 1))
/\ equal (index x 4) (slice s (j + 1) (length s)))))
val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s}
-> i':nat -> j':nat{i' <= j' /\ j'<=length s /\
(j < i' //high slice
\/ j' <= i //low slice
\/ (i < i' /\ j' <= j)) //mid slice
}
-> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j'
/\ slice s i (i + 1) == slice (swap s i j) j (j + 1)
/\ slice s j (j + 1) == slice (swap s i j) i (i + 1)))
val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s} -> x:a -> Lemma
(requires True)
(ensures (count x s = count x (swap s i j)))
type permutation (a:eqtype) (s1:seq a) (s2:seq a) =
(forall i. count i s1 = count i s2)
val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma
(requires permutation a s1 s1' /\ permutation a s2 s2')
(ensures permutation a (append s1 s2) (append s1' s2'))
val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i<length s}) (j:nat{i <= j && j<length s})
: Lemma (permutation a s (swap s i j))
(* perm_len:
A lemma that shows that two sequences that are permutations
of each other also have the same length
*)
val perm_len (#a:eqtype) (s1 s2: seq a)
: Lemma (requires (permutation a s1 s2))
(ensures (length s1 == length s2))
val cons_perm: #a:eqtype -> tl:seq a -> s:seq a{length s > 0} ->
Lemma (requires (permutation a tl (tail s)))
(ensures (permutation a (cons (head s) tl) s))
val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a
-> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2)))
val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j))))
val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1)))))
val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv))
(ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv)))
val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back)))
(ensures ((forall y. mem y (slice s back len) ==> f pv y)))
val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i == slice (swap s i j) lo i))
val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i'))
val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s}
-> Lemma (ensures (slice s k hi == slice (swap s i j) k hi))
val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start))))
val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len)))
(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *)
let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)}) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val splice
(#a: Type)
(s1: seq a)
(i: nat)
(s2: seq a {length s1 = length s2})
(j: nat{i <= j /\ j <= (length s2)})
: Tot (seq a) | [] | FStar.Seq.Properties.splice | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
s1: FStar.Seq.Base.seq a ->
i: Prims.nat ->
s2: FStar.Seq.Base.seq a {FStar.Seq.Base.length s1 = FStar.Seq.Base.length s2} ->
j: Prims.nat{i <= j /\ j <= FStar.Seq.Base.length s2}
-> FStar.Seq.Base.seq a | {
"end_col": 80,
"end_line": 254,
"start_col": 2,
"start_line": 254
} |
Prims.Tot | val foldr_snoc (#a #b: Type) (f: (b -> a -> Tot a)) (s: seq b) (init: a)
: Tot a (decreases (length s)) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec foldr_snoc (#a #b:Type) (f:b -> a -> Tot a) (s:seq b) (init:a)
: Tot a (decreases (length s))
= if length s = 0 then init
else let s, last = un_snoc s in
f last (foldr_snoc f s init) | val foldr_snoc (#a #b: Type) (f: (b -> a -> Tot a)) (s: seq b) (init: a)
: Tot a (decreases (length s))
let rec foldr_snoc (#a #b: Type) (f: (b -> a -> Tot a)) (s: seq b) (init: a)
: Tot a (decreases (length s)) = | false | null | false | if length s = 0
then init
else
let s, last = un_snoc s in
f last (foldr_snoc f s init) | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total",
""
] | [
"FStar.Seq.Base.seq",
"Prims.op_Equality",
"Prims.int",
"FStar.Seq.Base.length",
"Prims.bool",
"FStar.Seq.Properties.foldr_snoc",
"FStar.Pervasives.Native.tuple2",
"Prims.eq2",
"FStar.Seq.Properties.snoc",
"FStar.Pervasives.Native.fst",
"FStar.Pervasives.Native.snd",
"FStar.Seq.Properties.un_snoc"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s)
val sorted_feq (#a:Type)
(f g : (a -> a -> Tot bool))
(s:seq a{forall x y. f x y == g x y})
: Lemma (ensures (sorted f s <==> sorted g s))
val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (forall x. count x (append lo hi) = (count x lo + count x hi)))
val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (count x (append lo hi) = (count x lo + count x hi)))
val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma
(ensures (forall x. mem x s = (x=head s || mem x (tail s))))
val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma
(requires (forall (i:nat{i<length s}). f (index s i)))
(ensures (forall (x:a). mem x s ==> f x))
val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma
(requires True)
(ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s))))
type total_order (a:eqtype) (f: (a -> a -> Tot bool)) =
(forall a. f a a) (* reflexivity *)
/\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *)
/\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *)
type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f}
val sorted_concat_lemma: #a:eqtype
-> f:(a -> a -> Tot bool){total_order a f}
-> lo:seq a{sorted f lo}
-> pivot:a
-> hi:seq a{sorted f hi}
-> Lemma (requires (forall y. (mem y lo ==> f y pivot)
/\ (mem y hi ==> f pivot y)))
(ensures (sorted f (append lo (cons pivot hi))))
val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a))
(requires True)
(ensures (fun x ->
(length x = 5
/\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4)))))
/\ equal (index x 0) (slice s 0 i)
/\ equal (index x 1) (slice s i (i+1))
/\ equal (index x 2) (slice s (i+1) j)
/\ equal (index x 3) (slice s j (j + 1))
/\ equal (index x 4) (slice s (j + 1) (length s)))))
val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s}
-> i':nat -> j':nat{i' <= j' /\ j'<=length s /\
(j < i' //high slice
\/ j' <= i //low slice
\/ (i < i' /\ j' <= j)) //mid slice
}
-> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j'
/\ slice s i (i + 1) == slice (swap s i j) j (j + 1)
/\ slice s j (j + 1) == slice (swap s i j) i (i + 1)))
val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s} -> x:a -> Lemma
(requires True)
(ensures (count x s = count x (swap s i j)))
type permutation (a:eqtype) (s1:seq a) (s2:seq a) =
(forall i. count i s1 = count i s2)
val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma
(requires permutation a s1 s1' /\ permutation a s2 s2')
(ensures permutation a (append s1 s2) (append s1' s2'))
val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i<length s}) (j:nat{i <= j && j<length s})
: Lemma (permutation a s (swap s i j))
(* perm_len:
A lemma that shows that two sequences that are permutations
of each other also have the same length
*)
val perm_len (#a:eqtype) (s1 s2: seq a)
: Lemma (requires (permutation a s1 s2))
(ensures (length s1 == length s2))
val cons_perm: #a:eqtype -> tl:seq a -> s:seq a{length s > 0} ->
Lemma (requires (permutation a tl (tail s)))
(ensures (permutation a (cons (head s) tl) s))
val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a
-> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2)))
val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j))))
val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1)))))
val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv))
(ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv)))
val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back)))
(ensures ((forall y. mem y (slice s back len) ==> f pv y)))
val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i == slice (swap s i j) lo i))
val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i'))
val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s}
-> Lemma (ensures (slice s k hi == slice (swap s i j) k hi))
val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start))))
val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len)))
(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *)
let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
= Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1)))
(* replace with sub *)
let replace_subseq (#a:Type0) (s:Seq.seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (sub:Seq.seq a{length sub == j - i}) :Tot (Seq.seq a)
= Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s)))
val splice_refl : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s}
-> Lemma
(ensures (s == splice s i s j))
val lemma_swap_splice : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma
(ensures (swap s i j == splice s start (swap s i j) len))
val lemma_seq_frame_hi: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j <= m} -> n:nat{m < n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 i s1 j)))
(ensures ((slice s1 m n == slice s2 m n) /\ (index s1 m == index s2 m)))
val lemma_seq_frame_lo: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j < m} -> n:nat{m <= n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 m s1 n)))
(ensures ((slice s1 i j == slice s2 i j) /\ (index s1 j == index s2 j)))
val lemma_tail_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma
(requires True)
(ensures (tail (slice s i j) == slice s (i + 1) j))
[SMTPat (tail (slice s i j))]
val lemma_weaken_frame_right : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j))
(ensures (s1 == splice s2 i s1 k))
val lemma_weaken_frame_left : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k))
(ensures (s1 == splice s2 i s1 k))
val lemma_trans_frame : #a:Type -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i <= j && j <= length s1}
-> Lemma
(requires ((s1 == splice s2 i s1 j) /\ s2 == splice s3 i s2 j))
(ensures (s1 == splice s3 i s1 j))
val lemma_weaken_perm_left: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k /\ permutation a (slice s2 j k) (slice s1 j k)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_weaken_perm_right: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j /\ permutation a (slice s2 i j) (slice s1 i j)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_trans_perm: #a:eqtype -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i<=j && j <= length s1}
-> Lemma
(requires (permutation a (slice s1 i j) (slice s2 i j)
/\ permutation a (slice s2 i j) (slice s3 i j)))
(ensures (permutation a (slice s1 i j) (slice s3 i j)))
(*New additions, please review*)
let snoc (#a:Type) (s:seq a) (x:a) : Tot (seq a) = Seq.append s (Seq.create 1 x)
val lemma_cons_snoc (#a:Type) (hd:a) (s:Seq.seq a) (tl:a)
: Lemma (requires True)
(ensures (Seq.equal (cons hd (snoc s tl))
(snoc (cons hd s) tl)))
val lemma_tail_snoc: #a:Type -> s:Seq.seq a{Seq.length s > 0} -> x:a
-> Lemma (ensures (tail (snoc s x) == snoc (tail s) x))
val lemma_snoc_inj: #a:Type -> s1:seq a -> s2:seq a -> v1:a -> v2:a
-> Lemma (requires (equal (snoc s1 v1) (snoc s2 v2)))
(ensures (v1 == v2 /\ equal s1 s2))
val lemma_mem_snoc : #a:eqtype -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. mem y (snoc s x) <==> mem y s \/ x=y))
let rec find_l (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else find_l f (tail l)
let rec ghost_find_l (#a:Type) (f:a -> GTot bool) (l:seq a)
: GTot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else ghost_find_l f (tail l)
val find_append_some: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (Some? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_append_none: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s2))
val find_append_none_s2: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s2)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_snoc: #a:Type -> s:Seq.seq a -> x:a -> f:(a -> Tot bool)
-> Lemma (ensures (let res = find_l f (snoc s x) in
match res with
| None -> find_l f s == None /\ not (f x)
| Some y -> res == find_l f s \/ (f x /\ x==y)))
let un_snoc (#a:Type) (s:seq a{length s <> 0}) : Tot (r:(seq a * a){s == snoc (fst r) (snd r)}) =
let s', a = split s (length s - 1) in
assert (Seq.equal (snoc s' (Seq.index a 0)) s);
s', Seq.index a 0
val un_snoc_snoc (#a:Type) (s:seq a) (x:a) : Lemma (un_snoc (snoc s x) == (s, x))
let rec find_r (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else let prefix, last = un_snoc l in
if f last then Some last
else find_r f prefix
type found (i:nat) = True
let rec seq_find_aux (#a:Type) (f:a -> Tot bool) (l:seq a) (ctr:nat{ctr <= Seq.length l})
: Pure (option a)
(requires (forall (i:nat{ i < Seq.length l /\ i >= ctr}).
not (f (Seq.index l i) )))
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}). {:pattern (found i)}
found i /\ x == Seq.index l i)))
= match ctr with
| 0 -> None
| _ -> let i = ctr - 1 in
if f (Seq.index l i)
then (
cut (found i);
Some (Seq.index l i))
else seq_find_aux f l i
let seq_find (#a:Type) (f:a -> Tot bool) (l:seq a)
: Pure (option a)
(requires True)
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}).{:pattern (found i)}
found i /\ x == Seq.index l i)))
= seq_find_aux f l (Seq.length l)
val find_mem (#a:eqtype) (s:seq a) (f:a -> Tot bool) (x:a{f x})
: Lemma (requires (mem x s))
(ensures (Some? (seq_find f s) /\ f (Some?.v (seq_find f s))))
let for_all
(#a: Type)
(f: (a -> Tot bool))
(l: seq a)
: Pure bool
(requires True)
(ensures (fun b -> (b == true <==> (forall (i: nat {i < Seq.length l} ) . f (index l i) == true))))
= None? (seq_find (fun i -> not (f i)) l)
val seq_mem_k: #a:eqtype -> s:seq a -> n:nat{n < Seq.length s} ->
Lemma (requires True)
(ensures (mem (Seq.index s n) s))
[SMTPat (mem (Seq.index s n) s)]
module L = FStar.List.Tot
let rec seq_to_list (#a:Type) (s:seq a)
: Tot (l:list a{L.length l = length s})
(decreases (length s))
= if length s = 0 then []
else index s 0::seq_to_list (slice s 1 (length s))
[@@"opaque_to_smt"]
let rec seq_of_list (#a:Type) (l:list a) : Tot (s:seq a{L.length l = length s}) =
match l with
| [] -> Seq.empty #a
| hd::tl -> create 1 hd @| seq_of_list tl
val lemma_seq_of_list_induction (#a:Type) (l:list a)
:Lemma (requires True)
(ensures (let s = seq_of_list l in
match l with
| [] -> Seq.equal s empty
| hd::tl -> s == cons hd (seq_of_list tl) /\
head s == hd /\ tail s == (seq_of_list tl)))
val lemma_seq_list_bij: #a:Type -> s:seq a -> Lemma
(requires (True))
(ensures (seq_of_list (seq_to_list s) == s))
val lemma_list_seq_bij: #a:Type -> l:list a -> Lemma
(requires (True))
(ensures (seq_to_list (seq_of_list l) == l))
unfold let createL_post (#a:Type0) (l:list a) (s:seq a) : GTot Type0 =
normalize (L.length l = length s) /\ seq_to_list s == l /\ seq_of_list l == s
let createL (#a:Type0) (l:list a)
: Pure (seq a)
(requires True)
(ensures (fun s -> createL_post #a l s))
= let s = seq_of_list l in
lemma_list_seq_bij l;
s
val lemma_index_is_nth: #a:Type -> s:seq a -> i:nat{i < length s} -> Lemma
(requires True)
(ensures (L.index (seq_to_list s) i == index s i))
////////////////////////////////////////////////////////////////////////////////
//s `contains` x : Type0
// An undecidable version of `mem`,
// for when the sequence payload is not an eqtype
////////////////////////////////////////////////////////////////////////////////
[@@ remove_unused_type_parameters [0; 1; 2]]
val contains (#a:Type) (s:seq a) (x:a) : Tot Type0
val contains_intro (#a:Type) (s:seq a) (k:nat) (x:a)
: Lemma (k < Seq.length s /\ Seq.index s k == x
==>
s `contains` x)
val contains_elim (#a:Type) (s:seq a) (x:a)
: Lemma (s `contains` x
==>
(exists (k:nat). k < Seq.length s /\ Seq.index s k == x))
val lemma_contains_empty (#a:Type) : Lemma (forall (x:a). ~ (contains Seq.empty x))
val lemma_contains_singleton (#a:Type) (x:a) : Lemma (forall (y:a). contains (create 1 x) y ==> y == x)
val append_contains_equiv (#a:Type) (s1:seq a) (s2:seq a) (x:a)
: Lemma ((append s1 s2) `contains` x
<==>
(s1 `contains` x \/ s2 `contains` x))
val contains_snoc : #a:Type -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. (snoc s x) `contains` y <==> s `contains` y \/ x==y))
val lemma_find_l_contains (#a:Type) (f:a -> Tot bool) (l:seq a)
: Lemma (requires True) (ensures Some? (find_l f l) ==> l `contains` (Some?.v (find_l f l)))
val contains_cons (#a:Type) (hd:a) (tl:Seq.seq a) (x:a)
: Lemma ((cons hd tl) `contains` x
<==>
(x==hd \/ tl `contains` x))
val append_cons_snoc (#a:Type) (u: Seq.seq a) (x:a) (v:Seq.seq a)
: Lemma (Seq.equal (Seq.append u (cons x v))
(Seq.append (snoc u x) v))
val append_slices (#a:Type) (s1:Seq.seq a) (s2:Seq.seq a)
: Lemma ( Seq.equal s1 (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) /\
Seq.equal s2 (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length s1 + Seq.length s2)) /\
(forall (i:nat) (j:nat).
i <= j /\ j <= Seq.length s2 ==>
Seq.equal (Seq.slice s2 i j)
(Seq.slice (Seq.append s1 s2) (Seq.length s1 + i) (Seq.length s1 + j))))
val find_l_none_no_index (#a:Type) (s:Seq.seq a) (f:(a -> Tot bool)) :
Lemma (requires (None? (find_l f s)))
(ensures (forall (i:nat{i < Seq.length s}). not (f (Seq.index s i))))
(decreases (Seq.length s))
(** More properties, with new naming conventions *)
let suffix_of
(#a: Type)
(s_suff s: seq a)
= exists s_pref . (s == append s_pref s_suff)
val cons_head_tail
(#a: Type)
(s: seq a {length s > 0})
: Lemma
(requires True)
(ensures (s == cons (head s) (tail s)))
[SMTPat (cons (head s) (tail s))]
val head_cons
(#a: Type)
(x: a)
(s: seq a)
: Lemma
(ensures (head (cons x s) == x))
val suffix_of_tail
(#a: Type)
(s: seq a {length s > 0})
: Lemma
(requires True)
(ensures ((tail s) `suffix_of` s))
[SMTPat ((tail s) `suffix_of` s)]
val index_cons_l
(#a: Type)
(c: a)
(s: seq a)
: Lemma
(ensures (index (cons c s) 0 == c))
val index_cons_r
(#a: Type)
(c: a)
(s: seq a)
(i: nat {1 <= i /\ i <= length s})
: Lemma
(ensures (index (cons c s) i == index s (i - 1)))
val append_cons
(#a: Type)
(c: a)
(s1 s2: seq a)
: Lemma
(ensures (append (cons c s1) s2 == cons c (append s1 s2)))
val index_tail
(#a: Type)
(s: seq a {length s > 0})
(i: nat {i < length s - 1} )
: Lemma
(ensures (index (tail s) i == index s (i + 1)))
val mem_cons
(#a:eqtype)
(x:a)
(s:seq a)
: Lemma
(ensures (forall y. mem y (cons x s) <==> mem y s \/ x=y))
val snoc_slice_index
(#a: Type)
(s: seq a)
(i: nat)
(j: nat {i <= j /\ j < length s} )
: Lemma
(requires True)
(ensures (snoc (slice s i j) (index s j) == slice s i (j + 1)))
[SMTPat (snoc (slice s i j) (index s j))]
val cons_index_slice
(#a: Type)
(s: seq a)
(i: nat)
(j: nat {i < j /\ j <= length s} )
(k:nat{k == i+1})
: Lemma
(requires True)
(ensures (cons (index s i) (slice s k j) == slice s i j))
[SMTPat (cons (index s i) (slice s k j))]
val slice_is_empty
(#a: Type)
(s: seq a)
(i: nat {i <= length s})
: Lemma
(requires True)
(ensures (slice s i i == Seq.empty))
[SMTPat (slice s i i)]
val slice_length
(#a: Type)
(s: seq a)
: Lemma
(requires True)
(ensures (slice s 0 (length s) == s))
[SMTPat (slice s 0 (length s))]
val slice_slice
(#a: Type)
(s: seq a)
(i1: nat)
(j1: nat {i1 <= j1 /\ j1 <= length s} )
(i2: nat)
(j2: nat {i2 <= j2 /\ j2 <= j1 - i1} )
: Lemma
(requires True)
(ensures (slice (slice s i1 j1) i2 j2 == slice s (i1 + i2) (i1 + j2)))
[SMTPat (slice (slice s i1 j1) i2 j2)]
val lemma_seq_of_list_index (#a:Type) (l:list a) (i:nat{i < List.Tot.length l})
:Lemma (requires True)
(ensures (index (seq_of_list l) i == List.Tot.index l i))
[SMTPat (index (seq_of_list l) i)]
[@@(deprecated "seq_of_list")]
let of_list (#a:Type) (l:list a) :seq a = seq_of_list l
val seq_of_list_tl
(#a: Type)
(l: list a { List.Tot.length l > 0 } )
: Lemma
(requires True)
(ensures (seq_of_list (List.Tot.tl l) == tail (seq_of_list l)))
val mem_seq_of_list
(#a: eqtype)
(x: a)
(l: list a)
: Lemma
(requires True)
(ensures (mem x (seq_of_list l) == List.Tot.mem x l))
[SMTPat (mem x (seq_of_list l))]
(** Dealing efficiently with `seq_of_list` by meta-evaluating conjunctions over
an entire list. *)
let rec explode_and (#a: Type)
(i: nat)
(s: seq a { i <= length s })
(l: list a { List.Tot.length l + i = length s }):
Tot Type
(decreases (List.Tot.length l))
= match l with
| [] -> True
| hd :: tl -> index s i == hd /\ explode_and (i + 1) s tl
unfold
let pointwise_and s l =
norm [ iota; zeta; primops; delta_only [ `%(explode_and) ] ] (explode_and 0 s l)
val intro_of_list': #a:Type ->
i:nat ->
s:seq a ->
l:list a ->
Lemma
(requires (
List.Tot.length l + i = length s /\
i <= length s /\
explode_and i s l))
(ensures (
equal (seq_of_list l) (slice s i (length s))))
val intro_of_list (#a: Type) (s: seq a) (l: list a):
Lemma
(requires (
List.Tot.length l = length s /\
pointwise_and s l))
(ensures (
s == seq_of_list l))
val elim_of_list': #a:Type ->
i:nat ->
s:seq a ->
l:list a ->
Lemma
(requires (
List.Tot.length l + i = length s /\
i <= length s /\
slice s i (length s) == seq_of_list l))
(ensures (
explode_and i s l))
val elim_of_list (#a: Type) (l: list a):
Lemma
(ensures (
let s = seq_of_list l in
pointwise_and s l))
(****** sortWith ******)
let sortWith (#a:eqtype) (f:a -> a -> Tot int) (s:seq a) :Tot (seq a)
= seq_of_list (List.Tot.Base.sortWith f (seq_to_list s))
val lemma_seq_to_list_permutation (#a:eqtype) (s:seq a)
:Lemma (requires True) (ensures (forall x. count x s == List.Tot.Base.count x (seq_to_list s))) (decreases (length s))
val lemma_seq_of_list_permutation (#a:eqtype) (l:list a)
:Lemma (forall x. List.Tot.Base.count x l == count x (seq_of_list l))
val lemma_seq_of_list_sorted (#a:Type) (f:a -> a -> Tot bool) (l:list a)
:Lemma (requires (List.Tot.Properties.sorted f l)) (ensures (sorted f (seq_of_list l)))
val lemma_seq_sortwith_correctness (#a:eqtype) (f:a -> a -> Tot int) (s:seq a)
:Lemma (requires (total_order a (List.Tot.Base.bool_of_compare f)))
(ensures (let s' = sortWith f s in sorted (List.Tot.Base.bool_of_compare f) s' /\ permutation a s s'))
(* sort_lseq:
A wrapper of Seq.sortWith which proves that the output sequences
is a sorted permutation of the input sequence with the same length
*)
let sort_lseq (#a:eqtype) #n (f:tot_ord a) (s:lseq a n)
: s':lseq a n{sorted f s' /\ permutation a s s'} =
lemma_seq_sortwith_correctness (L.compare_of_bool f) s;
let s' = sortWith (L.compare_of_bool f) s in
perm_len s s';
sorted_feq f (L.bool_of_compare (L.compare_of_bool f)) s';
s'
let rec foldr (#a #b:Type) (f:b -> a -> Tot a) (s:seq b) (init:a)
: Tot a (decreases (length s))
= if length s = 0 then init
else f (head s) (foldr f (tail s) init)
let rec foldr_snoc (#a #b:Type) (f:b -> a -> Tot a) (s:seq b) (init:a) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val foldr_snoc (#a #b: Type) (f: (b -> a -> Tot a)) (s: seq b) (init: a)
: Tot a (decreases (length s)) | [
"recursion"
] | FStar.Seq.Properties.foldr_snoc | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | f: (_: b -> _: a -> a) -> s: FStar.Seq.Base.seq b -> init: a -> Prims.Tot a | {
"end_col": 37,
"end_line": 762,
"start_col": 4,
"start_line": 760
} |
Prims.Pure | val seq_find (#a: Type) (f: (a -> Tot bool)) (l: seq a)
: Pure (option a)
(requires True)
(ensures
(function
| None -> forall (i: nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x ->
f x /\
(exists (i: nat{i < Seq.length l}). {:pattern (found i)} found i /\ x == Seq.index l i))
) | [
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let seq_find (#a:Type) (f:a -> Tot bool) (l:seq a)
: Pure (option a)
(requires True)
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}).{:pattern (found i)}
found i /\ x == Seq.index l i)))
= seq_find_aux f l (Seq.length l) | val seq_find (#a: Type) (f: (a -> Tot bool)) (l: seq a)
: Pure (option a)
(requires True)
(ensures
(function
| None -> forall (i: nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x ->
f x /\
(exists (i: nat{i < Seq.length l}). {:pattern (found i)} found i /\ x == Seq.index l i))
)
let seq_find (#a: Type) (f: (a -> Tot bool)) (l: seq a)
: Pure (option a)
(requires True)
(ensures
(function
| None -> forall (i: nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x ->
f x /\
(exists (i: nat{i < Seq.length l}). {:pattern (found i)} found i /\ x == Seq.index l i))
) = | false | null | false | seq_find_aux f l (Seq.length l) | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [] | [
"Prims.bool",
"FStar.Seq.Base.seq",
"FStar.Seq.Properties.seq_find_aux",
"FStar.Seq.Base.length",
"FStar.Pervasives.Native.option",
"Prims.l_True",
"Prims.l_Forall",
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThan",
"Prims.op_Negation",
"FStar.Seq.Base.index",
"Prims.l_and",
"Prims.l_Exists",
"FStar.Seq.Properties.found",
"Prims.eq2"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s)
val sorted_feq (#a:Type)
(f g : (a -> a -> Tot bool))
(s:seq a{forall x y. f x y == g x y})
: Lemma (ensures (sorted f s <==> sorted g s))
val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (forall x. count x (append lo hi) = (count x lo + count x hi)))
val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (count x (append lo hi) = (count x lo + count x hi)))
val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma
(ensures (forall x. mem x s = (x=head s || mem x (tail s))))
val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma
(requires (forall (i:nat{i<length s}). f (index s i)))
(ensures (forall (x:a). mem x s ==> f x))
val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma
(requires True)
(ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s))))
type total_order (a:eqtype) (f: (a -> a -> Tot bool)) =
(forall a. f a a) (* reflexivity *)
/\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *)
/\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *)
type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f}
val sorted_concat_lemma: #a:eqtype
-> f:(a -> a -> Tot bool){total_order a f}
-> lo:seq a{sorted f lo}
-> pivot:a
-> hi:seq a{sorted f hi}
-> Lemma (requires (forall y. (mem y lo ==> f y pivot)
/\ (mem y hi ==> f pivot y)))
(ensures (sorted f (append lo (cons pivot hi))))
val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a))
(requires True)
(ensures (fun x ->
(length x = 5
/\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4)))))
/\ equal (index x 0) (slice s 0 i)
/\ equal (index x 1) (slice s i (i+1))
/\ equal (index x 2) (slice s (i+1) j)
/\ equal (index x 3) (slice s j (j + 1))
/\ equal (index x 4) (slice s (j + 1) (length s)))))
val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s}
-> i':nat -> j':nat{i' <= j' /\ j'<=length s /\
(j < i' //high slice
\/ j' <= i //low slice
\/ (i < i' /\ j' <= j)) //mid slice
}
-> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j'
/\ slice s i (i + 1) == slice (swap s i j) j (j + 1)
/\ slice s j (j + 1) == slice (swap s i j) i (i + 1)))
val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s} -> x:a -> Lemma
(requires True)
(ensures (count x s = count x (swap s i j)))
type permutation (a:eqtype) (s1:seq a) (s2:seq a) =
(forall i. count i s1 = count i s2)
val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma
(requires permutation a s1 s1' /\ permutation a s2 s2')
(ensures permutation a (append s1 s2) (append s1' s2'))
val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i<length s}) (j:nat{i <= j && j<length s})
: Lemma (permutation a s (swap s i j))
(* perm_len:
A lemma that shows that two sequences that are permutations
of each other also have the same length
*)
val perm_len (#a:eqtype) (s1 s2: seq a)
: Lemma (requires (permutation a s1 s2))
(ensures (length s1 == length s2))
val cons_perm: #a:eqtype -> tl:seq a -> s:seq a{length s > 0} ->
Lemma (requires (permutation a tl (tail s)))
(ensures (permutation a (cons (head s) tl) s))
val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a
-> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2)))
val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j))))
val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1)))))
val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv))
(ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv)))
val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back)))
(ensures ((forall y. mem y (slice s back len) ==> f pv y)))
val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i == slice (swap s i j) lo i))
val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i'))
val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s}
-> Lemma (ensures (slice s k hi == slice (swap s i j) k hi))
val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start))))
val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len)))
(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *)
let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
= Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1)))
(* replace with sub *)
let replace_subseq (#a:Type0) (s:Seq.seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (sub:Seq.seq a{length sub == j - i}) :Tot (Seq.seq a)
= Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s)))
val splice_refl : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s}
-> Lemma
(ensures (s == splice s i s j))
val lemma_swap_splice : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma
(ensures (swap s i j == splice s start (swap s i j) len))
val lemma_seq_frame_hi: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j <= m} -> n:nat{m < n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 i s1 j)))
(ensures ((slice s1 m n == slice s2 m n) /\ (index s1 m == index s2 m)))
val lemma_seq_frame_lo: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j < m} -> n:nat{m <= n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 m s1 n)))
(ensures ((slice s1 i j == slice s2 i j) /\ (index s1 j == index s2 j)))
val lemma_tail_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma
(requires True)
(ensures (tail (slice s i j) == slice s (i + 1) j))
[SMTPat (tail (slice s i j))]
val lemma_weaken_frame_right : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j))
(ensures (s1 == splice s2 i s1 k))
val lemma_weaken_frame_left : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k))
(ensures (s1 == splice s2 i s1 k))
val lemma_trans_frame : #a:Type -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i <= j && j <= length s1}
-> Lemma
(requires ((s1 == splice s2 i s1 j) /\ s2 == splice s3 i s2 j))
(ensures (s1 == splice s3 i s1 j))
val lemma_weaken_perm_left: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k /\ permutation a (slice s2 j k) (slice s1 j k)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_weaken_perm_right: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j /\ permutation a (slice s2 i j) (slice s1 i j)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_trans_perm: #a:eqtype -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i<=j && j <= length s1}
-> Lemma
(requires (permutation a (slice s1 i j) (slice s2 i j)
/\ permutation a (slice s2 i j) (slice s3 i j)))
(ensures (permutation a (slice s1 i j) (slice s3 i j)))
(*New additions, please review*)
let snoc (#a:Type) (s:seq a) (x:a) : Tot (seq a) = Seq.append s (Seq.create 1 x)
val lemma_cons_snoc (#a:Type) (hd:a) (s:Seq.seq a) (tl:a)
: Lemma (requires True)
(ensures (Seq.equal (cons hd (snoc s tl))
(snoc (cons hd s) tl)))
val lemma_tail_snoc: #a:Type -> s:Seq.seq a{Seq.length s > 0} -> x:a
-> Lemma (ensures (tail (snoc s x) == snoc (tail s) x))
val lemma_snoc_inj: #a:Type -> s1:seq a -> s2:seq a -> v1:a -> v2:a
-> Lemma (requires (equal (snoc s1 v1) (snoc s2 v2)))
(ensures (v1 == v2 /\ equal s1 s2))
val lemma_mem_snoc : #a:eqtype -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. mem y (snoc s x) <==> mem y s \/ x=y))
let rec find_l (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else find_l f (tail l)
let rec ghost_find_l (#a:Type) (f:a -> GTot bool) (l:seq a)
: GTot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else ghost_find_l f (tail l)
val find_append_some: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (Some? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_append_none: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s2))
val find_append_none_s2: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s2)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_snoc: #a:Type -> s:Seq.seq a -> x:a -> f:(a -> Tot bool)
-> Lemma (ensures (let res = find_l f (snoc s x) in
match res with
| None -> find_l f s == None /\ not (f x)
| Some y -> res == find_l f s \/ (f x /\ x==y)))
let un_snoc (#a:Type) (s:seq a{length s <> 0}) : Tot (r:(seq a * a){s == snoc (fst r) (snd r)}) =
let s', a = split s (length s - 1) in
assert (Seq.equal (snoc s' (Seq.index a 0)) s);
s', Seq.index a 0
val un_snoc_snoc (#a:Type) (s:seq a) (x:a) : Lemma (un_snoc (snoc s x) == (s, x))
let rec find_r (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else let prefix, last = un_snoc l in
if f last then Some last
else find_r f prefix
type found (i:nat) = True
let rec seq_find_aux (#a:Type) (f:a -> Tot bool) (l:seq a) (ctr:nat{ctr <= Seq.length l})
: Pure (option a)
(requires (forall (i:nat{ i < Seq.length l /\ i >= ctr}).
not (f (Seq.index l i) )))
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}). {:pattern (found i)}
found i /\ x == Seq.index l i)))
= match ctr with
| 0 -> None
| _ -> let i = ctr - 1 in
if f (Seq.index l i)
then (
cut (found i);
Some (Seq.index l i))
else seq_find_aux f l i
let seq_find (#a:Type) (f:a -> Tot bool) (l:seq a)
: Pure (option a)
(requires True)
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}).{:pattern (found i)} | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val seq_find (#a: Type) (f: (a -> Tot bool)) (l: seq a)
: Pure (option a)
(requires True)
(ensures
(function
| None -> forall (i: nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x ->
f x /\
(exists (i: nat{i < Seq.length l}). {:pattern (found i)} found i /\ x == Seq.index l i))
) | [] | FStar.Seq.Properties.seq_find | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | f: (_: a -> Prims.bool) -> l: FStar.Seq.Base.seq a -> Prims.Pure (FStar.Pervasives.Native.option a) | {
"end_col": 33,
"end_line": 408,
"start_col": 2,
"start_line": 408
} |
Prims.Tot | val sort_lseq (#a: eqtype) (#n: _) (f: tot_ord a) (s: lseq a n)
: s': lseq a n {sorted f s' /\ permutation a s s'} | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let sort_lseq (#a:eqtype) #n (f:tot_ord a) (s:lseq a n)
: s':lseq a n{sorted f s' /\ permutation a s s'} =
lemma_seq_sortwith_correctness (L.compare_of_bool f) s;
let s' = sortWith (L.compare_of_bool f) s in
perm_len s s';
sorted_feq f (L.bool_of_compare (L.compare_of_bool f)) s';
s' | val sort_lseq (#a: eqtype) (#n: _) (f: tot_ord a) (s: lseq a n)
: s': lseq a n {sorted f s' /\ permutation a s s'}
let sort_lseq (#a: eqtype) #n (f: tot_ord a) (s: lseq a n)
: s': lseq a n {sorted f s' /\ permutation a s s'} = | false | null | false | lemma_seq_sortwith_correctness (L.compare_of_bool f) s;
let s' = sortWith (L.compare_of_bool f) s in
perm_len s s';
sorted_feq f (L.bool_of_compare (L.compare_of_bool f)) s';
s' | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total"
] | [
"Prims.eqtype",
"Prims.nat",
"FStar.Seq.Properties.tot_ord",
"FStar.Seq.Properties.lseq",
"Prims.unit",
"FStar.Seq.Properties.sorted_feq",
"FStar.List.Tot.Base.bool_of_compare",
"FStar.List.Tot.Base.compare_of_bool",
"FStar.Seq.Properties.perm_len",
"FStar.Seq.Base.seq",
"FStar.Seq.Properties.sortWith",
"FStar.Seq.Properties.lemma_seq_sortwith_correctness",
"Prims.l_and",
"Prims.b2t",
"FStar.Seq.Properties.sorted",
"FStar.Seq.Properties.permutation"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s)
val sorted_feq (#a:Type)
(f g : (a -> a -> Tot bool))
(s:seq a{forall x y. f x y == g x y})
: Lemma (ensures (sorted f s <==> sorted g s))
val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (forall x. count x (append lo hi) = (count x lo + count x hi)))
val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (count x (append lo hi) = (count x lo + count x hi)))
val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma
(ensures (forall x. mem x s = (x=head s || mem x (tail s))))
val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma
(requires (forall (i:nat{i<length s}). f (index s i)))
(ensures (forall (x:a). mem x s ==> f x))
val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma
(requires True)
(ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s))))
type total_order (a:eqtype) (f: (a -> a -> Tot bool)) =
(forall a. f a a) (* reflexivity *)
/\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *)
/\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *)
type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f}
val sorted_concat_lemma: #a:eqtype
-> f:(a -> a -> Tot bool){total_order a f}
-> lo:seq a{sorted f lo}
-> pivot:a
-> hi:seq a{sorted f hi}
-> Lemma (requires (forall y. (mem y lo ==> f y pivot)
/\ (mem y hi ==> f pivot y)))
(ensures (sorted f (append lo (cons pivot hi))))
val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a))
(requires True)
(ensures (fun x ->
(length x = 5
/\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4)))))
/\ equal (index x 0) (slice s 0 i)
/\ equal (index x 1) (slice s i (i+1))
/\ equal (index x 2) (slice s (i+1) j)
/\ equal (index x 3) (slice s j (j + 1))
/\ equal (index x 4) (slice s (j + 1) (length s)))))
val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s}
-> i':nat -> j':nat{i' <= j' /\ j'<=length s /\
(j < i' //high slice
\/ j' <= i //low slice
\/ (i < i' /\ j' <= j)) //mid slice
}
-> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j'
/\ slice s i (i + 1) == slice (swap s i j) j (j + 1)
/\ slice s j (j + 1) == slice (swap s i j) i (i + 1)))
val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s} -> x:a -> Lemma
(requires True)
(ensures (count x s = count x (swap s i j)))
type permutation (a:eqtype) (s1:seq a) (s2:seq a) =
(forall i. count i s1 = count i s2)
val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma
(requires permutation a s1 s1' /\ permutation a s2 s2')
(ensures permutation a (append s1 s2) (append s1' s2'))
val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i<length s}) (j:nat{i <= j && j<length s})
: Lemma (permutation a s (swap s i j))
(* perm_len:
A lemma that shows that two sequences that are permutations
of each other also have the same length
*)
val perm_len (#a:eqtype) (s1 s2: seq a)
: Lemma (requires (permutation a s1 s2))
(ensures (length s1 == length s2))
val cons_perm: #a:eqtype -> tl:seq a -> s:seq a{length s > 0} ->
Lemma (requires (permutation a tl (tail s)))
(ensures (permutation a (cons (head s) tl) s))
val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a
-> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2)))
val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j))))
val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1)))))
val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv))
(ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv)))
val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back)))
(ensures ((forall y. mem y (slice s back len) ==> f pv y)))
val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i == slice (swap s i j) lo i))
val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i'))
val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s}
-> Lemma (ensures (slice s k hi == slice (swap s i j) k hi))
val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start))))
val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len)))
(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *)
let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
= Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1)))
(* replace with sub *)
let replace_subseq (#a:Type0) (s:Seq.seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (sub:Seq.seq a{length sub == j - i}) :Tot (Seq.seq a)
= Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s)))
val splice_refl : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s}
-> Lemma
(ensures (s == splice s i s j))
val lemma_swap_splice : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma
(ensures (swap s i j == splice s start (swap s i j) len))
val lemma_seq_frame_hi: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j <= m} -> n:nat{m < n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 i s1 j)))
(ensures ((slice s1 m n == slice s2 m n) /\ (index s1 m == index s2 m)))
val lemma_seq_frame_lo: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j < m} -> n:nat{m <= n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 m s1 n)))
(ensures ((slice s1 i j == slice s2 i j) /\ (index s1 j == index s2 j)))
val lemma_tail_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma
(requires True)
(ensures (tail (slice s i j) == slice s (i + 1) j))
[SMTPat (tail (slice s i j))]
val lemma_weaken_frame_right : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j))
(ensures (s1 == splice s2 i s1 k))
val lemma_weaken_frame_left : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k))
(ensures (s1 == splice s2 i s1 k))
val lemma_trans_frame : #a:Type -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i <= j && j <= length s1}
-> Lemma
(requires ((s1 == splice s2 i s1 j) /\ s2 == splice s3 i s2 j))
(ensures (s1 == splice s3 i s1 j))
val lemma_weaken_perm_left: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k /\ permutation a (slice s2 j k) (slice s1 j k)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_weaken_perm_right: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j /\ permutation a (slice s2 i j) (slice s1 i j)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_trans_perm: #a:eqtype -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i<=j && j <= length s1}
-> Lemma
(requires (permutation a (slice s1 i j) (slice s2 i j)
/\ permutation a (slice s2 i j) (slice s3 i j)))
(ensures (permutation a (slice s1 i j) (slice s3 i j)))
(*New additions, please review*)
let snoc (#a:Type) (s:seq a) (x:a) : Tot (seq a) = Seq.append s (Seq.create 1 x)
val lemma_cons_snoc (#a:Type) (hd:a) (s:Seq.seq a) (tl:a)
: Lemma (requires True)
(ensures (Seq.equal (cons hd (snoc s tl))
(snoc (cons hd s) tl)))
val lemma_tail_snoc: #a:Type -> s:Seq.seq a{Seq.length s > 0} -> x:a
-> Lemma (ensures (tail (snoc s x) == snoc (tail s) x))
val lemma_snoc_inj: #a:Type -> s1:seq a -> s2:seq a -> v1:a -> v2:a
-> Lemma (requires (equal (snoc s1 v1) (snoc s2 v2)))
(ensures (v1 == v2 /\ equal s1 s2))
val lemma_mem_snoc : #a:eqtype -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. mem y (snoc s x) <==> mem y s \/ x=y))
let rec find_l (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else find_l f (tail l)
let rec ghost_find_l (#a:Type) (f:a -> GTot bool) (l:seq a)
: GTot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else ghost_find_l f (tail l)
val find_append_some: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (Some? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_append_none: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s2))
val find_append_none_s2: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s2)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_snoc: #a:Type -> s:Seq.seq a -> x:a -> f:(a -> Tot bool)
-> Lemma (ensures (let res = find_l f (snoc s x) in
match res with
| None -> find_l f s == None /\ not (f x)
| Some y -> res == find_l f s \/ (f x /\ x==y)))
let un_snoc (#a:Type) (s:seq a{length s <> 0}) : Tot (r:(seq a * a){s == snoc (fst r) (snd r)}) =
let s', a = split s (length s - 1) in
assert (Seq.equal (snoc s' (Seq.index a 0)) s);
s', Seq.index a 0
val un_snoc_snoc (#a:Type) (s:seq a) (x:a) : Lemma (un_snoc (snoc s x) == (s, x))
let rec find_r (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else let prefix, last = un_snoc l in
if f last then Some last
else find_r f prefix
type found (i:nat) = True
let rec seq_find_aux (#a:Type) (f:a -> Tot bool) (l:seq a) (ctr:nat{ctr <= Seq.length l})
: Pure (option a)
(requires (forall (i:nat{ i < Seq.length l /\ i >= ctr}).
not (f (Seq.index l i) )))
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}). {:pattern (found i)}
found i /\ x == Seq.index l i)))
= match ctr with
| 0 -> None
| _ -> let i = ctr - 1 in
if f (Seq.index l i)
then (
cut (found i);
Some (Seq.index l i))
else seq_find_aux f l i
let seq_find (#a:Type) (f:a -> Tot bool) (l:seq a)
: Pure (option a)
(requires True)
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}).{:pattern (found i)}
found i /\ x == Seq.index l i)))
= seq_find_aux f l (Seq.length l)
val find_mem (#a:eqtype) (s:seq a) (f:a -> Tot bool) (x:a{f x})
: Lemma (requires (mem x s))
(ensures (Some? (seq_find f s) /\ f (Some?.v (seq_find f s))))
let for_all
(#a: Type)
(f: (a -> Tot bool))
(l: seq a)
: Pure bool
(requires True)
(ensures (fun b -> (b == true <==> (forall (i: nat {i < Seq.length l} ) . f (index l i) == true))))
= None? (seq_find (fun i -> not (f i)) l)
val seq_mem_k: #a:eqtype -> s:seq a -> n:nat{n < Seq.length s} ->
Lemma (requires True)
(ensures (mem (Seq.index s n) s))
[SMTPat (mem (Seq.index s n) s)]
module L = FStar.List.Tot
let rec seq_to_list (#a:Type) (s:seq a)
: Tot (l:list a{L.length l = length s})
(decreases (length s))
= if length s = 0 then []
else index s 0::seq_to_list (slice s 1 (length s))
[@@"opaque_to_smt"]
let rec seq_of_list (#a:Type) (l:list a) : Tot (s:seq a{L.length l = length s}) =
match l with
| [] -> Seq.empty #a
| hd::tl -> create 1 hd @| seq_of_list tl
val lemma_seq_of_list_induction (#a:Type) (l:list a)
:Lemma (requires True)
(ensures (let s = seq_of_list l in
match l with
| [] -> Seq.equal s empty
| hd::tl -> s == cons hd (seq_of_list tl) /\
head s == hd /\ tail s == (seq_of_list tl)))
val lemma_seq_list_bij: #a:Type -> s:seq a -> Lemma
(requires (True))
(ensures (seq_of_list (seq_to_list s) == s))
val lemma_list_seq_bij: #a:Type -> l:list a -> Lemma
(requires (True))
(ensures (seq_to_list (seq_of_list l) == l))
unfold let createL_post (#a:Type0) (l:list a) (s:seq a) : GTot Type0 =
normalize (L.length l = length s) /\ seq_to_list s == l /\ seq_of_list l == s
let createL (#a:Type0) (l:list a)
: Pure (seq a)
(requires True)
(ensures (fun s -> createL_post #a l s))
= let s = seq_of_list l in
lemma_list_seq_bij l;
s
val lemma_index_is_nth: #a:Type -> s:seq a -> i:nat{i < length s} -> Lemma
(requires True)
(ensures (L.index (seq_to_list s) i == index s i))
////////////////////////////////////////////////////////////////////////////////
//s `contains` x : Type0
// An undecidable version of `mem`,
// for when the sequence payload is not an eqtype
////////////////////////////////////////////////////////////////////////////////
[@@ remove_unused_type_parameters [0; 1; 2]]
val contains (#a:Type) (s:seq a) (x:a) : Tot Type0
val contains_intro (#a:Type) (s:seq a) (k:nat) (x:a)
: Lemma (k < Seq.length s /\ Seq.index s k == x
==>
s `contains` x)
val contains_elim (#a:Type) (s:seq a) (x:a)
: Lemma (s `contains` x
==>
(exists (k:nat). k < Seq.length s /\ Seq.index s k == x))
val lemma_contains_empty (#a:Type) : Lemma (forall (x:a). ~ (contains Seq.empty x))
val lemma_contains_singleton (#a:Type) (x:a) : Lemma (forall (y:a). contains (create 1 x) y ==> y == x)
val append_contains_equiv (#a:Type) (s1:seq a) (s2:seq a) (x:a)
: Lemma ((append s1 s2) `contains` x
<==>
(s1 `contains` x \/ s2 `contains` x))
val contains_snoc : #a:Type -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. (snoc s x) `contains` y <==> s `contains` y \/ x==y))
val lemma_find_l_contains (#a:Type) (f:a -> Tot bool) (l:seq a)
: Lemma (requires True) (ensures Some? (find_l f l) ==> l `contains` (Some?.v (find_l f l)))
val contains_cons (#a:Type) (hd:a) (tl:Seq.seq a) (x:a)
: Lemma ((cons hd tl) `contains` x
<==>
(x==hd \/ tl `contains` x))
val append_cons_snoc (#a:Type) (u: Seq.seq a) (x:a) (v:Seq.seq a)
: Lemma (Seq.equal (Seq.append u (cons x v))
(Seq.append (snoc u x) v))
val append_slices (#a:Type) (s1:Seq.seq a) (s2:Seq.seq a)
: Lemma ( Seq.equal s1 (Seq.slice (Seq.append s1 s2) 0 (Seq.length s1)) /\
Seq.equal s2 (Seq.slice (Seq.append s1 s2) (Seq.length s1) (Seq.length s1 + Seq.length s2)) /\
(forall (i:nat) (j:nat).
i <= j /\ j <= Seq.length s2 ==>
Seq.equal (Seq.slice s2 i j)
(Seq.slice (Seq.append s1 s2) (Seq.length s1 + i) (Seq.length s1 + j))))
val find_l_none_no_index (#a:Type) (s:Seq.seq a) (f:(a -> Tot bool)) :
Lemma (requires (None? (find_l f s)))
(ensures (forall (i:nat{i < Seq.length s}). not (f (Seq.index s i))))
(decreases (Seq.length s))
(** More properties, with new naming conventions *)
let suffix_of
(#a: Type)
(s_suff s: seq a)
= exists s_pref . (s == append s_pref s_suff)
val cons_head_tail
(#a: Type)
(s: seq a {length s > 0})
: Lemma
(requires True)
(ensures (s == cons (head s) (tail s)))
[SMTPat (cons (head s) (tail s))]
val head_cons
(#a: Type)
(x: a)
(s: seq a)
: Lemma
(ensures (head (cons x s) == x))
val suffix_of_tail
(#a: Type)
(s: seq a {length s > 0})
: Lemma
(requires True)
(ensures ((tail s) `suffix_of` s))
[SMTPat ((tail s) `suffix_of` s)]
val index_cons_l
(#a: Type)
(c: a)
(s: seq a)
: Lemma
(ensures (index (cons c s) 0 == c))
val index_cons_r
(#a: Type)
(c: a)
(s: seq a)
(i: nat {1 <= i /\ i <= length s})
: Lemma
(ensures (index (cons c s) i == index s (i - 1)))
val append_cons
(#a: Type)
(c: a)
(s1 s2: seq a)
: Lemma
(ensures (append (cons c s1) s2 == cons c (append s1 s2)))
val index_tail
(#a: Type)
(s: seq a {length s > 0})
(i: nat {i < length s - 1} )
: Lemma
(ensures (index (tail s) i == index s (i + 1)))
val mem_cons
(#a:eqtype)
(x:a)
(s:seq a)
: Lemma
(ensures (forall y. mem y (cons x s) <==> mem y s \/ x=y))
val snoc_slice_index
(#a: Type)
(s: seq a)
(i: nat)
(j: nat {i <= j /\ j < length s} )
: Lemma
(requires True)
(ensures (snoc (slice s i j) (index s j) == slice s i (j + 1)))
[SMTPat (snoc (slice s i j) (index s j))]
val cons_index_slice
(#a: Type)
(s: seq a)
(i: nat)
(j: nat {i < j /\ j <= length s} )
(k:nat{k == i+1})
: Lemma
(requires True)
(ensures (cons (index s i) (slice s k j) == slice s i j))
[SMTPat (cons (index s i) (slice s k j))]
val slice_is_empty
(#a: Type)
(s: seq a)
(i: nat {i <= length s})
: Lemma
(requires True)
(ensures (slice s i i == Seq.empty))
[SMTPat (slice s i i)]
val slice_length
(#a: Type)
(s: seq a)
: Lemma
(requires True)
(ensures (slice s 0 (length s) == s))
[SMTPat (slice s 0 (length s))]
val slice_slice
(#a: Type)
(s: seq a)
(i1: nat)
(j1: nat {i1 <= j1 /\ j1 <= length s} )
(i2: nat)
(j2: nat {i2 <= j2 /\ j2 <= j1 - i1} )
: Lemma
(requires True)
(ensures (slice (slice s i1 j1) i2 j2 == slice s (i1 + i2) (i1 + j2)))
[SMTPat (slice (slice s i1 j1) i2 j2)]
val lemma_seq_of_list_index (#a:Type) (l:list a) (i:nat{i < List.Tot.length l})
:Lemma (requires True)
(ensures (index (seq_of_list l) i == List.Tot.index l i))
[SMTPat (index (seq_of_list l) i)]
[@@(deprecated "seq_of_list")]
let of_list (#a:Type) (l:list a) :seq a = seq_of_list l
val seq_of_list_tl
(#a: Type)
(l: list a { List.Tot.length l > 0 } )
: Lemma
(requires True)
(ensures (seq_of_list (List.Tot.tl l) == tail (seq_of_list l)))
val mem_seq_of_list
(#a: eqtype)
(x: a)
(l: list a)
: Lemma
(requires True)
(ensures (mem x (seq_of_list l) == List.Tot.mem x l))
[SMTPat (mem x (seq_of_list l))]
(** Dealing efficiently with `seq_of_list` by meta-evaluating conjunctions over
an entire list. *)
let rec explode_and (#a: Type)
(i: nat)
(s: seq a { i <= length s })
(l: list a { List.Tot.length l + i = length s }):
Tot Type
(decreases (List.Tot.length l))
= match l with
| [] -> True
| hd :: tl -> index s i == hd /\ explode_and (i + 1) s tl
unfold
let pointwise_and s l =
norm [ iota; zeta; primops; delta_only [ `%(explode_and) ] ] (explode_and 0 s l)
val intro_of_list': #a:Type ->
i:nat ->
s:seq a ->
l:list a ->
Lemma
(requires (
List.Tot.length l + i = length s /\
i <= length s /\
explode_and i s l))
(ensures (
equal (seq_of_list l) (slice s i (length s))))
val intro_of_list (#a: Type) (s: seq a) (l: list a):
Lemma
(requires (
List.Tot.length l = length s /\
pointwise_and s l))
(ensures (
s == seq_of_list l))
val elim_of_list': #a:Type ->
i:nat ->
s:seq a ->
l:list a ->
Lemma
(requires (
List.Tot.length l + i = length s /\
i <= length s /\
slice s i (length s) == seq_of_list l))
(ensures (
explode_and i s l))
val elim_of_list (#a: Type) (l: list a):
Lemma
(ensures (
let s = seq_of_list l in
pointwise_and s l))
(****** sortWith ******)
let sortWith (#a:eqtype) (f:a -> a -> Tot int) (s:seq a) :Tot (seq a)
= seq_of_list (List.Tot.Base.sortWith f (seq_to_list s))
val lemma_seq_to_list_permutation (#a:eqtype) (s:seq a)
:Lemma (requires True) (ensures (forall x. count x s == List.Tot.Base.count x (seq_to_list s))) (decreases (length s))
val lemma_seq_of_list_permutation (#a:eqtype) (l:list a)
:Lemma (forall x. List.Tot.Base.count x l == count x (seq_of_list l))
val lemma_seq_of_list_sorted (#a:Type) (f:a -> a -> Tot bool) (l:list a)
:Lemma (requires (List.Tot.Properties.sorted f l)) (ensures (sorted f (seq_of_list l)))
val lemma_seq_sortwith_correctness (#a:eqtype) (f:a -> a -> Tot int) (s:seq a)
:Lemma (requires (total_order a (List.Tot.Base.bool_of_compare f)))
(ensures (let s' = sortWith f s in sorted (List.Tot.Base.bool_of_compare f) s' /\ permutation a s s'))
(* sort_lseq:
A wrapper of Seq.sortWith which proves that the output sequences
is a sorted permutation of the input sequence with the same length
*)
let sort_lseq (#a:eqtype) #n (f:tot_ord a) (s:lseq a n) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val sort_lseq (#a: eqtype) (#n: _) (f: tot_ord a) (s: lseq a n)
: s': lseq a n {sorted f s' /\ permutation a s s'} | [] | FStar.Seq.Properties.sort_lseq | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | f: FStar.Seq.Properties.tot_ord a -> s: FStar.Seq.Properties.lseq a n
-> s':
FStar.Seq.Properties.lseq a n
{FStar.Seq.Properties.sorted f s' /\ FStar.Seq.Properties.permutation a s s'} | {
"end_col": 4,
"end_line": 751,
"start_col": 2,
"start_line": 747
} |
Prims.Tot | val seq_to_list (#a: Type) (s: seq a)
: Tot (l: list a {L.length l = length s}) (decreases (length s)) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec seq_to_list (#a:Type) (s:seq a)
: Tot (l:list a{L.length l = length s})
(decreases (length s))
= if length s = 0 then []
else index s 0::seq_to_list (slice s 1 (length s)) | val seq_to_list (#a: Type) (s: seq a)
: Tot (l: list a {L.length l = length s}) (decreases (length s))
let rec seq_to_list (#a: Type) (s: seq a)
: Tot (l: list a {L.length l = length s}) (decreases (length s)) = | false | null | false | if length s = 0 then [] else index s 0 :: seq_to_list (slice s 1 (length s)) | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total",
""
] | [
"FStar.Seq.Base.seq",
"Prims.op_Equality",
"Prims.int",
"FStar.Seq.Base.length",
"Prims.Nil",
"Prims.bool",
"Prims.Cons",
"FStar.Seq.Base.index",
"FStar.Seq.Properties.seq_to_list",
"FStar.Seq.Base.slice",
"Prims.list",
"Prims.b2t",
"Prims.nat",
"FStar.List.Tot.Base.length"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s)
val sorted_feq (#a:Type)
(f g : (a -> a -> Tot bool))
(s:seq a{forall x y. f x y == g x y})
: Lemma (ensures (sorted f s <==> sorted g s))
val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (forall x. count x (append lo hi) = (count x lo + count x hi)))
val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (count x (append lo hi) = (count x lo + count x hi)))
val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma
(ensures (forall x. mem x s = (x=head s || mem x (tail s))))
val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma
(requires (forall (i:nat{i<length s}). f (index s i)))
(ensures (forall (x:a). mem x s ==> f x))
val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma
(requires True)
(ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s))))
type total_order (a:eqtype) (f: (a -> a -> Tot bool)) =
(forall a. f a a) (* reflexivity *)
/\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *)
/\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *)
type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f}
val sorted_concat_lemma: #a:eqtype
-> f:(a -> a -> Tot bool){total_order a f}
-> lo:seq a{sorted f lo}
-> pivot:a
-> hi:seq a{sorted f hi}
-> Lemma (requires (forall y. (mem y lo ==> f y pivot)
/\ (mem y hi ==> f pivot y)))
(ensures (sorted f (append lo (cons pivot hi))))
val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a))
(requires True)
(ensures (fun x ->
(length x = 5
/\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4)))))
/\ equal (index x 0) (slice s 0 i)
/\ equal (index x 1) (slice s i (i+1))
/\ equal (index x 2) (slice s (i+1) j)
/\ equal (index x 3) (slice s j (j + 1))
/\ equal (index x 4) (slice s (j + 1) (length s)))))
val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s}
-> i':nat -> j':nat{i' <= j' /\ j'<=length s /\
(j < i' //high slice
\/ j' <= i //low slice
\/ (i < i' /\ j' <= j)) //mid slice
}
-> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j'
/\ slice s i (i + 1) == slice (swap s i j) j (j + 1)
/\ slice s j (j + 1) == slice (swap s i j) i (i + 1)))
val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s} -> x:a -> Lemma
(requires True)
(ensures (count x s = count x (swap s i j)))
type permutation (a:eqtype) (s1:seq a) (s2:seq a) =
(forall i. count i s1 = count i s2)
val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma
(requires permutation a s1 s1' /\ permutation a s2 s2')
(ensures permutation a (append s1 s2) (append s1' s2'))
val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i<length s}) (j:nat{i <= j && j<length s})
: Lemma (permutation a s (swap s i j))
(* perm_len:
A lemma that shows that two sequences that are permutations
of each other also have the same length
*)
val perm_len (#a:eqtype) (s1 s2: seq a)
: Lemma (requires (permutation a s1 s2))
(ensures (length s1 == length s2))
val cons_perm: #a:eqtype -> tl:seq a -> s:seq a{length s > 0} ->
Lemma (requires (permutation a tl (tail s)))
(ensures (permutation a (cons (head s) tl) s))
val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a
-> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2)))
val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j))))
val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1)))))
val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv))
(ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv)))
val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back)))
(ensures ((forall y. mem y (slice s back len) ==> f pv y)))
val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i == slice (swap s i j) lo i))
val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i'))
val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s}
-> Lemma (ensures (slice s k hi == slice (swap s i j) k hi))
val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start))))
val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len)))
(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *)
let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
= Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1)))
(* replace with sub *)
let replace_subseq (#a:Type0) (s:Seq.seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (sub:Seq.seq a{length sub == j - i}) :Tot (Seq.seq a)
= Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s)))
val splice_refl : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s}
-> Lemma
(ensures (s == splice s i s j))
val lemma_swap_splice : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma
(ensures (swap s i j == splice s start (swap s i j) len))
val lemma_seq_frame_hi: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j <= m} -> n:nat{m < n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 i s1 j)))
(ensures ((slice s1 m n == slice s2 m n) /\ (index s1 m == index s2 m)))
val lemma_seq_frame_lo: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j < m} -> n:nat{m <= n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 m s1 n)))
(ensures ((slice s1 i j == slice s2 i j) /\ (index s1 j == index s2 j)))
val lemma_tail_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma
(requires True)
(ensures (tail (slice s i j) == slice s (i + 1) j))
[SMTPat (tail (slice s i j))]
val lemma_weaken_frame_right : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j))
(ensures (s1 == splice s2 i s1 k))
val lemma_weaken_frame_left : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k))
(ensures (s1 == splice s2 i s1 k))
val lemma_trans_frame : #a:Type -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i <= j && j <= length s1}
-> Lemma
(requires ((s1 == splice s2 i s1 j) /\ s2 == splice s3 i s2 j))
(ensures (s1 == splice s3 i s1 j))
val lemma_weaken_perm_left: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k /\ permutation a (slice s2 j k) (slice s1 j k)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_weaken_perm_right: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j /\ permutation a (slice s2 i j) (slice s1 i j)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_trans_perm: #a:eqtype -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i<=j && j <= length s1}
-> Lemma
(requires (permutation a (slice s1 i j) (slice s2 i j)
/\ permutation a (slice s2 i j) (slice s3 i j)))
(ensures (permutation a (slice s1 i j) (slice s3 i j)))
(*New additions, please review*)
let snoc (#a:Type) (s:seq a) (x:a) : Tot (seq a) = Seq.append s (Seq.create 1 x)
val lemma_cons_snoc (#a:Type) (hd:a) (s:Seq.seq a) (tl:a)
: Lemma (requires True)
(ensures (Seq.equal (cons hd (snoc s tl))
(snoc (cons hd s) tl)))
val lemma_tail_snoc: #a:Type -> s:Seq.seq a{Seq.length s > 0} -> x:a
-> Lemma (ensures (tail (snoc s x) == snoc (tail s) x))
val lemma_snoc_inj: #a:Type -> s1:seq a -> s2:seq a -> v1:a -> v2:a
-> Lemma (requires (equal (snoc s1 v1) (snoc s2 v2)))
(ensures (v1 == v2 /\ equal s1 s2))
val lemma_mem_snoc : #a:eqtype -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. mem y (snoc s x) <==> mem y s \/ x=y))
let rec find_l (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else find_l f (tail l)
let rec ghost_find_l (#a:Type) (f:a -> GTot bool) (l:seq a)
: GTot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else ghost_find_l f (tail l)
val find_append_some: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (Some? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_append_none: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s2))
val find_append_none_s2: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s2)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_snoc: #a:Type -> s:Seq.seq a -> x:a -> f:(a -> Tot bool)
-> Lemma (ensures (let res = find_l f (snoc s x) in
match res with
| None -> find_l f s == None /\ not (f x)
| Some y -> res == find_l f s \/ (f x /\ x==y)))
let un_snoc (#a:Type) (s:seq a{length s <> 0}) : Tot (r:(seq a * a){s == snoc (fst r) (snd r)}) =
let s', a = split s (length s - 1) in
assert (Seq.equal (snoc s' (Seq.index a 0)) s);
s', Seq.index a 0
val un_snoc_snoc (#a:Type) (s:seq a) (x:a) : Lemma (un_snoc (snoc s x) == (s, x))
let rec find_r (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else let prefix, last = un_snoc l in
if f last then Some last
else find_r f prefix
type found (i:nat) = True
let rec seq_find_aux (#a:Type) (f:a -> Tot bool) (l:seq a) (ctr:nat{ctr <= Seq.length l})
: Pure (option a)
(requires (forall (i:nat{ i < Seq.length l /\ i >= ctr}).
not (f (Seq.index l i) )))
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}). {:pattern (found i)}
found i /\ x == Seq.index l i)))
= match ctr with
| 0 -> None
| _ -> let i = ctr - 1 in
if f (Seq.index l i)
then (
cut (found i);
Some (Seq.index l i))
else seq_find_aux f l i
let seq_find (#a:Type) (f:a -> Tot bool) (l:seq a)
: Pure (option a)
(requires True)
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}).{:pattern (found i)}
found i /\ x == Seq.index l i)))
= seq_find_aux f l (Seq.length l)
val find_mem (#a:eqtype) (s:seq a) (f:a -> Tot bool) (x:a{f x})
: Lemma (requires (mem x s))
(ensures (Some? (seq_find f s) /\ f (Some?.v (seq_find f s))))
let for_all
(#a: Type)
(f: (a -> Tot bool))
(l: seq a)
: Pure bool
(requires True)
(ensures (fun b -> (b == true <==> (forall (i: nat {i < Seq.length l} ) . f (index l i) == true))))
= None? (seq_find (fun i -> not (f i)) l)
val seq_mem_k: #a:eqtype -> s:seq a -> n:nat{n < Seq.length s} ->
Lemma (requires True)
(ensures (mem (Seq.index s n) s))
[SMTPat (mem (Seq.index s n) s)]
module L = FStar.List.Tot
let rec seq_to_list (#a:Type) (s:seq a)
: Tot (l:list a{L.length l = length s}) | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val seq_to_list (#a: Type) (s: seq a)
: Tot (l: list a {L.length l = length s}) (decreases (length s)) | [
"recursion"
] | FStar.Seq.Properties.seq_to_list | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | s: FStar.Seq.Base.seq a
-> Prims.Tot (l: Prims.list a {FStar.List.Tot.Base.length l = FStar.Seq.Base.length s}) | {
"end_col": 52,
"end_line": 434,
"start_col": 2,
"start_line": 433
} |
Prims.Tot | val seq_of_list (#a: Type) (l: list a) : Tot (s: seq a {L.length l = length s}) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "FStar.Seq.Base",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "FStar.Seq.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec seq_of_list (#a:Type) (l:list a) : Tot (s:seq a{L.length l = length s}) =
match l with
| [] -> Seq.empty #a
| hd::tl -> create 1 hd @| seq_of_list tl | val seq_of_list (#a: Type) (l: list a) : Tot (s: seq a {L.length l = length s})
let rec seq_of_list (#a: Type) (l: list a) : Tot (s: seq a {L.length l = length s}) = | false | null | false | match l with
| [] -> Seq.empty #a
| hd :: tl -> create 1 hd @| seq_of_list tl | {
"checked_file": "FStar.Seq.Properties.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.Properties.fst.checked",
"FStar.List.Tot.Base.fst.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Properties.fsti"
} | [
"total"
] | [
"Prims.list",
"FStar.Seq.Base.empty",
"FStar.Seq.Base.op_At_Bar",
"FStar.Seq.Base.create",
"FStar.Seq.Properties.seq_of_list",
"FStar.Seq.Base.seq",
"Prims.b2t",
"Prims.op_Equality",
"Prims.nat",
"FStar.List.Tot.Base.length",
"FStar.Seq.Base.length"
] | [] | (*
Copyright 2008-2014 Nikhil Swamy and Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Seq.Properties
open FStar.Seq.Base
module Seq = FStar.Seq.Base
let lseq (a: Type) (l: nat) : Tot Type =
s: Seq.seq a { Seq.length s == l }
let indexable (#a:Type) (s:Seq.seq a) (j:int) = 0 <= j /\ j < Seq.length s
val lemma_append_inj_l: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s1}
-> Lemma (index s1 i == index t1 i)
val lemma_append_inj_r: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a{length s1 = length t1 /\ length s2 = length t2 /\ equal (append s1 s2) (append t1 t2)} -> i:nat{i < length s2}
-> Lemma (ensures (index s2 i == index t2 i))
val lemma_append_len_disj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {(length s1 = length t1 \/ length s2 = length t2) /\ (equal (append s1 s2) (append t1 t2))}
-> Lemma (ensures (length s1 = length t1 /\ length s2 = length t2))
val lemma_append_inj: #a:Type -> s1:seq a -> s2:seq a -> t1:seq a -> t2:seq a {length s1 = length t1 \/ length s2 = length t2}
-> Lemma (requires (equal (append s1 s2) (append t1 t2)))
(ensures (equal s1 t1 /\ equal s2 t2))
let head (#a:Type) (s:seq a{length s > 0}) : Tot a = index s 0
let tail (#a:Type) (s:seq a{length s > 0}) : Tot (seq a) = slice s 1 (length s)
val lemma_head_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(head (append s1 s2) == head s1)
val lemma_tail_append: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(tail (append s1 s2) == append (tail s1) s2)
let last (#a:Type) (s:seq a{length s > 0}) : Tot a = index s (length s - 1)
let cons (#a:Type) (x:a) (s:seq a) : Tot (seq a) = append (create 1 x) s
val lemma_cons_inj: #a:Type -> v1:a -> v2:a -> s1:seq a -> s2:seq a
-> Lemma (requires (equal (cons v1 s1) (cons v2 s2)))
(ensures (v1 == v2 /\ equal s1 s2))
let split (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)}) : Tot (seq a * seq a)
= slice s 0 i, slice s i (length s)
val lemma_split : #a:Type -> s:seq a -> i:nat{(0 <= i /\ i <= length s)} -> Lemma
(ensures (append (fst (split s i)) (snd (split s i)) == s))
let split_eq (#a:Type) (s:seq a) (i:nat{(0 <= i /\ i <= length s)})
: Pure
(seq a * seq a)
(requires True)
(ensures (fun x -> (append (fst x) (snd x) == s)))
= let x = split s i in
lemma_split s i;
x
let rec count (#a:eqtype) (x:a) (s:seq a) : Tot nat (decreases (length s))
= if length s = 0 then 0
else if head s = x
then 1 + count x (tail s)
else count x (tail s)
let mem (#a:eqtype) (x:a) (l:seq a) : Tot bool = count x l > 0
val mem_index (#a:eqtype) (x:a) (s:seq a)
: Lemma (requires (mem x s))
(ensures (exists i. index s i == x))
(* index_mem:
A utility function that finds the first index of
`x` in `s`, given that we know the `x` is actually contained in `s` *)
let rec index_mem (#a:eqtype) (x:a) (s:seq a)
: Pure nat
(requires (mem x s))
(ensures (fun i -> i < length s /\ index s i == x))
(decreases (length s))
= if head s = x then 0
else 1 + index_mem x (tail s)
let swap (#a:Type) (s:seq a) (i:nat{i<length s}) (j:nat{j<length s}) : Tot (seq a)
= upd (upd s j (index s i)) i (index s j)
val lemma_slice_append: #a:Type -> s1:seq a{length s1 >= 1} -> s2:seq a -> Lemma
(ensures (equal (append s1 s2) (append (slice s1 0 1) (append (slice s1 1 (length s1)) s2))))
val lemma_slice_first_in_append: #a:Type -> s1:seq a -> s2:seq a -> i:nat{i <= length s1} -> Lemma
(ensures (equal (slice (append s1 s2) i (length (append s1 s2))) (append (slice s1 i (length s1)) s2)))
val slice_upd: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < length s} -> v:a -> Lemma
(requires k < i \/ j <= k)
(ensures slice (upd s k v) i j == slice s i j)
[SMTPat (slice (upd s k v) i j)]
val upd_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i <= j /\ j <= length s}
-> k:nat{k < j - i} -> v:a -> Lemma
(requires i + k < j)
(ensures upd (slice s i j) k v == slice (upd s (i + k) v) i j)
[SMTPat (upd (slice s i j) k v)]
// TODO: should be renamed cons_head_append, or something like that (because it is NOT related to (append (cons _ _) _))
val lemma_append_cons: #a:Type -> s1:seq a{length s1 > 0} -> s2:seq a -> Lemma
(requires True)
(ensures (equal (append s1 s2) (cons (head s1) (append (tail s1) s2))))
val lemma_tl: #a:Type -> hd:a -> tl:seq a -> Lemma
(ensures (equal (tail (cons hd tl)) tl))
let rec sorted (#a:Type) (f:a -> a -> Tot bool) (s:seq a)
: Tot bool (decreases (length s))
= if length s <= 1
then true
else let hd = head s in
f hd (index s 1) && sorted f (tail s)
val sorted_feq (#a:Type)
(f g : (a -> a -> Tot bool))
(s:seq a{forall x y. f x y == g x y})
: Lemma (ensures (sorted f s <==> sorted g s))
val lemma_append_count: #a:eqtype -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (forall x. count x (append lo hi) = (count x lo + count x hi)))
val lemma_append_count_aux: #a:eqtype -> x:a -> lo:seq a -> hi:seq a -> Lemma
(requires True)
(ensures (count x (append lo hi) = (count x lo + count x hi)))
val lemma_mem_inversion: #a:eqtype -> s:seq a{length s > 0} -> Lemma
(ensures (forall x. mem x s = (x=head s || mem x (tail s))))
val lemma_mem_count: #a:eqtype -> s:seq a -> f:(a -> Tot bool) -> Lemma
(requires (forall (i:nat{i<length s}). f (index s i)))
(ensures (forall (x:a). mem x s ==> f x))
val lemma_count_slice: #a:eqtype -> s:seq a -> i:nat{i<=length s} -> Lemma
(requires True)
(ensures (forall x. count x s = count x (slice s 0 i) + count x (slice s i (length s))))
type total_order (a:eqtype) (f: (a -> a -> Tot bool)) =
(forall a. f a a) (* reflexivity *)
/\ (forall a1 a2. (f a1 a2 /\ a1<>a2) <==> not (f a2 a1)) (* anti-symmetry *)
/\ (forall a1 a2 a3. f a1 a2 /\ f a2 a3 ==> f a1 a3) (* transitivity *)
type tot_ord (a:eqtype) = f:(a -> a -> Tot bool){total_order a f}
val sorted_concat_lemma: #a:eqtype
-> f:(a -> a -> Tot bool){total_order a f}
-> lo:seq a{sorted f lo}
-> pivot:a
-> hi:seq a{sorted f hi}
-> Lemma (requires (forall y. (mem y lo ==> f y pivot)
/\ (mem y hi ==> f pivot y)))
(ensures (sorted f (append lo (cons pivot hi))))
val split_5 : #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j < length s} -> Pure (seq (seq a))
(requires True)
(ensures (fun x ->
(length x = 5
/\ equal s (append (index x 0) (append (index x 1) (append (index x 2) (append (index x 3) (index x 4)))))
/\ equal (index x 0) (slice s 0 i)
/\ equal (index x 1) (slice s i (i+1))
/\ equal (index x 2) (slice s (i+1) j)
/\ equal (index x 3) (slice s j (j + 1))
/\ equal (index x 4) (slice s (j + 1) (length s)))))
val lemma_swap_permutes_aux_frag_eq: #a:Type -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s}
-> i':nat -> j':nat{i' <= j' /\ j'<=length s /\
(j < i' //high slice
\/ j' <= i //low slice
\/ (i < i' /\ j' <= j)) //mid slice
}
-> Lemma (ensures (slice s i' j' == slice (swap s i j) i' j'
/\ slice s i (i + 1) == slice (swap s i j) j (j + 1)
/\ slice s j (j + 1) == slice (swap s i j) i (i + 1)))
val lemma_swap_permutes_aux: #a:eqtype -> s:seq a -> i:nat{i<length s} -> j:nat{i <= j && j<length s} -> x:a -> Lemma
(requires True)
(ensures (count x s = count x (swap s i j)))
type permutation (a:eqtype) (s1:seq a) (s2:seq a) =
(forall i. count i s1 = count i s2)
val append_permutations: #a:eqtype -> s1:seq a -> s2:seq a -> s1':seq a -> s2':seq a -> Lemma
(requires permutation a s1 s1' /\ permutation a s2 s2')
(ensures permutation a (append s1 s2) (append s1' s2'))
val lemma_swap_permutes (#a:eqtype) (s:seq a) (i:nat{i<length s}) (j:nat{i <= j && j<length s})
: Lemma (permutation a s (swap s i j))
(* perm_len:
A lemma that shows that two sequences that are permutations
of each other also have the same length
*)
val perm_len (#a:eqtype) (s1 s2: seq a)
: Lemma (requires (permutation a s1 s2))
(ensures (length s1 == length s2))
val cons_perm: #a:eqtype -> tl:seq a -> s:seq a{length s > 0} ->
Lemma (requires (permutation a tl (tail s)))
(ensures (permutation a (cons (head s) tl) s))
val lemma_mem_append : #a:eqtype -> s1:seq a -> s2:seq a
-> Lemma (ensures (forall x. mem x (append s1 s2) <==> (mem x s1 || mem x s2)))
val lemma_slice_cons: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s i || mem x (slice s (i + 1) j))))
val lemma_slice_snoc: #a:eqtype -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma (ensures (forall x. mem x (slice s i j) <==> (x = index s (j - 1) || mem x (slice s i (j - 1)))))
val lemma_ordering_lo_snoc: #a:eqtype -> f:tot_ord a -> s:seq a -> i:nat -> j:nat{i <= j && j < length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s i j) ==> f y pv) /\ f (index s j) pv))
(ensures ((forall y. mem y (slice s i (j + 1)) ==> f y pv)))
val lemma_ordering_hi_cons: #a:eqtype -> f:tot_ord a -> s:seq a -> back:nat -> len:nat{back < len && len <= length s} -> pv:a
-> Lemma (requires ((forall y. mem y (slice s (back + 1) len) ==> f pv y) /\ f pv (index s back)))
(ensures ((forall y. mem y (slice s back len) ==> f pv y)))
val swap_frame_lo : #a:Type -> s:seq a -> lo:nat -> i:nat{lo <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i == slice (swap s i j) lo i))
val swap_frame_lo' : #a:Type -> s:seq a -> lo:nat -> i':nat {lo <= i'} -> i:nat{i' <= i} -> j:nat{i <= j && j < length s}
-> Lemma (ensures (slice s lo i' == slice (swap s i j) lo i'))
val swap_frame_hi : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j} -> k:nat{j < k} -> hi:nat{k <= hi /\ hi <= length s}
-> Lemma (ensures (slice s k hi == slice (swap s i j) k hi))
val lemma_swap_slice_commute : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (slice (swap s i j) start len == (swap (slice s start len) (i - start) (j - start))))
val lemma_swap_permutes_slice : #a:eqtype -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma (ensures (permutation a (slice s start len) (slice (swap s i j) start len)))
(* replaces the [i,j) sub-sequence of s1 with the corresponding sub-sequence of s2 *)
let splice (#a:Type) (s1:seq a) (i:nat) (s2:seq a{length s1=length s2}) (j:nat{i <= j /\ j <= (length s2)})
: Tot (seq a)
= Seq.append (slice s1 0 i) (Seq.append (slice s2 i j) (slice s1 j (length s1)))
(* replace with sub *)
let replace_subseq (#a:Type0) (s:Seq.seq a) (i:nat) (j:nat{i <= j /\ j <= length s}) (sub:Seq.seq a{length sub == j - i}) :Tot (Seq.seq a)
= Seq.append (Seq.slice s 0 i) (Seq.append sub (Seq.slice s j (Seq.length s)))
val splice_refl : #a:Type -> s:seq a -> i:nat -> j:nat{i <= j && j <= length s}
-> Lemma
(ensures (s == splice s i s j))
val lemma_swap_splice : #a:Type -> s:seq a -> start:nat -> i:nat{start <= i} -> j:nat{i <= j} -> len:nat{j < len && len <= length s}
-> Lemma
(ensures (swap s i j == splice s start (swap s i j) len))
val lemma_seq_frame_hi: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j <= m} -> n:nat{m < n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 i s1 j)))
(ensures ((slice s1 m n == slice s2 m n) /\ (index s1 m == index s2 m)))
val lemma_seq_frame_lo: #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat{i <= j} -> m:nat{j < m} -> n:nat{m <= n && n <= length s1}
-> Lemma
(requires (s1 == (splice s2 m s1 n)))
(ensures ((slice s1 i j == slice s2 i j) /\ (index s1 j == index s2 j)))
val lemma_tail_slice: #a:Type -> s:seq a -> i:nat -> j:nat{i < j && j <= length s}
-> Lemma
(requires True)
(ensures (tail (slice s i j) == slice s (i + 1) j))
[SMTPat (tail (slice s i j))]
val lemma_weaken_frame_right : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j))
(ensures (s1 == splice s2 i s1 k))
val lemma_weaken_frame_left : #a:Type -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j && j <= k && k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k))
(ensures (s1 == splice s2 i s1 k))
val lemma_trans_frame : #a:Type -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i <= j && j <= length s1}
-> Lemma
(requires ((s1 == splice s2 i s1 j) /\ s2 == splice s3 i s2 j))
(ensures (s1 == splice s3 i s1 j))
val lemma_weaken_perm_left: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 j s1 k /\ permutation a (slice s2 j k) (slice s1 j k)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_weaken_perm_right: #a:eqtype -> s1:seq a -> s2:seq a{length s1 = length s2} -> i:nat -> j:nat -> k:nat{i <= j /\ j <= k /\ k <= length s1}
-> Lemma
(requires (s1 == splice s2 i s1 j /\ permutation a (slice s2 i j) (slice s1 i j)))
(ensures (permutation a (slice s2 i k) (slice s1 i k)))
val lemma_trans_perm: #a:eqtype -> s1:seq a -> s2:seq a -> s3:seq a{length s1 = length s2 /\ length s2 = length s3} -> i:nat -> j:nat{i<=j && j <= length s1}
-> Lemma
(requires (permutation a (slice s1 i j) (slice s2 i j)
/\ permutation a (slice s2 i j) (slice s3 i j)))
(ensures (permutation a (slice s1 i j) (slice s3 i j)))
(*New additions, please review*)
let snoc (#a:Type) (s:seq a) (x:a) : Tot (seq a) = Seq.append s (Seq.create 1 x)
val lemma_cons_snoc (#a:Type) (hd:a) (s:Seq.seq a) (tl:a)
: Lemma (requires True)
(ensures (Seq.equal (cons hd (snoc s tl))
(snoc (cons hd s) tl)))
val lemma_tail_snoc: #a:Type -> s:Seq.seq a{Seq.length s > 0} -> x:a
-> Lemma (ensures (tail (snoc s x) == snoc (tail s) x))
val lemma_snoc_inj: #a:Type -> s1:seq a -> s2:seq a -> v1:a -> v2:a
-> Lemma (requires (equal (snoc s1 v1) (snoc s2 v2)))
(ensures (v1 == v2 /\ equal s1 s2))
val lemma_mem_snoc : #a:eqtype -> s:Seq.seq a -> x:a ->
Lemma (ensures (forall y. mem y (snoc s x) <==> mem y s \/ x=y))
let rec find_l (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else find_l f (tail l)
let rec ghost_find_l (#a:Type) (f:a -> GTot bool) (l:seq a)
: GTot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else if f (head l) then Some (head l)
else ghost_find_l f (tail l)
val find_append_some: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (Some? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_append_none: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s1)))
(ensures (find_l f (append s1 s2) == find_l f s2))
val find_append_none_s2: #a:Type -> s1:seq a -> s2:seq a -> f:(a -> Tot bool) -> Lemma
(requires (None? (find_l f s2)))
(ensures (find_l f (append s1 s2) == find_l f s1))
val find_snoc: #a:Type -> s:Seq.seq a -> x:a -> f:(a -> Tot bool)
-> Lemma (ensures (let res = find_l f (snoc s x) in
match res with
| None -> find_l f s == None /\ not (f x)
| Some y -> res == find_l f s \/ (f x /\ x==y)))
let un_snoc (#a:Type) (s:seq a{length s <> 0}) : Tot (r:(seq a * a){s == snoc (fst r) (snd r)}) =
let s', a = split s (length s - 1) in
assert (Seq.equal (snoc s' (Seq.index a 0)) s);
s', Seq.index a 0
val un_snoc_snoc (#a:Type) (s:seq a) (x:a) : Lemma (un_snoc (snoc s x) == (s, x))
let rec find_r (#a:Type) (f:a -> Tot bool) (l:seq a)
: Tot (o:option a{Some? o ==> f (Some?.v o)})
(decreases (Seq.length l))
= if Seq.length l = 0 then None
else let prefix, last = un_snoc l in
if f last then Some last
else find_r f prefix
type found (i:nat) = True
let rec seq_find_aux (#a:Type) (f:a -> Tot bool) (l:seq a) (ctr:nat{ctr <= Seq.length l})
: Pure (option a)
(requires (forall (i:nat{ i < Seq.length l /\ i >= ctr}).
not (f (Seq.index l i) )))
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}). {:pattern (found i)}
found i /\ x == Seq.index l i)))
= match ctr with
| 0 -> None
| _ -> let i = ctr - 1 in
if f (Seq.index l i)
then (
cut (found i);
Some (Seq.index l i))
else seq_find_aux f l i
let seq_find (#a:Type) (f:a -> Tot bool) (l:seq a)
: Pure (option a)
(requires True)
(ensures (function
| None -> forall (i:nat{i < Seq.length l}). not (f (Seq.index l i))
| Some x -> f x /\ (exists (i:nat{i < Seq.length l}).{:pattern (found i)}
found i /\ x == Seq.index l i)))
= seq_find_aux f l (Seq.length l)
val find_mem (#a:eqtype) (s:seq a) (f:a -> Tot bool) (x:a{f x})
: Lemma (requires (mem x s))
(ensures (Some? (seq_find f s) /\ f (Some?.v (seq_find f s))))
let for_all
(#a: Type)
(f: (a -> Tot bool))
(l: seq a)
: Pure bool
(requires True)
(ensures (fun b -> (b == true <==> (forall (i: nat {i < Seq.length l} ) . f (index l i) == true))))
= None? (seq_find (fun i -> not (f i)) l)
val seq_mem_k: #a:eqtype -> s:seq a -> n:nat{n < Seq.length s} ->
Lemma (requires True)
(ensures (mem (Seq.index s n) s))
[SMTPat (mem (Seq.index s n) s)]
module L = FStar.List.Tot
let rec seq_to_list (#a:Type) (s:seq a)
: Tot (l:list a{L.length l = length s})
(decreases (length s))
= if length s = 0 then []
else index s 0::seq_to_list (slice s 1 (length s))
[@@"opaque_to_smt"] | false | false | FStar.Seq.Properties.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val seq_of_list (#a: Type) (l: list a) : Tot (s: seq a {L.length l = length s}) | [
"recursion"
] | FStar.Seq.Properties.seq_of_list | {
"file_name": "ulib/FStar.Seq.Properties.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | l: Prims.list a -> s: FStar.Seq.Base.seq a {FStar.List.Tot.Base.length l = FStar.Seq.Base.length s} | {
"end_col": 43,
"end_line": 440,
"start_col": 2,
"start_line": 438
} |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.