hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
26e16cb32abb9fcd4ac59d5273b29109bbf92011 | 8,859 | use std::marker::PhantomData;
use rustc_hash::FxHashSet;
use super::{OpMut, OpOwned};
use crate::facts;
use crate::index::{EdgeIndex, IndexType, VertexIndex};
use crate::infra::CompactIndexMap;
use crate::marker::{Direction, Outgoing, Undirected};
use crate::traits::*;
use crate::{Vertices, VerticesMut};
#[derive(Debug, Vertices, VerticesMut)]
pub struct Complement<V, E, G> {
#[graph]
graph: G,
edge: E,
ty: PhantomData<V>,
}
impl<V, E, G> Complement<V, E, G>
where
G: Vertices<V> + Edges<E, Undirected>,
{
pub fn new(graph: G, edge: E) -> Self {
Self {
graph,
edge,
ty: PhantomData,
}
}
pub fn into_unmodified(self) -> G {
self.graph
}
pub fn edge_count(&self) -> usize {
facts::complete_graph_edge_count::<Undirected>(self.vertex_count())
- self.graph.edge_count()
}
}
impl<V, E, G1, G2> OpMut<G2> for Complement<V, E, G1>
where
G1: Vertices<V> + Edges<E, Undirected>,
G2: VerticesMut<V> + EdgesMut<E, Undirected>,
V: Clone,
E: Clone,
{
fn apply_mut(self, result: &mut G2) {
// Make sure that the result graph is initially empty.
result.clear();
let vertex_map = self.graph.vertex_index_map();
let mut cur = 0;
for v in self.graph.vertices() {
let idx = result.add_vertex(v.data().clone());
// Assumption: adding vertices to the result graph generates index
// sequence going from zero with step 1.
debug_assert!(idx.to_usize() == cur, "unexpected behavior of `add_vertex`");
cur += 1;
}
for u in self.graph.vertex_indices() {
for v in self.graph.vertex_indices() {
if u.to_usize() < v.to_usize() && self.graph.edge_index(u, v).is_none() {
let u = vertex_map.virt(u).to_usize().into();
let v = vertex_map.virt(v).to_usize().into();
result.add_edge(u, v, self.edge.clone());
}
}
}
}
}
impl<V, E, G> OpOwned<G> for Complement<V, E, G>
where
G: VerticesMut<V> + EdgesMut<E, Undirected> + Create<V, E, Undirected>,
V: Clone,
E: Clone,
{
fn apply(self) -> G {
// XXX: Is it possible to do it in place in a way that would be more
// efficient that the out-of-place approach? It would also have a nice
// side effect of not changing the vertex indices when they are holes.
let mut result = G::with_capacity(self.graph.vertex_count(), self.edge_count());
self.apply_mut(&mut result);
result
}
}
impl<V, E, G> EdgesWeak<E, Undirected> for Complement<V, E, G>
where
G: Vertices<V> + Edges<E, Undirected>,
{
fn edge_count_hint(&self) -> Option<usize> {
Some(self.edge_count())
}
fn edge_bound_hint(&self) -> Option<usize> {
self.edge_count_hint()
}
fn edge_weak(&self, index: EdgeIndex) -> Option<WeakRef<'_, E>> {
if self.graph.contains_edge(index) {
None
} else {
Some(WeakRef::borrowed(&self.edge))
}
}
fn endpoints_weak(&self, _index: EdgeIndex) -> Option<(Self::VertexIndex, Self::VertexIndex)> {
None
}
fn edge_index_weak(&self, src: Self::VertexIndex, dst: Self::VertexIndex) -> Option<EdgeIndex> {
if self.graph.edge_index(src, dst).is_some() {
None
} else {
Some(EdgeIndex::null())
}
}
}
impl<V, E, G> Neighbors for Complement<V, E, G>
where
G: Neighbors + Vertices<V>,
{
type NeighborRef<'a> = (VertexIndex, EdgeIndex, VertexIndex, Direction);
type NeighborsIter<'a>
where
Self: 'a,
= NeighborsIter<'a, V, G>;
fn neighbors(&self, src: VertexIndex) -> Self::NeighborsIter<'_> {
NeighborsIter {
src,
dir: Outgoing,
neighbors: self.graph.neighbors(src).map(|n| n.index()).collect(),
vertices: self.graph.vertex_indices(),
}
}
fn neighbors_directed(&self, src: VertexIndex, dir: Direction) -> Self::NeighborsIter<'_> {
NeighborsIter {
src,
dir,
neighbors: self
.graph
.neighbors_directed(src, dir)
.map(|n| n.index())
.collect(),
vertices: self.graph.vertex_indices(),
}
}
}
pub struct NeighborsIter<'a, V, G>
where
G: Vertices<V> + 'a,
{
src: VertexIndex,
dir: Direction,
neighbors: FxHashSet<VertexIndex>,
vertices: G::VertexIndicesIter<'a>,
}
impl<'a, V, G> Iterator for NeighborsIter<'a, V, G>
where
G: Vertices<V> + 'a,
{
type Item = (VertexIndex, EdgeIndex, VertexIndex, Direction);
fn next(&mut self) -> Option<Self::Item> {
loop {
let v = self.vertices.next()?;
if v != self.src && !self.neighbors.contains(&v) {
return Some((v, EdgeIndex::null(), self.src, self.dir));
}
}
}
}
#[cfg(test)]
mod tests {
use std::collections::HashSet;
use crate::storage::{AdjList, Stable};
use super::*;
#[test]
fn edge_count() {
let mut graph = AdjList::new();
let v0 = graph.add_vertex(());
let v1 = graph.add_vertex(());
let v2 = graph.add_vertex(());
let v3 = graph.add_vertex(());
graph.add_edge(v0, v1, ());
graph.add_edge(v1, v2, ());
graph.add_edge(v2, v3, ());
graph.add_edge(v3, v1, ());
let complement = Complement::new(graph, ());
assert_eq!(complement.edge_count(), 2);
}
#[test]
fn apply() {
let mut graph = AdjList::new();
let v0 = graph.add_vertex(());
let v1 = graph.add_vertex(());
let v2 = graph.add_vertex(());
let v3 = graph.add_vertex(());
graph.add_edge(v0, v1, ());
graph.add_edge(v1, v2, ());
graph.add_edge(v2, v3, ());
graph.add_edge(v3, v1, ());
let complement: AdjList<_, _, _> = Complement::new(graph, ()).apply();
assert!(complement.edge_index(v0, v1).is_none());
assert!(complement.edge_index(v1, v2).is_none());
assert!(complement.edge_index(v2, v3).is_none());
assert!(complement.edge_index(v3, v1).is_none());
assert!(complement.edge_index(v0, v2).is_some());
assert!(complement.edge_index(v0, v3).is_some());
}
#[test]
fn neighbors() {
let mut graph = AdjList::new();
let v0 = graph.add_vertex(());
let v1 = graph.add_vertex(());
let v2 = graph.add_vertex(());
let v3 = graph.add_vertex(());
graph.add_edge(v0, v1, ());
graph.add_edge(v1, v2, ());
graph.add_edge(v2, v3, ());
graph.add_edge(v3, v1, ());
let complement = Complement::new(graph, ());
assert_eq!(
complement
.neighbors(v0)
.map(|n| n.index())
.collect::<HashSet<_>>(),
vec![v2, v3].into_iter().collect()
);
assert_eq!(complement.neighbors(v1).count(), 0);
assert_eq!(
complement
.neighbors(v2)
.map(|n| n.index())
.collect::<HashSet<_>>(),
vec![v0].into_iter().collect()
);
assert_eq!(
complement
.neighbors(v3)
.map(|n| n.index())
.collect::<HashSet<_>>(),
vec![v0].into_iter().collect()
);
}
#[test]
fn apply_holes() {
let mut graph = Stable::new(AdjList::new());
let v0 = graph.add_vertex(());
let v1 = graph.add_vertex(());
let v2 = graph.add_vertex(());
let v3 = graph.add_vertex(());
let v4 = graph.add_vertex(());
graph.add_edge(v0, v1, ());
graph.add_edge(v1, v2, ());
graph.add_edge(v2, v3, ());
graph.add_edge(v3, v4, ());
graph.add_edge(v4, v1, ());
graph.remove_vertex(v3);
let complement: Stable<AdjList<_, _, _>> = Complement::new(graph, ()).apply();
// XXX: Complement does not preserve the vertex indices when there are
// holes. This would change if we implement an in-place algorithm.
let v0 = VertexIndex::new(0);
let v1 = VertexIndex::new(1);
let v2 = VertexIndex::new(2);
let v4 = VertexIndex::new(3);
assert!(complement.edge_index(v0, v1).is_none());
assert!(complement.edge_index(v1, v2).is_none());
assert!(complement.edge_index(v4, v1).is_none());
assert!(complement.edge_index(v0, v2).is_some());
assert!(complement.edge_index(v0, v4).is_some());
assert!(complement.edge_index(v2, v4).is_some());
}
}
| 27.946372 | 100 | 0.543515 |
cc84cf0ea6bc1d6d23faa4602a24d2042ba59476 | 4,005 | //! This is the existing test case from
//! https://raw.githubusercontent.com/veddan/rust-introsort/master/benches/bench.rs
#![feature(test)]
#![feature(unboxed_closures)]
extern crate test;
use rand::{distributions::Standard, rngs::SmallRng, seq::SliceRandom, Rng, SeedableRng};
use std::mem;
use test::Bencher;
fn rng() -> impl Rng {
SmallRng::from_entropy()
}
#[inline]
fn sort<T: Ord>(l: &mut [T]) {
l.sort();
}
type BigSortable = (u64, u64, u64, u64);
macro_rules! bench_random(
($name: ident, $sortfun: ident, $typ: ty, $n: expr) => (
#[bench]
fn $name(b: &mut Bencher) {
let mut rng = rng();
b.iter(|| {
let mut v = (&mut rng).sample_iter(Standard).take($n).collect::<Vec<$typ>>();
$sortfun(&mut v[..]);
});
b.bytes = $n * mem::size_of::<$typ>() as u64;
}
)
);
bench_random!(sort_tiny_random_small, sort, u8, 5);
bench_random!(sort_tiny_random_medium, sort, u8, 100);
bench_random!(sort_tiny_random_large, sort, u8, 10_000);
bench_random!(sort_random_small, sort, u64, 5);
bench_random!(sort_random_medium, sort, u64, 100);
bench_random!(sort_random_large, sort, u64, 10_000);
bench_random!(sort_big_random_small, sort, BigSortable, 5);
bench_random!(sort_big_random_medium, sort, BigSortable, 100);
bench_random!(sort_big_random_large, sort, BigSortable, 10_000);
#[bench]
fn sort_sorted(b: &mut Bencher) {
let mut v: Vec<_> = (0..10000isize).collect();
b.iter(|| {
sort(&mut v[..]);
});
b.bytes = (v.len() * mem::size_of_val(&v[0])) as u64;
}
#[bench]
fn sort_big_sorted(b: &mut Bencher) {
let mut v: Vec<_> = (0..10000usize).map(|i| (i, i, i, i)).collect();
b.iter(|| {
sort(&mut v[..]);
});
b.bytes = (v.len() * mem::size_of_val(&v[0])) as u64;
}
#[bench]
fn sort_few_unique(b: &mut Bencher) {
let mut v = Vec::new();
for i in 0u32..10 {
for _ in 0usize..100 {
v.push(i);
}
}
let mut rng = rng();
b.iter(|| {
v.shuffle(&mut rng);
sort(&mut v[..]);
});
b.bytes = (v.len() * mem::size_of_val(&v[0])) as u64;
}
#[bench]
fn sort_equals(b: &mut Bencher) {
let mut v = vec![1u64; 1000];
b.iter(|| {
sort(&mut v[..]);
});
b.bytes = (v.len() * mem::size_of_val(&v[0])) as u64;
}
#[bench]
fn sort_huge(b: &mut Bencher) {
let mut rng = rng();
let n = 100_000;
let mut v = (&mut rng)
.sample_iter(Standard)
.take(n)
.collect::<Vec<i64>>();
b.iter(|| {
v.shuffle(&mut rng);
sort(&mut v[..]);
});
b.bytes = (n * mem::size_of::<i64>()) as u64;
}
#[bench]
fn sort_partially_sorted(b: &mut Bencher) {
fn partially_sort<T: Ord + ::std::fmt::Display>(v: &mut [T]) {
let s = v.len() / 100;
if s == 0 {
return;
}
let mut sorted = true;
for c in v.chunks_mut(s) {
if sorted {
sort(&mut c[..]);
}
sorted = !sorted;
}
}
let mut rng = rng();
let n = 10_000;
let mut v = (&mut rng)
.sample_iter(Standard)
.take(n)
.collect::<Vec<i64>>();
v.shuffle(&mut rng);
partially_sort(&mut v[..]);
b.iter(|| {
let mut v2 = v.clone();
sort(&mut v2[..]);
});
b.bytes = (n * mem::size_of::<i64>()) as u64;
}
#[bench]
fn sort_strings(b: &mut Bencher) {
let mut rng = rng();
let n = 10_000usize;
let mut v = Vec::with_capacity(n);
let mut bytes = 0;
for _ in 0..n {
let len = rng.gen_range(0, 60);
bytes += len;
let mut s = String::with_capacity(len);
if len == 0 {
v.push(s);
continue;
}
for _ in 0..len {
s.push(rng.gen_range(b'a', b'z') as char);
}
v.push(s);
}
b.iter(|| {
v.shuffle(&mut rng);
sort(&mut v[..]);
});
b.bytes = bytes as u64;
}
| 24.570552 | 93 | 0.523096 |
1e794d2471344db380b6b89d62174f8971cbbb3e | 39,292 | use crate::util::*;
use std::{
collections::HashMap,
path::{Path, PathBuf},
};
#[derive(Debug)]
enum Verb {
Simple(SimpleVerb),
Patterns(VerbPatterns),
}
#[derive(Debug)]
struct SimpleVerb {
singular: String,
plural: String,
singular_past: Option<String>,
plural_past: Option<String>,
present_participle: Option<String>,
past_participle: Option<String>,
}
#[derive(Debug)]
struct VerbPatterns {
singular_recognizer: String,
singular_replacement: String,
plural_recognizer: String,
plural_replacement: String,
past_recognizer: Option<String>,
past_replacement: Option<String>,
present_participle_recognizer: Option<String>,
present_participle_replacement: Option<String>,
past_participle_recognizer: Option<String>,
past_participle_replacement: Option<String>,
}
#[derive(Debug)]
struct ParsedLine<'a> {
singular_re_str: Option<&'a str>,
singular: &'a str,
plural_re_str: Option<&'a str>,
plural: &'a str,
past_re_str: Option<&'a str>,
singular_past: Option<&'a str>,
plural_past: Option<&'a str>,
present_participle_re_str: Option<&'a str>,
present_participle: Option<&'a str>,
past_participle_re_str: Option<&'a str>,
past_participle: Option<&'a str>,
}
impl<'a> ParsedLine<'a> {
fn is_generic(&self) -> bool {
[
self.singular_re_str,
self.plural_re_str,
self.past_re_str,
self.present_participle_re_str,
self.past_participle_re_str,
]
.iter()
.any(Option::is_some)
}
}
pub(crate) fn generate(source_dir: PathBuf, target_dir: PathBuf) -> (String, PathBuf) {
generate_verbs_rs_file(source_dir, target_dir)
}
const VERBS_TEMPLATE: &str = include_str!("verbs.rs.template");
fn generate_verbs_rs_file(source_dir: PathBuf, target_dir: PathBuf) -> (String, PathBuf) {
let lines = read_lines(&source_dir, "verbs.lei");
let verbs = lines_to_verbs(&lines);
let mut verbs_rs = insert_simple_data(&verbs);
verbs_rs = insert_pattern_data(verbs_rs, &verbs);
verbs_rs = insert_test_data(&source_dir, verbs_rs);
let mut verbs_rs_file = target_dir;
verbs_rs_file.push("verbs.rs");
(verbs_rs, verbs_rs_file)
}
fn lines_to_verbs(lines: &[String]) -> Vec<Verb> {
lines
.iter()
.map(|l| line_to_verb(l.as_str()))
.flatten()
.flatten()
.collect::<Vec<Verb>>()
}
fn line_to_verb(line: &str) -> Option<Vec<Verb>> {
if BLANK_LINE_RE.is_match(line) || COMMENT_LINE_RE.is_match(line) {
return None;
}
let parsed = parse_data_from_line(line);
if parsed.is_generic() {
return Some(parsed_to_patterns(parsed));
}
Some(parsed_to_simple(parsed))
}
fn parse_data_from_line(line: &str) -> ParsedLine {
let verbs = line.split_whitespace().collect::<Vec<_>>();
if verbs.len() != 5 && verbs.len() != 6 {
panic!(
"Splitting line on whitespace produced a surprising number of elements ({}): [{}]",
verbs.len(),
line
);
}
let (singular_re_str, singular) = split_generic(verbs[0]);
if singular.is_none() {
panic!("Splitting line did not produce a singular verb: [{}]", line);
}
let (plural_re_str, plural) = split_generic(verbs[1]);
if plural.is_none() {
panic!("Splitting line did not produce a plural verb: [{}]", line);
}
let (past_re_str, past) = split_generic(verbs[2]);
let (singular_past, plural_past) = if let Some(p) = past {
let split = p.split('|').collect::<Vec<_>>();
if split.len() == 2 {
(Some(split[0]), Some(split[1]))
} else {
(Some(p), Some(p))
}
} else {
(None, None)
};
let (present_participle_re_str, present_participle) = split_generic(verbs[3]);
let (past_participle_re_str, past_participle) = split_generic(verbs[4]);
ParsedLine {
singular_re_str,
singular: singular.unwrap(),
plural_re_str,
plural: plural.unwrap(),
past_re_str,
singular_past,
plural_past,
present_participle_re_str,
present_participle,
past_participle_re_str,
past_participle,
}
}
fn split_generic(verb: &str) -> (Option<&str>, Option<&str>) {
if verb == "_" {
return (None, None);
}
let (generic, split_verb) = verb.split_at(1);
if generic.contains(&['*', '-'][..]) {
return (maybe_is_generic_re_str(Some(generic)), Some(split_verb));
}
(None, Some(verb))
}
fn parsed_to_simple(parsed: ParsedLine) -> Vec<Verb> {
let singular = parsed.singular;
let plural = parsed.plural;
let mut verbs = vec![Verb::Simple(SimpleVerb {
singular: singular.to_string(),
plural: plural.to_string(),
singular_past: parsed.singular_past.map(|p| p.to_string()),
plural_past: parsed.plural_past.map(|p| p.to_string()),
present_participle: parsed.present_participle.map(|p| p.to_string()),
past_participle: parsed.past_participle.map(|p| p.to_string()),
})];
// For words with dashes we also add a version where the dashes are
// replaced with spaces, e.g. "break away". As of this writing there are
// no verbs with dashes, but there could be in the future.
if singular.contains('-') {
verbs.push(Verb::Simple(SimpleVerb {
singular: singular.replace("-", " "),
plural: plural.replace("-", " "),
singular_past: parsed.singular_past.map(|p| p.replace("-", " ")),
plural_past: parsed.plural_past.map(|p| p.replace("-", " ")),
present_participle: parsed.present_participle.map(|p| p.replace("-", " ")),
past_participle: parsed.past_participle.map(|p| p.replace("-", " ")),
}));
}
verbs
}
fn parsed_to_patterns(parsed: ParsedLine) -> Vec<Verb> {
let (singular_replacement, singular_recognizer) = replacement_and_regex_str_for_pattern_match(
parsed.singular_re_str.unwrap(),
parsed.singular,
);
let (plural_replacement, plural_recognizer) =
replacement_and_regex_str_for_pattern_match(parsed.plural_re_str.unwrap(), parsed.plural);
let (past_replacement, past_recognizer) = match parsed.past_re_str {
None => (None, None),
Some(re) => {
let (re, replacement) =
replacement_and_regex_str_for_pattern_match(re, parsed.singular_past.unwrap());
(Some(re), Some(replacement))
}
};
let (past_participle_replacement, past_participle_recognizer) = match parsed
.past_participle_re_str
{
None => (None, None),
Some(re) => {
let (re, replacement) =
replacement_and_regex_str_for_pattern_match(re, parsed.past_participle.unwrap());
(Some(re), Some(replacement))
}
};
let (present_participle_replacement, present_participle_recognizer) = match parsed
.present_participle_re_str
{
None => (None, None),
Some(re) => {
let (re, replacement) =
replacement_and_regex_str_for_pattern_match(re, parsed.present_participle.unwrap());
(Some(re), Some(replacement))
}
};
// println!(
// "SIN REC = {} - PL REP = {}",
// singular_recognizer, plural_replacement
// );
vec![Verb::Patterns(VerbPatterns {
singular_recognizer,
singular_replacement: format!("${{1}}{}", singular_replacement),
plural_recognizer,
plural_replacement: format!("${{1}}{}", plural_replacement),
past_recognizer,
past_replacement: past_replacement.map(|p| format!("${{1}}{}", p)),
present_participle_recognizer,
present_participle_replacement: present_participle_replacement
.map(|p| format!("${{1}}{}", p)),
past_participle_recognizer,
past_participle_replacement: past_participle_replacement.map(|p| format!("${{1}}{}", p)),
})]
}
fn insert_simple_data(verbs: &[Verb]) -> String {
let mut plural_of: HashMap<&str, &str> = HashMap::new();
let mut singular_of: HashMap<&str, &str> = HashMap::new();
let mut past_of: HashMap<&str, &str> = HashMap::new();
let mut present_participle_of: HashMap<&str, &str> = HashMap::new();
let mut past_participle_of: HashMap<&str, &str> = HashMap::new();
let mut is_singular: Vec<&str> = vec![];
let mut is_plural: Vec<&str> = vec![];
let mut is_past: Vec<&str> = vec![];
let mut is_present_participle: Vec<&str> = vec![];
let mut is_past_participle: Vec<&str> = vec![];
for v in verbs.iter().filter_map(|n| match n {
Verb::Simple(n) => Some(n),
_ => None,
}) {
plural_of.insert(&v.singular, &v.plural);
singular_of.insert(&v.plural, &v.singular);
insert_past_mappings(&mut past_of, v);
insert_present_participle_mappings(&mut present_participle_of, v);
insert_past_participle_mappings(&mut past_participle_of, v);
is_singular.push(&v.singular);
is_plural.push(&v.plural);
if let Some(p) = &v.singular_past {
is_past.push(p);
}
if let Some(p) = &v.plural_past {
is_past.push(p);
}
if let Some(p) = &v.present_participle {
is_present_participle.push(p);
}
if let Some(p) = &v.past_participle {
is_past_participle.push(p);
}
}
let mut verbs_rs = VERBS_TEMPLATE.to_string();
for (var_name, hm) in [
("SINGULAR_OF", singular_of),
("PLURAL_OF", plural_of),
("PAST_OF", past_of),
("PRESENT_PARTICIPLE_OF", present_participle_of),
("PAST_PARTICIPLE_OF", past_participle_of),
] {
verbs_rs = add_hashmap_from_pairs_list(verbs_rs, var_name, &hashmap_to_sorted_pairs(hm));
}
for (var_name, mut v) in [
("IS_SINGULAR", is_singular),
("IS_PLURAL", is_plural),
("IS_PAST", is_past),
("IS_PRESENT_PARTICIPLE", is_present_participle),
("IS_PAST_PARTICIPLE", is_past_participle),
] {
v.dedup();
v.sort_by_key(|w| w.to_lowercase());
verbs_rs = add_hashset_from_list(verbs_rs, var_name, &v);
}
verbs_rs
}
fn insert_past_mappings<'a, 'b: 'a>(
past_of: &'a mut HashMap<&'b str, &'b str>,
verb: &'b SimpleVerb,
) {
// This is a special case.
if !maybe_insert_mapping(past_of, Some(&verb.plural), verb.plural_past.as_ref()) {
maybe_insert_mapping(past_of, Some(&verb.plural), verb.singular_past.as_ref());
}
for pair in [
(Some(&verb.singular), verb.singular_past.as_ref()),
(verb.past_participle.as_ref(), verb.singular_past.as_ref()),
(
verb.present_participle.as_ref(),
verb.singular_past.as_ref(),
),
] {
maybe_insert_mapping(past_of, pair.0, pair.1);
}
}
fn insert_present_participle_mappings<'a, 'b: 'a>(
pres_part_of: &'a mut HashMap<&'b str, &'b str>,
verb: &'b SimpleVerb,
) {
for pair in [
(Some(&verb.singular), verb.present_participle.as_ref()),
(Some(&verb.plural), verb.present_participle.as_ref()),
(
verb.singular_past.as_ref(),
verb.present_participle.as_ref(),
),
(verb.plural_past.as_ref(), verb.present_participle.as_ref()),
(
verb.past_participle.as_ref(),
verb.present_participle.as_ref(),
),
] {
maybe_insert_mapping(pres_part_of, pair.0, pair.1);
}
}
fn insert_past_participle_mappings<'a, 'b: 'a>(
past_part_of: &'a mut HashMap<&'b str, &'b str>,
verb: &'b SimpleVerb,
) {
for pair in [
(Some(&verb.singular), verb.past_participle.as_ref()),
(Some(&verb.plural), verb.past_participle.as_ref()),
(verb.singular_past.as_ref(), verb.past_participle.as_ref()),
(verb.plural_past.as_ref(), verb.past_participle.as_ref()),
(
verb.present_participle.as_ref(),
verb.past_participle.as_ref(),
),
] {
maybe_insert_mapping(past_part_of, pair.0, pair.1);
}
}
fn maybe_insert_mapping<'a, 'b: 'a>(
mapping: &'a mut HashMap<&'b str, &'b str>,
from: Option<&'b String>,
to: Option<&'b String>,
) -> bool {
if let (Some(from), Some(to)) = (from, to) {
mapping.insert(from, to);
return true;
}
false
}
struct Transformation<'a> {
idx: usize,
recognizer_var: &'static str,
recognizer: &'a str,
replacement: &'a str,
}
fn insert_pattern_data(mut verbs_rs: String, verbs: &[Verb]) -> String {
let mut plural_to_singular_transformations: Vec<Transformation> = vec![];
let mut singular_to_plural_transformations: Vec<Transformation> = vec![];
let mut past_recognizer_regexes: Vec<&str> = vec![];
let mut to_past_transformations: Vec<Transformation> = vec![];
let mut present_participle_recognizer_regexes: Vec<&str> = vec![];
let mut to_present_participle_transformations: Vec<Transformation> = vec![];
let mut past_participle_recognizer_regexes: Vec<&str> = vec![];
let mut to_past_participle_transformations: Vec<Transformation> = vec![];
for (idx, v) in verbs
.iter()
.filter_map(|v| match v {
Verb::Patterns(v) => Some(v),
_ => None,
})
.enumerate()
{
plural_to_singular_transformations.push(Transformation {
idx,
recognizer_var: "PLURAL_RECOGNIZER_REGEXES",
recognizer: &v.plural_recognizer,
replacement: &v.singular_replacement,
});
singular_to_plural_transformations.push(Transformation {
idx,
recognizer_var: "SINGULAR_RECOGNIZER_REGEXES",
recognizer: &v.singular_recognizer,
replacement: &v.plural_replacement,
});
if let Some(replacement) = &v.past_replacement {
past_recognizer_regexes.push(v.past_recognizer.as_ref().unwrap());
to_past_transformations.push(Transformation {
idx,
recognizer_var: "SINGULAR_RECOGNIZER_REGEXES",
recognizer: &v.singular_recognizer,
replacement,
});
to_past_transformations.push(Transformation {
idx,
recognizer_var: "PLURAL_RECOGNIZER_REGEXES",
recognizer: &v.plural_recognizer,
replacement,
});
}
if let Some(replacement) = &v.present_participle_replacement {
present_participle_recognizer_regexes
.push(v.present_participle_recognizer.as_ref().unwrap());
to_present_participle_transformations.push(Transformation {
idx,
recognizer_var: "SINGULAR_RECOGNIZER_REGEXES",
recognizer: &v.singular_recognizer,
replacement,
});
to_present_participle_transformations.push(Transformation {
idx,
recognizer_var: "PLURAL_RECOGNIZER_REGEXES",
recognizer: &v.plural_recognizer,
replacement,
});
}
if let Some(replacement) = &v.past_participle_replacement {
past_participle_recognizer_regexes.push(v.past_participle_recognizer.as_ref().unwrap());
to_past_participle_transformations.push(Transformation {
idx,
recognizer_var: "SINGULAR_RECOGNIZER_REGEXES",
recognizer: &v.singular_recognizer,
replacement,
});
to_past_participle_transformations.push(Transformation {
idx,
recognizer_var: "PLURAL_RECOGNIZER_REGEXES",
recognizer: &v.plural_recognizer,
replacement,
});
}
}
// XXX - deduping this breaks the idx value we set earlier. I'm not sure
// how best to dedupe and keep the idx var correct for all transformations.
// plural_to_singular_transformations.dedup_by_key(|t| t.recognizer);
// singular_to_plural_transformations.dedup_by_key(|t| t.recognizer);
verbs_rs = add_regexes(
&mut verbs_rs,
"SINGULAR_RECOGNIZER_REGEXES",
&singular_to_plural_transformations
.iter()
.map(|t| t.recognizer)
.collect::<Vec<&str>>(),
);
verbs_rs = add_transformations(
&mut verbs_rs,
"SINGULAR_TO_PLURAL_TRANSFORMATIONS",
&singular_to_plural_transformations,
false,
);
verbs_rs = add_regexes(
&mut verbs_rs,
"PLURAL_RECOGNIZER_REGEXES",
&plural_to_singular_transformations
.iter()
.map(|t| t.recognizer)
.collect::<Vec<&str>>(),
);
verbs_rs = add_transformations(
&mut verbs_rs,
"PLURAL_TO_SINGULAR_TRANSFORMATIONS",
&plural_to_singular_transformations,
false,
);
verbs_rs = add_regexes(
&mut verbs_rs,
"PAST_RECOGNIZER_REGEXES",
&past_recognizer_regexes,
);
verbs_rs = add_transformations(
&mut verbs_rs,
"TO_PAST_TRANSFORMATIONS",
&to_past_transformations,
true,
);
verbs_rs = add_regexes(
&mut verbs_rs,
"PRESENT_PARTICIPLE_RECOGNIZER_REGEXES",
&present_participle_recognizer_regexes,
);
verbs_rs = add_transformations(
&mut verbs_rs,
"TO_PRESENT_PARTICIPLE_TRANSFORMATIONS",
&to_present_participle_transformations,
true,
);
verbs_rs = add_regexes(
&mut verbs_rs,
"PAST_PARTICIPLE_RECOGNIZER_REGEXES",
&past_participle_recognizer_regexes,
);
verbs_rs = add_transformations(
&mut verbs_rs,
"TO_PAST_PARTICIPLE_TRANSFORMATIONS",
&to_past_participle_transformations,
true,
);
verbs_rs
}
fn add_regexes(template: &mut String, which: &str, regexes: &[&str]) -> String {
let regexes_str = regexes
.iter()
.enumerate()
.map(|(i, r)| {
format!(
r##"Regex::new(r#"{regex}"#).unwrap(), // {idx}"##,
regex = r,
idx = i,
)
})
.collect::<Vec<String>>()
.join("\n");
let re = template_match_re(which);
re.replace(template, regexes_str).to_string()
}
fn add_transformations(
template: &mut String,
which: &str,
transformations: &[Transformation],
return_is_option: bool,
) -> String {
let transformations_str = transformations
.iter()
.map(|t| {
let transformation = format!(
r##"
// {regex}
if let Some(caps) = {recognizer_var}[{i}].captures(word) {{
let mut new_word = String::new();
caps.expand(r#"{replacement}"#, &mut new_word);
#[cfg(feature = "debug")]
println!(r#" word '{{}}' matched regex {{:?}} (idx {i}) and became {{}}"#, word, {recognizer_var}[{i}], new_word);
return {return_val};
}}
"##,
regex = t.recognizer,
recognizer_var = t.recognizer_var,
i = t.idx,
replacement = t.replacement,
return_val = if return_is_option {
"Some(Cow::Owned(new_word))"
} else {
"Cow::Owned(new_word)"
},
);
transformation
.trim_start_matches('\n')
.trim_end_matches('\n')
.to_string()
})
.collect::<Vec<String>>()
.join("\n");
// If the string we pass to re.replace as the replace contains dollar
// signs, those are interpreted as references to matching groups. We could
// try to escape them, but I tried that and couldn't get it right. So
// we'll just replace them with a fun Unicode symbol and then replace them
// back after the call to re.replace.
let dollar_replacer = "👍";
let re = template_match_re(which);
re.replace(template, &transformations_str.replace("$", dollar_replacer))
// This is the str.replace method
.replace(dollar_replacer, "$")
}
struct Inflections {
plural: String,
past: String,
present_participle: String,
past_participle: String,
}
fn insert_test_data(source_dir: &Path, verbs_rs: String) -> String {
let mut conversions = conversions_from_verbs_general(source_dir);
conversions = conversions_from_custom_data(conversions);
let widths = (
conversions.keys().map(String::len).max().unwrap(),
conversions.values().map(|p| p.plural.len()).max().unwrap(),
conversions.values().map(|p| p.past.len()).max().unwrap(),
conversions
.values()
.map(|p| p.present_participle.len())
.max()
.unwrap(),
conversions
.values()
.map(|p| p.past_participle.len())
.max()
.unwrap(),
);
let mut test_data = one_conversion_line(
widths,
"Singular",
"Plural",
"Past",
"Present Participle",
"Past Participle",
true,
);
test_data.push_str(&one_conversion_line(
widths,
&"_".repeat(widths.0),
&"_".repeat(widths.1),
&"_".repeat(widths.2),
&"_".repeat(widths.3),
&"_".repeat(widths.4),
true,
));
let mut keys = conversions
.keys()
.map(|k| k.as_str())
.collect::<Vec<&str>>();
keys.sort_by_key(|k| k.to_lowercase());
for singular in keys {
let inflections = conversions.get(singular).unwrap();
test_data.push_str(&one_conversion_line(
widths,
singular,
&inflections.plural,
&inflections.past,
&inflections.present_participle,
&inflections.past_participle,
false,
));
}
verbs_rs.replace(
" // CONVERSIONS_TEST_DATA",
&format!(
r##"static CONVERSIONS_TEST_DATA: &str = r#"{}{}"#;"##,
"\n", test_data
),
)
}
fn conversions_from_verbs_general(source_dir: &Path) -> HashMap<String, Inflections> {
let source = read_lines(source_dir, "verb_general.t");
let mut iter = source.iter().skip_while(|l| !l.starts_with("__DATA__"));
iter.next();
conversions_from_test_data(HashMap::new(), &iter.collect::<Vec<&String>>())
}
fn conversions_from_custom_data(
conversions: HashMap<String, Inflections>,
) -> HashMap<String, Inflections> {
let custom_data = r#"
# Singular Plural Preterite Pres particple Past participle
# __________ ___________ _______ ______________ __________
# \A(.*)bears$
bears bear bore bearing borne
forbears forbear forbore forbearing forborne
# \A(.*)bids$
bids bid bade bidding bidden
forbids forbid forbade forbidding forbidden
# \A(.*)buys$
buys buy bought buying bought
outbuys outbuy outbought outbuying outbought
# \A(.*)casts$
casts cast cast casting cast
spellcasts spellcast spellcast spellcasting spellcast
# \A(.*)clads$
clads clad clad cladding clad
ironclads ironclad ironclad ironcladding ironclad
# \A(.*)cuts$
cuts cut cut cutting cut
crosscuts crosscut crosscut crosscutting crosscut
# \A(.*)does$
does do did doing done
outdoes outdo outdid outdoing outdone
# \A(.*)draws$
draws draw drew drawing drawn
outdraws outdraw outdrew outdrawing outdrawn
# \A(.*)feeds$
feeds feed fed feeding fed
overfeeds overfeed overfed overfeeding overfed
# \A(.*)freezes$
freezes freeze froze freezing frozen
flashfreezes flashfreeze flashfroze flashfreezing flashfrozen
# \A(.*)grows$
grows grow grew growing grown
overgrows overgrow overgrew overgrowing overgrown
# \A(.*)hangs$
hangs hang hung hanging hung
overhangs overhang overhung overhanging overhung
# \A(.*)shears$
shears shear sheared shearing shorn
reshears reshear resheared reshearing reshorn
# \A(.*)hears$
hears hear heard hearing heard
overhears overhear overheard overhearing overheard
# \A(.*)hides$
hides hide hid hiding hidden
rehides rehide rehid rehiding rehidden
# \A(.*)inputs$
inputs input input inputting input
reinputs reinput reinput reinputting reinput
# \A(.*)knits$
knits knit knitted knitting knitted
reknits reknit reknitted reknitting reknitted
# \A(.*)lends$
lends lend lent lending lent
relends relend relent relending relent
# \A(.*)lets$
lets let let letting let
relets relet relet reletting relet
# \A(.*)lights$
lights light lit lighting lit
relights relight relit relighting relit
# \A(.*)makes$
makes make made making made
remakes remake remade remaking remade
# \A(.*)mows$
mows mow mowed mowing mown
remows remow remowed remowing remown
# \A(.*)pays$
pays pay paid paying paid
repays repay repaid repaying repaid
# \A(.*)reads$
reads read read reading read
rereads reread reread rereading reread
# \A(.*)says$
says say said saying said
resays resay resaid resaying resaid
# \A(.*)sees$
sees see saw seeing seen
resees resee resaw reseeing reseen
# \A(.*)sells$
sells sell sold selling sold
resells resell resold reselling resold
# \A(.*)sends$
sends send sent sending sent
resends resend resent resending resent
# \A(.*)sets$
sets set set setting set
resets reset reset resetting reset
# \A(.*)sews$
sews sew sewed sewing sewn
resews resew resewed resewing resewn
# \A(.*)shines$
shines shine shone shining shone
reshines reshine reshone reshining reshone
# \A(.*)shoots$
shoots shoot shot shooting shot
reshoots reshoot reshot reshooting reshot
# \A(.*)shuts$
shuts shut shut shutting shut
reshuts reshut reshut reshutting reshut
# \A(.*)sleeps$
sleeps sleep slept sleeping slept
oversleeps oversleep overslept oversleeping overslept
# \A(.*)slings$
slings sling slung slinging slung
reslings resling reslung reslinging reslung
# \A(.*)spins$
spins spin spun spinning spun
respins respin respun respinning respun
# \A(.*)splits$
splits split split splitting split
resplits resplit resplit resplitting resplit
# \A(.*)spreads$
spreads spread spread spreading spread
respreads respread respread respreading respread
# \A(.*)sticks$
sticks stick stuck sticking stuck
resticks restick restuck resticking restuck
# \A(.*)strikes$
strikes strike struck striking struck
restrikes restrike restruck restriking restruck
# \A(.*)strings$
strings string strung stringing strung
restrings restring restrung restringing restrung
# \A(.*)takes$
takes take took taking taken
retakes retake retook retaking retaken
# \A(.*)teaches$
teaches teach taught teaching taught
reteaches reteach retaught reteaching retaught
# \A(.*)tells$
tells tell told telling told
retells retell retold retelling retold
# \A(.*)thinks$
thinks think thought thinking thought
rethinks rethink rethought rethinking rethought
# \A(.*)throws$
throws throw threw throwing thrown
rethrows rethrow rethrew rethrowing rethrown
# \A(.*)tries$
tries try tried trying tried
retries retry retried retrying retried
# \A(.*)weaves$
weaves weave wove weaving woven
reweaves reweave rewove reweaving rewoven
# \A(.*)weds$
weds wed wed wedding wed
reweds rewed rewed rewedding rewed
# \A(.*)wets$
wets wet wet wetting wetted
rewets rewet rewet rewetting rewetted
# \A(.*)winds$
winds wind wound winding wound
rewinds rewind rewound rewinding rewound
# \A(.*)writes$
writes write wrote writing written
rewrites rewrite rewrote rewriting rewritten
# \A(.*)adds$
adds add added adding added
readds readd readded readding readded
# \A(.*)alights$
alights alight alit alighting alit
realights realight realit realighting realit
# \A(.*)allows$
allows allow allowed allowing allowed
reallows reallow reallowed reallowing reallowed
# \A(.*)appears$
appears appear appeared appearing appeared
reappears reappear reappeared reappearing reappeared
# \A(.*)believes$
believes believe believed believing believed
disbelieves disbelieve disbelieved disbelieving disbelieved
# \A(.*)calls$
calls call called calling called
recalls recall recalled recalling recalled
# \A(.*)clothes$
clothes clothe clothed clothing clothed
reclothes reclothe reclothed reclothing reclothed
# \A(.*)considers$
considers consider considered considering considered
reconsiders reconsider reconsidered reconsidering reconsidered
# \A(.*)creates$
creates create created creating created
recreates recreate recreated recreating recreated
# \A(.*)fits$
fits fit fitted fitting fitted
refits refit refitted refitting refitted
# \A(.*)includes$
includes include included including included
reincludes reinclude reincluded reincluding reincluded
# \A(.*)melts$
melts melt melted melting melted
remelts remelt remelted remelting remelted
# \A(.*)offers$
offers offer offered offering offered
reoffers reoffer reoffered reoffering reoffered
# \A(.*)opens$
opens open opened opening opened
reopens reopen reopened reopening reopened
# \A(.*)plays$
plays play played playing played
replays replay replayed replaying replayed
# \A(.*)serves$
serves serve served serving served
reserves reserve reserved reserving reserved
# \A(.*)smells$
smells smell smelled smelling smelled
resmells resmell resmelled resmelling resmelled
# \A(.*)spells$
spells spell spelled spelling spelled
respells respell respelled respelling respelled
# \A(.*)spills$
spills spill spilled spilling spilled
respills respill respilled respilling respilled
# \A(.*)starts$
starts start started starting started
restarts restart restarted restarting restarted
# \A(.*)turns$
turns turn turned turning turned
returns return returned returning returned
# \A(.*)uses$
uses use used using used
reuses reuse reused reusing reused
# \A(.*)works$
works work worked working worked
reworks rework reworked reworking reworked
# \A(.+)n't$
# This rule doesn't make much sense in LEI right now - see https://rt.cpan.org/Ticket/Display.html?id=140229
# don't don't didn't - -
# \A(.+[aeiou])ys$
# also covered above
arrays array arrayed arraying arrayed
rearrays rearray rearrayed rearraying rearrayed
buoys buoy buoyed buoying buoyed
# \A(.+[aiy])nxes$
# already covered by sphinxes
# \A(.+)ceps$
# already covered by forceps
# \A(.+[cs])hes$
beaches beach beached beaching beached
# \A(.+)oes$
# already covered by oboes
# \A(.+)ieus$
# already covered by adieus
# \A(.+)eaus$
# already covered by chateaus
# \A(.+)sses$
# already covered by kisses and misses
# \A(.+)trixes$
# is this when you make a great first work in a trilogy and then screw it
# up in the latter two parts, like "he really matrixed that trilogy up"?
matrixes matrix matrixed matrixing matrixed
# \A(.+)zzes$
# already covered by buzzes
# \A(.+)zes$
# already covered by razes
# \A(.+)ues$
# already covered by fondues and glues
# \A(.+)is$
# already covered by graffitis, alibis, etc.
# \A(.+)ees$
# already covered by decrees
# \A(.+)yes$
# already covered by eyes
# \A(.+[au])es$
# the "ues" version is already covered earlier - https://rt.cpan.org/Ticket/Display.html?id=140231
# LEI doesn't inflect this correctly - https://rt.cpan.org/Ticket/Display.html?id=140233
# spaes spae spaed spaeing spaed
# \A(.+[^b])is$
# will never match - https://rt.cpan.org/Ticket/Display.html?id=140235
# \A(.+)ies$
# covered by cries and scries
# \A(.+)ys$
# covered by toys
# \A(.+[^e])es$
# covered by japes
# \A(.+)ers$
# covered by bothers
# \A(.+[^s])s$
# covered by lots of stuff
"#;
conversions_from_test_data(conversions, &custom_data.lines().collect::<Vec<&str>>())
}
fn conversions_from_test_data<A: AsRef<str>>(
mut conversions: HashMap<String, Inflections>,
source: &[A],
) -> HashMap<String, Inflections> {
for line in source
.iter()
.map(|s| s.as_ref())
.filter(|l| !(l.contains('#') || l.is_empty()))
{
let inflections = line.split_whitespace().collect::<Vec<&str>>();
conversions.insert(
inflections[0].to_string(),
Inflections {
plural: inflections[1].to_string(),
past: inflections[2].to_string(),
present_participle: inflections[3].to_string(),
past_participle: inflections[4].to_string(),
},
);
}
conversions
}
fn one_conversion_line(
widths: (usize, usize, usize, usize, usize),
singular: &str,
plural: &str,
past: &str,
present_participle: &str,
past_participle: &str,
is_comment: bool,
) -> String {
let lead = if is_comment { "# " } else { " " };
format!(
"{lead}{s:w0$} {pl:w1$} {pa:w2$} {prep:w3$} {pasp:w4$}\n",
lead = lead,
s = singular,
w0 = widths.0,
pl = plural,
w1 = widths.1,
pa = past,
w2 = widths.2,
prep = present_participle,
w3 = widths.3,
pasp = past_participle,
w4 = widths.4,
)
}
| 37.780769 | 123 | 0.537387 |
908bb136beb9e1e505fb0ca472d16cf3266a9e95 | 32,397 | //! This module contains functions to generate default trait impl function bodies where possible.
use syntax::{
ast::{self, edit::AstNodeEdit, make, AstNode, BinaryOp, CmpOp, HasName, LogicOp},
ted,
};
/// Generate custom trait bodies where possible.
///
/// Returns `Option` so that we can use `?` rather than `if let Some`. Returning
/// `None` means that generating a custom trait body failed, and the body will remain
/// as `todo!` instead.
pub(crate) fn gen_trait_fn_body(
func: &ast::Fn,
trait_path: &ast::Path,
adt: &ast::Adt,
) -> Option<()> {
match trait_path.segment()?.name_ref()?.text().as_str() {
"Clone" => gen_clone_impl(adt, func),
"Debug" => gen_debug_impl(adt, func),
"Default" => gen_default_impl(adt, func),
"Hash" => gen_hash_impl(adt, func),
"PartialEq" => gen_partial_eq(adt, func),
"PartialOrd" => gen_partial_ord(adt, func),
_ => None,
}
}
/// Generate a `Clone` impl based on the fields and members of the target type.
fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
fn gen_clone_call(target: ast::Expr) -> ast::Expr {
let method = make::name_ref("clone");
make::expr_method_call(target, method, make::arg_list(None))
}
let expr = match adt {
// `Clone` cannot be derived for unions, so no default impl can be provided.
ast::Adt::Union(_) => return None,
ast::Adt::Enum(enum_) => {
let list = enum_.variant_list()?;
let mut arms = vec![];
for variant in list.variants() {
let name = variant.name()?;
let variant_name = make::ext::path_from_idents(["Self", &format!("{}", name)])?;
match variant.field_list() {
// => match self { Self::Name { x } => Self::Name { x: x.clone() } }
Some(ast::FieldList::RecordFieldList(list)) => {
let mut pats = vec![];
let mut fields = vec![];
for field in list.fields() {
let field_name = field.name()?;
let pat = make::ident_pat(false, false, field_name.clone());
pats.push(pat.into());
let path = make::ext::ident_path(&field_name.to_string());
let method_call = gen_clone_call(make::expr_path(path));
let name_ref = make::name_ref(&field_name.to_string());
let field = make::record_expr_field(name_ref, Some(method_call));
fields.push(field);
}
let pat = make::record_pat(variant_name.clone(), pats.into_iter());
let fields = make::record_expr_field_list(fields);
let record_expr = make::record_expr(variant_name, fields).into();
arms.push(make::match_arm(Some(pat.into()), None, record_expr));
}
// => match self { Self::Name(arg1) => Self::Name(arg1.clone()) }
Some(ast::FieldList::TupleFieldList(list)) => {
let mut pats = vec![];
let mut fields = vec![];
for (i, _) in list.fields().enumerate() {
let field_name = format!("arg{}", i);
let pat = make::ident_pat(false, false, make::name(&field_name));
pats.push(pat.into());
let f_path = make::expr_path(make::ext::ident_path(&field_name));
fields.push(gen_clone_call(f_path));
}
let pat = make::tuple_struct_pat(variant_name.clone(), pats.into_iter());
let struct_name = make::expr_path(variant_name);
let tuple_expr = make::expr_call(struct_name, make::arg_list(fields));
arms.push(make::match_arm(Some(pat.into()), None, tuple_expr));
}
// => match self { Self::Name => Self::Name }
None => {
let pattern = make::path_pat(variant_name.clone());
let variant_expr = make::expr_path(variant_name);
arms.push(make::match_arm(Some(pattern.into()), None, variant_expr));
}
}
}
let match_target = make::expr_path(make::ext::ident_path("self"));
let list = make::match_arm_list(arms).indent(ast::edit::IndentLevel(1));
make::expr_match(match_target, list)
}
ast::Adt::Struct(strukt) => {
match strukt.field_list() {
// => Self { name: self.name.clone() }
Some(ast::FieldList::RecordFieldList(field_list)) => {
let mut fields = vec![];
for field in field_list.fields() {
let base = make::expr_path(make::ext::ident_path("self"));
let target = make::expr_field(base, &field.name()?.to_string());
let method_call = gen_clone_call(target);
let name_ref = make::name_ref(&field.name()?.to_string());
let field = make::record_expr_field(name_ref, Some(method_call));
fields.push(field);
}
let struct_name = make::ext::ident_path("Self");
let fields = make::record_expr_field_list(fields);
make::record_expr(struct_name, fields).into()
}
// => Self(self.0.clone(), self.1.clone())
Some(ast::FieldList::TupleFieldList(field_list)) => {
let mut fields = vec![];
for (i, _) in field_list.fields().enumerate() {
let f_path = make::expr_path(make::ext::ident_path("self"));
let target = make::expr_field(f_path, &format!("{}", i)).into();
fields.push(gen_clone_call(target));
}
let struct_name = make::expr_path(make::ext::ident_path("Self"));
make::expr_call(struct_name, make::arg_list(fields))
}
// => Self { }
None => {
let struct_name = make::ext::ident_path("Self");
let fields = make::record_expr_field_list(None);
make::record_expr(struct_name, fields).into()
}
}
}
};
let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1));
ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
Some(())
}
/// Generate a `Debug` impl based on the fields and members of the target type.
fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
let annotated_name = adt.name()?;
match adt {
// `Debug` cannot be derived for unions, so no default impl can be provided.
ast::Adt::Union(_) => None,
// => match self { Self::Variant => write!(f, "Variant") }
ast::Adt::Enum(enum_) => {
let list = enum_.variant_list()?;
let mut arms = vec![];
for variant in list.variants() {
let name = variant.name()?;
let variant_name = make::ext::path_from_idents(["Self", &format!("{}", name)])?;
let target = make::expr_path(make::ext::ident_path("f").into());
match variant.field_list() {
Some(ast::FieldList::RecordFieldList(list)) => {
// => f.debug_struct(name)
let target = make::expr_path(make::ext::ident_path("f"));
let method = make::name_ref("debug_struct");
let struct_name = format!("\"{}\"", name);
let args = make::arg_list(Some(make::expr_literal(&struct_name).into()));
let mut expr = make::expr_method_call(target, method, args);
let mut pats = vec![];
for field in list.fields() {
let field_name = field.name()?;
// create a field pattern for use in `MyStruct { fields.. }`
let pat = make::ident_pat(false, false, field_name.clone());
pats.push(pat.into());
// => <expr>.field("field_name", field)
let method_name = make::name_ref("field");
let name = make::expr_literal(&(format!("\"{}\"", field_name))).into();
let path = &format!("{}", field_name);
let path = make::expr_path(make::ext::ident_path(path));
let args = make::arg_list(vec![name, path]);
expr = make::expr_method_call(expr, method_name, args);
}
// => <expr>.finish()
let method = make::name_ref("finish");
let expr = make::expr_method_call(expr, method, make::arg_list(None));
// => MyStruct { fields.. } => f.debug_struct("MyStruct")...finish(),
let pat = make::record_pat(variant_name.clone(), pats.into_iter());
arms.push(make::match_arm(Some(pat.into()), None, expr));
}
Some(ast::FieldList::TupleFieldList(list)) => {
// => f.debug_tuple(name)
let target = make::expr_path(make::ext::ident_path("f"));
let method = make::name_ref("debug_tuple");
let struct_name = format!("\"{}\"", name);
let args = make::arg_list(Some(make::expr_literal(&struct_name).into()));
let mut expr = make::expr_method_call(target, method, args);
let mut pats = vec![];
for (i, _) in list.fields().enumerate() {
let name = format!("arg{}", i);
// create a field pattern for use in `MyStruct(fields..)`
let field_name = make::name(&name);
let pat = make::ident_pat(false, false, field_name.clone());
pats.push(pat.into());
// => <expr>.field(field)
let method_name = make::name_ref("field");
let field_path = &format!("{}", name);
let field_path = make::expr_path(make::ext::ident_path(field_path));
let args = make::arg_list(vec![field_path]);
expr = make::expr_method_call(expr, method_name, args);
}
// => <expr>.finish()
let method = make::name_ref("finish");
let expr = make::expr_method_call(expr, method, make::arg_list(None));
// => MyStruct (fields..) => f.debug_tuple("MyStruct")...finish(),
let pat = make::tuple_struct_pat(variant_name.clone(), pats.into_iter());
arms.push(make::match_arm(Some(pat.into()), None, expr));
}
None => {
let fmt_string = make::expr_literal(&(format!("\"{}\"", name))).into();
let args = make::arg_list([target, fmt_string]);
let macro_name = make::expr_path(make::ext::ident_path("write"));
let macro_call = make::expr_macro_call(macro_name, args);
let variant_name = make::path_pat(variant_name);
arms.push(make::match_arm(
Some(variant_name.into()),
None,
macro_call.into(),
));
}
}
}
let match_target = make::expr_path(make::ext::ident_path("self"));
let list = make::match_arm_list(arms).indent(ast::edit::IndentLevel(1));
let match_expr = make::expr_match(match_target, list);
let body = make::block_expr(None, Some(match_expr));
let body = body.indent(ast::edit::IndentLevel(1));
ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
Some(())
}
ast::Adt::Struct(strukt) => {
let name = format!("\"{}\"", annotated_name);
let args = make::arg_list(Some(make::expr_literal(&name).into()));
let target = make::expr_path(make::ext::ident_path("f"));
let expr = match strukt.field_list() {
// => f.debug_struct("Name").finish()
None => make::expr_method_call(target, make::name_ref("debug_struct"), args),
// => f.debug_struct("Name").field("foo", &self.foo).finish()
Some(ast::FieldList::RecordFieldList(field_list)) => {
let method = make::name_ref("debug_struct");
let mut expr = make::expr_method_call(target, method, args);
for field in field_list.fields() {
let name = field.name()?;
let f_name = make::expr_literal(&(format!("\"{}\"", name))).into();
let f_path = make::expr_path(make::ext::ident_path("self"));
let f_path = make::expr_ref(f_path, false);
let f_path = make::expr_field(f_path, &format!("{}", name)).into();
let args = make::arg_list([f_name, f_path]);
expr = make::expr_method_call(expr, make::name_ref("field"), args);
}
expr
}
// => f.debug_tuple("Name").field(self.0).finish()
Some(ast::FieldList::TupleFieldList(field_list)) => {
let method = make::name_ref("debug_tuple");
let mut expr = make::expr_method_call(target, method, args);
for (i, _) in field_list.fields().enumerate() {
let f_path = make::expr_path(make::ext::ident_path("self"));
let f_path = make::expr_ref(f_path, false);
let f_path = make::expr_field(f_path, &format!("{}", i)).into();
let method = make::name_ref("field");
expr = make::expr_method_call(expr, method, make::arg_list(Some(f_path)));
}
expr
}
};
let method = make::name_ref("finish");
let expr = make::expr_method_call(expr, method, make::arg_list(None));
let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1));
ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
Some(())
}
}
}
/// Generate a `Debug` impl based on the fields and members of the target type.
fn gen_default_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
fn gen_default_call() -> Option<ast::Expr> {
let fn_name = make::ext::path_from_idents(["Default", "default"])?;
Some(make::expr_call(make::expr_path(fn_name), make::arg_list(None)))
}
match adt {
// `Debug` cannot be derived for unions, so no default impl can be provided.
ast::Adt::Union(_) => None,
// Deriving `Debug` for enums is not stable yet.
ast::Adt::Enum(_) => None,
ast::Adt::Struct(strukt) => {
let expr = match strukt.field_list() {
Some(ast::FieldList::RecordFieldList(field_list)) => {
let mut fields = vec![];
for field in field_list.fields() {
let method_call = gen_default_call()?;
let name_ref = make::name_ref(&field.name()?.to_string());
let field = make::record_expr_field(name_ref, Some(method_call));
fields.push(field);
}
let struct_name = make::ext::ident_path("Self");
let fields = make::record_expr_field_list(fields);
make::record_expr(struct_name, fields).into()
}
Some(ast::FieldList::TupleFieldList(field_list)) => {
let struct_name = make::expr_path(make::ext::ident_path("Self"));
let fields = field_list
.fields()
.map(|_| gen_default_call())
.collect::<Option<Vec<ast::Expr>>>()?;
make::expr_call(struct_name, make::arg_list(fields))
}
None => {
let struct_name = make::ext::ident_path("Self");
let fields = make::record_expr_field_list(None);
make::record_expr(struct_name, fields).into()
}
};
let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1));
ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
Some(())
}
}
}
/// Generate a `Hash` impl based on the fields and members of the target type.
fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
fn gen_hash_call(target: ast::Expr) -> ast::Stmt {
let method = make::name_ref("hash");
let arg = make::expr_path(make::ext::ident_path("state"));
let expr = make::expr_method_call(target, method, make::arg_list(Some(arg)));
make::expr_stmt(expr).into()
}
let body = match adt {
// `Hash` cannot be derived for unions, so no default impl can be provided.
ast::Adt::Union(_) => return None,
// => std::mem::discriminant(self).hash(state);
ast::Adt::Enum(_) => {
let fn_name = make_discriminant()?;
let arg = make::expr_path(make::ext::ident_path("self"));
let fn_call = make::expr_call(fn_name, make::arg_list(Some(arg)));
let stmt = gen_hash_call(fn_call);
make::block_expr(Some(stmt), None).indent(ast::edit::IndentLevel(1))
}
ast::Adt::Struct(strukt) => match strukt.field_list() {
// => self.<field>.hash(state);
Some(ast::FieldList::RecordFieldList(field_list)) => {
let mut stmts = vec![];
for field in field_list.fields() {
let base = make::expr_path(make::ext::ident_path("self"));
let target = make::expr_field(base, &field.name()?.to_string());
stmts.push(gen_hash_call(target));
}
make::block_expr(stmts, None).indent(ast::edit::IndentLevel(1))
}
// => self.<field_index>.hash(state);
Some(ast::FieldList::TupleFieldList(field_list)) => {
let mut stmts = vec![];
for (i, _) in field_list.fields().enumerate() {
let base = make::expr_path(make::ext::ident_path("self"));
let target = make::expr_field(base, &format!("{}", i));
stmts.push(gen_hash_call(target));
}
make::block_expr(stmts, None).indent(ast::edit::IndentLevel(1))
}
// No fields in the body means there's nothing to hash.
None => return None,
},
};
ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
Some(())
}
/// Generate a `PartialEq` impl based on the fields and members of the target type.
fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
fn gen_eq_chain(expr: Option<ast::Expr>, cmp: ast::Expr) -> Option<ast::Expr> {
match expr {
Some(expr) => Some(make::expr_bin_op(expr, BinaryOp::LogicOp(LogicOp::And), cmp)),
None => Some(cmp),
}
}
fn gen_record_pat_field(field_name: &str, pat_name: &str) -> ast::RecordPatField {
let pat = make::ext::simple_ident_pat(make::name(&pat_name));
let name_ref = make::name_ref(field_name);
make::record_pat_field(name_ref, pat.into())
}
fn gen_record_pat(record_name: ast::Path, fields: Vec<ast::RecordPatField>) -> ast::RecordPat {
let list = make::record_pat_field_list(fields);
make::record_pat_with_fields(record_name, list)
}
fn gen_variant_path(variant: &ast::Variant) -> Option<ast::Path> {
make::ext::path_from_idents(["Self", &variant.name()?.to_string()])
}
fn gen_tuple_field(field_name: &String) -> ast::Pat {
ast::Pat::IdentPat(make::ident_pat(false, false, make::name(field_name)))
}
// FIXME: return `None` if the trait carries a generic type; we can only
// generate this code `Self` for the time being.
let body = match adt {
// `Hash` cannot be derived for unions, so no default impl can be provided.
ast::Adt::Union(_) => return None,
ast::Adt::Enum(enum_) => {
// => std::mem::discriminant(self) == std::mem::discriminant(other)
let lhs_name = make::expr_path(make::ext::ident_path("self"));
let lhs = make::expr_call(make_discriminant()?, make::arg_list(Some(lhs_name.clone())));
let rhs_name = make::expr_path(make::ext::ident_path("other"));
let rhs = make::expr_call(make_discriminant()?, make::arg_list(Some(rhs_name.clone())));
let eq_check =
make::expr_bin_op(lhs, BinaryOp::CmpOp(CmpOp::Eq { negated: false }), rhs);
let mut n_cases = 0;
let mut arms = vec![];
for variant in enum_.variant_list()?.variants() {
n_cases += 1;
match variant.field_list() {
// => (Self::Bar { bin: l_bin }, Self::Bar { bin: r_bin }) => l_bin == r_bin,
Some(ast::FieldList::RecordFieldList(list)) => {
let mut expr = None;
let mut l_fields = vec![];
let mut r_fields = vec![];
for field in list.fields() {
let field_name = field.name()?.to_string();
let l_name = &format!("l_{}", field_name);
l_fields.push(gen_record_pat_field(&field_name, &l_name));
let r_name = &format!("r_{}", field_name);
r_fields.push(gen_record_pat_field(&field_name, &r_name));
let lhs = make::expr_path(make::ext::ident_path(l_name));
let rhs = make::expr_path(make::ext::ident_path(r_name));
let cmp = make::expr_bin_op(
lhs,
BinaryOp::CmpOp(CmpOp::Eq { negated: false }),
rhs,
);
expr = gen_eq_chain(expr, cmp);
}
let left = gen_record_pat(gen_variant_path(&variant)?, l_fields);
let right = gen_record_pat(gen_variant_path(&variant)?, r_fields);
let tuple = make::tuple_pat(vec![left.into(), right.into()]);
if let Some(expr) = expr {
arms.push(make::match_arm(Some(tuple.into()), None, expr));
}
}
Some(ast::FieldList::TupleFieldList(list)) => {
let mut expr = None;
let mut l_fields = vec![];
let mut r_fields = vec![];
for (i, _) in list.fields().enumerate() {
let field_name = format!("{}", i);
let l_name = format!("l{}", field_name);
l_fields.push(gen_tuple_field(&l_name));
let r_name = format!("r{}", field_name);
r_fields.push(gen_tuple_field(&r_name));
let lhs = make::expr_path(make::ext::ident_path(&l_name));
let rhs = make::expr_path(make::ext::ident_path(&r_name));
let cmp = make::expr_bin_op(
lhs,
BinaryOp::CmpOp(CmpOp::Eq { negated: false }),
rhs,
);
expr = gen_eq_chain(expr, cmp);
}
let left = make::tuple_struct_pat(gen_variant_path(&variant)?, l_fields);
let right = make::tuple_struct_pat(gen_variant_path(&variant)?, r_fields);
let tuple = make::tuple_pat(vec![left.into(), right.into()]);
if let Some(expr) = expr {
arms.push(make::match_arm(Some(tuple.into()), None, expr));
}
}
None => continue,
}
}
let expr = match arms.len() {
0 => eq_check,
_ => {
if n_cases > arms.len() {
let lhs = make::wildcard_pat().into();
arms.push(make::match_arm(Some(lhs), None, eq_check));
}
let match_target = make::expr_tuple(vec![lhs_name, rhs_name]);
let list = make::match_arm_list(arms).indent(ast::edit::IndentLevel(1));
make::expr_match(match_target, list)
}
};
make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1))
}
ast::Adt::Struct(strukt) => match strukt.field_list() {
Some(ast::FieldList::RecordFieldList(field_list)) => {
let mut expr = None;
for field in field_list.fields() {
let lhs = make::expr_path(make::ext::ident_path("self"));
let lhs = make::expr_field(lhs, &field.name()?.to_string());
let rhs = make::expr_path(make::ext::ident_path("other"));
let rhs = make::expr_field(rhs, &field.name()?.to_string());
let cmp =
make::expr_bin_op(lhs, BinaryOp::CmpOp(CmpOp::Eq { negated: false }), rhs);
expr = gen_eq_chain(expr, cmp);
}
make::block_expr(None, expr).indent(ast::edit::IndentLevel(1))
}
Some(ast::FieldList::TupleFieldList(field_list)) => {
let mut expr = None;
for (i, _) in field_list.fields().enumerate() {
let idx = format!("{}", i);
let lhs = make::expr_path(make::ext::ident_path("self"));
let lhs = make::expr_field(lhs, &idx);
let rhs = make::expr_path(make::ext::ident_path("other"));
let rhs = make::expr_field(rhs, &idx);
let cmp =
make::expr_bin_op(lhs, BinaryOp::CmpOp(CmpOp::Eq { negated: false }), rhs);
expr = gen_eq_chain(expr, cmp);
}
make::block_expr(None, expr).indent(ast::edit::IndentLevel(1))
}
// No fields in the body means there's nothing to hash.
None => {
let expr = make::expr_literal("true").into();
make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1))
}
},
};
ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
Some(())
}
fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
fn gen_partial_eq_match(match_target: ast::Expr) -> Option<ast::Stmt> {
let mut arms = vec![];
let variant_name =
make::path_pat(make::ext::path_from_idents(["core", "cmp", "Ordering", "Equal"])?);
let lhs = make::tuple_struct_pat(make::ext::path_from_idents(["Some"])?, [variant_name]);
arms.push(make::match_arm(Some(lhs.into()), None, make::expr_empty_block()));
arms.push(make::match_arm(
[make::ident_pat(false, false, make::name("ord")).into()],
None,
make::expr_return(Some(make::expr_path(make::ext::ident_path("ord")))),
));
let list = make::match_arm_list(arms).indent(ast::edit::IndentLevel(1));
Some(make::expr_stmt(make::expr_match(match_target, list)).into())
}
fn gen_partial_cmp_call(lhs: ast::Expr, rhs: ast::Expr) -> ast::Expr {
let rhs = make::expr_ref(rhs, false);
let method = make::name_ref("partial_cmp");
make::expr_method_call(lhs, method, make::arg_list(Some(rhs)))
}
// FIXME: return `None` if the trait carries a generic type; we can only
// generate this code `Self` for the time being.
let body = match adt {
// `PartialOrd` cannot be derived for unions, so no default impl can be provided.
ast::Adt::Union(_) => return None,
// `core::mem::Discriminant` does not implement `PartialOrd` in stable Rust today.
ast::Adt::Enum(_) => return None,
ast::Adt::Struct(strukt) => match strukt.field_list() {
Some(ast::FieldList::RecordFieldList(field_list)) => {
let mut exprs = vec![];
for field in field_list.fields() {
let lhs = make::expr_path(make::ext::ident_path("self"));
let lhs = make::expr_field(lhs, &field.name()?.to_string());
let rhs = make::expr_path(make::ext::ident_path("other"));
let rhs = make::expr_field(rhs, &field.name()?.to_string());
let ord = gen_partial_cmp_call(lhs, rhs);
exprs.push(ord);
}
let tail = exprs.pop();
let stmts = exprs
.into_iter()
.map(gen_partial_eq_match)
.collect::<Option<Vec<ast::Stmt>>>()?;
make::block_expr(stmts.into_iter(), tail).indent(ast::edit::IndentLevel(1))
}
Some(ast::FieldList::TupleFieldList(field_list)) => {
let mut exprs = vec![];
for (i, _) in field_list.fields().enumerate() {
let idx = format!("{}", i);
let lhs = make::expr_path(make::ext::ident_path("self"));
let lhs = make::expr_field(lhs, &idx);
let rhs = make::expr_path(make::ext::ident_path("other"));
let rhs = make::expr_field(rhs, &idx);
let ord = gen_partial_cmp_call(lhs, rhs);
exprs.push(ord);
}
let tail = exprs.pop();
let stmts = exprs
.into_iter()
.map(gen_partial_eq_match)
.collect::<Option<Vec<ast::Stmt>>>()?;
make::block_expr(stmts.into_iter(), tail).indent(ast::edit::IndentLevel(1))
}
// No fields in the body means there's nothing to hash.
None => {
let expr = make::expr_literal("true").into();
make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1))
}
},
};
ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
Some(())
}
fn make_discriminant() -> Option<ast::Expr> {
Some(make::expr_path(make::ext::path_from_idents(["core", "mem", "discriminant"])?))
}
| 48.938066 | 100 | 0.491496 |
8fabb30dfe96344df55fea2829315c71dcec70b1 | 4,927 | use super::*;
/// Boot services function table
pub struct BootServices(*mut bits::BootServices);
impl BootServices {
pub(crate) fn new(boot_services: *mut bits::BootServices) -> Self {
BootServices(boot_services)
}
/// Allocate `size` bytes of memory
///
/// Do not use this function for ordinary memory allocations. Use the global allocator instead.
///
/// # Safety
///
/// Safe if `exit_boot_services` was not called.
pub unsafe fn allocate_pool(
&self,
pool_type: bits::MemoryType,
size: usize,
) -> Result<*mut u8> {
let mut buffer: *mut core::ffi::c_void = 0 as _;
status_to_result(((*self.0).allocate_pool)(pool_type, size, &mut buffer as _))?;
Ok(buffer as _)
}
/// Frees memory allocated by `allocate_pool`
///
/// Do not use this function for ordinary memory allocations. Use the global allocator instead.
///
/// # Safety
///
/// Safe if `exit_boot_services` was not called and `buffer` was allocated by `allocate_pool`.
pub unsafe fn free_pool(&self, buffer: *mut u8) -> Result<()> {
status_to_result(((*self.0).free_pool)(buffer as _))
}
/// Allocate `num` consecutive pages of physical memory
///
/// Do not use this function for ordinary memory allocations. Use the global allocator instead.
///
/// # Safety
///
/// Safe if `exit_boot_services` was not called.
pub unsafe fn allocate_pages(
&self,
allocate_type: bits::AllocateType,
memory_type: bits::MemoryType,
num: usize,
) -> Result<*mut u8> {
let mut result: bits::PhysicalAddress = 0;
status_to_result(((*self.0).allocate_pages)(
allocate_type,
memory_type,
num,
&mut result as _,
))?;
Ok(result as _)
}
/// Free `num` consecutive pages of physical memory
///
/// Do not use this function for ordinary memory allocations. Use the global allocator instead.
///
/// # Safety
///
/// Safe if `exit_boot_services` was not called and `memory` was allocated by `allocate_pages`.
pub unsafe fn free_pages(&self, memory: *mut u8, num: usize) -> Result<()> {
status_to_result(((*self.0).free_pages)(memory as _, num))
}
/// Get the current memory map
///
/// Do not use this function. Use `MemoryMap::get_current` instead.
///
/// # Safety
///
/// Safe if `exit_boot_services` was not called and pointers refer to valid memory.
pub unsafe fn get_memory_map(
&self,
memory_map_size: *mut usize,
memory_map: *mut bits::MemoryDescriptor,
map_key: *mut usize,
desc_size: *mut usize,
desc_version: *mut u32,
) -> Result<()> {
status_to_result(((*self.0).get_memory_map)(
memory_map_size,
memory_map,
map_key,
desc_size,
desc_version,
))
}
/// Get an array of handles that support a specific protocol
///
/// Do not use this function to locate protocol handles. Use `Protocol::locate_handles`
/// instead.
///
/// # Safety
///
/// Safe if `exit_boot_services` was not called and passed pointer point to valid memory.
pub unsafe fn locate_handle(
&self,
search_type: bits::LocateSearchType,
protocol: *mut bits::Guid,
search_key: *mut core::ffi::c_void,
buffer_size: *mut usize,
buffer: *mut bits::Handle,
) -> Result<()> {
status_to_result(((*self.0).locate_handle)(
search_type,
protocol,
search_key,
buffer_size,
buffer,
))
}
/// Get a pointer to a protocol supported by the handle
///
/// Do not use this function to handle protocols. Use `Protocol::find_instances` instead.
///
/// # Safety
///
/// Safe if `exit_boot_services` was not called and passed pointer point to valid memory.
pub unsafe fn handle_protocol(
&self,
handle: Handle,
protocol: *mut bits::Guid,
) -> Result<*mut core::ffi::c_void> {
let mut interface: *mut core::ffi::c_void = 0 as _;
status_to_result(((*self.0).handle_protocol)(
handle.value(),
protocol,
&mut interface as _,
))?;
Ok(interface)
}
/// Exit the boot services and take control of the machine
///
/// Most of the safe interfaces of `efw` will not work after calling this function.
///
/// # Safety
///
/// Safe if `exit_boot_services` was not called before.
pub unsafe fn exit_boot_services(&self, map_key: usize) -> Result<()> {
status_to_result(((*self.0).exit_boot_services)(
Handle::get_self_handle().value() as _,
map_key,
))
}
}
| 31.382166 | 99 | 0.584737 |
dd996b8d0d0844bcea586e200de25725b4c9add2 | 21,023 | use lazy_static::lazy_static;
use std::collections::HashMap;
use std::fmt;
use std::sync::mpsc::{channel, Receiver, Sender};
use std::thread;
type Pos = usize;
static LEFT_TRIM_MARKER: &str = "- ";
static RIGHT_TRIM_MARKER: &str = " -";
static LEFT_DELIM: &str = "{{";
static RIGHT_DELIM: &str = "}}";
static LEFT_COMMENT: &str = "/*";
static RIGHT_COMMENT: &str = "*/";
lazy_static! {
static ref KEY: HashMap<&'static str, ItemType> = {
let mut m = HashMap::new();
m.insert(".", ItemType::ItemDot);
m.insert("block", ItemType::ItemBlock);
m.insert("define", ItemType::ItemDefine);
m.insert("end", ItemType::ItemEnd);
m.insert("else", ItemType::ItemElse);
m.insert("if", ItemType::ItemIf);
m.insert("range", ItemType::ItemRange);
m.insert("nil", ItemType::ItemNil);
m.insert("template", ItemType::ItemTemplate);
m.insert("with", ItemType::ItemWith);
m
};
}
#[derive(Debug, Clone, PartialEq)]
pub enum ItemType {
ItemError, // error occurred; value is text of error
ItemBool, // boolean constant
ItemChar, // printable ASCII character; grab bag for comma etc.
ItemCharConstant, // character constant
ItemComplex, // complex constant (1+2i); imaginary is just a number
ItemColonEquals, // colon-equals (':=') introducing a declaration
ItemEOF,
ItemField, // alphanumeric identifier starting with '.'
ItemIdentifier, // alphanumeric identifier not starting with '.'
ItemLeftDelim, // left action delimiter
ItemLeftParen, // '(' inside action
ItemNumber, // simple number, including imaginary
ItemPipe, // pipe symbol
ItemRawString, // raw quoted string (includes quotes)
ItemRightDelim, // right action delimiter
ItemRightParen, // ')' inside action
ItemSpace, // run of spaces separating arguments
ItemString, // quoted string (includes quotes)
ItemText, // plain text
ItemVariable, // variable starting with '$', such as '$' or '$1' or '$hello'
// Keywords, appear after all the rest.
ItemKeyword, // used only to delimit the keywords
ItemBlock, // block keyword
ItemDot, // the cursor, spelled '.'
ItemDefine, // define keyword
ItemElse, // else keyword
ItemEnd, // end keyword
ItemIf, // if keyword
ItemNil, // the untyped nil constant, easiest to treat as a keyword
ItemRange, // range keyword
ItemTemplate, // template keyword
ItemWith, // with keyword
}
#[derive(Debug)]
pub struct Item {
pub typ: ItemType,
pub pos: Pos,
pub val: String,
pub line: usize,
}
impl Item {
pub fn new<T: Into<String>>(typ: ItemType, pos: Pos, val: T, line: usize) -> Item {
Item {
typ,
pos,
val: val.into(),
line,
}
}
}
impl fmt::Display for Item {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.typ {
ItemType::ItemEOF => write!(f, "EOF"),
ItemType::ItemKeyword => write!(f, "<{}>", self.val),
_ => write!(f, "{}", self.val),
}
}
}
pub struct Lexer {
last_pos: Pos, // position of most recent item returned by nextItem
items_receiver: Receiver<Item>, // channel of scanned items
finished: bool, // flag if lexer is finished
}
struct LexerStateMachine {
input: String, // the string being scanned
state: State, // the next lexing function to enter
pos: Pos, // current position in the input
start: Pos, // start position of this item
width: Pos, // width of last rune read from input
items_sender: Sender<Item>, // channel of scanned items
paren_depth: usize, // nesting depth of ( ) exprs
line: usize, // 1+number of newlines seen
}
#[derive(Debug)]
enum State {
End,
LexText,
LexLeftDelim,
LexComment,
LexRightDelim,
LexInsideAction,
LexSpace,
LexIdentifier,
LexField,
LexVariable,
LexChar,
LexNumber,
LexQuote,
LexRawQuote,
}
impl Iterator for Lexer {
type Item = Item;
fn next(&mut self) -> Option<Item> {
if self.finished {
return None;
}
let item = match self.items_receiver.recv() {
Ok(item) => {
self.last_pos = item.pos;
if item.typ == ItemType::ItemError || item.typ == ItemType::ItemEOF {
self.finished = true;
}
item
}
Err(e) => {
self.finished = true;
Item::new(ItemType::ItemError, 0, format!("{}", e), 0)
}
};
Some(item)
}
}
impl Lexer {
pub fn new(input: String) -> Lexer {
let (tx, rx) = channel();
let mut l = LexerStateMachine {
input,
state: State::LexText,
pos: 0,
start: 0,
width: 0,
items_sender: tx,
paren_depth: 0,
line: 1,
};
thread::spawn(move || l.run());
Lexer {
last_pos: 0,
items_receiver: rx,
finished: false,
}
}
pub fn drain(&mut self) {
for _ in self.items_receiver.iter() {}
}
}
impl Drop for Lexer {
fn drop(&mut self) {
self.drain();
}
}
impl Iterator for LexerStateMachine {
type Item = char;
fn next(&mut self) -> Option<char> {
match self.input[self.pos..].chars().next() {
Some(c) => {
self.width = c.len_utf8();
self.pos += self.width;
if c == '\n' {
self.line += 1;
}
Some(c)
}
None => {
self.width = 0;
None
}
}
}
}
impl LexerStateMachine {
fn run(&mut self) {
loop {
self.state = match self.state {
State::LexText => self.lex_text(),
State::LexComment => self.lex_comment(),
State::LexLeftDelim => self.lex_left_delim(),
State::LexRightDelim => self.lex_right_delim(),
State::LexInsideAction => self.lex_inside_action(),
State::LexSpace => self.lex_space(),
State::LexIdentifier => self.lex_identifier(),
State::LexField => self.lex_field(),
State::LexVariable => self.lex_variable(),
State::LexChar => self.lex_char(),
State::LexNumber => self.lex_number(),
State::LexQuote => self.lex_quote(),
State::LexRawQuote => self.lex_raw_quote(),
State::End => {
return;
}
}
}
}
fn backup(&mut self) {
self.pos -= 1;
if self.width == 1
&& self.input[self.pos..]
.chars()
.next()
.and_then(|c| if c == '\n' { Some(()) } else { None })
.is_some()
{
self.line -= 1;
}
}
fn peek(&mut self) -> Option<char> {
let c = self.next();
self.backup();
c
}
fn emit(&mut self, t: ItemType) {
let s = &self.input[self.start..self.pos];
let lines = match t {
ItemType::ItemText
| ItemType::ItemRawString
| ItemType::ItemLeftDelim
| ItemType::ItemRightDelim => 1,
_ => s.chars().filter(|c| *c == '\n').count(),
};
self.items_sender
.send(Item::new(t, self.start, s, self.line))
.unwrap();
self.line += lines;
self.start = self.pos;
}
fn ignore(&mut self) {
self.start = self.pos;
}
fn accept(&mut self, valid: &str) -> bool {
if self.next().map(|s| valid.contains(s)).unwrap_or_default() {
return true;
}
self.backup();
false
}
fn accept_run(&mut self, valid: &str) {
while self.accept(valid) {}
}
fn errorf(&mut self, msg: &str) -> State {
self.items_sender
.send(Item::new(ItemType::ItemError, self.start, msg, self.line))
.unwrap();
State::End
}
fn lex_text(&mut self) -> State {
self.width = 0;
let x = self.input[self.pos..].find(&LEFT_DELIM);
match x {
Some(x) => {
self.pos += x;
let ld = self.pos + LEFT_DELIM.len();
let trim = if self.input[ld..].starts_with(LEFT_TRIM_MARKER) {
rtrim_len(&self.input[self.start..self.pos])
} else {
0
};
self.pos -= trim;
if self.pos > self.start {
self.emit(ItemType::ItemText);
}
self.pos += trim;
self.ignore();
State::LexLeftDelim
}
None => {
self.pos = self.input.len();
if self.pos > self.start {
self.emit(ItemType::ItemText);
}
self.emit(ItemType::ItemEOF);
State::End
}
}
}
fn at_right_delim(&mut self) -> (bool, bool) {
if self.input[self.pos..].starts_with(&RIGHT_DELIM) {
return (true, false);
}
if self.input[self.pos..].starts_with(&format!("{}{}", RIGHT_TRIM_MARKER, RIGHT_DELIM)) {
return (true, true);
}
(false, false)
}
fn lex_left_delim(&mut self) -> State {
self.pos += LEFT_DELIM.len();
let trim = self.input[self.pos..].starts_with(LEFT_TRIM_MARKER);
let after_marker = if trim { LEFT_TRIM_MARKER.len() } else { 0 };
if self.input[(self.pos + after_marker)..].starts_with(LEFT_COMMENT) {
self.pos += after_marker;
self.ignore();
State::LexComment
} else {
self.emit(ItemType::ItemLeftDelim);
self.pos += after_marker;
self.ignore();
self.paren_depth = 0;
State::LexInsideAction
}
}
fn lex_comment(&mut self) -> State {
self.pos += LEFT_COMMENT.len();
let i = match self.input[self.pos..].find(RIGHT_COMMENT) {
Some(i) => i,
None => {
return self.errorf("unclosed comment");
}
};
self.pos += i + RIGHT_COMMENT.len();
let (delim, trim) = self.at_right_delim();
if !delim {
return self.errorf("comment end before closing delimiter");
}
if trim {
self.pos += RIGHT_TRIM_MARKER.len();
}
self.pos += RIGHT_DELIM.len();
if trim {
self.pos += ltrim_len(&self.input[self.pos..]);
}
self.ignore();
State::LexText
}
fn lex_right_delim(&mut self) -> State {
let trim = self.input[self.pos..].starts_with(RIGHT_TRIM_MARKER);
if trim {
self.pos += RIGHT_TRIM_MARKER.len();
self.ignore();
}
self.pos += RIGHT_DELIM.len();
self.emit(ItemType::ItemRightDelim);
if trim {
self.pos += ltrim_len(&self.input[self.pos..]);
self.ignore();
}
State::LexText
}
fn lex_inside_action(&mut self) -> State {
let (delim, _) = self.at_right_delim();
if delim {
if self.paren_depth == 0 {
return State::LexRightDelim;
}
return self.errorf("unclosed left paren");
}
match self.next() {
None | Some('\r') | Some('\n') => self.errorf("unclosed action"),
Some(c) => {
match c {
'"' => State::LexQuote,
'`' => State::LexRawQuote,
'$' => State::LexVariable,
'\'' => State::LexChar,
'(' => {
self.emit(ItemType::ItemLeftParen);
self.paren_depth += 1;
State::LexInsideAction
}
')' => {
self.emit(ItemType::ItemRightParen);
if self.paren_depth == 0 {
return self.errorf(&format!("unexpected right paren {}", c));
}
self.paren_depth -= 1;
State::LexInsideAction
}
':' => match self.next() {
Some('=') => {
self.emit(ItemType::ItemColonEquals);
State::LexInsideAction
}
_ => self.errorf("expected :="),
},
'|' => {
self.emit(ItemType::ItemPipe);
State::LexInsideAction
}
'.' => match self.input[self.pos..].chars().next() {
Some('0'..='9') => {
self.backup();
State::LexNumber
}
_ => State::LexField,
},
'+' | '-' | '0'..='9' => {
self.backup();
State::LexNumber
}
_ if c.is_whitespace() => State::LexSpace,
_ if c.is_alphanumeric() || c == '_' => {
self.backup();
State::LexIdentifier
}
_ if c.is_ascii() => {
// figure out a way to check for unicode.isPrint ?!
self.emit(ItemType::ItemChar);
State::LexInsideAction
}
_ => self.errorf(&format!("unrecognized character in action {}", c)),
}
}
}
}
fn lex_space(&mut self) -> State {
while self.peek().map(|c| c.is_whitespace()).unwrap_or_default() {
self.next();
}
self.emit(ItemType::ItemSpace);
State::LexInsideAction
}
fn lex_identifier(&mut self) -> State {
let c = self.find(|c| !(c.is_alphanumeric() || *c == '_'));
self.backup();
if !self.at_terminator() {
return self.errorf(&format!("bad character {}", c.unwrap_or_default()));
}
let item_type = match &self.input[self.start..self.pos] {
"true" | "false" => ItemType::ItemBool,
word if KEY.contains_key(word) => (*KEY.get(word).unwrap()).clone(),
word if word.starts_with('.') => ItemType::ItemField,
_ => ItemType::ItemIdentifier,
};
self.emit(item_type);
State::LexInsideAction
}
fn lex_field(&mut self) -> State {
self.lex_field_or_variable(ItemType::ItemField)
}
fn lex_variable(&mut self) -> State {
self.lex_field_or_variable(ItemType::ItemVariable)
}
fn lex_field_or_variable(&mut self, typ: ItemType) -> State {
if self.at_terminator() {
self.emit(match typ {
ItemType::ItemVariable => ItemType::ItemVariable,
_ => ItemType::ItemDot,
});
return State::LexInsideAction;
}
let c = self.find(|c| !(c.is_alphanumeric() || *c == '_'));
self.backup();
if !self.at_terminator() {
return self.errorf(&format!("bad character {}", c.unwrap_or_default()));
}
self.emit(typ);
State::LexInsideAction
}
fn at_terminator(&mut self) -> bool {
match self.peek() {
Some(c) => {
match c {
'.' | ',' | '|' | ':' | ')' | '(' | ' ' | '\t' | '\r' | '\n' => true,
// this is what golang does to detect a delimiter
_ => RIGHT_DELIM.starts_with(c),
}
}
None => false,
}
}
fn lex_char(&mut self) -> State {
let mut escaped = false;
loop {
let c = self.next();
match c {
Some('\\') => {
escaped = true;
continue;
}
Some('\n') | None => {
return self.errorf("unterminated character constant");
}
Some('\'') if !escaped => {
break;
}
_ => {}
};
escaped = false;
}
self.emit(ItemType::ItemCharConstant);
State::LexInsideAction
}
fn lex_number(&mut self) -> State {
if self.scan_number() {
// Let's ingnore complex numbers here.
self.emit(ItemType::ItemNumber);
State::LexInsideAction
} else {
let msg = &format!("bad number syntax: {}", &self.input[self.start..self.pos]);
self.errorf(msg)
}
}
fn scan_number(&mut self) -> bool {
self.accept("+-");
if self.accept("0") && self.accept("xX") {
let digits = "0123456789abcdefABCDEF";
self.accept_run(digits);
} else {
let digits = "0123456789";
self.accept_run(digits);
if self.accept(".") {
self.accept_run(digits);
}
if self.accept("eE") {
self.accept("+-");
self.accept_run(digits);
}
}
// Let's ignore imaginary numbers for now.
if self.peek().map(|c| c.is_alphanumeric()).unwrap_or(true) {
self.next();
return false;
}
true
}
fn lex_quote(&mut self) -> State {
let mut escaped = false;
loop {
let c = self.next();
match c {
Some('\\') => {
escaped = true;
continue;
}
Some('\n') | None => {
return self.errorf("unterminated quoted string");
}
Some('"') if !escaped => {
break;
}
_ => {}
};
escaped = false;
}
self.emit(ItemType::ItemString);
State::LexInsideAction
}
fn lex_raw_quote(&mut self) -> State {
let start_line = self.line;
if self.find(|c| *c == '`').is_none() {
self.line = start_line;
return self.errorf("unterminated raw quoted string");
}
self.emit(ItemType::ItemRawString);
State::LexInsideAction
}
}
fn rtrim_len(s: &str) -> usize {
match s.rfind(|c: char| !c.is_whitespace()) {
Some(i) => s.len() - 1 - i,
None => s.len(),
}
}
fn ltrim_len(s: &str) -> usize {
let l = s.len();
s.find(|c: char| !c.is_whitespace()).unwrap_or(l)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn lexer_run() {
let mut l = Lexer::new("abc".to_owned());
let i1 = l.next().unwrap();
assert_eq!(i1.typ, ItemType::ItemText);
assert_eq!(&i1.val, "abc");
}
#[test]
fn lex_simple() {
let s = r#"something {{ if eq "foo" "bar" }}"#;
let l = Lexer::new(s.to_owned());
assert_eq!(l.count(), 13);
}
#[test]
fn test_whitespace() {
let s = r#"something {{ .foo }}"#;
let l = Lexer::new(s.to_owned());
let s_ = l.map(|i| i.val).collect::<Vec<String>>().join("");
assert_eq!(s_, s);
}
#[test]
fn test_input() {
let s = r#"something {{ .foo }}"#;
let l = Lexer::new(s.to_owned());
let s_ = l.map(|i| i.val).collect::<Vec<String>>().join("");
assert_eq!(s_, s);
}
#[test]
fn test_underscore() {
let s = r#"something {{ .foo_bar }}"#;
let l = Lexer::new(s.to_owned());
let s_ = l.map(|i| i.val).collect::<Vec<String>>().join("");
assert_eq!(s_, s);
}
#[test]
fn test_trim() {
let s = r#"something {{- .foo -}} 2000"#;
let l = Lexer::new(s.to_owned());
let s_ = l.map(|i| i.val).collect::<Vec<String>>().join("");
assert_eq!(s_, r#"something{{.foo}}2000"#);
}
#[test]
fn test_comment() {
let s = r#"something {{- /* foo */ -}} 2000"#;
let l = Lexer::new(s.to_owned());
let s_ = l.map(|i| i.val).collect::<Vec<String>>().join("");
assert_eq!(s_, r#"something2000"#);
}
}
| 30.601164 | 97 | 0.468011 |
29d814b2567dc3969c95da2ef75dbdb50494e29b | 11,009 | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
use crate::{
components::{
apply_chunk_output::{ensure_no_discard, ensure_no_retry},
chunk_commit_queue::ChunkCommitQueue,
chunk_output::ChunkOutput,
},
logging::{LogEntry, LogSchema},
metrics::{
DIEM_EXECUTOR_APPLY_CHUNK_SECONDS, DIEM_EXECUTOR_COMMIT_CHUNK_SECONDS,
DIEM_EXECUTOR_EXECUTE_CHUNK_SECONDS, DIEM_EXECUTOR_VM_EXECUTE_CHUNK_SECONDS,
},
};
use anyhow::Result;
use diem_infallible::Mutex;
use diem_logger::prelude::*;
use diem_state_view::StateViewId;
use diem_types::{
contract_event::ContractEvent,
ledger_info::LedgerInfoWithSignatures,
transaction::{
Transaction, TransactionInfo, TransactionListWithProof, TransactionOutputListWithProof,
},
};
use diem_vm::VMExecutor;
use executor_types::{ChunkExecutorTrait, ExecutedChunk, ExecutedTrees, TransactionReplayer};
use fail::fail_point;
use std::{marker::PhantomData, sync::Arc};
use storage_interface::{state_view::VerifiedStateView, DbReaderWriter};
pub struct ChunkExecutor<V> {
db: DbReaderWriter,
commit_queue: Mutex<ChunkCommitQueue>,
_phantom: PhantomData<V>,
}
impl<V> ChunkExecutor<V> {
pub fn new(db: DbReaderWriter) -> Result<Self> {
let commit_queue = Mutex::new(ChunkCommitQueue::new_from_db(&db.reader)?);
Ok(Self {
db,
commit_queue,
_phantom: PhantomData,
})
}
pub fn new_with_view(db: DbReaderWriter, persisted_view: ExecutedTrees) -> Self {
let commit_queue = Mutex::new(ChunkCommitQueue::new(persisted_view));
Self {
db,
commit_queue,
_phantom: PhantomData,
}
}
pub fn reset(&self) -> Result<()> {
*self.commit_queue.lock() = ChunkCommitQueue::new_from_db(&self.db.reader)?;
Ok(())
}
fn state_view(
&self,
latest_view: &ExecutedTrees,
persisted_view: &ExecutedTrees,
) -> VerifiedStateView {
latest_view.state_view(
persisted_view,
StateViewId::ChunkExecution {
first_version: latest_view.txn_accumulator().num_leaves(),
},
Arc::clone(&self.db.reader),
)
}
fn apply_chunk_output_for_state_sync(
verified_target_li: &LedgerInfoWithSignatures,
epoch_change_li: Option<&LedgerInfoWithSignatures>,
latest_view: &ExecutedTrees,
chunk_output: ChunkOutput,
transaction_infos: &[TransactionInfo],
) -> Result<ExecutedChunk> {
let (mut executed_chunk, to_discard, to_retry) =
chunk_output.apply_to_ledger(latest_view.txn_accumulator())?;
ensure_no_discard(to_discard)?;
ensure_no_retry(to_retry)?;
executed_chunk.ledger_info = executed_chunk
.maybe_select_chunk_ending_ledger_info(verified_target_li, epoch_change_li)?;
executed_chunk.ensure_transaction_infos_match(transaction_infos)?;
Ok(executed_chunk)
}
fn commit_chunk_impl(&self) -> Result<Arc<ExecutedChunk>> {
let (base_view, to_commit) = self.commit_queue.lock().next_chunk_to_commit()?;
let txns_to_commit = to_commit.transactions_to_commit()?;
let ledger_info = to_commit.ledger_info.as_ref();
if ledger_info.is_some() || !txns_to_commit.is_empty() {
fail_point!("executor::commit_chunk", |_| {
Err(anyhow::anyhow!("Injected error in commit_chunk"))
});
self.db.writer.save_transactions(
&txns_to_commit,
base_view.txn_accumulator().num_leaves(),
ledger_info,
)?;
}
self.commit_queue.lock().dequeue()?;
Ok(to_commit)
}
}
impl<V: VMExecutor> ChunkExecutorTrait for ChunkExecutor<V> {
fn execute_chunk(
&self,
txn_list_with_proof: TransactionListWithProof,
verified_target_li: &LedgerInfoWithSignatures,
epoch_change_li: Option<&LedgerInfoWithSignatures>,
) -> Result<()> {
let _timer = DIEM_EXECUTOR_EXECUTE_CHUNK_SECONDS.start_timer();
let num_txns = txn_list_with_proof.transactions.len();
let first_version_in_request = txn_list_with_proof.first_transaction_version;
let (persisted_view, latest_view) = self.commit_queue.lock().persisted_and_latest_view();
// Verify input transaction list.
txn_list_with_proof.verify(verified_target_li.ledger_info(), first_version_in_request)?;
// Skip transactions already in ledger.
let txns_to_skip = txn_list_with_proof.proof.verify_extends_ledger(
latest_view.txn_accumulator().num_leaves(),
latest_view.txn_accumulator().root_hash(),
first_version_in_request,
)?;
let mut transactions = txn_list_with_proof.transactions;
transactions.drain(..txns_to_skip as usize);
if txns_to_skip == num_txns {
info!(
"Skipping all transactions in the given chunk! Num transactions: {:?}",
num_txns
);
}
// Execute transactions.
let state_view = self.state_view(&latest_view, &persisted_view);
let chunk_output = {
let _timer = DIEM_EXECUTOR_VM_EXECUTE_CHUNK_SECONDS.start_timer();
ChunkOutput::by_transaction_execution::<V>(transactions, state_view)?
};
let executed_chunk = Self::apply_chunk_output_for_state_sync(
verified_target_li,
epoch_change_li,
&latest_view,
chunk_output,
&txn_list_with_proof.proof.transaction_infos[txns_to_skip..],
)?;
// Add result to commit queue.
self.commit_queue.lock().enqueue(executed_chunk);
info!(
LogSchema::new(LogEntry::ChunkExecutor)
.local_synced_version(latest_view.version().unwrap_or(0))
.first_version_in_request(first_version_in_request)
.num_txns_in_request(num_txns),
"sync_request_executed",
);
Ok(())
}
fn apply_chunk(
&self,
txn_output_list_with_proof: TransactionOutputListWithProof,
verified_target_li: &LedgerInfoWithSignatures,
epoch_change_li: Option<&LedgerInfoWithSignatures>,
) -> Result<()> {
let _timer = DIEM_EXECUTOR_APPLY_CHUNK_SECONDS.start_timer();
let num_txns = txn_output_list_with_proof.transactions_and_outputs.len();
let first_version_in_request = txn_output_list_with_proof.first_transaction_output_version;
let (persisted_view, latest_view) = self.commit_queue.lock().persisted_and_latest_view();
// Verify input transaction list.
txn_output_list_with_proof
.verify(verified_target_li.ledger_info(), first_version_in_request)?;
// Skip transactions already in ledger.
let txns_to_skip = txn_output_list_with_proof.proof.verify_extends_ledger(
latest_view.txn_accumulator().num_leaves(),
latest_view.txn_accumulator().root_hash(),
first_version_in_request,
)?;
let mut txns_and_outputs = txn_output_list_with_proof.transactions_and_outputs;
txns_and_outputs.drain(..txns_to_skip as usize);
// Apply transaction outputs.
let state_view = self.state_view(&latest_view, &persisted_view);
let chunk_output = ChunkOutput::by_transaction_output(txns_and_outputs, state_view)?;
let executed_chunk = Self::apply_chunk_output_for_state_sync(
verified_target_li,
epoch_change_li,
&latest_view,
chunk_output,
&txn_output_list_with_proof.proof.transaction_infos[txns_to_skip..],
)?;
// Add result to commit queue.
self.commit_queue.lock().enqueue(executed_chunk);
info!(
LogSchema::new(LogEntry::ChunkExecutor)
.local_synced_version(latest_view.version().unwrap_or(0))
.first_version_in_request(first_version_in_request)
.num_txns_in_request(num_txns),
"sync_request_applied",
);
Ok(())
}
fn commit_chunk(&self) -> Result<Vec<ContractEvent>> {
let _timer = DIEM_EXECUTOR_COMMIT_CHUNK_SECONDS.start_timer();
Ok(self.commit_chunk_impl()?.events_to_commit())
}
fn execute_and_commit_chunk(
&self,
txn_list_with_proof: TransactionListWithProof,
verified_target_li: &LedgerInfoWithSignatures,
epoch_change_li: Option<&LedgerInfoWithSignatures>,
) -> Result<Vec<ContractEvent>> {
// Re-sync with DB, make sure the queue is empty.
self.reset()?;
self.execute_chunk(txn_list_with_proof, verified_target_li, epoch_change_li)?;
self.commit_chunk()
}
fn apply_and_commit_chunk(
&self,
txn_output_list_with_proof: TransactionOutputListWithProof,
verified_target_li: &LedgerInfoWithSignatures,
epoch_change_li: Option<&LedgerInfoWithSignatures>,
) -> Result<Vec<ContractEvent>> {
// Re-sync with DB, make sure the queue is empty.
self.reset()?;
self.apply_chunk(
txn_output_list_with_proof,
verified_target_li,
epoch_change_li,
)?;
self.commit_chunk()
}
}
impl<V: VMExecutor> ChunkExecutor<V> {}
impl<V: VMExecutor> TransactionReplayer for ChunkExecutor<V> {
fn replay(
&self,
transactions: Vec<Transaction>,
mut transaction_infos: Vec<TransactionInfo>,
) -> Result<()> {
let (persisted_view, mut latest_view) =
self.commit_queue.lock().persisted_and_latest_view();
let mut executed_chunk = ExecutedChunk::default();
let mut to_run = Some(transactions);
while !to_run.as_ref().unwrap().is_empty() {
// Execute transactions.
let state_view = self.state_view(&latest_view, &persisted_view);
let txns = to_run.take().unwrap();
let (executed, to_discard, to_retry) =
ChunkOutput::by_transaction_execution::<V>(txns, state_view)?
.apply_to_ledger(latest_view.txn_accumulator())?;
// Accumulate result and deal with retry
ensure_no_discard(to_discard)?;
let n = executed.to_commit.len();
executed.ensure_transaction_infos_match(&transaction_infos[..n])?;
transaction_infos.drain(..n);
to_run = Some(to_retry);
executed_chunk = executed_chunk.combine(executed)?;
latest_view = executed_chunk.result_view.clone();
}
// Add result to commit queue.
self.commit_queue.lock().enqueue(executed_chunk);
Ok(())
}
fn commit(&self) -> Result<Arc<ExecutedChunk>> {
self.commit_chunk_impl()
}
}
| 36.095082 | 99 | 0.650468 |
7114137b83d566793ea1e62bf86d4162cd92d124 | 2,081 | #[macro_use]
extern crate bencher;
extern crate gluon;
use bencher::{black_box, Bencher};
use gluon::vm::api::{primitive, FunctionRef, Primitive};
use gluon::vm::thread::{Status, Thread};
use gluon::{new_vm, Compiler};
// Benchmarks function calls
fn factorial(b: &mut Bencher) {
let vm = new_vm();
let text = r#"
let factorial n =
if n < 2
then 1
else n * factorial (n - 1)
factorial
"#;
Compiler::new().load_script(&vm, "factorial", text).unwrap();
let mut factorial: FunctionRef<fn(i32) -> i32> = vm.get_global("factorial").unwrap();
b.iter(|| {
let result = factorial.call(100).unwrap();
black_box(result)
})
}
fn factorial_tail_call(b: &mut Bencher) {
let vm = new_vm();
let text = r#"
let factorial a n =
if n < 2
then a
else factorial (a * n) (n - 1)
factorial 1
"#;
Compiler::new().load_script(&vm, "factorial", text).unwrap();
let mut factorial: FunctionRef<fn(i32) -> i32> = vm.get_global("factorial").unwrap();
b.iter(|| {
let result = factorial.call(100).unwrap();
black_box(result)
})
}
fn gluon_rust_boundary_overhead(b: &mut Bencher) {
let vm = new_vm();
extern "C" fn test_fn(_: &Thread) -> Status {
Status::Ok
}
let text = r#"
let for n f =
if n #Int== 0 then
()
else
f n
f n
f n
f n
f n
f n
f n
f n
f n
f n
for (n #Int- 10) f
for
"#;
Compiler::new().load_script(&vm, "test", text).unwrap();
let mut test: FunctionRef<fn(i32, Primitive<fn(i32)>) -> ()> = vm.get_global("test").unwrap();
b.iter(|| {
let result = test
.call(1000, primitive::<fn(i32)>("test_fn", test_fn))
.unwrap();
black_box(result)
})
}
benchmark_group!(
function_call,
factorial,
factorial_tail_call,
gluon_rust_boundary_overhead
);
benchmark_main!(function_call);
| 23.122222 | 98 | 0.539164 |
76b5d911591945622ff97227f1517f7ead7c6a4c | 5,383 | // Copyright 2016 - 2021 Ulrik Sverdrup "bluss"
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//!
//! General matrix multiplication for f32, f64 matrices. Operates on matrices
//! with general layout (they can use arbitrary row and column stride).
//!
//! This crate uses the same macro/microkernel approach to matrix multiplication as
//! the [BLIS][bl] project.
//!
//! We presently provide a few good microkernels, portable and for x86-64, and
//! only one operation: the general matrix-matrix multiplication (“gemm”).
//!
//! [bl]: https://github.com/flame/blis
//!
//! ## Matrix Representation
//!
//! **matrixmultiply** supports matrices with general stride, so a matrix
//! is passed using a pointer and four integers:
//!
//! - `a: *const f32`, pointer to the first element in the matrix
//! - `m: usize`, number of rows
//! - `k: usize`, number of columns
//! - `rsa: isize`, row stride
//! - `csa: isize`, column stride
//!
//! In this example, A is a m by k matrix. `a` is a pointer to the element at
//! index *0, 0*.
//!
//! The *row stride* is the pointer offset (in number of elements) to the
//! element on the next row. It’s the distance from element *i, j* to *i + 1,
//! j*.
//!
//! The *column stride* is the pointer offset (in number of elements) to the
//! element in the next column. It’s the distance from element *i, j* to *i,
//! j + 1*.
//!
//! For example for a contiguous matrix, row major strides are *rsa=k,
//! csa=1* and column major strides are *rsa=1, csa=m*.
//!
//! Strides can be negative or even zero, but for a mutable matrix elements
//! may not alias each other.
//!
//! ## Portability and Performance
//!
//! - The default kernels are written in portable Rust and available
//! on all targets. These may depend on autovectorization to perform well.
//!
//! - *x86* and *x86-64* features can be detected at runtime by default or
//! compile time (if enabled), and the crate following kernel variants are
//! implemented:
//!
//! - `fma`
//! - `avx`
//! - `sse2`
//!
//! ## Features
//!
//! ### `std`
//!
//! `std` is enabled by default.
//!
//! This crate can be used without the standard library (`#![no_std]`) by
//! disabling the default `std` feature. To do so, use this in your
//! `Cargo.toml`:
//!
//! ```toml
//! matrixmultiply = { version = "0.2", default-features = false }
//! ```
//!
//! Runtime CPU feature detection is available only when `std` is enabled.
//! Without the `std` feature, the crate uses special CPU features only if they
//! are enabled at compile time. (To enable CPU features at compile time, pass
//! the relevant
//! [`target-cpu`](https://doc.rust-lang.org/rustc/codegen-options/index.html#target-cpu)
//! or
//! [`target-feature`](https://doc.rust-lang.org/rustc/codegen-options/index.html#target-feature)
//! option to `rustc`.)
//!
//! ### `threading`
//!
//! `threading` is an optional crate feature
//!
//! Threading enables multithreading for the operations. The environment variable
//! `MATMUL_NUM_THREADS` decides how many threads are used at maximum. At the moment 1-4 are
//! supported and the default is the number of physical cpus (as detected by `num_cpus`).
//!
//! ### `cgemm`
//!
//! `cgemm` is an optional crate feature.
//!
//! It enables the `cgemm` and `zgemm` methods for complex matrix multiplication.
//! This is an **experimental feature** and not yet as performant as the float kernels on x86.
//!
//! The complex representation we use is `[f64; 2]`.
//!
//! ### `constconf`
//!
//! `constconf` is an optional feature. When enabled, cache-sensitive parameters of
//! the gemm implementations can be tweaked *at compile time* by defining the following variables:
//!
//! - `MATMUL_SGEMM_MC`
//! (And so on, for S, D, C, ZGEMM and with NC, KC or MC).
//!
//! ## Other Notes
//!
//! The functions in this crate are thread safe, as long as the destination
//! matrix is distinct.
//!
//! ## Rust Version
//!
//! This version requires Rust 1.41.1 or later; the crate follows a carefully
//! considered upgrade policy, where updating the minimum Rust version is not a breaking
//! change.
#![doc(html_root_url = "https://docs.rs/matrixmultiply/0.2/")]
#![cfg_attr(not(feature = "std"), no_std)]
#[cfg(not(feature = "std"))]
extern crate alloc;
#[cfg(feature = "std")]
extern crate core;
#[macro_use]
mod debugmacros;
#[macro_use]
mod loopmacros;
mod archparam_defaults;
#[cfg(feature = "constconf")]
mod archparam;
#[cfg(feature = "constconf")]
mod constparse;
#[cfg(not(feature = "constconf"))]
pub(crate) use archparam_defaults as archparam;
mod gemm;
mod kernel;
mod ptr;
mod threading;
mod aligned_alloc;
mod util;
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
#[macro_use]
mod x86;
mod dgemm_kernel;
mod sgemm_kernel;
pub use crate::gemm::dgemm;
pub use crate::gemm::sgemm;
#[cfg(feature = "cgemm")]
#[macro_use]
mod cgemm_common;
#[cfg(feature = "cgemm")]
mod cgemm_kernel;
#[cfg(feature = "cgemm")]
mod zgemm_kernel;
#[cfg(feature = "cgemm")]
pub use crate::gemm::cgemm;
#[cfg(feature = "cgemm")]
pub use crate::gemm::zgemm;
#[cfg(feature = "cgemm")]
pub use crate::gemm::CGemmOption;
| 30.936782 | 98 | 0.678618 |
2927ae78809930a548fb8c6a70547efce712ef99 | 522 | mod dmc;
mod noise;
mod pulse;
mod triangle;
pub use self::dmc::Dmc;
pub use self::noise::Noise;
pub use self::pulse::{Pulse, PulseChannel};
pub use self::triangle::Triangle;
use crate::apu::ApuState;
use crate::bus::AddressBus;
use crate::system::SystemState;
pub trait Channel {
fn register(&self, state: &mut SystemState, cpu: &mut AddressBus);
fn write(&self, addr: u16, value: u8);
fn tick(&self, state: &ApuState) -> u8;
fn enable(&self);
fn disable(&self);
fn get_state(&self) -> bool;
}
| 22.695652 | 70 | 0.670498 |
188d0419c3993c9589b872ece99c6ee2e575a7e8 | 31,759 | //! This lint is used to collect metadata about clippy lints. This metadata is exported as a json
//! file and then used to generate the [clippy lint list](https://rust-lang.github.io/rust-clippy/master/index.html)
//!
//! This module and therefor the entire lint is guarded by a feature flag called
//! `metadata-collector-lint`
//!
//! The module transforms all lint names to ascii lowercase to ensure that we don't have mismatches
//! during any comparison or mapping. (Please take care of this, it's not fun to spend time on such
//! a simple mistake)
use if_chain::if_chain;
use rustc_ast as ast;
use rustc_data_structures::fx::FxHashMap;
use rustc_hir::{
self as hir, def::DefKind, intravisit, intravisit::Visitor, ExprKind, Item, ItemKind, Mutability, QPath,
};
use rustc_lint::{CheckLintNameResult, LateContext, LateLintPass, LintContext, LintId};
use rustc_middle::hir::map::Map;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::{sym, Loc, Span, Symbol};
use serde::{ser::SerializeStruct, Serialize, Serializer};
use std::collections::BinaryHeap;
use std::fmt;
use std::fs::{self, OpenOptions};
use std::io::prelude::*;
use std::path::Path;
use crate::utils::internal_lints::is_lint_ref_type;
use clippy_utils::{
diagnostics::span_lint, last_path_segment, match_def_path, match_function_call, match_path, paths, ty::match_type,
ty::walk_ptrs_ty_depth,
};
/// This is the output file of the lint collector.
const OUTPUT_FILE: &str = "../util/gh-pages/lints.json";
/// These lints are excluded from the export.
const BLACK_LISTED_LINTS: [&str; 3] = ["lint_author", "deep_code_inspection", "internal_metadata_collector"];
/// These groups will be ignored by the lint group matcher. This is useful for collections like
/// `clippy::all`
const IGNORED_LINT_GROUPS: [&str; 1] = ["clippy::all"];
/// Lints within this group will be excluded from the collection. These groups
/// have to be defined without the `clippy::` prefix.
const EXCLUDED_LINT_GROUPS: [&str; 1] = ["internal"];
/// Collected deprecated lint will be assigned to this group in the JSON output
const DEPRECATED_LINT_GROUP_STR: &str = "deprecated";
/// This is the lint level for deprecated lints that will be displayed in the lint list
const DEPRECATED_LINT_LEVEL: &str = "none";
/// This array holds Clippy's lint groups with their corresponding default lint level. The
/// lint level for deprecated lints is set in `DEPRECATED_LINT_LEVEL`.
const DEFAULT_LINT_LEVELS: &[(&str, &str)] = &[
("correctness", "deny"),
("suspicious", "warn"),
("restriction", "allow"),
("style", "warn"),
("pedantic", "allow"),
("complexity", "warn"),
("perf", "warn"),
("cargo", "allow"),
("nursery", "allow"),
];
/// This prefix is in front of the lint groups in the lint store. The prefix will be trimmed
/// to only keep the actual lint group in the output.
const CLIPPY_LINT_GROUP_PREFIX: &str = "clippy::";
/// This template will be used to format the configuration section in the lint documentation.
/// The `configurations` parameter will be replaced with one or multiple formatted
/// `ClippyConfiguration` instances. See `CONFIGURATION_VALUE_TEMPLATE` for further customizations
macro_rules! CONFIGURATION_SECTION_TEMPLATE {
() => {
r#"
### Configuration
This lint has the following configuration variables:
{configurations}
"#
};
}
/// This template will be used to format an individual `ClippyConfiguration` instance in the
/// lint documentation.
///
/// The format function will provide strings for the following parameters: `name`, `ty`, `doc` and
/// `default`
macro_rules! CONFIGURATION_VALUE_TEMPLATE {
() => {
"* `{name}`: `{ty}`: {doc} (defaults to `{default}`)\n"
};
}
const LINT_EMISSION_FUNCTIONS: [&[&str]; 7] = [
&["clippy_utils", "diagnostics", "span_lint"],
&["clippy_utils", "diagnostics", "span_lint_and_help"],
&["clippy_utils", "diagnostics", "span_lint_and_note"],
&["clippy_utils", "diagnostics", "span_lint_hir"],
&["clippy_utils", "diagnostics", "span_lint_and_sugg"],
&["clippy_utils", "diagnostics", "span_lint_and_then"],
&["clippy_utils", "diagnostics", "span_lint_hir_and_then"],
];
const SUGGESTION_DIAGNOSTIC_BUILDER_METHODS: [(&str, bool); 9] = [
("span_suggestion", false),
("span_suggestion_short", false),
("span_suggestion_verbose", false),
("span_suggestion_hidden", false),
("tool_only_span_suggestion", false),
("multipart_suggestion", true),
("multipart_suggestions", true),
("tool_only_multipart_suggestion", true),
("span_suggestions", true),
];
const SUGGESTION_FUNCTIONS: [&[&str]; 2] = [
&["clippy_utils", "diagnostics", "multispan_sugg"],
&["clippy_utils", "diagnostics", "multispan_sugg_with_applicability"],
];
const DEPRECATED_LINT_TYPE: [&str; 3] = ["clippy_lints", "deprecated_lints", "ClippyDeprecatedLint"];
/// The index of the applicability name of `paths::APPLICABILITY_VALUES`
const APPLICABILITY_NAME_INDEX: usize = 2;
/// This applicability will be set for unresolved applicability values.
const APPLICABILITY_UNRESOLVED_STR: &str = "Unresolved";
declare_clippy_lint! {
/// ### What it does
/// Collects metadata about clippy lints for the website.
///
/// This lint will be used to report problems of syntax parsing. You should hopefully never
/// see this but never say never I guess ^^
///
/// ### Why is this bad?
/// This is not a bad thing but definitely a hacky way to do it. See
/// issue [#4310](https://github.com/rust-lang/rust-clippy/issues/4310) for a discussion
/// about the implementation.
///
/// ### Known problems
/// Hopefully none. It would be pretty uncool to have a problem here :)
///
/// ### Example output
/// ```json,ignore
/// {
/// "id": "internal_metadata_collector",
/// "id_span": {
/// "path": "clippy_lints/src/utils/internal_lints/metadata_collector.rs",
/// "line": 1
/// },
/// "group": "clippy::internal",
/// "docs": " ### What it does\nCollects metadata about clippy lints for the website. [...] "
/// }
/// ```
pub INTERNAL_METADATA_COLLECTOR,
internal_warn,
"A busy bee collection metadata about lints"
}
impl_lint_pass!(MetadataCollector => [INTERNAL_METADATA_COLLECTOR]);
#[allow(clippy::module_name_repetitions)]
#[derive(Debug, Clone)]
pub struct MetadataCollector {
/// All collected lints
///
/// We use a Heap here to have the lints added in alphabetic order in the export
lints: BinaryHeap<LintMetadata>,
applicability_info: FxHashMap<String, ApplicabilityInfo>,
config: Vec<ClippyConfiguration>,
}
impl MetadataCollector {
pub fn new() -> Self {
Self {
lints: BinaryHeap::<LintMetadata>::default(),
applicability_info: FxHashMap::<String, ApplicabilityInfo>::default(),
config: collect_configs(),
}
}
fn get_lint_configs(&self, lint_name: &str) -> Option<String> {
self.config
.iter()
.filter(|config| config.lints.iter().any(|lint| lint == lint_name))
.map(ToString::to_string)
.reduce(|acc, x| acc + &x)
.map(|configurations| format!(CONFIGURATION_SECTION_TEMPLATE!(), configurations = configurations))
}
}
impl Drop for MetadataCollector {
/// You might ask: How hacky is this?
/// My answer: YES
fn drop(&mut self) {
// The metadata collector gets dropped twice, this makes sure that we only write
// when the list is full
if self.lints.is_empty() {
return;
}
let mut applicability_info = std::mem::take(&mut self.applicability_info);
// Mapping the final data
let mut lints = std::mem::take(&mut self.lints).into_sorted_vec();
lints
.iter_mut()
.for_each(|x| x.applicability = Some(applicability_info.remove(&x.id).unwrap_or_default()));
// Outputting
if Path::new(OUTPUT_FILE).exists() {
fs::remove_file(OUTPUT_FILE).unwrap();
}
let mut file = OpenOptions::new().write(true).create(true).open(OUTPUT_FILE).unwrap();
writeln!(file, "{}", serde_json::to_string_pretty(&lints).unwrap()).unwrap();
}
}
#[derive(Debug, Clone, Serialize, PartialEq, Eq, PartialOrd, Ord)]
struct LintMetadata {
id: String,
id_span: SerializableSpan,
group: String,
level: String,
docs: String,
/// This field is only used in the output and will only be
/// mapped shortly before the actual output.
applicability: Option<ApplicabilityInfo>,
}
impl LintMetadata {
fn new(id: String, id_span: SerializableSpan, group: String, level: &'static str, docs: String) -> Self {
Self {
id,
id_span,
group,
level: level.to_string(),
docs,
applicability: None,
}
}
}
#[derive(Debug, Clone, Serialize, PartialEq, Eq, PartialOrd, Ord)]
struct SerializableSpan {
path: String,
line: usize,
}
impl std::fmt::Display for SerializableSpan {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}:{}", self.path.rsplit('/').next().unwrap_or_default(), self.line)
}
}
impl SerializableSpan {
fn from_item(cx: &LateContext<'_>, item: &Item<'_>) -> Self {
Self::from_span(cx, item.ident.span)
}
fn from_span(cx: &LateContext<'_>, span: Span) -> Self {
let loc: Loc = cx.sess().source_map().lookup_char_pos(span.lo());
Self {
path: format!("{}", loc.file.name.prefer_remapped()),
line: loc.line,
}
}
}
#[derive(Debug, Clone, Default, PartialEq, Eq, PartialOrd, Ord)]
struct ApplicabilityInfo {
/// Indicates if any of the lint emissions uses multiple spans. This is related to
/// [rustfix#141](https://github.com/rust-lang/rustfix/issues/141) as such suggestions can
/// currently not be applied automatically.
is_multi_part_suggestion: bool,
applicability: Option<usize>,
}
impl Serialize for ApplicabilityInfo {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut s = serializer.serialize_struct("ApplicabilityInfo", 2)?;
s.serialize_field("is_multi_part_suggestion", &self.is_multi_part_suggestion)?;
if let Some(index) = self.applicability {
s.serialize_field(
"applicability",
&paths::APPLICABILITY_VALUES[index][APPLICABILITY_NAME_INDEX],
)?;
} else {
s.serialize_field("applicability", APPLICABILITY_UNRESOLVED_STR)?;
}
s.end()
}
}
// ==================================================================
// Configuration
// ==================================================================
#[derive(Debug, Clone, Default)]
pub struct ClippyConfiguration {
name: String,
config_type: &'static str,
default: String,
lints: Vec<String>,
doc: String,
deprecation_reason: Option<&'static str>,
}
impl ClippyConfiguration {
pub fn new(
name: &'static str,
config_type: &'static str,
default: String,
doc_comment: &'static str,
deprecation_reason: Option<&'static str>,
) -> Self {
let (lints, doc) = parse_config_field_doc(doc_comment)
.unwrap_or_else(|| (vec![], "[ERROR] MALFORMED DOC COMMENT".to_string()));
Self {
name: to_kebab(name),
lints,
doc,
config_type,
default,
deprecation_reason,
}
}
}
fn collect_configs() -> Vec<ClippyConfiguration> {
crate::utils::conf::metadata::get_configuration_metadata()
}
/// This parses the field documentation of the config struct.
///
/// ```rust, ignore
/// parse_config_field_doc(cx, "Lint: LINT_NAME_1, LINT_NAME_2. Papa penguin, papa penguin")
/// ```
///
/// Would yield:
/// ```rust, ignore
/// Some(["lint_name_1", "lint_name_2"], "Papa penguin, papa penguin")
/// ```
fn parse_config_field_doc(doc_comment: &str) -> Option<(Vec<String>, String)> {
const DOC_START: &str = " Lint: ";
if_chain! {
if doc_comment.starts_with(DOC_START);
if let Some(split_pos) = doc_comment.find('.');
then {
let mut doc_comment = doc_comment.to_string();
let mut documentation = doc_comment.split_off(split_pos);
// Extract lints
doc_comment.make_ascii_lowercase();
let lints: Vec<String> = doc_comment.split_off(DOC_START.len()).split(", ").map(str::to_string).collect();
// Format documentation correctly
// split off leading `.` from lint name list and indent for correct formatting
documentation = documentation.trim_start_matches('.').trim().replace("\n ", "\n ");
Some((lints, documentation))
} else {
None
}
}
}
/// Transforms a given `snake_case_string` to a tasty `kebab-case-string`
fn to_kebab(config_name: &str) -> String {
config_name.replace('_', "-")
}
impl fmt::Display for ClippyConfiguration {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
CONFIGURATION_VALUE_TEMPLATE!(),
name = self.name,
ty = self.config_type,
doc = self.doc,
default = self.default
)
}
}
// ==================================================================
// Lint pass
// ==================================================================
impl<'hir> LateLintPass<'hir> for MetadataCollector {
/// Collecting lint declarations like:
/// ```rust, ignore
/// declare_clippy_lint! {
/// /// ### What it does
/// /// Something IDK.
/// pub SOME_LINT,
/// internal,
/// "Who am I?"
/// }
/// ```
fn check_item(&mut self, cx: &LateContext<'hir>, item: &'hir Item<'_>) {
if let ItemKind::Static(ty, Mutability::Not, _) = item.kind {
// Normal lint
if_chain! {
// item validation
if is_lint_ref_type(cx, ty);
// blacklist check
let lint_name = sym_to_string(item.ident.name).to_ascii_lowercase();
if !BLACK_LISTED_LINTS.contains(&lint_name.as_str());
// metadata extraction
if let Some((group, level)) = get_lint_group_and_level_or_lint(cx, &lint_name, item);
if let Some(mut docs) = extract_attr_docs_or_lint(cx, item);
then {
if let Some(configuration_section) = self.get_lint_configs(&lint_name) {
docs.push_str(&configuration_section);
}
self.lints.push(LintMetadata::new(
lint_name,
SerializableSpan::from_item(cx, item),
group,
level,
docs,
));
}
}
if_chain! {
if is_deprecated_lint(cx, ty);
// blacklist check
let lint_name = sym_to_string(item.ident.name).to_ascii_lowercase();
if !BLACK_LISTED_LINTS.contains(&lint_name.as_str());
// Metadata the little we can get from a deprecated lint
if let Some(docs) = extract_attr_docs_or_lint(cx, item);
then {
self.lints.push(LintMetadata::new(
lint_name,
SerializableSpan::from_item(cx, item),
DEPRECATED_LINT_GROUP_STR.to_string(),
DEPRECATED_LINT_LEVEL,
docs,
));
}
}
}
}
/// Collecting constant applicability from the actual lint emissions
///
/// Example:
/// ```rust, ignore
/// span_lint_and_sugg(
/// cx,
/// SOME_LINT,
/// item.span,
/// "Le lint message",
/// "Here comes help:",
/// "#![allow(clippy::all)]",
/// Applicability::MachineApplicable, // <-- Extracts this constant value
/// );
/// ```
fn check_expr(&mut self, cx: &LateContext<'hir>, expr: &'hir hir::Expr<'_>) {
if let Some(args) = match_lint_emission(cx, expr) {
let mut emission_info = extract_emission_info(cx, args);
if emission_info.is_empty() {
// See:
// - src/misc.rs:734:9
// - src/methods/mod.rs:3545:13
// - src/methods/mod.rs:3496:13
// We are basically unable to resolve the lint name itself.
return;
}
for (lint_name, applicability, is_multi_part) in emission_info.drain(..) {
let app_info = self.applicability_info.entry(lint_name).or_default();
app_info.applicability = applicability;
app_info.is_multi_part_suggestion = is_multi_part;
}
}
}
}
// ==================================================================
// Lint definition extraction
// ==================================================================
fn sym_to_string(sym: Symbol) -> String {
sym.as_str().to_string()
}
fn extract_attr_docs_or_lint(cx: &LateContext<'_>, item: &Item<'_>) -> Option<String> {
extract_attr_docs(cx, item).or_else(|| {
lint_collection_error_item(cx, item, "could not collect the lint documentation");
None
})
}
/// This function collects all documentation that has been added to an item using
/// `#[doc = r""]` attributes. Several attributes are aggravated using line breaks
///
/// ```ignore
/// #[doc = r"Hello world!"]
/// #[doc = r"=^.^="]
/// struct SomeItem {}
/// ```
///
/// Would result in `Hello world!\n=^.^=\n`
///
/// ---
///
/// This function may modify the doc comment to ensure that the string can be displayed using a
/// markdown viewer in Clippy's lint list. The following modifications could be applied:
/// * Removal of leading space after a new line. (Important to display tables)
/// * Ensures that code blocks only contain language information
fn extract_attr_docs(cx: &LateContext<'_>, item: &Item<'_>) -> Option<String> {
let attrs = cx.tcx.hir().attrs(item.hir_id());
let mut lines = attrs.iter().filter_map(ast::Attribute::doc_str);
let mut docs = String::from(&*lines.next()?.as_str());
let mut in_code_block = false;
for line in lines {
docs.push('\n');
let line = line.as_str();
let line = &*line;
if let Some(info) = line.trim_start().strip_prefix("```") {
in_code_block = !in_code_block;
if in_code_block {
let lang = info
.trim()
.split(',')
// remove rustdoc directives
.find(|&s| !matches!(s, "" | "ignore" | "no_run" | "should_panic"))
// if no language is present, fill in "rust"
.unwrap_or("rust");
docs.push_str("```");
docs.push_str(lang);
continue;
}
}
// This removes the leading space that the macro translation introduces
if let Some(stripped_doc) = line.strip_prefix(' ') {
docs.push_str(stripped_doc);
} else if !line.is_empty() {
docs.push_str(line);
}
}
Some(docs)
}
fn get_lint_group_and_level_or_lint(
cx: &LateContext<'_>,
lint_name: &str,
item: &'hir Item<'_>,
) -> Option<(String, &'static str)> {
let result = cx
.lint_store
.check_lint_name(cx.sess(), lint_name, Some(sym::clippy), &[]);
if let CheckLintNameResult::Tool(Ok(lint_lst)) = result {
if let Some(group) = get_lint_group(cx, lint_lst[0]) {
if EXCLUDED_LINT_GROUPS.contains(&group.as_str()) {
return None;
}
if let Some(level) = get_lint_level_from_group(&group) {
Some((group, level))
} else {
lint_collection_error_item(
cx,
item,
&format!("Unable to determine lint level for found group `{}`", group),
);
None
}
} else {
lint_collection_error_item(cx, item, "Unable to determine lint group");
None
}
} else {
lint_collection_error_item(cx, item, "Unable to find lint in lint_store");
None
}
}
fn get_lint_group(cx: &LateContext<'_>, lint_id: LintId) -> Option<String> {
for (group_name, lints, _) in &cx.lint_store.get_lint_groups() {
if IGNORED_LINT_GROUPS.contains(group_name) {
continue;
}
if lints.iter().any(|group_lint| *group_lint == lint_id) {
let group = group_name.strip_prefix(CLIPPY_LINT_GROUP_PREFIX).unwrap_or(group_name);
return Some((*group).to_string());
}
}
None
}
fn get_lint_level_from_group(lint_group: &str) -> Option<&'static str> {
DEFAULT_LINT_LEVELS
.iter()
.find_map(|(group_name, group_level)| (*group_name == lint_group).then(|| *group_level))
}
fn is_deprecated_lint(cx: &LateContext<'_>, ty: &hir::Ty<'_>) -> bool {
if let hir::TyKind::Path(ref path) = ty.kind {
if let hir::def::Res::Def(DefKind::Struct, def_id) = cx.qpath_res(path, ty.hir_id) {
return match_def_path(cx, def_id, &DEPRECATED_LINT_TYPE);
}
}
false
}
// ==================================================================
// Lint emission
// ==================================================================
fn lint_collection_error_item(cx: &LateContext<'_>, item: &Item<'_>, message: &str) {
span_lint(
cx,
INTERNAL_METADATA_COLLECTOR,
item.ident.span,
&format!("metadata collection error for `{}`: {}", item.ident.name, message),
);
}
// ==================================================================
// Applicability
// ==================================================================
/// This function checks if a given expression is equal to a simple lint emission function call.
/// It will return the function arguments if the emission matched any function.
fn match_lint_emission<'hir>(cx: &LateContext<'hir>, expr: &'hir hir::Expr<'_>) -> Option<&'hir [hir::Expr<'hir>]> {
LINT_EMISSION_FUNCTIONS
.iter()
.find_map(|emission_fn| match_function_call(cx, expr, emission_fn))
}
fn take_higher_applicability(a: Option<usize>, b: Option<usize>) -> Option<usize> {
a.map_or(b, |a| a.max(b.unwrap_or_default()).into())
}
fn extract_emission_info<'hir>(
cx: &LateContext<'hir>,
args: &'hir [hir::Expr<'hir>],
) -> Vec<(String, Option<usize>, bool)> {
let mut lints = Vec::new();
let mut applicability = None;
let mut multi_part = false;
for arg in args {
let (arg_ty, _) = walk_ptrs_ty_depth(cx.typeck_results().expr_ty(arg));
if match_type(cx, arg_ty, &paths::LINT) {
// If we found the lint arg, extract the lint name
let mut resolved_lints = resolve_lints(cx, arg);
lints.append(&mut resolved_lints);
} else if match_type(cx, arg_ty, &paths::APPLICABILITY) {
applicability = resolve_applicability(cx, arg);
} else if arg_ty.is_closure() {
multi_part |= check_is_multi_part(cx, arg);
// TODO xFrednet 2021-03-01: don't use or_else but rather a comparison
applicability = applicability.or_else(|| resolve_applicability(cx, arg));
}
}
lints
.drain(..)
.map(|lint_name| (lint_name, applicability, multi_part))
.collect()
}
/// Resolves the possible lints that this expression could reference
fn resolve_lints(cx: &LateContext<'hir>, expr: &'hir hir::Expr<'hir>) -> Vec<String> {
let mut resolver = LintResolver::new(cx);
resolver.visit_expr(expr);
resolver.lints
}
/// This function tries to resolve the linked applicability to the given expression.
fn resolve_applicability(cx: &LateContext<'hir>, expr: &'hir hir::Expr<'hir>) -> Option<usize> {
let mut resolver = ApplicabilityResolver::new(cx);
resolver.visit_expr(expr);
resolver.complete()
}
fn check_is_multi_part(cx: &LateContext<'hir>, closure_expr: &'hir hir::Expr<'hir>) -> bool {
if let ExprKind::Closure(_, _, body_id, _, _) = closure_expr.kind {
let mut scanner = IsMultiSpanScanner::new(cx);
intravisit::walk_body(&mut scanner, cx.tcx.hir().body(body_id));
return scanner.is_multi_part();
} else if let Some(local) = get_parent_local(cx, closure_expr) {
if let Some(local_init) = local.init {
return check_is_multi_part(cx, local_init);
}
}
false
}
struct LintResolver<'a, 'hir> {
cx: &'a LateContext<'hir>,
lints: Vec<String>,
}
impl<'a, 'hir> LintResolver<'a, 'hir> {
fn new(cx: &'a LateContext<'hir>) -> Self {
Self {
cx,
lints: Vec::<String>::default(),
}
}
}
impl<'a, 'hir> intravisit::Visitor<'hir> for LintResolver<'a, 'hir> {
type Map = Map<'hir>;
fn nested_visit_map(&mut self) -> intravisit::NestedVisitorMap<Self::Map> {
intravisit::NestedVisitorMap::All(self.cx.tcx.hir())
}
fn visit_expr(&mut self, expr: &'hir hir::Expr<'hir>) {
if_chain! {
if let ExprKind::Path(qpath) = &expr.kind;
if let QPath::Resolved(_, path) = qpath;
let (expr_ty, _) = walk_ptrs_ty_depth(self.cx.typeck_results().expr_ty(expr));
if match_type(self.cx, expr_ty, &paths::LINT);
then {
if let hir::def::Res::Def(DefKind::Static, _) = path.res {
let lint_name = last_path_segment(qpath).ident.name;
self.lints.push(sym_to_string(lint_name).to_ascii_lowercase());
} else if let Some(local) = get_parent_local(self.cx, expr) {
if let Some(local_init) = local.init {
intravisit::walk_expr(self, local_init);
}
}
}
}
intravisit::walk_expr(self, expr);
}
}
/// This visitor finds the highest applicability value in the visited expressions
struct ApplicabilityResolver<'a, 'hir> {
cx: &'a LateContext<'hir>,
/// This is the index of hightest `Applicability` for `paths::APPLICABILITY_VALUES`
applicability_index: Option<usize>,
}
impl<'a, 'hir> ApplicabilityResolver<'a, 'hir> {
fn new(cx: &'a LateContext<'hir>) -> Self {
Self {
cx,
applicability_index: None,
}
}
fn add_new_index(&mut self, new_index: usize) {
self.applicability_index = take_higher_applicability(self.applicability_index, Some(new_index));
}
fn complete(self) -> Option<usize> {
self.applicability_index
}
}
impl<'a, 'hir> intravisit::Visitor<'hir> for ApplicabilityResolver<'a, 'hir> {
type Map = Map<'hir>;
fn nested_visit_map(&mut self) -> intravisit::NestedVisitorMap<Self::Map> {
intravisit::NestedVisitorMap::All(self.cx.tcx.hir())
}
fn visit_path(&mut self, path: &'hir hir::Path<'hir>, _id: hir::HirId) {
for (index, enum_value) in paths::APPLICABILITY_VALUES.iter().enumerate() {
if match_path(path, enum_value) {
self.add_new_index(index);
return;
}
}
}
fn visit_expr(&mut self, expr: &'hir hir::Expr<'hir>) {
let (expr_ty, _) = walk_ptrs_ty_depth(self.cx.typeck_results().expr_ty(expr));
if_chain! {
if match_type(self.cx, expr_ty, &paths::APPLICABILITY);
if let Some(local) = get_parent_local(self.cx, expr);
if let Some(local_init) = local.init;
then {
intravisit::walk_expr(self, local_init);
}
};
intravisit::walk_expr(self, expr);
}
}
/// This returns the parent local node if the expression is a reference one
fn get_parent_local(cx: &LateContext<'hir>, expr: &'hir hir::Expr<'hir>) -> Option<&'hir hir::Local<'hir>> {
if let ExprKind::Path(QPath::Resolved(_, path)) = expr.kind {
if let hir::def::Res::Local(local_hir) = path.res {
return get_parent_local_hir_id(cx, local_hir);
}
}
None
}
fn get_parent_local_hir_id(cx: &LateContext<'hir>, hir_id: hir::HirId) -> Option<&'hir hir::Local<'hir>> {
let map = cx.tcx.hir();
match map.find(map.get_parent_node(hir_id)) {
Some(hir::Node::Local(local)) => Some(local),
Some(hir::Node::Pat(pattern)) => get_parent_local_hir_id(cx, pattern.hir_id),
_ => None,
}
}
/// This visitor finds the highest applicability value in the visited expressions
struct IsMultiSpanScanner<'a, 'hir> {
cx: &'a LateContext<'hir>,
suggestion_count: usize,
}
impl<'a, 'hir> IsMultiSpanScanner<'a, 'hir> {
fn new(cx: &'a LateContext<'hir>) -> Self {
Self {
cx,
suggestion_count: 0,
}
}
/// Add a new single expression suggestion to the counter
fn add_single_span_suggestion(&mut self) {
self.suggestion_count += 1;
}
/// Signals that a suggestion with possible multiple spans was found
fn add_multi_part_suggestion(&mut self) {
self.suggestion_count += 2;
}
/// Checks if the suggestions include multiple spanns
fn is_multi_part(&self) -> bool {
self.suggestion_count > 1
}
}
impl<'a, 'hir> intravisit::Visitor<'hir> for IsMultiSpanScanner<'a, 'hir> {
type Map = Map<'hir>;
fn nested_visit_map(&mut self) -> intravisit::NestedVisitorMap<Self::Map> {
intravisit::NestedVisitorMap::All(self.cx.tcx.hir())
}
fn visit_expr(&mut self, expr: &'hir hir::Expr<'hir>) {
// Early return if the lint is already multi span
if self.is_multi_part() {
return;
}
match &expr.kind {
ExprKind::Call(fn_expr, _args) => {
let found_function = SUGGESTION_FUNCTIONS
.iter()
.any(|func_path| match_function_call(self.cx, fn_expr, func_path).is_some());
if found_function {
// These functions are all multi part suggestions
self.add_single_span_suggestion();
}
},
ExprKind::MethodCall(path, _path_span, arg, _arg_span) => {
let (self_ty, _) = walk_ptrs_ty_depth(self.cx.typeck_results().expr_ty(&arg[0]));
if match_type(self.cx, self_ty, &paths::DIAGNOSTIC_BUILDER) {
let called_method = path.ident.name.as_str().to_string();
for (method_name, is_multi_part) in &SUGGESTION_DIAGNOSTIC_BUILDER_METHODS {
if *method_name == called_method {
if *is_multi_part {
self.add_multi_part_suggestion();
} else {
self.add_single_span_suggestion();
}
break;
}
}
}
},
_ => {},
}
intravisit::walk_expr(self, expr);
}
}
| 35.724409 | 118 | 0.586102 |
4a5aa09121ffbfdf6c7ba83975f41570d389d5ec | 13,392 | mod common;
mod sqlite;
mod test_api;
use barrel::{types, Migration};
use common::*;
use pretty_assertions::assert_eq;
use quaint::prelude::Queryable;
use sql_schema_describer::*;
use sqlite::*;
use test_api::{TestApi, TestResult};
use test_macros::test_each_connector;
#[tokio::test]
async fn views_can_be_described() {
let full_sql = r#"
CREATE TABLE a (a_id int);
CREATE TABLE b (b_id int);
CREATE VIEW ab AS SELECT a_id FROM a UNION ALL SELECT b_id FROM b;
"#;
let inspector = get_sqlite_describer(&full_sql).await;
let result = inspector.describe(SCHEMA).await.expect("describing");
let view = result.get_view("ab").expect("couldn't get ab view").to_owned();
let expected_sql = "CREATE VIEW ab AS SELECT a_id FROM a UNION ALL SELECT b_id FROM b";
assert_eq!("ab", &view.name);
assert_eq!(expected_sql, &view.definition);
}
#[tokio::test]
async fn sqlite_column_types_must_work() {
let mut migration = Migration::new();
migration.create_table("User", move |t| {
t.inject_custom("int_col int not null");
t.add_column("int4_col", types::integer());
t.add_column("text_col", types::text());
t.add_column("real_col", types::float());
t.add_column("primary_col", types::primary());
t.inject_custom("decimal_col decimal (5, 3) not null");
});
let full_sql = migration.make::<barrel::backend::Sqlite>();
let inspector = get_sqlite_describer(&full_sql).await;
let result = inspector.describe(SCHEMA).await.expect("describing");
let table = result.get_table("User").expect("couldn't get User table");
let expected_columns = vec![
Column {
name: "int_col".to_string(),
tpe: ColumnType {
full_data_type: "int".to_string(),
family: ColumnTypeFamily::Int,
arity: ColumnArity::Required,
native_type: None,
},
default: None,
auto_increment: false,
},
Column {
name: "int4_col".to_string(),
tpe: ColumnType {
full_data_type: "INTEGER".to_string(),
family: ColumnTypeFamily::Int,
arity: ColumnArity::Required,
native_type: None,
},
default: None,
auto_increment: false,
},
Column {
name: "text_col".to_string(),
tpe: ColumnType {
full_data_type: "TEXT".to_string(),
family: ColumnTypeFamily::String,
arity: ColumnArity::Required,
native_type: None,
},
default: None,
auto_increment: false,
},
Column {
name: "real_col".to_string(),
tpe: ColumnType {
full_data_type: "REAL".to_string(),
family: ColumnTypeFamily::Float,
arity: ColumnArity::Required,
native_type: None,
},
default: None,
auto_increment: false,
},
Column {
name: "primary_col".to_string(),
tpe: ColumnType {
full_data_type: "INTEGER".to_string(),
family: ColumnTypeFamily::Int,
arity: ColumnArity::Required,
native_type: None,
},
default: None,
auto_increment: true,
},
Column {
name: "decimal_col".to_string(),
tpe: ColumnType {
full_data_type: "decimal (5, 3)".to_string(),
family: ColumnTypeFamily::Decimal,
arity: ColumnArity::Required,
native_type: None,
},
default: None,
auto_increment: false,
},
];
assert_eq!(
table,
&Table {
name: "User".to_string(),
columns: expected_columns,
indices: vec![],
primary_key: Some(PrimaryKey {
columns: vec!["primary_col".to_string()],
sequence: None,
constraint_name: None,
}),
foreign_keys: vec![],
}
);
}
#[tokio::test]
async fn sqlite_foreign_key_on_delete_must_be_handled() {
let sql = "
CREATE TABLE City (id INTEGER NOT NULL PRIMARY KEY);
CREATE TABLE User (
id INTEGER NOT NULL PRIMARY KEY,
city INTEGER REFERENCES City(id) ON DELETE NO ACTION,
city_cascade INTEGER REFERENCES City(id) ON DELETE CASCADE,
city_restrict INTEGER REFERENCES City (id) ON DELETE RESTRICT,
city_set_default INTEGER REFERENCES City(id) ON DELETE SET DEFAULT,
city_set_null INTEGER REFERENCES City(id) ON DELETE SET NULL
)";
let inspector = get_sqlite_describer(&sql).await;
let schema = inspector.describe(SCHEMA).await.expect("describing");
let mut table = schema.get_table("User").expect("get User table").to_owned();
table.foreign_keys.sort_unstable_by_key(|fk| fk.columns.clone());
assert_eq!(
table,
Table {
name: "User".to_string(),
columns: vec![
Column {
name: "id".to_string(),
tpe: ColumnType {
full_data_type: "INTEGER".to_string(),
family: ColumnTypeFamily::Int,
arity: ColumnArity::Required,
native_type: None,
},
default: None,
auto_increment: true,
},
Column {
name: "city".to_string(),
tpe: ColumnType {
full_data_type: "INTEGER".to_string(),
family: ColumnTypeFamily::Int,
arity: ColumnArity::Nullable,
native_type: None,
},
default: None,
auto_increment: false,
},
Column {
name: "city_cascade".to_string(),
tpe: ColumnType {
full_data_type: "INTEGER".to_string(),
family: ColumnTypeFamily::Int,
arity: ColumnArity::Nullable,
native_type: None,
},
default: None,
auto_increment: false,
},
Column {
name: "city_restrict".to_string(),
tpe: ColumnType {
full_data_type: "INTEGER".to_string(),
family: ColumnTypeFamily::Int,
arity: ColumnArity::Nullable,
native_type: None,
},
default: None,
auto_increment: false,
},
Column {
name: "city_set_default".to_string(),
tpe: ColumnType {
full_data_type: "INTEGER".to_string(),
family: ColumnTypeFamily::Int,
arity: ColumnArity::Nullable,
native_type: None,
},
default: None,
auto_increment: false,
},
Column {
name: "city_set_null".to_string(),
tpe: ColumnType {
full_data_type: "INTEGER".to_string(),
family: ColumnTypeFamily::Int,
arity: ColumnArity::Nullable,
native_type: None,
},
default: None,
auto_increment: false,
},
],
indices: vec![],
primary_key: Some(PrimaryKey {
columns: vec!["id".to_string()],
sequence: None,
constraint_name: None,
}),
foreign_keys: vec![
ForeignKey {
constraint_name: None,
columns: vec!["city".to_string()],
referenced_columns: vec!["id".to_string()],
referenced_table: "City".to_string(),
on_update_action: ForeignKeyAction::NoAction,
on_delete_action: ForeignKeyAction::NoAction,
},
ForeignKey {
constraint_name: None,
columns: vec!["city_cascade".to_string()],
referenced_columns: vec!["id".to_string()],
referenced_table: "City".to_string(),
on_update_action: ForeignKeyAction::NoAction,
on_delete_action: ForeignKeyAction::Cascade,
},
ForeignKey {
constraint_name: None,
columns: vec!["city_restrict".to_string()],
referenced_columns: vec!["id".to_string()],
referenced_table: "City".to_string(),
on_update_action: ForeignKeyAction::NoAction,
on_delete_action: ForeignKeyAction::Restrict,
},
ForeignKey {
constraint_name: None,
columns: vec!["city_set_default".to_string()],
referenced_columns: vec!["id".to_string()],
referenced_table: "City".to_string(),
on_update_action: ForeignKeyAction::NoAction,
on_delete_action: ForeignKeyAction::SetDefault,
},
ForeignKey {
constraint_name: None,
columns: vec!["city_set_null".to_string()],
referenced_columns: vec!["id".to_string()],
referenced_table: "City".to_string(),
on_update_action: ForeignKeyAction::NoAction,
on_delete_action: ForeignKeyAction::SetNull,
},
],
}
);
}
#[tokio::test]
async fn sqlite_text_primary_keys_must_be_inferred_on_table_and_not_as_separate_indexes() {
let mut migration = Migration::new();
migration.create_table("User", move |t| {
t.add_column("int4_col", types::integer());
t.add_column("text_col", types::text());
t.add_column("real_col", types::float());
t.add_column("primary_col", types::text());
// Simulate how we create primary keys in the migrations engine.
t.inject_custom("PRIMARY KEY (\"primary_col\")");
});
let full_sql = migration.make::<barrel::backend::Sqlite>();
let inspector = get_sqlite_describer(&full_sql).await;
let result = inspector.describe(SCHEMA).await.expect("describing");
let table = result.get_table("User").expect("couldn't get User table");
assert!(table.indices.is_empty());
assert_eq!(
table.primary_key.as_ref().unwrap(),
&PrimaryKey {
columns: vec!["primary_col".to_owned()],
sequence: None,
constraint_name: None,
}
);
}
#[test_each_connector(tags("sqlite"))]
async fn escaped_quotes_in_string_defaults_must_be_unescaped(api: &TestApi) -> TestResult {
let create_table = format!(
r#"
CREATE TABLE "{0}"."string_defaults_test" (
id INTEGER PRIMARY KEY,
regular VARCHAR NOT NULL DEFAULT 'meow, says the cat',
escaped VARCHAR NOT NULL DEFAULT '"That''s a lot of fish!"
- Godzilla, 1998'
);
"#,
api.schema_name()
);
api.database().query_raw(&create_table, &[]).await?;
let schema = api.describe().await?;
let table = schema.table_bang("string_defaults_test");
let regular_column_default = table
.column_bang("regular")
.default
.as_ref()
.unwrap()
.as_value()
.unwrap()
.clone()
.into_string()
.unwrap();
assert_eq!(regular_column_default, "meow, says the cat");
let escaped_column_default = table
.column_bang("escaped")
.default
.as_ref()
.unwrap()
.as_value()
.unwrap()
.clone()
.into_string()
.unwrap();
assert_eq!(escaped_column_default, "\"That's a lot of fish!\"\n- Godzilla, 1998");
Ok(())
}
#[test_each_connector(tags("sqlite"))]
async fn escaped_backslashes_in_string_literals_must_be_unescaped(api: &TestApi) -> TestResult {
let create_table = format!(
r#"
CREATE TABLE "{0}"."test" (
model_name_space VARCHAR(255) NOT NULL DEFAULT 'xyz\Datasource\Model'
);
"#,
api.schema_name()
);
api.database().query_raw(&create_table, &[]).await?;
let schema = api.describe().await?;
let table = schema.table_bang("test");
let default = table
.column_bang("model_name_space")
.default
.as_ref()
.unwrap()
.as_value()
.unwrap()
.clone()
.into_string()
.unwrap();
assert_eq!(default, "xyz\\Datasource\\Model");
Ok(())
}
| 34.604651 | 96 | 0.513366 |
ab11c4a37f79a37e43ae858d53768c2e13f83f1b | 13,100 | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
mod generate_flow;
use fixture_tests::test_fixture;
use generate_flow::transform_fixture;
#[test]
fn conditional() {
let input = include_str!("generate_flow/fixtures/conditional.graphql");
let expected = include_str!("generate_flow/fixtures/conditional.expected");
test_fixture(
transform_fixture,
"conditional.graphql",
"generate_flow/fixtures/conditional.expected",
input,
expected,
);
}
#[test]
fn fragment_spread() {
let input = include_str!("generate_flow/fixtures/fragment-spread.graphql");
let expected = include_str!("generate_flow/fixtures/fragment-spread.expected");
test_fixture(
transform_fixture,
"fragment-spread.graphql",
"generate_flow/fixtures/fragment-spread.expected",
input,
expected,
);
}
#[test]
fn inline_fragment() {
let input = include_str!("generate_flow/fixtures/inline-fragment.graphql");
let expected = include_str!("generate_flow/fixtures/inline-fragment.expected");
test_fixture(
transform_fixture,
"inline-fragment.graphql",
"generate_flow/fixtures/inline-fragment.expected",
input,
expected,
);
}
#[test]
fn linked_field() {
let input = include_str!("generate_flow/fixtures/linked-field.graphql");
let expected = include_str!("generate_flow/fixtures/linked-field.expected");
test_fixture(
transform_fixture,
"linked-field.graphql",
"generate_flow/fixtures/linked-field.expected",
input,
expected,
);
}
#[test]
fn match_field() {
let input = include_str!("generate_flow/fixtures/match-field.graphql");
let expected = include_str!("generate_flow/fixtures/match-field.expected");
test_fixture(
transform_fixture,
"match-field.graphql",
"generate_flow/fixtures/match-field.expected",
input,
expected,
);
}
#[test]
fn match_field_in_query() {
let input = include_str!("generate_flow/fixtures/match-field-in-query.graphql");
let expected = include_str!("generate_flow/fixtures/match-field-in-query.expected");
test_fixture(
transform_fixture,
"match-field-in-query.graphql",
"generate_flow/fixtures/match-field-in-query.expected",
input,
expected,
);
}
#[test]
fn refetchable() {
let input = include_str!("generate_flow/fixtures/refetchable.graphql");
let expected = include_str!("generate_flow/fixtures/refetchable.expected");
test_fixture(
transform_fixture,
"refetchable.graphql",
"generate_flow/fixtures/refetchable.expected",
input,
expected,
);
}
// #[test]
// fn mutaion_with_client_extension() {
// let input = include_str!("generate_flow/fixtures/mutaion-with-client-extension.graphql");
// let expected = include_str!("generate_flow/fixtures/mutaion-with-client-extension.expected");
// test_fixture(
// transform_fixture,
// "mutaion-with-client-extension.graphql",
// "generate_flow/fixtures/mutaion-with-client-extension.expected",
// input,
// expected,
// );
// }
#[test]
fn mutaion_with_response_on_inline_fragments() {
let input =
include_str!("generate_flow/fixtures/mutaion-with-response-on-inline-fragments.graphql");
let expected =
include_str!("generate_flow/fixtures/mutaion-with-response-on-inline-fragments.expected");
test_fixture(
transform_fixture,
"mutaion-with-response-on-inline-fragments.graphql",
"generate_flow/fixtures/mutaion-with-response-on-inline-fragments.expected",
input,
expected,
);
}
#[test]
fn mutation() {
let input = include_str!("generate_flow/fixtures/mutation.graphql");
let expected = include_str!("generate_flow/fixtures/mutation.expected");
test_fixture(
transform_fixture,
"mutation.graphql",
"generate_flow/fixtures/mutation.expected",
input,
expected,
);
}
#[test]
fn mutation_input_has_array() {
let input = include_str!("generate_flow/fixtures/mutation-input-has-array.graphql");
let expected = include_str!("generate_flow/fixtures/mutation-input-has-array.expected");
test_fixture(
transform_fixture,
"mutation-input-has-array.graphql",
"generate_flow/fixtures/mutation-input-has-array.expected",
input,
expected,
);
}
#[test]
fn mutation_with_enums_on_fragment() {
let input = include_str!("generate_flow/fixtures/mutation-with-enums-on-fragment.graphql");
let expected = include_str!("generate_flow/fixtures/mutation-with-enums-on-fragment.expected");
test_fixture(
transform_fixture,
"mutation-with-enums-on-fragment.graphql",
"generate_flow/fixtures/mutation-with-enums-on-fragment.expected",
input,
expected,
);
}
#[test]
fn mutation_with_nested_fragments() {
let input = include_str!("generate_flow/fixtures/mutation-with-nested-fragments.graphql");
let expected = include_str!("generate_flow/fixtures/mutation-with-nested-fragments.expected");
test_fixture(
transform_fixture,
"mutation-with-nested-fragments.graphql",
"generate_flow/fixtures/mutation-with-nested-fragments.expected",
input,
expected,
);
}
#[test]
fn plural_fragment() {
let input = include_str!("generate_flow/fixtures/plural-fragment.graphql");
let expected = include_str!("generate_flow/fixtures/plural-fragment.expected");
test_fixture(
transform_fixture,
"plural-fragment.graphql",
"generate_flow/fixtures/plural-fragment.expected",
input,
expected,
);
}
#[test]
fn query_with_handles() {
let input = include_str!("generate_flow/fixtures/query-with-handles.graphql");
let expected = include_str!("generate_flow/fixtures/query-with-handles.expected");
test_fixture(
transform_fixture,
"query-with-handles.graphql",
"generate_flow/fixtures/query-with-handles.expected",
input,
expected,
);
}
#[test]
fn query_with_match_fields() {
let input = include_str!("generate_flow/fixtures/query-with-match-fields.graphql");
let expected = include_str!("generate_flow/fixtures/query-with-match-fields.expected");
test_fixture(
transform_fixture,
"query-with-match-fields.graphql",
"generate_flow/fixtures/query-with-match-fields.expected",
input,
expected,
);
}
#[test]
fn query_with_module_field() {
let input = include_str!("generate_flow/fixtures/query-with-module-field.graphql");
let expected = include_str!("generate_flow/fixtures/query-with-module-field.expected");
test_fixture(
transform_fixture,
"query-with-module-field.graphql",
"generate_flow/fixtures/query-with-module-field.expected",
input,
expected,
);
}
#[test]
fn query_with_multiple_match_fields() {
let input = include_str!("generate_flow/fixtures/query-with-multiple-match-fields.graphql");
let expected = include_str!("generate_flow/fixtures/query-with-multiple-match-fields.expected");
test_fixture(
transform_fixture,
"query-with-multiple-match-fields.graphql",
"generate_flow/fixtures/query-with-multiple-match-fields.expected",
input,
expected,
);
}
#[test]
fn query_with_raw_response_on_conditional() {
let input =
include_str!("generate_flow/fixtures/query-with-raw-response-on-conditional.graphql");
let expected =
include_str!("generate_flow/fixtures/query-with-raw-response-on-conditional.expected");
test_fixture(
transform_fixture,
"query-with-raw-response-on-conditional.graphql",
"generate_flow/fixtures/query-with-raw-response-on-conditional.expected",
input,
expected,
);
}
#[test]
fn query_with_raw_response_on_literal_conditional() {
let input = include_str!(
"generate_flow/fixtures/query-with-raw-response-on-literal-conditional.graphql"
);
let expected = include_str!(
"generate_flow/fixtures/query-with-raw-response-on-literal-conditional.expected"
);
test_fixture(
transform_fixture,
"query-with-raw-response-on-literal-conditional.graphql",
"generate_flow/fixtures/query-with-raw-response-on-literal-conditional.expected",
input,
expected,
);
}
#[test]
fn query_with_stream() {
let input = include_str!("generate_flow/fixtures/query-with-stream.graphql");
let expected = include_str!("generate_flow/fixtures/query-with-stream.expected");
test_fixture(
transform_fixture,
"query-with-stream.graphql",
"generate_flow/fixtures/query-with-stream.expected",
input,
expected,
);
}
#[test]
fn query_with_stream_connection() {
let input = include_str!("generate_flow/fixtures/query-with-stream-connection.graphql");
let expected = include_str!("generate_flow/fixtures/query-with-stream-connection.expected");
test_fixture(
transform_fixture,
"query-with-stream-connection.graphql",
"generate_flow/fixtures/query-with-stream-connection.expected",
input,
expected,
);
}
#[test]
fn recursive_fragments() {
let input = include_str!("generate_flow/fixtures/recursive-fragments.graphql");
let expected = include_str!("generate_flow/fixtures/recursive-fragments.expected");
test_fixture(
transform_fixture,
"recursive-fragments.graphql",
"generate_flow/fixtures/recursive-fragments.expected",
input,
expected,
);
}
// #[test]
// fn refetchable_fragment() {
// let input = include_str!("generate_flow/fixtures/refetchable-fragment.graphql");
// let expected = include_str!("generate_flow/fixtures/refetchable-fragment.expected");
// test_fixture(
// transform_fixture,
// "refetchable-fragment.graphql",
// "generate_flow/fixtures/refetchable-fragment.expected",
// input,
// expected,
// );
// }
#[test]
fn relay_client_id_field() {
let input = include_str!("generate_flow/fixtures/relay-client-id-field.graphql");
let expected = include_str!("generate_flow/fixtures/relay-client-id-field.expected");
test_fixture(
transform_fixture,
"relay-client-id-field.graphql",
"generate_flow/fixtures/relay-client-id-field.expected",
input,
expected,
);
}
#[test]
fn roots() {
let input = include_str!("generate_flow/fixtures/roots.graphql");
let expected = include_str!("generate_flow/fixtures/roots.expected");
test_fixture(
transform_fixture,
"roots.graphql",
"generate_flow/fixtures/roots.expected",
input,
expected,
);
}
#[test]
fn scalar_field() {
let input = include_str!("generate_flow/fixtures/scalar-field.graphql");
let expected = include_str!("generate_flow/fixtures/scalar-field.expected");
test_fixture(
transform_fixture,
"scalar-field.graphql",
"generate_flow/fixtures/scalar-field.expected",
input,
expected,
);
}
#[test]
fn simple() {
let input = include_str!("generate_flow/fixtures/simple.graphql");
let expected = include_str!("generate_flow/fixtures/simple.expected");
test_fixture(
transform_fixture,
"simple.graphql",
"generate_flow/fixtures/simple.expected",
input,
expected,
);
}
#[test]
fn typename_inside_with_overlapping_fields() {
let input =
include_str!("generate_flow/fixtures/typename-inside-with-overlapping-fields.graphql");
let expected =
include_str!("generate_flow/fixtures/typename-inside-with-overlapping-fields.expected");
test_fixture(
transform_fixture,
"typename-inside-with-overlapping-fields.graphql",
"generate_flow/fixtures/typename-inside-with-overlapping-fields.expected",
input,
expected,
);
}
#[test]
fn typename_on_union() {
let input = include_str!("generate_flow/fixtures/typename-on-union.graphql");
let expected = include_str!("generate_flow/fixtures/typename-on-union.expected");
test_fixture(
transform_fixture,
"typename-on-union.graphql",
"generate_flow/fixtures/typename-on-union.expected",
input,
expected,
);
}
#[test]
fn unmasked_fragment_spreads() {
let input = include_str!("generate_flow/fixtures/unmasked-fragment-spreads.graphql");
let expected = include_str!("generate_flow/fixtures/unmasked-fragment-spreads.expected");
test_fixture(
transform_fixture,
"unmasked-fragment-spreads.graphql",
"generate_flow/fixtures/unmasked-fragment-spreads.expected",
input,
expected,
);
}
| 30.823529 | 100 | 0.678702 |
23dddcdd158da2595d90e8301e19d15718eabc13 | 1,514 | // Copyright 2014-2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unused)]
fn main() {
mut_range_bound_upper();
mut_range_bound_lower();
mut_range_bound_both();
mut_range_bound_no_mutation();
immut_range_bound();
mut_borrow_range_bound();
immut_borrow_range_bound();
}
fn mut_range_bound_upper() {
let mut m = 4;
for i in 0..m {
m = 5;
} // warning
}
fn mut_range_bound_lower() {
let mut m = 4;
for i in m..10 {
m *= 2;
} // warning
}
fn mut_range_bound_both() {
let mut m = 4;
let mut n = 6;
for i in m..n {
m = 5;
n = 7;
} // warning (1 for each mutated bound)
}
fn mut_range_bound_no_mutation() {
let mut m = 4;
for i in 0..m {
continue;
} // no warning
}
fn mut_borrow_range_bound() {
let mut m = 4;
for i in 0..m {
let n = &mut m; // warning
*n += 1;
}
}
fn immut_borrow_range_bound() {
let mut m = 4;
for i in 0..m {
let n = &m; // should be no warning?
}
}
fn immut_range_bound() {
let m = 4;
for i in 0..m {
continue;
} // no warning
}
| 20.739726 | 69 | 0.593131 |
90a0608d56e17fd26a41697a70117019528b4a4d | 11,146 | #![cfg_attr(not(feature = "std"), no_std)]
use ink_lang as ink;
#[ink::contract(version = "0.1.0")]
mod matchingengine {
use ink_core::storage;
#[ink(storage)]
struct MatchingEngine {
/// Contract owner
owner: storage::Value<AccountId>,
/// Contract admin (server that will input/output quote currency)
admin: storage::Value<AccountId>,
/////////////////////////////////////////////////////////////////////////////////
// Deposits / Balances / Withdrawals
/// Balances by quote currency ID and address
/// Quote currency ID:
/// 1 = Unique (not live yet, so unused for now)
/// 2 = KSM
/// 3 = DOT
/// 4 = EDG
/// (...) more to come
quote_balance: storage::HashMap<(u64, AccountId), Balance>,
/// Total trades counter (resettable)
total_traded: storage::HashMap<u64, Balance>,
/// Vault withdraw ledger
withdraw_queue: storage::HashMap<u128, (AccountId, Balance)>,
/// Last withdraw id
last_withdraw_id: storage::Value<u128>,
/// Recorded NFT deposits
nft_deposits: storage::HashMap<(u64, u64), AccountId>,
/// NFT Vault withdraw ledger
nft_withdraw_queue: storage::HashMap<u128, (AccountId, u64, u64)>,
/// Last NFT withdraw index
last_nft_withdraw_id: storage::Value<u128>,
/////////////////////////////////////////////////////////////////////////////////
// Asks
/// Current asks: ask_id -> (collectionId, tokenId, quote_id, price, seller)
asks: storage::HashMap<u128, (u64, u64, u64, Balance, AccountId)>,
/// Ask index: Helps find the ask by the colectionId + tokenId
asks_by_token: storage::HashMap<(u64, u64), u128>,
/// Last Ask ID
last_ask_id: storage::Value<u128>,
}
impl MatchingEngine {
#[ink(constructor)]
fn new(&mut self) {
// Set contract owner
self.owner.set(self.env().caller());
// Initialize initial value of last withdraw ids
self.last_withdraw_id.set(0);
self.last_nft_withdraw_id.set(0);
// Initialize the last ask ID
self.last_ask_id.set(0);
// Init KSM totals
self.total_traded.insert(2, 0);
}
/// Returns the contract owner
#[ink(message)]
fn get_owner(&self) -> AccountId {
*self.owner.get()
}
/// Set contract admin
#[ink(message)]
fn set_admin(&mut self, admin: AccountId) {
self.ensure_only_owner();
self.admin.set(admin);
}
/// Get total
#[ink(message)]
fn get_total(&self, quote_id: u64) -> Balance{
*self.total_traded.get("e_id).unwrap()
}
/// Reset total
#[ink(message)]
fn reset_total(&mut self, quote_id: u64) {
self.ensure_only_owner();
self.total_traded.insert(quote_id, 0);
}
/// Admin: Make a deposit for a user
#[ink(message)]
fn register_deposit(&mut self, quote_id: u64, deposit_balance: Balance, user: AccountId) {
self.ensure_only_admin();
// Check overflow
let initial_balance = self.balance_of_or_zero(quote_id, &user);
assert!(initial_balance + deposit_balance > initial_balance);
// Set or update quote balance
self.quote_balance.insert((quote_id, user.clone()), initial_balance + deposit_balance);
}
/// Get address balance in quote currency
#[ink(message)]
fn get_balance(&self, quote_id: u64) -> Balance {
self.balance_of_or_zero(quote_id, &self.env().caller())
}
/// User: Withdraw funds
#[ink(message)]
fn withdraw(&mut self, quote_id: u64, withdraw_balance: Balance) {
self.vault_withdraw(&self.env().caller(), quote_id, withdraw_balance);
}
/// Get last withdraw id
#[ink(message)]
fn get_last_withdraw_id(&self) -> u128 {
*self.last_withdraw_id.get()
}
/// Get withdraw by id
#[ink(message)]
fn get_withdraw_by_id(&self, id: u128) -> (AccountId, Balance) {
*self.withdraw_queue.get(&id).unwrap()
}
/// Get last NFT withdraw id
#[ink(message)]
fn get_last_nft_withdraw_id(&self) -> u128 {
*self.last_nft_withdraw_id.get()
}
/// Get NFT withdraw by id
#[ink(message)]
fn get_nft_withdraw_by_id(&self, id: u128) -> (AccountId, u64, u64) {
*self.nft_withdraw_queue.get(&id).unwrap()
}
/// Admin: Tell the market about a successful NFT deposit
#[ink(message)]
fn register_nft_deposit(&mut self, collection_id: u64, token_id: u64, user: AccountId) {
self.ensure_only_admin();
// Record the token deposit for the user
self.nft_deposits.insert((collection_id, token_id), user.clone());
}
/// Get deposit
#[ink(message)]
fn get_nft_deposit(&self, collection_id: u64, token_id: u64) -> AccountId {
*self.nft_deposits.get(&(collection_id, token_id)).unwrap()
}
/// User: Place a deposited NFT for sale
#[ink(message)]
fn ask(&mut self, collection_id: u64, token_id: u64, quote_id: u64, price: Balance) {
// make sure sender owns this deposit (if not called by the admin)
let deposit_owner = *self.nft_deposits.get(&(collection_id, token_id)).unwrap();
if self.env().caller() != *self.owner.get() {
assert_eq!(deposit_owner, self.env().caller());
}
// Remove a deposit
let _ = self.nft_deposits.remove(&(collection_id, token_id));
// Place an ask (into asks with a new Ask ID)
let ask_id = self.last_ask_id.get() + 1;
let ask = (collection_id, token_id, quote_id, price, deposit_owner.clone());
self.last_ask_id.set(ask_id);
self.asks.insert(ask_id, ask.clone());
// Record that token is being sold by this user (in asks_by_token) in reverse lookup index
self.asks_by_token.insert((collection_id, token_id), ask_id);
}
/// Get last ask ID
#[ink(message)]
fn get_last_ask_id(&self) -> u128 {
*self.last_ask_id.get()
}
/// Get ask by ID
#[ink(message)]
fn get_ask_by_id(&self, ask_id: u128) -> (u64, u64, u64, Balance, AccountId) {
*self.asks.get(&ask_id).unwrap()
}
/// Get ask by token
#[ink(message)]
fn get_ask_id_by_token(&self, collection_id: u64, token_id: u64) -> u128 {
*self.asks_by_token.get(&(collection_id, token_id)).unwrap()
}
/// Cancel an ask
#[ink(message)]
fn cancel(&mut self, collection_id: u64, token_id: u64) {
// Ensure that sender owns this ask
let ask_id = *self.asks_by_token.get(&(collection_id, token_id)).unwrap();
let (_, _, _, _, user) = *self.asks.get(&ask_id).unwrap();
if self.env().caller() != *self.owner.get() {
assert_eq!(self.env().caller(), user);
}
// Remove ask from everywhere
self.remove_ask(collection_id, token_id, ask_id);
// Transfer token back to user through NFT Vault
self.last_nft_withdraw_id.set(self.last_nft_withdraw_id.get() + 1);
self.nft_withdraw_queue.insert(*self.last_nft_withdraw_id.get(), (user, collection_id, token_id));
}
/// Match an ask
#[ink(message)]
fn buy(&mut self, collection_id: u64, token_id: u64) {
// Get the ask
let ask_id = *self.asks_by_token.get(&(collection_id, token_id)).unwrap();
let (_, _, quote_id, price, seller) = *self.asks.get(&ask_id).unwrap();
// Check that buyer has enough balance
let initial_buyer_balance = self.balance_of_or_zero(quote_id, &self.env().caller());
assert!(initial_buyer_balance >= price);
// Subtract balance from buyer and increase balance of the seller and owner (due to commission)
let initial_seller_balance = self.balance_of_or_zero(quote_id, &seller);
assert!(initial_seller_balance + price > initial_seller_balance); // overflow protection
self.quote_balance.insert((quote_id, self.env().caller().clone()), initial_buyer_balance - price);
self.quote_balance.insert((quote_id, seller.clone()), initial_seller_balance + price);
// Remove ask from everywhere
self.remove_ask(collection_id, token_id, ask_id);
// Start an NFT withdraw from the vault
self.last_nft_withdraw_id.set(*self.last_nft_withdraw_id.get() + 1);
self.nft_withdraw_queue.insert(*self.last_nft_withdraw_id.get(), (self.env().caller().clone(), collection_id, token_id));
// Start Quote withdraw from the vault for the seller
self.vault_withdraw(&seller, quote_id, price);
// Update totals
let total = *self.total_traded.get("e_id).unwrap();
self.total_traded.insert(quote_id, total + price);
}
/// Panic if the sender is not the contract owner
fn ensure_only_owner(&self) {
assert_eq!(self.env().caller(), *self.owner.get());
}
/// Panic if the sender is not the contract admin
fn ensure_only_admin(&self) {
assert_eq!(self.env().caller(), *self.admin.get());
}
/// Return address balance in quote currency or 0
fn balance_of_or_zero(&self, quote_id: u64, user: &AccountId) -> Balance {
*self.quote_balance.get(&(quote_id, *user)).unwrap_or(&0)
}
fn remove_ask(&mut self, collection_id: u64, token_id: u64, ask_id: u128) {
// Remove the record that token is being sold by this user (from asks_by_token)
let _ = self.asks_by_token.remove(&(collection_id, token_id));
// Remove an ask (from asks)
let _ = self.asks.remove(&ask_id);
}
fn vault_withdraw(&mut self, user: &AccountId, quote_id: u64, withdraw_balance: Balance) {
// Make sure the user has enough
let initial_balance = self.balance_of_or_zero(quote_id, user);
assert!(initial_balance >= withdraw_balance);
// Update user's quote balance
self.quote_balance.insert((quote_id, (*user).clone()), initial_balance - withdraw_balance);
// Increase last withdraw index
self.last_withdraw_id.set(self.last_withdraw_id.get() + 1);
// Start a withdraw from the vault
self.withdraw_queue.insert(*self.last_withdraw_id.get(), ((*user).clone(), withdraw_balance));
}
}
}
| 36.785479 | 133 | 0.571505 |
79cd7deef77cef6d11af4a2fc6182069152357e8 | 268 | mod annotate;
pub use annotate::AnnotateApp;
mod distill;
pub use distill::DistillApp;
mod filter_len;
pub use filter_len::FilterLenApp;
mod finetune;
pub use finetune::FinetuneApp;
mod prepare;
pub use prepare::PrepareApp;
mod server;
pub use server::ServerApp;
| 14.888889 | 33 | 0.779851 |
7a2590ea6bc09358442f809f3b90fb1895c01375 | 6,879 | //!
//! - <https://notcurses.com/notcurses_visual.3.html>
#![allow(dead_code)]
// TODO
// - NcRgba, NcVGeom...
// - allow changing the inner options after, with a safe interface
// - add alpha_color NcVisualOptions::ADDALPHA
// - add halign & valign
// - add blend NcVisualOptions::BLEND
// - add nodegrade NcVisualOptions::NODEGRADE
//
// MAYBE
// - offer the alternative of using a VisualOptions structure. (old: visual3)
use crate::{
ncresult,
sys::{NcVisual, NcVisualOptions},
NResult, Notcurses, Plane,
};
mod blitter;
mod builder;
mod rgba;
mod scale;
pub use blitter::Blitter;
pub use builder::VisualBuilder;
pub use rgba::Rgba;
pub use scale::Scale;
/// A virtual [`Rgba`] pixel framebuffer.
#[derive(Debug)]
pub struct Visual<'ncvisual> {
pub(crate) ncvisual: &'ncvisual mut NcVisual,
pub(crate) options: NcVisualOptions,
}
impl<'ncvisual> Drop for Visual<'ncvisual> {
/// Destroys the Visual.
///
/// Rendered elements will not be disrupted, but the visual can be neither
/// decoded nor rendered any further.
fn drop(&mut self) {
let _ = self.ncvisual.destroy();
}
}
/// # Methods
impl<'ncvisual, 'ncplane, 'plane> Visual<'ncvisual>
// where
// 'ncvisual: 'ncplane,
{
/// Returns a default [`VisualBuilder`] used to customize a new `Visual`.
pub fn build() -> VisualBuilder<'ncvisual, 'ncplane, 'plane> {
VisualBuilder::default()
}
// /// Creates a `Visual` from an existing [`NcVisual`] and [`NcVisualOptions`].
// pub fn from_ncvisual(visual: &'ncvisual mut NcVisual) -> Visual<'ncvisual> {
// Self {
// ncvisual: visual,
// // options: None,
// }
// }
pub fn as_ncvisual(&self) -> &NcVisual {
self.ncvisual
}
/// Returns a mutable reference to the inner `NcVisual`.
pub fn as_ncvisual_mut(&mut self) -> &mut NcVisual {
self.ncvisual
}
/// Resizes the visual to `x`,`y` pixels, using interpolation.
pub fn resize(&mut self, x: u32, y: u32) -> NResult<()> {
ncresult![NcVisual::resize(self.ncvisual, y, x)]
}
/// Resizes the visual to `x`,`y` pixels, without using interpolation.
pub fn resize_ni(&mut self, x: u32, y: u32) -> NResult<()> {
ncresult![NcVisual::resize_noninterpolative(self.ncvisual, y, x)]
}
/// Renders the decoded frame to the configured [`Plane`][crate::Plane].
pub fn render_plane(&mut self, nc: &mut Notcurses) -> NResult<()> {
assert![!self.options.n.is_null()];
self.options.flags &= !NcVisualOptions::CHILDPLANE as u64;
let _ = NcVisual::render(self.ncvisual, nc.nc, &self.options)?;
Ok(())
}
// /// Renders the decoded frame as a new plane, that is a child of the configured
// /// [`Plane`][crate::Plane], and returns it.
// // WIP:0 FIXME: visual doesn't live long enough
// pub fn render_child_plane(
// &'ncvisual mut self,
// nc: &mut Notcurses,
// ) -> NResult<Plane<'ncvisual>> {
// assert![!self.options.n.is_null()];
// self.options.flags |= NcVisualOptions::CHILDPLANE as u64;
// let child_plane = NcVisual::render(self.ncvisual, nc.nc, &self.options)?;
// Ok(Plane::<'ncplane> {
// ncplane: child_plane,
// })
// }
// /// Renders the decoded frame as a new [`Plane`][crate::Plane], and returns it.
// ///
// /// Doesn't need to have a plane configured.
// // WIP:1 FIXME: visual doesn't live long enough
// pub fn render_new_plane(&'ncvisual mut self, nc: &mut Notcurses) -> NResult<Plane<'ncplane>> {
// self.options.flags &= !NcVisualOptions::CHILDPLANE as u64;
// let ncplane = NcVisual::render(self.ncvisual, nc.nc, &self.options)?;
// Ok(ncplane.into())
// }
}
/// # Post-Builder Configuration Methods
///
/// These methods allows to re-configure a `Visual` after it has been built
/// via [`VisualBuilder`].
impl<'ncvisual, 'ncplane> Visual<'ncvisual> {
/// (re)Sets the `Visual` based off RGBA content in memory at `rgba`.
pub fn set_from_rgba(&mut self, rgba: &[u8], cols: u32, rows: u32) -> NResult<()> {
self.ncvisual = NcVisual::from_rgba(rgba, rows, cols * 4, cols)?;
Ok(())
}
/// (re)Sets the `Visual` based off RGB content in memory at `rgb`.
pub fn set_from_rgb(&mut self, rgb: &[u8], cols: u32, rows: u32, alpha: u8) -> NResult<()> {
self.ncvisual = NcVisual::from_rgb_packed(rgb, rows, cols * 4, cols, alpha)?;
Ok(())
}
/// (re)Sets the `Visual` based off RGBX content in memory at `rgbx`.
pub fn set_from_rgbx(&mut self, rgbx: &[u8], cols: u32, rows: u32, alpha: u8) -> NResult<()> {
self.ncvisual = NcVisual::from_rgb_loose(rgbx, rows, cols * 4, cols, alpha)?;
Ok(())
}
/// (re)Sets the `Visual` based off BGRA content in memory at `bgra`.
pub fn set_from_bgra(&mut self, bgra: &[u8], cols: u32, rows: u32) -> NResult<()> {
self.ncvisual = NcVisual::from_bgra(bgra, rows, cols * 4, cols)?;
Ok(())
}
/// (re)Sets the `Visual` from a `file`, extracts the codec and paramenters
/// and decodes the first image to memory.
pub fn set_from_file(&mut self, file: &str) -> NResult<()> {
self.ncvisual = NcVisual::from_file(file)?;
Ok(())
}
/// (re)Sets the [`Blitter`]. Default: `Blitter::Default`.
pub fn set_blitter(&mut self, blitter: Blitter) {
self.options.blitter = blitter.into();
}
/// (re)Sets the [`Scale`]. Default: `Blitter::Default`.
pub fn set_scale(&mut self, scale: Scale) {
self.options.scaling = scale.into();
}
/// (re)Sets the [`Plane`] used by the rendering functions. Default: unset.
pub fn set_plane(&mut self, plane: &mut Plane<'ncplane>) {
self.options.n = plane.as_ncplane_mut();
}
/// Unsets the [`Plane`]. The plane is unset by default.
pub fn unset_plane(&mut self) {
self.options.n = core::ptr::null_mut();
}
/// Sets whether the scaling should be done with interpolation or not.
///
/// The default is to interpolate.
pub fn set_interpolate(&mut self, interpolate: bool) {
if interpolate {
self.options.flags &= !NcVisualOptions::NOINTERPOLATE as u64;
} else {
self.options.flags |= NcVisualOptions::NOINTERPOLATE as u64;
}
}
/// Sets the RGB color to be treated as transparent. Default: `None`.
pub fn set_transparent_color(mut self, color: Option<u32>) -> Self {
if let Some(color) = color {
self.options.flags |= NcVisualOptions::ADDALPHA as u64;
self.options.transcolor = color;
} else {
self.options.flags &= !NcVisualOptions::ADDALPHA as u64;
self.options.transcolor = 0;
}
self
}
}
| 34.395 | 101 | 0.60692 |
67dfbe37e23b8c9939c696b8c9b2c4297e2c38bd | 7,365 | #[doc = "Reader of register STAT_SET"]
pub type R = crate::R<u32, super::STAT_SET>;
#[doc = "Writer for register STAT_SET"]
pub type W = crate::W<u32, super::STAT_SET>;
#[doc = "Register STAT_SET `reset()`'s with value 0"]
impl crate::ResetValue for super::STAT_SET {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `IRQ`"]
pub type IRQ_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `IRQ`"]
pub struct IRQ_W<'a> {
w: &'a mut W,
}
impl<'a> IRQ_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `AXI_WRITE_ERROR`"]
pub type AXI_WRITE_ERROR_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `AXI_WRITE_ERROR`"]
pub struct AXI_WRITE_ERROR_W<'a> {
w: &'a mut W,
}
impl<'a> AXI_WRITE_ERROR_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `AXI_READ_ERROR`"]
pub type AXI_READ_ERROR_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `AXI_READ_ERROR`"]
pub struct AXI_READ_ERROR_W<'a> {
w: &'a mut W,
}
impl<'a> AXI_READ_ERROR_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `NEXT_IRQ`"]
pub type NEXT_IRQ_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `NEXT_IRQ`"]
pub struct NEXT_IRQ_W<'a> {
w: &'a mut W,
}
impl<'a> NEXT_IRQ_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `AXI_ERROR_ID`"]
pub type AXI_ERROR_ID_R = crate::R<u8, u8>;
#[doc = "Reader of field `LUT_DMA_LOAD_DONE_IRQ`"]
pub type LUT_DMA_LOAD_DONE_IRQ_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `LUT_DMA_LOAD_DONE_IRQ`"]
pub struct LUT_DMA_LOAD_DONE_IRQ_W<'a> {
w: &'a mut W,
}
impl<'a> LUT_DMA_LOAD_DONE_IRQ_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "Reader of field `RSVD2`"]
pub type RSVD2_R = crate::R<u8, u8>;
#[doc = "Reader of field `BLOCKY`"]
pub type BLOCKY_R = crate::R<u8, u8>;
#[doc = "Reader of field `BLOCKX`"]
pub type BLOCKX_R = crate::R<u8, u8>;
impl R {
#[doc = "Bit 0 - Indicates current PXP interrupt status"]
#[inline(always)]
pub fn irq(&self) -> IRQ_R {
IRQ_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Indicates PXP encountered an AXI write error and processing has been terminated."]
#[inline(always)]
pub fn axi_write_error(&self) -> AXI_WRITE_ERROR_R {
AXI_WRITE_ERROR_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Indicates PXP encountered an AXI read error and processing has been terminated."]
#[inline(always)]
pub fn axi_read_error(&self) -> AXI_READ_ERROR_R {
AXI_READ_ERROR_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Indicates that a command issued with the \"Next Command\" functionality has been issued and that a new command may be initiated with a write to the PXP_NEXT register"]
#[inline(always)]
pub fn next_irq(&self) -> NEXT_IRQ_R {
NEXT_IRQ_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bits 4:7 - Indicates the AXI ID of the failing bus operation."]
#[inline(always)]
pub fn axi_error_id(&self) -> AXI_ERROR_ID_R {
AXI_ERROR_ID_R::new(((self.bits >> 4) & 0x0f) as u8)
}
#[doc = "Bit 8 - Indicates that the LUT DMA transfer has completed."]
#[inline(always)]
pub fn lut_dma_load_done_irq(&self) -> LUT_DMA_LOAD_DONE_IRQ_R {
LUT_DMA_LOAD_DONE_IRQ_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bits 9:15 - Reserved, always set to zero."]
#[inline(always)]
pub fn rsvd2(&self) -> RSVD2_R {
RSVD2_R::new(((self.bits >> 9) & 0x7f) as u8)
}
#[doc = "Bits 16:23 - Indicates the X coordinate of the block currently being rendered."]
#[inline(always)]
pub fn blocky(&self) -> BLOCKY_R {
BLOCKY_R::new(((self.bits >> 16) & 0xff) as u8)
}
#[doc = "Bits 24:31 - Indicates the X coordinate of the block currently being rendered."]
#[inline(always)]
pub fn blockx(&self) -> BLOCKX_R {
BLOCKX_R::new(((self.bits >> 24) & 0xff) as u8)
}
}
impl W {
#[doc = "Bit 0 - Indicates current PXP interrupt status"]
#[inline(always)]
pub fn irq(&mut self) -> IRQ_W {
IRQ_W { w: self }
}
#[doc = "Bit 1 - Indicates PXP encountered an AXI write error and processing has been terminated."]
#[inline(always)]
pub fn axi_write_error(&mut self) -> AXI_WRITE_ERROR_W {
AXI_WRITE_ERROR_W { w: self }
}
#[doc = "Bit 2 - Indicates PXP encountered an AXI read error and processing has been terminated."]
#[inline(always)]
pub fn axi_read_error(&mut self) -> AXI_READ_ERROR_W {
AXI_READ_ERROR_W { w: self }
}
#[doc = "Bit 3 - Indicates that a command issued with the \"Next Command\" functionality has been issued and that a new command may be initiated with a write to the PXP_NEXT register"]
#[inline(always)]
pub fn next_irq(&mut self) -> NEXT_IRQ_W {
NEXT_IRQ_W { w: self }
}
#[doc = "Bit 8 - Indicates that the LUT DMA transfer has completed."]
#[inline(always)]
pub fn lut_dma_load_done_irq(&mut self) -> LUT_DMA_LOAD_DONE_IRQ_W {
LUT_DMA_LOAD_DONE_IRQ_W { w: self }
}
}
| 34.255814 | 188 | 0.590631 |
e4bd3f4ba725daee7fa0d2434159d467ecba2f8c | 3,470 | /// Params defines the parameters for the bank module.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Params {
#[prost(message, repeated, tag="1")]
pub send_enabled: ::prost::alloc::vec::Vec<SendEnabled>,
#[prost(bool, tag="2")]
pub default_send_enabled: bool,
}
/// SendEnabled maps coin denom to a send_enabled status (whether a denom is
/// sendable).
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SendEnabled {
#[prost(string, tag="1")]
pub denom: ::prost::alloc::string::String,
#[prost(bool, tag="2")]
pub enabled: bool,
}
/// Input models transaction input.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Input {
#[prost(string, tag="1")]
pub address: ::prost::alloc::string::String,
#[prost(message, repeated, tag="2")]
pub coins: ::prost::alloc::vec::Vec<super::super::base::v1beta1::Coin>,
}
/// Output models transaction outputs.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Output {
#[prost(string, tag="1")]
pub address: ::prost::alloc::string::String,
#[prost(message, repeated, tag="2")]
pub coins: ::prost::alloc::vec::Vec<super::super::base::v1beta1::Coin>,
}
/// Supply represents a struct that passively keeps track of the total supply
/// amounts in the network.
/// This message is deprecated now that supply is indexed by denom.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Supply {
#[prost(message, repeated, tag="1")]
pub total: ::prost::alloc::vec::Vec<super::super::base::v1beta1::Coin>,
}
/// DenomUnit represents a struct that describes a given
/// denomination unit of the basic token.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DenomUnit {
/// denom represents the string name of the given denom unit (e.g uatom).
#[prost(string, tag="1")]
pub denom: ::prost::alloc::string::String,
/// exponent represents power of 10 exponent that one must
/// raise the base_denom to in order to equal the given DenomUnit's denom
/// 1 denom = 1^exponent base_denom
/// (e.g. with a base_denom of uatom, one can create a DenomUnit of 'atom' with
/// exponent = 6, thus: 1 atom = 10^6 uatom).
#[prost(uint32, tag="2")]
pub exponent: u32,
/// aliases is a list of string aliases for the given denom
#[prost(string, repeated, tag="3")]
pub aliases: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
/// Metadata represents a struct that describes
/// a basic token.
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Metadata {
#[prost(string, tag="1")]
pub description: ::prost::alloc::string::String,
/// denom_units represents the list of DenomUnit's for a given coin
#[prost(message, repeated, tag="2")]
pub denom_units: ::prost::alloc::vec::Vec<DenomUnit>,
/// base represents the base denom (should be the DenomUnit with exponent = 0).
#[prost(string, tag="3")]
pub base: ::prost::alloc::string::String,
/// display indicates the suggested denom that should be
/// displayed in clients.
#[prost(string, tag="4")]
pub display: ::prost::alloc::string::String,
/// name defines the name of the token (eg: Cosmos Atom)
#[prost(string, tag="5")]
pub name: ::prost::alloc::string::String,
/// symbol is the token symbol usually shown on exchanges (eg: ATOM). This can
/// be the same as the display.
#[prost(string, tag="6")]
pub symbol: ::prost::alloc::string::String,
}
| 41.309524 | 83 | 0.66196 |
f5ceb1a8ec10af96a41ffea2200251c0f1d65a3b | 67,851 | use solana_cli::{
cli::{process_command, request_and_confirm_airdrop, CliCommand, CliConfig},
spend_utils::SpendAmount,
stake::StakeAuthorizationIndexed,
test_utils::{check_ready, check_recent_balance},
};
use solana_cli_output::{parse_sign_only_reply_string, OutputFormat};
use solana_client::{
blockhash_query::{self, BlockhashQuery},
nonce_utils,
rpc_client::RpcClient,
};
use solana_core::test_validator::TestValidator;
use solana_faucet::faucet::run_local_faucet;
use solana_sdk::{
account_utils::StateMut,
commitment_config::CommitmentConfig,
nonce::State as NonceState,
pubkey::Pubkey,
signature::{keypair_from_seed, Keypair, Signer},
stake::{
self,
instruction::LockupArgs,
state::{Lockup, StakeAuthorize, StakeState},
},
};
use solana_streamer::socket::SocketAddrSpace;
#[test]
fn test_stake_delegation_force() {
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let authorized_withdrawer = Keypair::new().pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator =
TestValidator::with_no_fees(mint_pubkey, Some(faucet_addr), SocketAddrSpace::Unspecified);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let default_signer = Keypair::new();
let mut config = CliConfig::recent_for_tests();
config.json_rpc_url = test_validator.rpc_url();
config.signers = vec![&default_signer];
request_and_confirm_airdrop(&rpc_client, &config, &config.signers[0].pubkey(), 100_000)
.unwrap();
// Create vote account
let vote_keypair = Keypair::new();
config.signers = vec![&default_signer, &vote_keypair];
config.command = CliCommand::CreateVoteAccount {
vote_account: 1,
seed: None,
identity_account: 0,
authorized_voter: None,
authorized_withdrawer,
commission: 0,
memo: None,
};
process_command(&config).unwrap();
// Create stake account
let stake_keypair = Keypair::new();
config.signers = vec![&default_signer, &stake_keypair];
config.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: None,
withdrawer: None,
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config).unwrap();
// Delegate stake fails (vote account had never voted)
config.signers = vec![&default_signer];
config.command = CliCommand::DelegateStake {
stake_account_pubkey: stake_keypair.pubkey(),
vote_account_pubkey: vote_keypair.pubkey(),
stake_authority: 0,
force: false,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap_err();
// But if we force it, it works anyway!
config.command = CliCommand::DelegateStake {
stake_account_pubkey: stake_keypair.pubkey(),
vote_account_pubkey: vote_keypair.pubkey(),
stake_authority: 0,
force: true,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap();
}
#[test]
fn test_seed_stake_delegation_and_deactivation() {
solana_logger::setup();
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator =
TestValidator::with_no_fees(mint_pubkey, Some(faucet_addr), SocketAddrSpace::Unspecified);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let validator_keypair = keypair_from_seed(&[0u8; 32]).unwrap();
let mut config_validator = CliConfig::recent_for_tests();
config_validator.json_rpc_url = test_validator.rpc_url();
config_validator.signers = vec![&validator_keypair];
request_and_confirm_airdrop(
&rpc_client,
&config_validator,
&config_validator.signers[0].pubkey(),
100_000,
)
.unwrap();
check_recent_balance(100_000, &rpc_client, &config_validator.signers[0].pubkey());
let stake_address = Pubkey::create_with_seed(
&config_validator.signers[0].pubkey(),
"hi there",
&stake::program::id(),
)
.expect("bad seed");
// Create stake account with a seed, uses the validator config as the base,
// which is nice ;)
config_validator.command = CliCommand::CreateStakeAccount {
stake_account: 0,
seed: Some("hi there".to_string()),
staker: None,
withdrawer: None,
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config_validator).unwrap();
// Delegate stake
config_validator.command = CliCommand::DelegateStake {
stake_account_pubkey: stake_address,
vote_account_pubkey: test_validator.vote_account_address(),
stake_authority: 0,
force: true,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config_validator).unwrap();
// Deactivate stake
config_validator.command = CliCommand::DeactivateStake {
stake_account_pubkey: stake_address,
stake_authority: 0,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
seed: None,
fee_payer: 0,
};
process_command(&config_validator).unwrap();
}
#[test]
fn test_stake_delegation_and_deactivation() {
solana_logger::setup();
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator =
TestValidator::with_no_fees(mint_pubkey, Some(faucet_addr), SocketAddrSpace::Unspecified);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let validator_keypair = Keypair::new();
let mut config_validator = CliConfig::recent_for_tests();
config_validator.json_rpc_url = test_validator.rpc_url();
config_validator.signers = vec![&validator_keypair];
let stake_keypair = keypair_from_seed(&[0u8; 32]).unwrap();
request_and_confirm_airdrop(
&rpc_client,
&config_validator,
&config_validator.signers[0].pubkey(),
100_000,
)
.unwrap();
check_recent_balance(100_000, &rpc_client, &config_validator.signers[0].pubkey());
// Create stake account
config_validator.signers.push(&stake_keypair);
config_validator.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: None,
withdrawer: None,
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config_validator).unwrap();
// Delegate stake
config_validator.signers.pop();
config_validator.command = CliCommand::DelegateStake {
stake_account_pubkey: stake_keypair.pubkey(),
vote_account_pubkey: test_validator.vote_account_address(),
stake_authority: 0,
force: true,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config_validator).unwrap();
// Deactivate stake
config_validator.command = CliCommand::DeactivateStake {
stake_account_pubkey: stake_keypair.pubkey(),
stake_authority: 0,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
seed: None,
fee_payer: 0,
};
process_command(&config_validator).unwrap();
}
#[test]
fn test_offline_stake_delegation_and_deactivation() {
solana_logger::setup();
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator =
TestValidator::with_no_fees(mint_pubkey, Some(faucet_addr), SocketAddrSpace::Unspecified);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let mut config_validator = CliConfig::recent_for_tests();
config_validator.json_rpc_url = test_validator.rpc_url();
let validator_keypair = Keypair::new();
config_validator.signers = vec![&validator_keypair];
let mut config_payer = CliConfig::recent_for_tests();
config_payer.json_rpc_url = test_validator.rpc_url();
let stake_keypair = keypair_from_seed(&[0u8; 32]).unwrap();
let mut config_offline = CliConfig::recent_for_tests();
config_offline.json_rpc_url = String::default();
config_offline.command = CliCommand::ClusterVersion;
let offline_keypair = Keypair::new();
config_offline.signers = vec![&offline_keypair];
// Verify that we cannot reach the cluster
process_command(&config_offline).unwrap_err();
request_and_confirm_airdrop(
&rpc_client,
&config_validator,
&config_validator.signers[0].pubkey(),
100_000,
)
.unwrap();
check_recent_balance(100_000, &rpc_client, &config_validator.signers[0].pubkey());
request_and_confirm_airdrop(
&rpc_client,
&config_offline,
&config_offline.signers[0].pubkey(),
100_000,
)
.unwrap();
check_recent_balance(100_000, &rpc_client, &config_offline.signers[0].pubkey());
// Create stake account
config_validator.signers.push(&stake_keypair);
config_validator.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: Some(config_offline.signers[0].pubkey()),
withdrawer: None,
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config_validator).unwrap();
// Delegate stake offline
let blockhash = rpc_client.get_latest_blockhash().unwrap();
config_offline.command = CliCommand::DelegateStake {
stake_account_pubkey: stake_keypair.pubkey(),
vote_account_pubkey: test_validator.vote_account_address(),
stake_authority: 0,
force: true,
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
config_offline.output_format = OutputFormat::JsonCompact;
let sig_response = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sig_response);
assert!(sign_only.has_all_signers());
let offline_presigner = sign_only
.presigner_of(&config_offline.signers[0].pubkey())
.unwrap();
config_payer.signers = vec![&offline_presigner];
config_payer.command = CliCommand::DelegateStake {
stake_account_pubkey: stake_keypair.pubkey(),
vote_account_pubkey: test_validator.vote_account_address(),
stake_authority: 0,
force: true,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(blockhash_query::Source::Cluster, blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config_payer).unwrap();
// Deactivate stake offline
let blockhash = rpc_client.get_latest_blockhash().unwrap();
config_offline.command = CliCommand::DeactivateStake {
stake_account_pubkey: stake_keypair.pubkey(),
stake_authority: 0,
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
seed: None,
fee_payer: 0,
};
let sig_response = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sig_response);
assert!(sign_only.has_all_signers());
let offline_presigner = sign_only
.presigner_of(&config_offline.signers[0].pubkey())
.unwrap();
config_payer.signers = vec![&offline_presigner];
config_payer.command = CliCommand::DeactivateStake {
stake_account_pubkey: stake_keypair.pubkey(),
stake_authority: 0,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(blockhash_query::Source::Cluster, blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
seed: None,
fee_payer: 0,
};
process_command(&config_payer).unwrap();
}
#[test]
fn test_nonced_stake_delegation_and_deactivation() {
solana_logger::setup();
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator =
TestValidator::with_no_fees(mint_pubkey, Some(faucet_addr), SocketAddrSpace::Unspecified);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let config_keypair = keypair_from_seed(&[0u8; 32]).unwrap();
let mut config = CliConfig::recent_for_tests();
config.signers = vec![&config_keypair];
config.json_rpc_url = test_validator.rpc_url();
let minimum_nonce_balance = rpc_client
.get_minimum_balance_for_rent_exemption(NonceState::size())
.unwrap();
request_and_confirm_airdrop(&rpc_client, &config, &config.signers[0].pubkey(), 100_000)
.unwrap();
// Create stake account
let stake_keypair = Keypair::new();
config.signers.push(&stake_keypair);
config.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: None,
withdrawer: None,
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config).unwrap();
// Create nonce account
let nonce_account = Keypair::new();
config.signers[1] = &nonce_account;
config.command = CliCommand::CreateNonceAccount {
nonce_account: 1,
seed: None,
nonce_authority: Some(config.signers[0].pubkey()),
memo: None,
amount: SpendAmount::Some(minimum_nonce_balance),
};
process_command(&config).unwrap();
// Fetch nonce hash
let nonce_hash = nonce_utils::get_account_with_commitment(
&rpc_client,
&nonce_account.pubkey(),
CommitmentConfig::processed(),
)
.and_then(|ref a| nonce_utils::data_from_account(a))
.unwrap()
.blockhash;
// Delegate stake
config.signers = vec![&config_keypair];
config.command = CliCommand::DelegateStake {
stake_account_pubkey: stake_keypair.pubkey(),
vote_account_pubkey: test_validator.vote_account_address(),
stake_authority: 0,
force: true,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(
blockhash_query::Source::NonceAccount(nonce_account.pubkey()),
nonce_hash,
),
nonce_account: Some(nonce_account.pubkey()),
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap();
// Fetch nonce hash
let nonce_hash = nonce_utils::get_account_with_commitment(
&rpc_client,
&nonce_account.pubkey(),
CommitmentConfig::processed(),
)
.and_then(|ref a| nonce_utils::data_from_account(a))
.unwrap()
.blockhash;
// Deactivate stake
config.command = CliCommand::DeactivateStake {
stake_account_pubkey: stake_keypair.pubkey(),
stake_authority: 0,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(
blockhash_query::Source::NonceAccount(nonce_account.pubkey()),
nonce_hash,
),
nonce_account: Some(nonce_account.pubkey()),
nonce_authority: 0,
memo: None,
seed: None,
fee_payer: 0,
};
process_command(&config).unwrap();
}
#[test]
fn test_stake_authorize() {
solana_logger::setup();
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator =
TestValidator::with_no_fees(mint_pubkey, Some(faucet_addr), SocketAddrSpace::Unspecified);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let default_signer = Keypair::new();
let mut config = CliConfig::recent_for_tests();
config.json_rpc_url = test_validator.rpc_url();
config.signers = vec![&default_signer];
request_and_confirm_airdrop(&rpc_client, &config, &config.signers[0].pubkey(), 100_000)
.unwrap();
let offline_keypair = keypair_from_seed(&[0u8; 32]).unwrap();
let mut config_offline = CliConfig::recent_for_tests();
config_offline.signers = vec![&offline_keypair];
config_offline.json_rpc_url = String::default();
let offline_authority_pubkey = config_offline.signers[0].pubkey();
config_offline.command = CliCommand::ClusterVersion;
// Verify that we cannot reach the cluster
process_command(&config_offline).unwrap_err();
request_and_confirm_airdrop(
&rpc_client,
&config_offline,
&config_offline.signers[0].pubkey(),
100_000,
)
.unwrap();
// Create stake account, identity is authority
let stake_keypair = Keypair::new();
let stake_account_pubkey = stake_keypair.pubkey();
config.signers.push(&stake_keypair);
config.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: None,
withdrawer: None,
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config).unwrap();
// Assign new online stake authority
let online_authority = Keypair::new();
let online_authority_pubkey = online_authority.pubkey();
config.signers.pop();
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: online_authority_pubkey,
authority: 0,
new_authority_signer: None,
}],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let current_authority = match stake_state {
StakeState::Initialized(meta) => meta.authorized.staker,
_ => panic!("Unexpected stake state!"),
};
assert_eq!(current_authority, online_authority_pubkey);
// Assign new online stake and withdraw authorities
let online_authority2 = Keypair::new();
let online_authority2_pubkey = online_authority2.pubkey();
let withdraw_authority = Keypair::new();
let withdraw_authority_pubkey = withdraw_authority.pubkey();
config.signers.push(&online_authority);
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![
StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: online_authority2_pubkey,
authority: 1,
new_authority_signer: None,
},
StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Withdrawer,
new_authority_pubkey: withdraw_authority_pubkey,
authority: 0,
new_authority_signer: None,
},
],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let (current_staker, current_withdrawer) = match stake_state {
StakeState::Initialized(meta) => (meta.authorized.staker, meta.authorized.withdrawer),
_ => panic!("Unexpected stake state!"),
};
assert_eq!(current_staker, online_authority2_pubkey);
assert_eq!(current_withdrawer, withdraw_authority_pubkey);
// Assign new offline stake authority
config.signers.pop();
config.signers.push(&online_authority2);
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: offline_authority_pubkey,
authority: 1,
new_authority_signer: None,
}],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let current_authority = match stake_state {
StakeState::Initialized(meta) => meta.authorized.staker,
_ => panic!("Unexpected stake state!"),
};
assert_eq!(current_authority, offline_authority_pubkey);
// Offline assignment of new nonced stake authority
let nonced_authority = Keypair::new();
let nonced_authority_pubkey = nonced_authority.pubkey();
let blockhash = rpc_client.get_latest_blockhash().unwrap();
config_offline.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: nonced_authority_pubkey,
authority: 0,
new_authority_signer: None,
}],
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
config_offline.output_format = OutputFormat::JsonCompact;
let sign_reply = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sign_reply);
assert!(sign_only.has_all_signers());
let offline_presigner = sign_only.presigner_of(&offline_authority_pubkey).unwrap();
config.signers = vec![&offline_presigner];
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: nonced_authority_pubkey,
authority: 0,
new_authority_signer: None,
}],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(blockhash_query::Source::Cluster, blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let current_authority = match stake_state {
StakeState::Initialized(meta) => meta.authorized.staker,
_ => panic!("Unexpected stake state!"),
};
assert_eq!(current_authority, nonced_authority_pubkey);
// Create nonce account
let minimum_nonce_balance = rpc_client
.get_minimum_balance_for_rent_exemption(NonceState::size())
.unwrap();
let nonce_account = Keypair::new();
config.signers = vec![&default_signer, &nonce_account];
config.command = CliCommand::CreateNonceAccount {
nonce_account: 1,
seed: None,
nonce_authority: Some(offline_authority_pubkey),
memo: None,
amount: SpendAmount::Some(minimum_nonce_balance),
};
process_command(&config).unwrap();
// Fetch nonce hash
let nonce_hash = nonce_utils::get_account_with_commitment(
&rpc_client,
&nonce_account.pubkey(),
CommitmentConfig::processed(),
)
.and_then(|ref a| nonce_utils::data_from_account(a))
.unwrap()
.blockhash;
// Nonced assignment of new online stake authority
let online_authority = Keypair::new();
let online_authority_pubkey = online_authority.pubkey();
config_offline.signers.push(&nonced_authority);
config_offline.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: online_authority_pubkey,
authority: 1,
new_authority_signer: None,
}],
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(nonce_hash),
nonce_account: Some(nonce_account.pubkey()),
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
let sign_reply = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sign_reply);
assert!(sign_only.has_all_signers());
assert_eq!(sign_only.blockhash, nonce_hash);
let offline_presigner = sign_only.presigner_of(&offline_authority_pubkey).unwrap();
let nonced_authority_presigner = sign_only.presigner_of(&nonced_authority_pubkey).unwrap();
config.signers = vec![&offline_presigner, &nonced_authority_presigner];
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: online_authority_pubkey,
authority: 1,
new_authority_signer: None,
}],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(
blockhash_query::Source::NonceAccount(nonce_account.pubkey()),
sign_only.blockhash,
),
nonce_account: Some(nonce_account.pubkey()),
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let current_authority = match stake_state {
StakeState::Initialized(meta) => meta.authorized.staker,
_ => panic!("Unexpected stake state!"),
};
assert_eq!(current_authority, online_authority_pubkey);
let new_nonce_hash = nonce_utils::get_account_with_commitment(
&rpc_client,
&nonce_account.pubkey(),
CommitmentConfig::processed(),
)
.and_then(|ref a| nonce_utils::data_from_account(a))
.unwrap()
.blockhash;
assert_ne!(nonce_hash, new_nonce_hash);
}
#[test]
fn test_stake_authorize_with_fee_payer() {
solana_logger::setup();
const SIG_FEE: u64 = 42;
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator = TestValidator::with_custom_fees(
mint_pubkey,
SIG_FEE,
Some(faucet_addr),
SocketAddrSpace::Unspecified,
);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let default_signer = Keypair::new();
let default_pubkey = default_signer.pubkey();
let mut config = CliConfig::recent_for_tests();
config.json_rpc_url = test_validator.rpc_url();
config.signers = vec![&default_signer];
let payer_keypair = keypair_from_seed(&[0u8; 32]).unwrap();
let mut config_payer = CliConfig::recent_for_tests();
config_payer.signers = vec![&payer_keypair];
config_payer.json_rpc_url = test_validator.rpc_url();
let payer_pubkey = config_payer.signers[0].pubkey();
let mut config_offline = CliConfig::recent_for_tests();
let offline_signer = Keypair::new();
config_offline.signers = vec![&offline_signer];
config_offline.json_rpc_url = String::new();
let offline_pubkey = config_offline.signers[0].pubkey();
// Verify we're offline
config_offline.command = CliCommand::ClusterVersion;
process_command(&config_offline).unwrap_err();
request_and_confirm_airdrop(&rpc_client, &config, &default_pubkey, 100_000).unwrap();
check_recent_balance(100_000, &rpc_client, &config.signers[0].pubkey());
request_and_confirm_airdrop(&rpc_client, &config_payer, &payer_pubkey, 100_000).unwrap();
check_recent_balance(100_000, &rpc_client, &payer_pubkey);
request_and_confirm_airdrop(&rpc_client, &config_offline, &offline_pubkey, 100_000).unwrap();
check_recent_balance(100_000, &rpc_client, &offline_pubkey);
check_ready(&rpc_client);
// Create stake account, identity is authority
let stake_keypair = Keypair::new();
let stake_account_pubkey = stake_keypair.pubkey();
config.signers.push(&stake_keypair);
config.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: None,
withdrawer: None,
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config).unwrap();
// `config` balance should be 50,000 - 1 stake account sig - 1 fee sig
check_recent_balance(50_000 - SIG_FEE - SIG_FEE, &rpc_client, &default_pubkey);
// Assign authority with separate fee payer
config.signers = vec![&default_signer, &payer_keypair];
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: offline_pubkey,
authority: 0,
new_authority_signer: None,
}],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 1,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap();
// `config` balance has not changed, despite submitting the TX
check_recent_balance(50_000 - SIG_FEE - SIG_FEE, &rpc_client, &default_pubkey);
// `config_payer` however has paid `config`'s authority sig
// and `config_payer`'s fee sig
check_recent_balance(100_000 - SIG_FEE - SIG_FEE, &rpc_client, &payer_pubkey);
// Assign authority with offline fee payer
let blockhash = rpc_client.get_latest_blockhash().unwrap();
config_offline.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: payer_pubkey,
authority: 0,
new_authority_signer: None,
}],
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
config_offline.output_format = OutputFormat::JsonCompact;
let sign_reply = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sign_reply);
assert!(sign_only.has_all_signers());
let offline_presigner = sign_only.presigner_of(&offline_pubkey).unwrap();
config.signers = vec![&offline_presigner];
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: payer_pubkey,
authority: 0,
new_authority_signer: None,
}],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(blockhash_query::Source::Cluster, blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap();
// `config`'s balance again has not changed
check_recent_balance(50_000 - SIG_FEE - SIG_FEE, &rpc_client, &default_pubkey);
// `config_offline` however has paid 1 sig due to being both authority
// and fee payer
check_recent_balance(100_000 - SIG_FEE, &rpc_client, &offline_pubkey);
}
#[test]
fn test_stake_split() {
solana_logger::setup();
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator = TestValidator::with_custom_fees(
mint_pubkey,
1,
Some(faucet_addr),
SocketAddrSpace::Unspecified,
);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let default_signer = Keypair::new();
let offline_signer = Keypair::new();
let mut config = CliConfig::recent_for_tests();
config.json_rpc_url = test_validator.rpc_url();
config.signers = vec![&default_signer];
let mut config_offline = CliConfig::recent_for_tests();
config_offline.json_rpc_url = String::default();
config_offline.signers = vec![&offline_signer];
let offline_pubkey = config_offline.signers[0].pubkey();
// Verify we're offline
config_offline.command = CliCommand::ClusterVersion;
process_command(&config_offline).unwrap_err();
request_and_confirm_airdrop(&rpc_client, &config, &config.signers[0].pubkey(), 500_000)
.unwrap();
check_recent_balance(500_000, &rpc_client, &config.signers[0].pubkey());
request_and_confirm_airdrop(&rpc_client, &config_offline, &offline_pubkey, 100_000).unwrap();
check_recent_balance(100_000, &rpc_client, &offline_pubkey);
// Create stake account, identity is authority
let minimum_stake_balance = rpc_client
.get_minimum_balance_for_rent_exemption(std::mem::size_of::<StakeState>())
.unwrap();
let stake_keypair = keypair_from_seed(&[0u8; 32]).unwrap();
let stake_account_pubkey = stake_keypair.pubkey();
config.signers.push(&stake_keypair);
config.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: Some(offline_pubkey),
withdrawer: Some(offline_pubkey),
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(10 * minimum_stake_balance),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config).unwrap();
check_recent_balance(
10 * minimum_stake_balance,
&rpc_client,
&stake_account_pubkey,
);
// Create nonce account
let minimum_nonce_balance = rpc_client
.get_minimum_balance_for_rent_exemption(NonceState::size())
.unwrap();
let nonce_account = keypair_from_seed(&[1u8; 32]).unwrap();
config.signers = vec![&default_signer, &nonce_account];
config.command = CliCommand::CreateNonceAccount {
nonce_account: 1,
seed: None,
nonce_authority: Some(offline_pubkey),
memo: None,
amount: SpendAmount::Some(minimum_nonce_balance),
};
process_command(&config).unwrap();
check_recent_balance(minimum_nonce_balance, &rpc_client, &nonce_account.pubkey());
// Fetch nonce hash
let nonce_hash = nonce_utils::get_account_with_commitment(
&rpc_client,
&nonce_account.pubkey(),
CommitmentConfig::processed(),
)
.and_then(|ref a| nonce_utils::data_from_account(a))
.unwrap()
.blockhash;
// Nonced offline split
let split_account = keypair_from_seed(&[2u8; 32]).unwrap();
check_recent_balance(0, &rpc_client, &split_account.pubkey());
config_offline.signers.push(&split_account);
config_offline.command = CliCommand::SplitStake {
stake_account_pubkey,
stake_authority: 0,
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(nonce_hash),
nonce_account: Some(nonce_account.pubkey()),
nonce_authority: 0,
memo: None,
split_stake_account: 1,
seed: None,
lamports: 2 * minimum_stake_balance,
fee_payer: 0,
};
config_offline.output_format = OutputFormat::JsonCompact;
let sig_response = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sig_response);
assert!(sign_only.has_all_signers());
let offline_presigner = sign_only.presigner_of(&offline_pubkey).unwrap();
config.signers = vec![&offline_presigner, &split_account];
config.command = CliCommand::SplitStake {
stake_account_pubkey,
stake_authority: 0,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(
blockhash_query::Source::NonceAccount(nonce_account.pubkey()),
sign_only.blockhash,
),
nonce_account: Some(nonce_account.pubkey()),
nonce_authority: 0,
memo: None,
split_stake_account: 1,
seed: None,
lamports: 2 * minimum_stake_balance,
fee_payer: 0,
};
process_command(&config).unwrap();
check_recent_balance(
8 * minimum_stake_balance,
&rpc_client,
&stake_account_pubkey,
);
check_recent_balance(
2 * minimum_stake_balance,
&rpc_client,
&split_account.pubkey(),
);
}
#[test]
fn test_stake_set_lockup() {
solana_logger::setup();
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator = TestValidator::with_custom_fees(
mint_pubkey,
1,
Some(faucet_addr),
SocketAddrSpace::Unspecified,
);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let default_signer = Keypair::new();
let offline_signer = Keypair::new();
let mut config = CliConfig::recent_for_tests();
config.json_rpc_url = test_validator.rpc_url();
config.signers = vec![&default_signer];
let mut config_offline = CliConfig::recent_for_tests();
config_offline.json_rpc_url = String::default();
config_offline.signers = vec![&offline_signer];
let offline_pubkey = config_offline.signers[0].pubkey();
// Verify we're offline
config_offline.command = CliCommand::ClusterVersion;
process_command(&config_offline).unwrap_err();
request_and_confirm_airdrop(&rpc_client, &config, &config.signers[0].pubkey(), 500_000)
.unwrap();
check_recent_balance(500_000, &rpc_client, &config.signers[0].pubkey());
request_and_confirm_airdrop(&rpc_client, &config_offline, &offline_pubkey, 100_000).unwrap();
check_recent_balance(100_000, &rpc_client, &offline_pubkey);
// Create stake account, identity is authority
let minimum_stake_balance = rpc_client
.get_minimum_balance_for_rent_exemption(std::mem::size_of::<StakeState>())
.unwrap();
let stake_keypair = keypair_from_seed(&[0u8; 32]).unwrap();
let stake_account_pubkey = stake_keypair.pubkey();
let lockup = Lockup {
custodian: config.signers[0].pubkey(),
..Lockup::default()
};
config.signers.push(&stake_keypair);
config.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: Some(offline_pubkey),
withdrawer: Some(config.signers[0].pubkey()),
withdrawer_signer: None,
lockup,
amount: SpendAmount::Some(10 * minimum_stake_balance),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config).unwrap();
check_recent_balance(
10 * minimum_stake_balance,
&rpc_client,
&stake_account_pubkey,
);
// Online set lockup
let lockup = LockupArgs {
unix_timestamp: Some(1_581_534_570),
epoch: Some(200),
custodian: None,
};
config.signers.pop();
config.command = CliCommand::StakeSetLockup {
stake_account_pubkey,
lockup,
new_custodian_signer: None,
custodian: 0,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let current_lockup = match stake_state {
StakeState::Initialized(meta) => meta.lockup,
_ => panic!("Unexpected stake state!"),
};
assert_eq!(
current_lockup.unix_timestamp,
lockup.unix_timestamp.unwrap()
);
assert_eq!(current_lockup.epoch, lockup.epoch.unwrap());
assert_eq!(current_lockup.custodian, config.signers[0].pubkey());
// Set custodian to another pubkey
let online_custodian = Keypair::new();
let online_custodian_pubkey = online_custodian.pubkey();
let lockup = LockupArgs {
unix_timestamp: Some(1_581_534_571),
epoch: Some(201),
custodian: Some(online_custodian_pubkey),
};
config.command = CliCommand::StakeSetLockup {
stake_account_pubkey,
lockup,
new_custodian_signer: None,
custodian: 0,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap();
let lockup = LockupArgs {
unix_timestamp: Some(1_581_534_572),
epoch: Some(202),
custodian: None,
};
config.signers = vec![&default_signer, &online_custodian];
config.command = CliCommand::StakeSetLockup {
stake_account_pubkey,
lockup,
new_custodian_signer: None,
custodian: 1,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let current_lockup = match stake_state {
StakeState::Initialized(meta) => meta.lockup,
_ => panic!("Unexpected stake state!"),
};
assert_eq!(
current_lockup.unix_timestamp,
lockup.unix_timestamp.unwrap()
);
assert_eq!(current_lockup.epoch, lockup.epoch.unwrap());
assert_eq!(current_lockup.custodian, online_custodian_pubkey);
// Set custodian to offline pubkey
let lockup = LockupArgs {
unix_timestamp: Some(1_581_534_573),
epoch: Some(203),
custodian: Some(offline_pubkey),
};
config.command = CliCommand::StakeSetLockup {
stake_account_pubkey,
lockup,
new_custodian_signer: None,
custodian: 1,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap();
// Create nonce account
let minimum_nonce_balance = rpc_client
.get_minimum_balance_for_rent_exemption(NonceState::size())
.unwrap();
let nonce_account = keypair_from_seed(&[1u8; 32]).unwrap();
let nonce_account_pubkey = nonce_account.pubkey();
config.signers = vec![&default_signer, &nonce_account];
config.command = CliCommand::CreateNonceAccount {
nonce_account: 1,
seed: None,
nonce_authority: Some(offline_pubkey),
memo: None,
amount: SpendAmount::Some(minimum_nonce_balance),
};
process_command(&config).unwrap();
check_recent_balance(minimum_nonce_balance, &rpc_client, &nonce_account_pubkey);
// Fetch nonce hash
let nonce_hash = nonce_utils::get_account_with_commitment(
&rpc_client,
&nonce_account.pubkey(),
CommitmentConfig::processed(),
)
.and_then(|ref a| nonce_utils::data_from_account(a))
.unwrap()
.blockhash;
// Nonced offline set lockup
let lockup = LockupArgs {
unix_timestamp: Some(1_581_534_576),
epoch: Some(222),
custodian: None,
};
config_offline.command = CliCommand::StakeSetLockup {
stake_account_pubkey,
lockup,
new_custodian_signer: None,
custodian: 0,
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(nonce_hash),
nonce_account: Some(nonce_account_pubkey),
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
config_offline.output_format = OutputFormat::JsonCompact;
let sig_response = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sig_response);
assert!(sign_only.has_all_signers());
let offline_presigner = sign_only.presigner_of(&offline_pubkey).unwrap();
config.signers = vec![&offline_presigner];
config.command = CliCommand::StakeSetLockup {
stake_account_pubkey,
lockup,
new_custodian_signer: None,
custodian: 0,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(
blockhash_query::Source::NonceAccount(nonce_account_pubkey),
sign_only.blockhash,
),
nonce_account: Some(nonce_account_pubkey),
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let current_lockup = match stake_state {
StakeState::Initialized(meta) => meta.lockup,
_ => panic!("Unexpected stake state!"),
};
assert_eq!(
current_lockup.unix_timestamp,
lockup.unix_timestamp.unwrap()
);
assert_eq!(current_lockup.epoch, lockup.epoch.unwrap());
assert_eq!(current_lockup.custodian, offline_pubkey);
}
#[test]
fn test_offline_nonced_create_stake_account_and_withdraw() {
solana_logger::setup();
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator =
TestValidator::with_no_fees(mint_pubkey, Some(faucet_addr), SocketAddrSpace::Unspecified);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let mut config = CliConfig::recent_for_tests();
let default_signer = keypair_from_seed(&[1u8; 32]).unwrap();
config.signers = vec![&default_signer];
config.json_rpc_url = test_validator.rpc_url();
let mut config_offline = CliConfig::recent_for_tests();
let offline_signer = keypair_from_seed(&[2u8; 32]).unwrap();
config_offline.signers = vec![&offline_signer];
let offline_pubkey = config_offline.signers[0].pubkey();
config_offline.json_rpc_url = String::default();
config_offline.command = CliCommand::ClusterVersion;
// Verify that we cannot reach the cluster
process_command(&config_offline).unwrap_err();
request_and_confirm_airdrop(&rpc_client, &config, &config.signers[0].pubkey(), 200_000)
.unwrap();
check_recent_balance(200_000, &rpc_client, &config.signers[0].pubkey());
request_and_confirm_airdrop(&rpc_client, &config_offline, &offline_pubkey, 100_000).unwrap();
check_recent_balance(100_000, &rpc_client, &offline_pubkey);
// Create nonce account
let minimum_nonce_balance = rpc_client
.get_minimum_balance_for_rent_exemption(NonceState::size())
.unwrap();
let nonce_account = keypair_from_seed(&[3u8; 32]).unwrap();
let nonce_pubkey = nonce_account.pubkey();
config.signers.push(&nonce_account);
config.command = CliCommand::CreateNonceAccount {
nonce_account: 1,
seed: None,
nonce_authority: Some(offline_pubkey),
memo: None,
amount: SpendAmount::Some(minimum_nonce_balance),
};
process_command(&config).unwrap();
// Fetch nonce hash
let nonce_hash = nonce_utils::get_account_with_commitment(
&rpc_client,
&nonce_account.pubkey(),
CommitmentConfig::processed(),
)
.and_then(|ref a| nonce_utils::data_from_account(a))
.unwrap()
.blockhash;
// Create stake account offline
let stake_keypair = keypair_from_seed(&[4u8; 32]).unwrap();
let stake_pubkey = stake_keypair.pubkey();
config_offline.signers.push(&stake_keypair);
config_offline.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: None,
withdrawer: None,
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(nonce_hash),
nonce_account: Some(nonce_pubkey),
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
config_offline.output_format = OutputFormat::JsonCompact;
let sig_response = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sig_response);
assert!(sign_only.has_all_signers());
let offline_presigner = sign_only.presigner_of(&offline_pubkey).unwrap();
let stake_presigner = sign_only.presigner_of(&stake_pubkey).unwrap();
config.signers = vec![&offline_presigner, &stake_presigner];
config.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: Some(offline_pubkey),
withdrawer: None,
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(
blockhash_query::Source::NonceAccount(nonce_pubkey),
sign_only.blockhash,
),
nonce_account: Some(nonce_pubkey),
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config).unwrap();
check_recent_balance(50_000, &rpc_client, &stake_pubkey);
// Fetch nonce hash
let nonce_hash = nonce_utils::get_account_with_commitment(
&rpc_client,
&nonce_account.pubkey(),
CommitmentConfig::processed(),
)
.and_then(|ref a| nonce_utils::data_from_account(a))
.unwrap()
.blockhash;
// Offline, nonced stake-withdraw
let recipient = keypair_from_seed(&[5u8; 32]).unwrap();
let recipient_pubkey = recipient.pubkey();
config_offline.signers.pop();
config_offline.command = CliCommand::WithdrawStake {
stake_account_pubkey: stake_pubkey,
destination_account_pubkey: recipient_pubkey,
amount: SpendAmount::Some(42),
withdraw_authority: 0,
custodian: None,
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(nonce_hash),
nonce_account: Some(nonce_pubkey),
nonce_authority: 0,
memo: None,
seed: None,
fee_payer: 0,
};
let sig_response = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sig_response);
let offline_presigner = sign_only.presigner_of(&offline_pubkey).unwrap();
config.signers = vec![&offline_presigner];
config.command = CliCommand::WithdrawStake {
stake_account_pubkey: stake_pubkey,
destination_account_pubkey: recipient_pubkey,
amount: SpendAmount::Some(42),
withdraw_authority: 0,
custodian: None,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(
blockhash_query::Source::NonceAccount(nonce_pubkey),
sign_only.blockhash,
),
nonce_account: Some(nonce_pubkey),
nonce_authority: 0,
memo: None,
seed: None,
fee_payer: 0,
};
process_command(&config).unwrap();
check_recent_balance(42, &rpc_client, &recipient_pubkey);
// Fetch nonce hash
let nonce_hash = nonce_utils::get_account_with_commitment(
&rpc_client,
&nonce_account.pubkey(),
CommitmentConfig::processed(),
)
.and_then(|ref a| nonce_utils::data_from_account(a))
.unwrap()
.blockhash;
// Create another stake account. This time with seed
let seed = "seedy";
config_offline.signers = vec![&offline_signer, &stake_keypair];
config_offline.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: Some(seed.to_string()),
staker: None,
withdrawer: None,
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(nonce_hash),
nonce_account: Some(nonce_pubkey),
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
let sig_response = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sig_response);
let offline_presigner = sign_only.presigner_of(&offline_pubkey).unwrap();
let stake_presigner = sign_only.presigner_of(&stake_pubkey).unwrap();
config.signers = vec![&offline_presigner, &stake_presigner];
config.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: Some(seed.to_string()),
staker: Some(offline_pubkey),
withdrawer: Some(offline_pubkey),
withdrawer_signer: None,
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(
blockhash_query::Source::NonceAccount(nonce_pubkey),
sign_only.blockhash,
),
nonce_account: Some(nonce_pubkey),
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config).unwrap();
let seed_address =
Pubkey::create_with_seed(&stake_pubkey, seed, &stake::program::id()).unwrap();
check_recent_balance(50_000, &rpc_client, &seed_address);
}
#[test]
fn test_stake_checked_instructions() {
solana_logger::setup();
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator =
TestValidator::with_no_fees(mint_pubkey, Some(faucet_addr), SocketAddrSpace::Unspecified);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let default_signer = Keypair::new();
let mut config = CliConfig::recent_for_tests();
config.json_rpc_url = test_validator.rpc_url();
config.signers = vec![&default_signer];
request_and_confirm_airdrop(&rpc_client, &config, &config.signers[0].pubkey(), 100_000)
.unwrap();
// Create stake account with withdrawer
let stake_keypair = Keypair::new();
let stake_account_pubkey = stake_keypair.pubkey();
let withdrawer_keypair = Keypair::new();
let withdrawer_pubkey = withdrawer_keypair.pubkey();
config.signers.push(&stake_keypair);
config.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: None,
withdrawer: Some(withdrawer_pubkey),
withdrawer_signer: Some(1),
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config).unwrap_err(); // unsigned authority should fail
config.signers = vec![&default_signer, &stake_keypair, &withdrawer_keypair];
config.command = CliCommand::CreateStakeAccount {
stake_account: 1,
seed: None,
staker: None,
withdrawer: Some(withdrawer_pubkey),
withdrawer_signer: Some(1),
lockup: Lockup::default(),
amount: SpendAmount::Some(50_000),
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
from: 0,
};
process_command(&config).unwrap();
// Re-authorize account, checking new authority
let staker_keypair = Keypair::new();
let staker_pubkey = staker_keypair.pubkey();
config.signers = vec![&default_signer];
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: staker_pubkey,
authority: 0,
new_authority_signer: Some(0),
}],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap_err(); // unsigned authority should fail
config.signers = vec![&default_signer, &staker_keypair];
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Staker,
new_authority_pubkey: staker_pubkey,
authority: 0,
new_authority_signer: Some(1),
}],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let current_authority = match stake_state {
StakeState::Initialized(meta) => meta.authorized.staker,
_ => panic!("Unexpected stake state!"),
};
assert_eq!(current_authority, staker_pubkey);
let new_withdrawer_keypair = Keypair::new();
let new_withdrawer_pubkey = new_withdrawer_keypair.pubkey();
config.signers = vec![&default_signer, &withdrawer_keypair];
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Withdrawer,
new_authority_pubkey: new_withdrawer_pubkey,
authority: 1,
new_authority_signer: Some(1),
}],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap_err(); // unsigned authority should fail
config.signers = vec![
&default_signer,
&withdrawer_keypair,
&new_withdrawer_keypair,
];
config.command = CliCommand::StakeAuthorize {
stake_account_pubkey,
new_authorizations: vec![StakeAuthorizationIndexed {
authorization_type: StakeAuthorize::Withdrawer,
new_authority_pubkey: new_withdrawer_pubkey,
authority: 1,
new_authority_signer: Some(2),
}],
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
custodian: None,
no_wait: false,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let current_authority = match stake_state {
StakeState::Initialized(meta) => meta.authorized.withdrawer,
_ => panic!("Unexpected stake state!"),
};
assert_eq!(current_authority, new_withdrawer_pubkey);
// Set lockup, checking new custodian
let custodian = Keypair::new();
let custodian_pubkey = custodian.pubkey();
let lockup = LockupArgs {
unix_timestamp: Some(1_581_534_570),
epoch: Some(200),
custodian: Some(custodian_pubkey),
};
config.signers = vec![&default_signer, &new_withdrawer_keypair];
config.command = CliCommand::StakeSetLockup {
stake_account_pubkey,
lockup,
new_custodian_signer: Some(1),
custodian: 1,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap_err(); // unsigned new custodian should fail
config.signers = vec![&default_signer, &new_withdrawer_keypair, &custodian];
config.command = CliCommand::StakeSetLockup {
stake_account_pubkey,
lockup,
new_custodian_signer: Some(2),
custodian: 1,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::default(),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap();
let stake_account = rpc_client.get_account(&stake_account_pubkey).unwrap();
let stake_state: StakeState = stake_account.state().unwrap();
let current_lockup = match stake_state {
StakeState::Initialized(meta) => meta.lockup,
_ => panic!("Unexpected stake state!"),
};
assert_eq!(
current_lockup.unix_timestamp,
lockup.unix_timestamp.unwrap()
);
assert_eq!(current_lockup.epoch, lockup.epoch.unwrap());
assert_eq!(current_lockup.custodian, custodian_pubkey);
}
| 35.919005 | 100 | 0.66618 |
61f1694b00fbdde9c87d99c74a5bd70f267ecc57 | 1,193 | mod error;
pub use self::error::*;
#[cfg(any(feature = "cuda", feature = "opencl"))]
mod locks;
#[cfg(any(feature = "cuda", feature = "opencl"))]
pub use self::locks::*;
#[cfg(any(feature = "cuda", feature = "opencl"))]
mod program;
#[cfg(any(feature = "cuda", feature = "opencl"))]
mod sources;
#[cfg(any(feature = "cuda", feature = "opencl"))]
pub use self::sources::*;
#[cfg(any(feature = "cuda", feature = "opencl"))]
mod utils;
#[cfg(any(feature = "cuda", feature = "opencl"))]
pub use self::utils::*;
#[cfg(any(feature = "cuda", feature = "opencl"))]
mod fft;
#[cfg(any(feature = "cuda", feature = "opencl"))]
pub use self::fft::*;
#[cfg(any(feature = "cuda", feature = "opencl"))]
mod multiexp;
#[cfg(any(feature = "cuda", feature = "opencl"))]
pub use self::multiexp::*;
#[cfg(not(any(feature = "cuda", feature = "opencl")))]
mod nogpu;
#[cfg(not(any(feature = "cuda", feature = "opencl")))]
pub use self::nogpu::*;
#[cfg(any(feature = "cuda", feature = "opencl"))]
pub use ec_gpu::GpuEngine;
#[cfg(not(any(feature = "cuda", feature = "opencl")))]
pub trait GpuEngine {}
#[cfg(not(any(feature = "cuda", feature = "opencl")))]
impl<E: pairing::Engine> GpuEngine for E {}
| 23.86 | 54 | 0.618609 |
7524cd7bb88e050649189e78fd6a144fb38d6871 | 998 | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
//
// @generated SignedSource<<99557c73e87f54ab473364550b9313df>>
//
// To regenerate this file, run:
// hphp/hack/src/oxidized_regen.sh
use arena_trait::TrivialDrop;
use eq_modulo_pos::EqModuloPos;
use no_pos_hash::NoPosHash;
use ocamlrep_derive::FromOcamlRep;
use ocamlrep_derive::FromOcamlRepIn;
use ocamlrep_derive::ToOcamlRep;
use serde::Deserialize;
use serde::Serialize;
#[allow(unused_imports)]
use crate::*;
#[derive(
Clone,
Copy,
Debug,
Deserialize,
Eq,
EqModuloPos,
FromOcamlRep,
FromOcamlRepIn,
Hash,
NoPosHash,
Ord,
PartialEq,
PartialOrd,
Serialize,
ToOcamlRep
)]
pub enum KindOfType {
TClass = 0,
TTypedef = 1,
TRecordDef = 2,
}
impl TrivialDrop for KindOfType {}
arena_deserializer::impl_deserialize_in_arena!(KindOfType);
| 21.234043 | 66 | 0.715431 |
1844ca43e2c28e0d5ea5dfd458bcc52751778e3e | 988 | use derivative::Derivative;
use pest::Span;
pub(crate) fn span_into_str(span: Span) -> &str {
span.as_str()
}
pub(crate) fn parse_u64(span: Span) -> u64 {
let input = span.as_str();
input
.parse()
.map_err(|e| {
log::error!("Failed to read `{}` as u64: {}", input, e);
e
})
.unwrap()
}
#[derive(Derivative, Clone, PartialEq)]
#[derivative(Debug = "transparent")]
pub struct Debug<'src> {
pub content: &'src str,
}
impl<'a> ::from_pest::FromPest<'a> for Debug<'a> {
type Rule = crate::xkb::Rule;
type FatalError = ::from_pest::Void;
fn from_pest(
pest: &mut ::from_pest::pest::iterators::Pairs<'a, Self::Rule>,
) -> ::std::result::Result<Self, ::from_pest::ConversionError<::from_pest::Void>> {
let mut clone = pest.clone();
let pair = clone.next().ok_or(::from_pest::ConversionError::NoMatch)?;
*pest = clone;
Ok(Debug { content: pair.as_str() })
}
}
| 26 | 87 | 0.571862 |
67a76c220be9b178786a5e824de0f45cc2ea0acb | 19,228 | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
block_storage::{block_tree::BlockTree, BlockReader},
counters,
persistent_liveness_storage::{
PersistentLivenessStorage, RecoveryData, RootInfo, RootMetadata,
},
state_replication::StateComputer,
util::time_service::duration_since_epoch,
};
use anyhow::{bail, ensure, format_err, Context};
use consensus_types::{
block::Block, common::Payload, executed_block::ExecutedBlock, quorum_cert::QuorumCert,
sync_info::SyncInfo, timeout_certificate::TimeoutCertificate,
};
use debug_interface::prelude::*;
use executor_types::StateComputeResult;
use libra_crypto::HashValue;
use libra_logger::prelude::*;
#[cfg(any(test, feature = "fuzzing"))]
use libra_types::epoch_state::EpochState;
use libra_types::{ledger_info::LedgerInfoWithSignatures, transaction::TransactionStatus};
use std::{
collections::vec_deque::VecDeque,
sync::{Arc, RwLock},
time::Duration,
};
use termion::color::*;
#[cfg(test)]
#[path = "block_store_test.rs"]
mod block_store_test;
#[path = "sync_manager.rs"]
pub mod sync_manager;
fn update_counters_for_committed_blocks<T>(blocks_to_commit: &[Arc<ExecutedBlock<T>>]) {
for block in blocks_to_commit {
if let Some(time_to_commit) =
duration_since_epoch().checked_sub(Duration::from_micros(block.timestamp_usecs()))
{
counters::CREATION_TO_COMMIT_S.observe_duration(time_to_commit);
}
let txn_status = block.compute_result().compute_status();
counters::NUM_TXNS_PER_BLOCK.observe(txn_status.len() as f64);
counters::COMMITTED_BLOCKS_COUNT.inc();
counters::LAST_COMMITTED_ROUND.set(block.round() as i64);
for status in txn_status.iter() {
match status {
TransactionStatus::Keep(_) => {
counters::COMMITTED_TXNS_COUNT
.with_label_values(&["success"])
.inc();
}
TransactionStatus::Discard(_) => {
counters::COMMITTED_TXNS_COUNT
.with_label_values(&["failed"])
.inc();
}
// TODO(zekunli): add counter
TransactionStatus::Retry => (),
}
}
}
}
/// Responsible for maintaining all the blocks of payload and the dependencies of those blocks
/// (parent and previous QC links). It is expected to be accessed concurrently by multiple threads
/// and is thread-safe.
///
/// Example tree block structure based on parent links.
/// ╭--> A3
/// Genesis--> B0--> B1--> B2--> B3
/// ╰--> C1--> C2
/// ╰--> D3
///
/// Example corresponding tree block structure for the QC links (must follow QC constraints).
/// ╭--> A3
/// Genesis--> B0--> B1--> B2--> B3
/// ├--> C1
/// ├--------> C2
/// ╰--------------> D3
pub struct BlockStore<T> {
inner: Arc<RwLock<BlockTree<T>>>,
state_computer: Arc<dyn StateComputer<Payload = T>>,
/// The persistent storage backing up the in-memory data structure, every write should go
/// through this before in-memory tree.
storage: Arc<dyn PersistentLivenessStorage<T>>,
}
impl<T: Payload> BlockStore<T> {
pub fn new(
storage: Arc<dyn PersistentLivenessStorage<T>>,
initial_data: RecoveryData<T>,
state_computer: Arc<dyn StateComputer<Payload = T>>,
max_pruned_blocks_in_mem: usize,
) -> Self {
let highest_tc = initial_data.highest_timeout_certificate();
let (root, root_metadata, blocks, quorum_certs) = initial_data.take();
Self::build(
root,
root_metadata,
blocks,
quorum_certs,
highest_tc,
state_computer,
storage,
max_pruned_blocks_in_mem,
)
}
fn build(
root: RootInfo<T>,
root_metadata: RootMetadata,
blocks: Vec<Block<T>>,
quorum_certs: Vec<QuorumCert>,
highest_timeout_cert: Option<TimeoutCertificate>,
state_computer: Arc<dyn StateComputer<Payload = T>>,
storage: Arc<dyn PersistentLivenessStorage<T>>,
max_pruned_blocks_in_mem: usize,
) -> Self {
let RootInfo(root_block, root_qc, root_li) = root;
//verify root is correct
assert_eq!(
root_qc.certified_block().version(),
root_metadata.version(),
"root qc version {} doesn't match committed trees {}",
root_qc.certified_block().version(),
root_metadata.version(),
);
assert_eq!(
root_qc.certified_block().executed_state_id(),
root_metadata.accu_hash,
"root qc state id {} doesn't match committed trees {}",
root_qc.certified_block().executed_state_id(),
root_metadata.accu_hash,
);
let executed_root_block = ExecutedBlock::new(
root_block,
// Create a dummy state_compute_result with necessary fields filled in.
StateComputeResult::new(
root_metadata.accu_hash,
root_metadata.frozen_root_hashes,
root_metadata.num_leaves, /* num_leaves */
None, /* epoch_state */
vec![], /* compute_status */
vec![], /* transaction_info_hashes */
),
);
let tree = BlockTree::new(
executed_root_block,
root_qc,
root_li,
max_pruned_blocks_in_mem,
highest_timeout_cert.map(Arc::new),
);
let block_store = Self {
inner: Arc::new(RwLock::new(tree)),
state_computer,
storage,
};
for block in blocks {
block_store
.execute_and_insert_block(block)
.unwrap_or_else(|e| {
panic!("[BlockStore] failed to insert block during build {:?}", e)
});
}
for qc in quorum_certs {
block_store
.insert_single_quorum_cert(qc)
.unwrap_or_else(|e| {
panic!("[BlockStore] failed to insert quorum during build{:?}", e)
});
}
block_store
}
/// Commit the given block id with the proof, returns () on success or error
pub async fn commit(&self, finality_proof: LedgerInfoWithSignatures) -> anyhow::Result<()> {
let block_id_to_commit = finality_proof.ledger_info().consensus_block_id();
let block_to_commit = self
.get_block(block_id_to_commit)
.ok_or_else(|| format_err!("Committed block id not found"))?;
// First make sure that this commit is new.
ensure!(
block_to_commit.round() > self.root().round(),
"Committed block round lower than root"
);
let blocks_to_commit = self
.path_from_root(block_id_to_commit)
.unwrap_or_else(Vec::new);
for block in &blocks_to_commit {
end_trace!("commit", {"block", block.id()});
}
self.state_computer
.commit(
blocks_to_commit.iter().map(|b| b.id()).collect(),
finality_proof,
)
.await
.expect("Failed to persist commit");
update_counters_for_committed_blocks(&blocks_to_commit);
debug!("{}Committed{} {}", Fg(Blue), Fg(Reset), *block_to_commit);
event!("committed",
"block_id": block_to_commit.id().short_str(),
"round": block_to_commit.round(),
"parent_id": block_to_commit.parent_id().short_str(),
);
self.prune_tree(block_to_commit.id());
Ok(())
}
pub async fn rebuild(
&self,
root: RootInfo<T>,
root_metadata: RootMetadata,
blocks: Vec<Block<T>>,
quorum_certs: Vec<QuorumCert>,
) {
let max_pruned_blocks_in_mem = self.inner.read().unwrap().max_pruned_blocks_in_mem();
// Rollover the previous highest TC from the old tree to the new one.
let prev_htc = self.highest_timeout_cert().map(|tc| tc.as_ref().clone());
let BlockStore { inner, .. } = Self::build(
root,
root_metadata,
blocks,
quorum_certs,
prev_htc,
Arc::clone(&self.state_computer),
Arc::clone(&self.storage),
max_pruned_blocks_in_mem,
);
let to_remove = self.inner.read().unwrap().get_all_block_id();
if let Err(e) = self.storage.prune_tree(to_remove) {
// it's fine to fail here, the next restart will try to clean up dangling blocks again.
error!("fail to delete block: {:?}", e);
}
// Unwrap the new tree and replace the existing tree.
*self.inner.write().unwrap() = Arc::try_unwrap(inner)
.unwrap_or_else(|_| panic!("New block tree is not shared"))
.into_inner()
.unwrap();
// If we fail to commit B_i via state computer and crash, after restart our highest commit cert
// will not match the latest commit B_j(j<i) of state computer.
// This introduces an inconsistent state if we send out SyncInfo and others try to sync to
// B_i and figure out we only have B_j.
// Here we commit up to the highest_commit_cert to maintain highest_commit_cert == state_computer.committed_trees.
if self.highest_commit_cert().commit_info().round() > self.root().round() {
let finality_proof = self.highest_commit_cert().ledger_info().clone();
if let Err(e) = self.commit(finality_proof).await {
warn!("{:?}", e);
}
}
}
/// Execute and insert a block if it passes all validation tests.
/// Returns the Arc to the block kept in the block store after persisting it to storage
///
/// This function assumes that the ancestors are present (returns MissingParent otherwise).
///
/// Duplicate inserts will return the previously inserted block (
/// note that it is considered a valid non-error case, for example, it can happen if a validator
/// receives a certificate for a block that is currently being added).
pub fn execute_and_insert_block(
&self,
block: Block<T>,
) -> anyhow::Result<Arc<ExecutedBlock<T>>> {
if let Some(existing_block) = self.get_block(block.id()) {
return Ok(existing_block);
}
let executed_block = self.execute_block(block)?;
self.storage
.save_tree(vec![executed_block.block().clone()], vec![])
.context("Insert block failed when saving block")?;
self.inner.write().unwrap().insert_block(executed_block)
}
fn execute_block(&self, block: Block<T>) -> anyhow::Result<ExecutedBlock<T>> {
trace_code_block!("block_store::execute_block", {"block", block.id()});
ensure!(
self.inner.read().unwrap().root().round() < block.round(),
"Block with old round"
);
let parent_block = self
.get_block(block.parent_id())
.ok_or_else(|| format_err!("Block with missing parent {}", block.parent_id()))?;
// Reconfiguration rule - if a block is a child of pending reconfiguration, it needs to be empty
// So we roll over the executed state until it's committed and we start new epoch.
let state_compute_result = if parent_block.compute_result().has_reconfiguration() {
StateComputeResult::new(
parent_block.compute_result().root_hash(),
parent_block.compute_result().frozen_subtree_roots().clone(),
parent_block.compute_result().num_leaves(),
parent_block.compute_result().epoch_state().clone(),
vec![], /* compute_status */
vec![], /* transaction_info_hashes */
)
} else {
// Although NIL blocks don't have payload, we still send a T::default() to compute
// because we may inject a block prologue transaction.
self.state_computer
.compute(&block, parent_block.id())
.with_context(|| format!("Execution failure for block {}", block))?
};
Ok(ExecutedBlock::new(block, state_compute_result))
}
/// Validates quorum certificates and inserts it into block tree assuming dependencies exist.
pub fn insert_single_quorum_cert(&self, qc: QuorumCert) -> anyhow::Result<()> {
// If the parent block is not the root block (i.e not None), ensure the executed state
// of a block is consistent with its QuorumCert, otherwise persist the QuorumCert's
// state and on restart, a new execution will agree with it. A new execution will match
// the QuorumCert's state on the next restart will work if there is a memory
// corruption, for example.
match self.get_block(qc.certified_block().id()) {
Some(executed_block) => {
ensure!(
executed_block.block_info() == *qc.certified_block(),
"QC for block {} has different {:?} than local {:?}",
qc.certified_block().id(),
qc.certified_block(),
executed_block.block_info()
);
}
None => bail!("Insert {} without having the block in store first", qc),
}
self.storage
.save_tree(vec![], vec![qc.clone()])
.context("Insert block failed when saving quorum")?;
self.inner.write().unwrap().insert_quorum_cert(qc)
}
/// Replace the highest timeout certificate in case the given one has a higher round.
/// In case a timeout certificate is updated, persist it to storage.
pub fn insert_timeout_certificate(&self, tc: Arc<TimeoutCertificate>) -> anyhow::Result<()> {
let cur_tc_round = self.highest_timeout_cert().map_or(0, |tc| tc.round());
if tc.round() <= cur_tc_round {
return Ok(());
}
self.storage
.save_highest_timeout_cert(tc.as_ref().clone())
.context("Timeout certificate insert failed when persisting to DB")?;
self.inner.write().unwrap().replace_timeout_cert(tc);
Ok(())
}
/// Prune the tree up to next_root_id (keep next_root_id's block). Any branches not part of
/// the next_root_id's tree should be removed as well.
///
/// For example, root = B0
/// B0--> B1--> B2
/// ╰--> B3--> B4
///
/// prune_tree(B3) should be left with
/// B3--> B4, root = B3
///
/// Returns the block ids of the blocks removed.
fn prune_tree(&self, next_root_id: HashValue) -> VecDeque<HashValue> {
let id_to_remove = self
.inner
.read()
.unwrap()
.find_blocks_to_prune(next_root_id);
if let Err(e) = self
.storage
.prune_tree(id_to_remove.clone().into_iter().collect())
{
// it's fine to fail here, as long as the commit succeeds, the next restart will clean
// up dangling blocks, and we need to prune the tree to keep the root consistent with
// executor.
error!("fail to delete block: {:?}", e);
}
self.inner
.write()
.unwrap()
.process_pruned_blocks(next_root_id, id_to_remove.clone());
id_to_remove
}
}
impl<T: Payload> BlockReader for BlockStore<T> {
type Payload = T;
fn block_exists(&self, block_id: HashValue) -> bool {
self.inner.read().unwrap().block_exists(&block_id)
}
fn get_block(&self, block_id: HashValue) -> Option<Arc<ExecutedBlock<T>>> {
self.inner.read().unwrap().get_block(&block_id)
}
fn root(&self) -> Arc<ExecutedBlock<T>> {
self.inner.read().unwrap().root()
}
fn get_quorum_cert_for_block(&self, block_id: HashValue) -> Option<Arc<QuorumCert>> {
self.inner
.read()
.unwrap()
.get_quorum_cert_for_block(&block_id)
}
fn path_from_root(&self, block_id: HashValue) -> Option<Vec<Arc<ExecutedBlock<T>>>> {
self.inner.read().unwrap().path_from_root(block_id)
}
fn highest_certified_block(&self) -> Arc<ExecutedBlock<Self::Payload>> {
self.inner.read().unwrap().highest_certified_block()
}
fn highest_quorum_cert(&self) -> Arc<QuorumCert> {
self.inner.read().unwrap().highest_quorum_cert()
}
fn highest_commit_cert(&self) -> Arc<QuorumCert> {
self.inner.read().unwrap().highest_commit_cert()
}
fn highest_timeout_cert(&self) -> Option<Arc<TimeoutCertificate>> {
self.inner.read().unwrap().highest_timeout_cert()
}
fn sync_info(&self) -> SyncInfo {
SyncInfo::new(
self.highest_quorum_cert().as_ref().clone(),
self.highest_commit_cert().as_ref().clone(),
self.highest_timeout_cert().map(|tc| tc.as_ref().clone()),
)
}
}
#[cfg(any(test, feature = "fuzzing"))]
impl<T: Payload> BlockStore<T> {
/// Returns the number of blocks in the tree
pub(crate) fn len(&self) -> usize {
self.inner.read().unwrap().len()
}
/// Returns the number of child links in the tree
pub(crate) fn child_links(&self) -> usize {
self.inner.read().unwrap().child_links()
}
/// The number of pruned blocks that are still available in memory
pub(super) fn pruned_blocks_in_mem(&self) -> usize {
self.inner.read().unwrap().pruned_blocks_in_mem()
}
/// Helper function to insert the block with the qc together
pub fn insert_block_with_qc(&self, block: Block<T>) -> anyhow::Result<Arc<ExecutedBlock<T>>> {
self.insert_single_quorum_cert(block.quorum_cert().clone())?;
Ok(self.execute_and_insert_block(block)?)
}
/// Helper function to insert a reconfiguration block
pub fn insert_reconfiguration_block(
&self,
block: Block<T>,
) -> anyhow::Result<Arc<ExecutedBlock<T>>> {
self.insert_single_quorum_cert(block.quorum_cert().clone())?;
let executed_block = self.execute_block(block)?;
let compute_result = executed_block.compute_result();
Ok(self
.inner
.write()
.unwrap()
.insert_block(ExecutedBlock::new(
executed_block.block().clone(),
StateComputeResult::new(
compute_result.root_hash(),
compute_result.frozen_subtree_roots().clone(),
compute_result.num_leaves(),
Some(EpochState::empty()),
compute_result.compute_status().clone(),
compute_result.transaction_info_hashes().clone(),
),
))?)
}
}
| 39.160896 | 122 | 0.588309 |
483ed100f1d9b6d34f7dd82c3e15f648a6a39ab1 | 2,142 | #[macro_use]
extern crate log;
pub(crate) mod file_changes;
pub(crate) mod file_watcher;
pub(crate) mod lsp_features;
pub(crate) mod lsp_model;
pub(crate) mod lsp_server;
pub(crate) mod text_encoding;
use std::env::ArgsOs;
use std::path::PathBuf;
fn get_help() -> String {
format!(
r#"{name} {version}
USAGE:
{name} [OPTIONS] [SUBCOMMAND]
EXAMPLE:
{name} --hsp3 "C:/hsp3"
OPTIONS:
-h, --help Print help
-V, --version Print version
--hsp3 HSP3 インストールディレクトリ"#,
name = "hsp3-forgery-lsp",
version = get_version()
)
}
fn get_version() -> &'static str {
env!("CARGO_PKG_VERSION")
}
fn exit_with_help() -> ! {
eprintln!("{}", get_help());
std::process::exit(1)
}
fn exit_with_version() -> ! {
eprintln!("{}", get_version());
std::process::exit(1)
}
fn switch_on_args(mut args: ArgsOs) -> Result<(), String> {
// 最初の引数は自身のパスなので無視する。
args.next();
let mut hsp_root = None;
let mut help = false;
let mut version = false;
while let Some(arg) = args.next() {
match arg.into_string().unwrap_or("".to_string()).as_str() {
"-h" | "--help" | "help" => {
help = true;
break;
}
"-V" | "--version" | "version" => {
version = true;
break;
}
"--hsp3" => match args.next() {
None => return Err("--hsp3 の引数がありません。".to_string()),
Some(arg) => {
hsp_root = Some(arg);
}
},
arg => return Err(format!("不明な引数: {:?}", arg)),
}
}
if help {
exit_with_help()
} else if version {
exit_with_version()
} else {
let hsp_root = PathBuf::from(hsp_root.expect("--hsp3 引数は省略できません。"));
crate::lsp_server::lsp_main::start_lsp_server(hsp_root)
}
}
pub fn main() {
match switch_on_args(std::env::args_os()) {
Ok(()) => {}
Err(err) => {
eprintln!("{:?}", err);
std::process::exit(1)
}
}
}
| 22.787234 | 76 | 0.5 |
b97ce1eed6aebfa123c340ddeb6f206da9cb954c | 422 | use clap::Parser;
#[derive(Parser)]
#[clap(name = "MyApp")]
#[clap(author = "Kevin K. <[email protected]>")]
#[clap(version = "1.0")]
#[clap(about = "Does awesome things", long_about = None)]
struct Cli {
#[clap(long, value_parser)]
two: String,
#[clap(long, value_parser)]
one: String,
}
fn main() {
let cli = Cli::parse();
println!("two: {:?}", cli.two);
println!("one: {:?}", cli.one);
}
| 20.095238 | 57 | 0.57109 |
01fb009ed78b3c88c49de1c48323bcfb3625967c | 14,805 | use crate::abi_digester::{AbiDigester, DigestError, DigestResult};
use log::*;
use serde::Serialize;
use std::any::type_name;
pub trait AbiExample: Sized {
fn example() -> Self;
}
// Following code snippets are copied and adapted from the official rustc implementation to
// implement AbiExample trait for most of basic types.
// These are licensed under Apache-2.0 + MIT (compatible because we're Apache-2.0)
// Source: https://github.com/rust-lang/rust/blob/ba18875557aabffe386a2534a1aa6118efb6ab88/src/libcore/tuple.rs#L7
macro_rules! tuple_example_impls {
($(
$Tuple:ident {
$(($idx:tt) -> $T:ident)+
}
)+) => {
$(
impl<$($T:AbiExample),+> AbiExample for ($($T,)+) {
fn example() -> Self {
($({ let x: $T = AbiExample::example(); x},)+)
}
}
)+
}
}
// Source: https://github.com/rust-lang/rust/blob/ba18875557aabffe386a2534a1aa6118efb6ab88/src/libcore/tuple.rs#L110
tuple_example_impls! {
Tuple1 {
(0) -> A
}
Tuple2 {
(0) -> A
(1) -> B
}
Tuple3 {
(0) -> A
(1) -> B
(2) -> C
}
Tuple4 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
}
Tuple5 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
}
Tuple6 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
}
Tuple7 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
}
Tuple8 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
}
Tuple9 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
}
Tuple10 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
}
Tuple11 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
}
Tuple12 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
(11) -> L
}
}
// Source: https://github.com/rust-lang/rust/blob/ba18875557aabffe386a2534a1aa6118efb6ab88/src/libcore/array/mod.rs#L417
macro_rules! array_example_impls {
{$n:expr, $t:ident $($ts:ident)*} => {
impl<T> AbiExample for [T; $n] where T: AbiExample {
fn example() -> Self {
[$t::example(), $($ts::example()),*]
}
}
array_example_impls!{($n - 1), $($ts)*}
};
{$n:expr,} => {
impl<T> AbiExample for [T; $n] {
fn example() -> Self { [] }
}
};
}
array_example_impls! {32, T T T T T T T T T T T T T T T T T T T T T T T T T T T T T T T T}
// Source: https://github.com/rust-lang/rust/blob/ba18875557aabffe386a2534a1aa6118efb6ab88/src/libcore/default.rs#L137
macro_rules! example_impls {
($t:ty, $v:expr) => {
impl AbiExample for $t {
fn example() -> Self {
$v
}
}
};
}
example_impls! { (), () }
example_impls! { bool, false }
example_impls! { char, '\x00' }
example_impls! { usize, 0 }
example_impls! { u8, 0 }
example_impls! { u16, 0 }
example_impls! { u32, 0 }
example_impls! { u64, 0 }
example_impls! { u128, 0 }
example_impls! { isize, 0 }
example_impls! { i8, 0 }
example_impls! { i16, 0 }
example_impls! { i32, 0 }
example_impls! { i64, 0 }
example_impls! { i128, 0 }
example_impls! { f32, 0.0f32 }
example_impls! { f64, 0.0f64 }
example_impls! { String, String::new() }
example_impls! { std::time::Duration, std::time::Duration::from_secs(0) }
example_impls! { std::sync::Once, std::sync::Once::new() }
use std::sync::atomic::*;
// Source: https://github.com/rust-lang/rust/blob/ba18875557aabffe386a2534a1aa6118efb6ab88/src/libcore/sync/atomic.rs#L1199
macro_rules! atomic_example_impls {
($atomic_type: ident) => {
impl AbiExample for $atomic_type {
fn example() -> Self {
Self::new(AbiExample::example())
}
}
};
}
atomic_example_impls! { AtomicU8 }
atomic_example_impls! { AtomicU16 }
atomic_example_impls! { AtomicU32 }
atomic_example_impls! { AtomicU64 }
atomic_example_impls! { AtomicUsize }
atomic_example_impls! { AtomicI8 }
atomic_example_impls! { AtomicI16 }
atomic_example_impls! { AtomicI32 }
atomic_example_impls! { AtomicI64 }
atomic_example_impls! { AtomicIsize }
atomic_example_impls! { AtomicBool }
#[cfg(not(target_arch = "bpf"))]
use generic_array::{ArrayLength, GenericArray};
#[cfg(not(target_arch = "bpf"))]
impl<T: Default, U: ArrayLength<T>> AbiExample for GenericArray<T, U> {
fn example() -> Self {
Self::default()
}
}
use bv::{BitVec, BlockType};
impl<T: BlockType> AbiExample for BitVec<T> {
fn example() -> Self {
Self::default()
}
}
impl<T: BlockType> IgnoreAsHelper for BitVec<T> {}
impl<T: BlockType> EvenAsOpaque for BitVec<T> {}
pub(crate) fn normalize_type_name(type_name: &str) -> String {
type_name.chars().filter(|c| *c != '&').collect()
}
type Placeholder = ();
impl<T: Sized> AbiExample for T {
default fn example() -> Self {
<Placeholder>::type_erased_example()
}
}
// this works like a type erasure and a hatch to escape type error to runtime error
trait TypeErasedExample<T> {
fn type_erased_example() -> T;
}
impl<T: Sized> TypeErasedExample<T> for Placeholder {
default fn type_erased_example() -> T {
panic!(
"derive or implement AbiExample/AbiEnumVisitor for {}",
type_name::<T>()
);
}
}
impl<T: Default + Serialize> TypeErasedExample<T> for Placeholder {
default fn type_erased_example() -> T {
let original_type_name = type_name::<T>();
let normalized_type_name = normalize_type_name(original_type_name);
if normalized_type_name.starts_with("analog") {
panic!(
"derive or implement AbiExample/AbiEnumVisitor for {}",
original_type_name
);
} else {
panic!(
"new unrecognized type for ABI digest!: {}",
original_type_name
)
}
}
}
impl<T: AbiExample> AbiExample for Option<T> {
fn example() -> Self {
info!("AbiExample for (Option<T>): {}", type_name::<Self>());
Some(T::example())
}
}
impl<O: AbiExample, E: AbiExample> AbiExample for Result<O, E> {
fn example() -> Self {
info!("AbiExample for (Result<O, E>): {}", type_name::<Self>());
Ok(O::example())
}
}
impl<T: AbiExample> AbiExample for Box<T> {
fn example() -> Self {
info!("AbiExample for (Box<T>): {}", type_name::<Self>());
Box::new(T::example())
}
}
impl<T> AbiExample for Box<dyn Fn(&mut T) + Sync + Send> {
fn example() -> Self {
info!("AbiExample for (Box<T>): {}", type_name::<Self>());
Box::new(move |_t: &mut T| {})
}
}
impl<T, U> AbiExample for Box<dyn Fn(&mut T, U) + Sync + Send> {
fn example() -> Self {
info!("AbiExample for (Box<T, U>): {}", type_name::<Self>());
Box::new(move |_t: &mut T, _u: U| {})
}
}
impl<T: AbiExample> AbiExample for Box<[T]> {
fn example() -> Self {
info!("AbiExample for (Box<[T]>): {}", type_name::<Self>());
Box::new([T::example()])
}
}
impl<T: AbiExample> AbiExample for std::marker::PhantomData<T> {
fn example() -> Self {
info!("AbiExample for (PhantomData<T>): {}", type_name::<Self>());
<std::marker::PhantomData<T>>::default()
}
}
impl<T: AbiExample> AbiExample for std::sync::Arc<T> {
fn example() -> Self {
info!("AbiExample for (Arc<T>): {}", type_name::<Self>());
std::sync::Arc::new(T::example())
}
}
impl<T: AbiExample> AbiExample for std::rc::Rc<T> {
fn example() -> Self {
info!("AbiExample for (Rc<T>): {}", type_name::<Self>());
std::rc::Rc::new(T::example())
}
}
impl<T: AbiExample> AbiExample for std::sync::Mutex<T> {
fn example() -> Self {
info!("AbiExample for (Mutex<T>): {}", type_name::<Self>());
std::sync::Mutex::new(T::example())
}
}
impl<T: AbiExample> AbiExample for std::sync::RwLock<T> {
fn example() -> Self {
info!("AbiExample for (RwLock<T>): {}", type_name::<Self>());
std::sync::RwLock::new(T::example())
}
}
use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet, VecDeque};
impl<
T: std::cmp::Eq + std::hash::Hash + AbiExample,
S: AbiExample,
H: std::hash::BuildHasher + Default,
> AbiExample for HashMap<T, S, H>
{
fn example() -> Self {
info!("AbiExample for (HashMap<T, S, H>): {}", type_name::<Self>());
let mut map = HashMap::default();
map.insert(T::example(), S::example());
map
}
}
impl<T: std::cmp::Ord + AbiExample, S: AbiExample> AbiExample for BTreeMap<T, S> {
fn example() -> Self {
info!("AbiExample for (BTreeMap<T, S>): {}", type_name::<Self>());
let mut map = BTreeMap::default();
map.insert(T::example(), S::example());
map
}
}
impl<T: AbiExample> AbiExample for Vec<T> {
fn example() -> Self {
info!("AbiExample for (Vec<T>): {}", type_name::<Self>());
vec![T::example()]
}
}
impl<T: AbiExample> AbiExample for VecDeque<T> {
fn example() -> Self {
info!("AbiExample for (Vec<T>): {}", type_name::<Self>());
VecDeque::from(vec![T::example()])
}
}
impl<T: std::cmp::Eq + std::hash::Hash + AbiExample, H: std::hash::BuildHasher + Default> AbiExample
for HashSet<T, H>
{
fn example() -> Self {
info!("AbiExample for (HashSet<T, H>): {}", type_name::<Self>());
let mut set: HashSet<T, H> = HashSet::default();
set.insert(T::example());
set
}
}
impl<T: std::cmp::Ord + AbiExample> AbiExample for BTreeSet<T> {
fn example() -> Self {
info!("AbiExample for (BTreeSet<T>): {}", type_name::<Self>());
let mut set: BTreeSet<T> = BTreeSet::default();
set.insert(T::example());
set
}
}
#[cfg(not(target_arch = "bpf"))]
impl AbiExample for memmap2::MmapMut {
fn example() -> Self {
memmap2::MmapMut::map_anon(1).expect("failed to map the data file")
}
}
#[cfg(not(target_arch = "bpf"))]
impl AbiExample for std::path::PathBuf {
fn example() -> Self {
std::path::PathBuf::from(String::example())
}
}
use std::net::{IpAddr, Ipv4Addr, SocketAddr};
impl AbiExample for SocketAddr {
fn example() -> Self {
SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 0)
}
}
// This is a control flow indirection needed for digesting all variants of an enum
pub trait AbiEnumVisitor: Serialize {
fn visit_for_abi(&self, digester: &mut AbiDigester) -> DigestResult;
}
pub trait IgnoreAsHelper {}
pub trait EvenAsOpaque {}
impl<T: Serialize + ?Sized> AbiEnumVisitor for T {
default fn visit_for_abi(&self, _digester: &mut AbiDigester) -> DigestResult {
unreachable!(
"AbiEnumVisitor must be implemented for {}",
type_name::<T>()
);
}
}
impl<T: Serialize + ?Sized + AbiExample> AbiEnumVisitor for T {
default fn visit_for_abi(&self, digester: &mut AbiDigester) -> DigestResult {
info!("AbiEnumVisitor for (default): {}", type_name::<T>());
T::example()
.serialize(digester.create_new())
.map_err(DigestError::wrap_by_type::<T>)
}
}
// even (experimental) rust specialization isn't enough for us, resort to
// the autoref hack: https://github.com/dtolnay/case-studies/blob/master/autoref-specialization/README.md
// relevant test: TestVecEnum
impl<T: Serialize + ?Sized + AbiEnumVisitor> AbiEnumVisitor for &T {
default fn visit_for_abi(&self, digester: &mut AbiDigester) -> DigestResult {
info!("AbiEnumVisitor for (&default): {}", type_name::<T>());
// Don't call self.visit_for_abi(...) to avoid the infinite recursion!
T::visit_for_abi(self, digester)
}
}
// force to call self.serialize instead of T::visit_for_abi() for serialization
// helper structs like ad-hoc iterator `struct`s
impl<T: Serialize + IgnoreAsHelper> AbiEnumVisitor for &T {
default fn visit_for_abi(&self, digester: &mut AbiDigester) -> DigestResult {
info!("AbiEnumVisitor for (IgnoreAsHelper): {}", type_name::<T>());
self.serialize(digester.create_new())
.map_err(DigestError::wrap_by_type::<T>)
}
}
// force to call self.serialize instead of T::visit_for_abi() to work around the
// inability of implementing AbiExample for private structs from other crates
impl<T: Serialize + IgnoreAsHelper + EvenAsOpaque> AbiEnumVisitor for &T {
default fn visit_for_abi(&self, digester: &mut AbiDigester) -> DigestResult {
info!("AbiEnumVisitor for (IgnoreAsOpaque): {}", type_name::<T>());
let top_scope = type_name::<T>().split("::").next().unwrap();
self.serialize(digester.create_new_opaque(top_scope))
.map_err(DigestError::wrap_by_type::<T>)
}
}
// Because Option and Result enums are so common enums, provide generic trait implementations
// The digesting pattern must match with what is derived from #[derive(AbiEnumVisitor)]
impl<T: AbiEnumVisitor> AbiEnumVisitor for Option<T> {
fn visit_for_abi(&self, digester: &mut AbiDigester) -> DigestResult {
info!("AbiEnumVisitor for (Option<T>): {}", type_name::<Self>());
let variant: Self = Option::Some(T::example());
// serde calls serialize_some(); not serialize_variant();
// so create_new is correct, not create_enum_child or create_enum_new
variant.serialize(digester.create_new())
}
}
impl<O: AbiEnumVisitor, E: AbiEnumVisitor> AbiEnumVisitor for Result<O, E> {
fn visit_for_abi(&self, digester: &mut AbiDigester) -> DigestResult {
info!("AbiEnumVisitor for (Result<O, E>): {}", type_name::<Self>());
digester.update(&["enum Result (variants = 2)"]);
let variant: Self = Result::Ok(O::example());
variant.serialize(digester.create_enum_child()?)?;
let variant: Self = Result::Err(E::example());
variant.serialize(digester.create_enum_child()?)?;
digester.create_child()
}
}
| 28.307839 | 123 | 0.560149 |
67e570d5aa2d497d77f6551727e642a536308f3f | 2,168 | #[macro_use] extern crate rustler;
#[macro_use] extern crate lazy_static;
extern crate gpgme;
use rustler::{NifEnv, NifTerm};
use rustler::schedule::NifScheduleFlags;
#[macro_use] mod helpers;
#[macro_use] mod keys;
mod context;
mod results;
mod engine;
mod protocol;
mod encrypt_flags;
mod pinentry_mode;
mod sign_mode;
mod validity;
mod key_algorithm;
mod hash_algorithm;
mod notation;
rustler_export_nifs! {
"Elixir.ExGpgme.Context",
[
("from_protocol", 1, context::from_protocol),
("protocol", 1, context::protocol),
("armor?", 1, context::armor),
("set_armor", 2, context::set_armor),
("text_mode?", 1, context::text_mode),
("set_text_mode", 2, context::set_text_mode),
("offline?", 1, context::offline),
("set_offline", 2, context::set_offline),
("get_flag", 2, context::get_flag),
("set_flag", 3, context::set_flag),
("engine_info", 1, context::engine_info),
("set_engine_path", 2, context::set_engine_path),
("set_engine_home_dir", 2, context::set_engine_home_dir),
("pinentry_mode", 1, context::pinentry_mode),
("set_pinentry_mode", 2, context::set_pinentry_mode),
("import", 2, context::import, NifScheduleFlags::DirtyIo),
("find_key", 2, context::find_key, NifScheduleFlags::DirtyIo),
("delete_key", 2, context::delete_key, NifScheduleFlags::DirtyIo),
("delete_secret_key", 2, context::delete_secret_key, NifScheduleFlags::DirtyIo),
("decrypt", 2, context::decrypt, NifScheduleFlags::DirtyIo),
("encrypt_with_flags", 4, context::encrypt_with_flags, NifScheduleFlags::DirtyIo),
("sign_and_encrypt_with_flags", 4, context::sign_and_encrypt_with_flags, NifScheduleFlags::DirtyIo),
("sign_with_mode", 3, context::sign_with_mode, NifScheduleFlags::DirtyIo),
("verify_opaque", 3, context::verify_opaque, NifScheduleFlags::DirtyIo),
],
Some(on_load)
}
fn on_load<'a>(env: NifEnv<'a>, _load_info: NifTerm<'a>) -> bool {
resource_struct_init!(context::resource::ContextNifResource, env);
resource_struct_init!(keys::KeyResource, env);
true
}
| 37.37931 | 108 | 0.676199 |
7a3e005fc519df26a33ca78f9ca5e7426c78e108 | 7,901 | use std::ops::{Mul, MulAssign};
use bevy::ecs::component::Component;
use bevy::math::prelude::*;
use bevy::reflect::prelude::*;
use duplicate::duplicate;
use crate::utils::NearZero;
/// Component that defines the linear and angular velocity.
///
/// It must be inserted on the same entity of a [`RigidBody`](crate::RigidBody)
///
/// The linear part is in "unit" per second on each axis, represented as a `Vec3`. (The unit, being your game unit, be it pixel or anything else)
/// The angular part is in radians per second around an axis, represented as a `Quat`.
///
/// # Example
///
/// ```
/// # use bevy::prelude::*;
/// # use heron_core::*;
/// # use std::f32::consts::PI;
///
/// fn spawn(mut commands: Commands) {
/// commands.spawn_bundle(todo!("Spawn your sprite/mesh, incl. at least a GlobalTransform"))
/// .insert(CollisionShape::Sphere { radius: 1.0 })
/// .insert(
/// Velocity::from_linear(Vec3::X * 10.0)
/// .with_angular(AxisAngle::new(Vec3::Z, 0.5 * PI))
/// );
/// }
/// ```
#[derive(Debug, Component, Copy, Clone, PartialEq, Default, Reflect)]
pub struct Velocity {
/// Linear velocity in units-per-second on each axis
pub linear: Vec3,
/// Angular velocity in radians-per-second around an axis
pub angular: AxisAngle,
}
/// Component that defines the linear and angular acceleration.
///
/// It must be inserted on the same entity of a [`RigidBody`](crate::RigidBody)
///
/// The linear part is in "unit" per second squared on each axis, represented as a `Vec3`. (The unit, being your game unit, be it pixel or anything else)
/// The angular part is in radians per second squared around an axis, represented as an [`AxisAngle`]
///
/// # Example
///
/// ```
/// # use bevy::prelude::*;
/// # use heron_core::*;
/// # use std::f32::consts::PI;
///
/// fn spawn(mut commands: Commands) {
/// commands.spawn_bundle(todo!("Spawn your sprite/mesh, incl. at least a GlobalTransform"))
/// .insert(CollisionShape::Sphere { radius: 1.0 })
/// .insert(
/// Acceleration::from_linear(Vec3::X * 1.0)
/// .with_angular(AxisAngle::new(Vec3::Z, 0.05 * PI))
/// );
/// }
/// ```
#[derive(Debug, Component, Copy, Clone, PartialEq, Default, Reflect)]
pub struct Acceleration {
/// Linear acceleration in units-per-second-squared on each axis
pub linear: Vec3,
/// Angular acceleration in radians-per-second-squared around an axis
pub angular: AxisAngle,
}
/// An [axis-angle] representation
///
/// [axis-angle]: https://en.wikipedia.org/wiki/Axis%E2%80%93angle_representation
#[derive(Debug, Copy, Clone, PartialEq, Default, Reflect)]
pub struct AxisAngle(Vec3);
impl Velocity {
/// Returns a linear velocity from a vector
#[must_use]
pub fn from_linear(linear: Vec3) -> Self {
Self {
linear,
angular: AxisAngle::default(),
}
}
/// Returns an angular velocity from a vector
#[must_use]
pub fn from_angular(angular: AxisAngle) -> Self {
Self {
angular,
linear: Vec3::ZERO,
}
}
/// Returns a new version with the given linear velocity
#[must_use]
pub fn with_linear(mut self, linear: Vec3) -> Self {
self.linear = linear;
self
}
/// Returns a new version with the given angular velocity
#[must_use]
pub fn with_angular(mut self, angular: AxisAngle) -> Self {
self.angular = angular;
self
}
}
impl Acceleration {
/// Returns a linear acceleration from a vector
#[must_use]
pub fn from_linear(linear: Vec3) -> Self {
Self {
linear,
angular: AxisAngle::default(),
}
}
/// Returns an angular acceleration from a vector
#[must_use]
pub fn from_angular(angular: AxisAngle) -> Self {
Self {
angular,
linear: Vec3::ZERO,
}
}
/// Returns a new version with the given linear acceleration
#[must_use]
pub fn with_linear(mut self, linear: Vec3) -> Self {
self.linear = linear;
self
}
/// Returns a new version with the given angular acceleration
#[must_use]
pub fn with_angular(mut self, angular: AxisAngle) -> Self {
self.angular = angular;
self
}
}
#[duplicate(
Velocity;
[ Velocity ];
[ Acceleration ];
)]
impl From<Vec2> for Velocity {
fn from(v: Vec2) -> Self {
Self::from_linear(v.extend(0.0))
}
}
#[duplicate(
Velocity;
[ Velocity ];
[ Acceleration ];
)]
impl From<Vec3> for Velocity {
fn from(linear: Vec3) -> Self {
Self::from_linear(linear)
}
}
#[duplicate(
Velocity;
[ Velocity ];
[ Acceleration ];
)]
impl From<Velocity> for Vec3 {
fn from(Velocity { linear, .. }: Velocity) -> Self {
linear
}
}
#[duplicate(
Velocity;
[ Velocity ];
[ Acceleration ];
)]
impl From<AxisAngle> for Velocity {
fn from(angular: AxisAngle) -> Self {
Self::from_angular(angular)
}
}
#[duplicate(
Velocity;
[ Velocity ];
[ Acceleration ];
)]
impl From<Quat> for Velocity {
fn from(quat: Quat) -> Self {
Self::from_angular(quat.into())
}
}
#[duplicate(
Velocity;
[ Velocity ];
[ Acceleration ];
)]
impl From<Velocity> for AxisAngle {
fn from(Velocity { angular, .. }: Velocity) -> Self {
angular
}
}
#[duplicate(
Velocity;
[ Velocity ];
[ Acceleration ];
)]
impl From<Velocity> for Quat {
fn from(Velocity { angular, .. }: Velocity) -> Self {
angular.into()
}
}
impl From<Vec3> for AxisAngle {
fn from(v: Vec3) -> Self {
Self(v)
}
}
impl From<AxisAngle> for Vec3 {
fn from(AxisAngle(v): AxisAngle) -> Self {
v
}
}
impl From<AxisAngle> for f32 {
fn from(AxisAngle(v): AxisAngle) -> Self {
v.length()
}
}
#[duplicate(
Velocity;
[ Velocity ];
[ Acceleration ];
)]
impl NearZero for Velocity {
fn is_near_zero(self) -> bool {
self.linear.is_near_zero() && self.angular.is_near_zero()
}
}
impl MulAssign<f32> for AxisAngle {
fn mul_assign(&mut self, rhs: f32) {
self.0 = self.0 * rhs;
}
}
impl Mul<f32> for AxisAngle {
type Output = Self;
fn mul(mut self, rhs: f32) -> Self::Output {
self *= rhs;
self
}
}
impl Mul<AxisAngle> for f32 {
type Output = AxisAngle;
fn mul(self, mut rhs: AxisAngle) -> Self::Output {
rhs *= self;
rhs
}
}
impl AxisAngle {
/// Create a new axis-angle
#[inline]
#[must_use]
pub fn new(axis: Vec3, angle: f32) -> Self {
Self(axis.normalize() * angle)
}
/// Squared angle.
///
/// In general faster than `angle` because it doesn't need to perform a square-root
#[inline]
#[must_use]
pub fn angle_squared(self) -> f32 {
self.0.length_squared()
}
/// Angle around the axis.
///
/// For comparison you may consider `angle_squared`, that doesn't need to perform a square root.
#[inline]
#[must_use]
pub fn angle(self) -> f32 {
self.0.length()
}
/// Returns the axis **NOT** normalized.
#[inline]
#[must_use]
pub fn axis(self) -> Vec3 {
self.0
}
}
impl NearZero for AxisAngle {
fn is_near_zero(self) -> bool {
self.0.is_near_zero()
}
}
impl From<Quat> for AxisAngle {
fn from(quat: Quat) -> Self {
let length = quat.length();
let (axis, angle) = quat.to_axis_angle();
Self(axis.normalize() * (angle * length))
}
}
impl From<AxisAngle> for Quat {
fn from(axis_angle: AxisAngle) -> Self {
if axis_angle.is_near_zero() {
Quat::IDENTITY
} else {
let angle = axis_angle.0.length();
Quat::from_axis_angle(axis_angle.0 / angle, angle)
}
}
}
| 23.514881 | 153 | 0.590305 |
3812e42b3e07e8cd4be57d6afeeb655a50cdbf06 | 88 | pub mod models;
pub mod operations;
pub const API_VERSION: &str = "2020-05-01-preview";
| 22 | 51 | 0.738636 |
695c55d1e08e9ca5be3df35bc82d73e11e0d4b23 | 221 | // Copyright 2020 WeDPR Lab Project Authors. Licensed under Apache-2.0.
//! Library of FFI of wedpr_third_party_fisco_bcos wrapper functions, targeting
//! C/C++ compatible architectures (including iOS).
pub mod bn128;
| 31.571429 | 79 | 0.773756 |
396907d9a057e04d8d027ce4df0d3e0bae511411 | 6,169 | //! # Day 6: Lanternfish
//!
//! The sea floor is getting steeper. Maybe the sleigh keys got carried this way?
//!
//! A massive school of glowing lanternfish swims past. They must spawn quickly to reach such large numbers - maybe exponentially quickly? You should model their growth rate to be sure.
//!
//! Although you know nothing about this specific species of lanternfish, you make some guesses about their attributes. Surely, each lanternfish creates a new lanternfish once every 7 days.
//!
//! However, this process isn't necessarily synchronized between every lanternfish - one lanternfish might have 2 days left until it creates another lanternfish, while another might have 4. So, you can model each fish as a single number that represents the number of days until it creates a new lanternfish.
//!
//! Furthermore, you reason, a new lanternfish would surely need slightly longer before it's capable of producing more lanternfish: two more days for its first cycle.
//!
//! So, suppose you have a lanternfish with an internal timer value of 3:
//!
//! After one day, its internal timer would become 2.
//! After another day, its internal timer would become 1.
//! After another day, its internal timer would become 0.
//! After another day, its internal timer would reset to 6, and it would create a new lanternfish with an internal timer of 8.
//! After another day, the first lanternfish would have an internal timer of 5, and the second lanternfish would have an internal timer of 7.
//!
//! A lanternfish that creates a new fish resets its timer to 6, not 7 (because 0 is included as a valid timer value). The new lanternfish starts with an internal timer of 8 and does not start counting down until the next day.
//!
//! Realizing what you're trying to do, the submarine automatically produces a list of the ages of several hundred nearby lanternfish (your puzzle input). For example, suppose you were given the following list:
//!
//! 3,4,3,1,2
//!
//! This list means that the first fish has an internal timer of 3, the second fish has an internal timer of 4, and so on until the fifth fish, which has an internal timer of 2. Simulating these fish over several days would proceed as follows:
//!
//! Initial state: 3,4,3,1,2
//! After 1 day: 2,3,2,0,1
//! After 2 days: 1,2,1,6,0,8
//! After 3 days: 0,1,0,5,6,7,8
//! After 4 days: 6,0,6,4,5,6,7,8,8
//! After 5 days: 5,6,5,3,4,5,6,7,7,8
//! After 6 days: 4,5,4,2,3,4,5,6,6,7
//! After 7 days: 3,4,3,1,2,3,4,5,5,6
//! After 8 days: 2,3,2,0,1,2,3,4,4,5
//! After 9 days: 1,2,1,6,0,1,2,3,3,4,8
//! After 10 days: 0,1,0,5,6,0,1,2,2,3,7,8
//! After 11 days: 6,0,6,4,5,6,0,1,1,2,6,7,8,8,8
//! After 12 days: 5,6,5,3,4,5,6,0,0,1,5,6,7,7,7,8,8
//! After 13 days: 4,5,4,2,3,4,5,6,6,0,4,5,6,6,6,7,7,8,8
//! After 14 days: 3,4,3,1,2,3,4,5,5,6,3,4,5,5,5,6,6,7,7,8
//! After 15 days: 2,3,2,0,1,2,3,4,4,5,2,3,4,4,4,5,5,6,6,7
//! After 16 days: 1,2,1,6,0,1,2,3,3,4,1,2,3,3,3,4,4,5,5,6,8
//! After 17 days: 0,1,0,5,6,0,1,2,2,3,0,1,2,2,2,3,3,4,4,5,7,8
//! After 18 days: 6,0,6,4,5,6,0,1,1,2,6,0,1,1,1,2,2,3,3,4,6,7,8,8,8,8
//!
//! Each day, a 0 becomes a 6 and adds a new 8 to the end of the list, while each other number decreases by 1 if it was present at the start of the day.
//!
//! In this example, after 18 days, there are a total of 26 fish. After 80 days, there would be a total of 5934.
//!
//! Find a way to simulate lanternfish. How many lanternfish would there be after 80 days?
//!
//! Your puzzle answer was 396210.
//!
//! ## Part Two
//!
//! Suppose the lanternfish live forever and have unlimited food and space. Would they take over the entire ocean?
//!
//! After 256 days in the example above, there would be a total of 26984457539 lanternfish!
//!
//! How many lanternfish would there be after 256 days?
//!
//! Your puzzle answer was 1770823541496.
use crate::{day::Challenge, parse_input_raw};
/// Day 06 implementation.
pub struct Day06;
struct FishSchool {
index: Vec<usize>,
buffer: Vec<usize>,
}
impl FishSchool {
pub fn count(&self) -> usize {
self.index.iter().map(|&x| x as usize).sum()
}
pub fn step_count(&mut self, count: usize) {
for _ in 0..count {
self.step();
}
}
pub fn step(&mut self) {
for x in 0..9 {
let fishes = self.index[x];
if x == 0 {
self.buffer[0] -= fishes;
self.buffer[8] += fishes;
self.buffer[6] += fishes;
} else {
self.buffer[x] -= fishes;
self.buffer[x - 1] += fishes;
}
}
for x in 0..9 {
self.index[x] = self.buffer[x];
}
}
}
impl From<&str> for FishSchool {
fn from(s: &str) -> Self {
let fishes: Vec<usize> = s.split(',').flat_map(|x| x.parse().ok()).collect();
let mut index = vec![0; 9];
for f in fishes {
let idx = f as usize;
index[idx] += 1;
}
Self {
buffer: index.clone(),
index,
}
}
}
impl Challenge for Day06 {
fn run_ex1(&mut self) -> String {
let mut school = FishSchool::from(parse_input_raw!());
school.step_count(80);
school.count().to_string()
}
fn run_ex2(&mut self) -> String {
let mut school = FishSchool::from(parse_input_raw!());
school.step_count(256);
school.count().to_string()
}
}
#[cfg(test)]
mod tests {
use crate::create_day_tests;
use super::FishSchool;
const SAMPLE_DATA: &str = "3,4,3,1,2";
create_day_tests!("06", "396210", "1770823541496");
#[test]
fn test_sample_18() {
let mut school = FishSchool::from(SAMPLE_DATA);
school.step_count(18);
assert_eq!(school.count(), 26);
}
#[test]
fn test_sample_80() {
let mut school = FishSchool::from(SAMPLE_DATA);
school.step_count(80);
assert_eq!(school.count(), 5934);
}
#[test]
fn test_sample_256() {
let mut school = FishSchool::from(SAMPLE_DATA);
school.step_count(256);
assert_eq!(school.count(), 26_984_457_539);
}
}
| 36.502959 | 307 | 0.62733 |
7ae2607933552c2b557398275691ac8ae089d401 | 22,249 | //! Collection of all runtime features.
//!
//! Steps to add a new feature are outlined below. Note that these steps only cover
//! the process of getting a feature into the core Solana code.
//! - For features that are unambiguously good (ie bug fixes), these steps are sufficient.
//! - For features that should go up for community vote (ie fee structure changes), more
//! information on the additional steps to follow can be found at:
//! <https://spl.solana.com/feature-proposal#feature-proposal-life-cycle>
//!
//! 1. Generate a new keypair with `solana-keygen new --outfile feature.json --no-passphrase`
//! - Keypairs should be held by core contributors only. If you're a non-core contirbutor going
//! through these steps, the PR process will facilitate a keypair holder being picked. That
//! person will generate the keypair, provide pubkey for PR, and ultimately enable the feature.
//! 2. Add a public module for the feature, specifying keypair pubkey as the id with
//! `solana_sdk::declare_id!()` within the module.
//! Additionally, add an entry to `FEATURE_NAMES` map.
//! 3. Add desired logic to check for and switch on feature availability.
//!
//! For more information on how features are picked up, see comments for `Feature`.
use {
lazy_static::lazy_static,
solana_sdk::{
clock::Slot,
hash::{Hash, Hasher},
pubkey::Pubkey,
},
std::collections::{HashMap, HashSet},
};
pub mod deprecate_rewards_sysvar {
solana_sdk::declare_id!("GaBtBJvmS4Arjj5W1NmFcyvPjsHN38UGYDq2MDwbs9Qu");
}
pub mod pico_inflation {
solana_sdk::declare_id!("4RWNif6C2WCNiKVW7otP4G7dkmkHGyKQWRpuZ1pxKU5m");
}
pub mod full_inflation {
pub mod devnet_and_testnet {
solana_sdk::declare_id!("DT4n6ABDqs6w4bnfwrXT9rsprcPf6cdDga1egctaPkLC");
}
pub mod mainnet {
pub mod certusone {
pub mod vote {
solana_sdk::declare_id!("BzBBveUDymEYoYzcMWNQCx3cd4jQs7puaVFHLtsbB6fm");
}
pub mod enable {
solana_sdk::declare_id!("7XRJcS5Ud5vxGB54JbK9N2vBZVwnwdBNeJW1ibRgD9gx");
}
}
}
}
pub mod spl_token_v2_multisig_fix {
solana_sdk::declare_id!("E5JiFDQCwyC6QfT9REFyMpfK2mHcmv1GUDySU1Ue7TYv");
}
pub mod no_overflow_rent_distribution {
solana_sdk::declare_id!("4kpdyrcj5jS47CZb2oJGfVxjYbsMm2Kx97gFyZrxxwXz");
}
pub mod filter_stake_delegation_accounts {
solana_sdk::declare_id!("GE7fRxmW46K6EmCD9AMZSbnaJ2e3LfqCZzdHi9hmYAgi");
}
pub mod require_custodian_for_locked_stake_authorize {
solana_sdk::declare_id!("D4jsDcXaqdW8tDAWn8H4R25Cdns2YwLneujSL1zvjW6R");
}
pub mod spl_token_v2_self_transfer_fix {
solana_sdk::declare_id!("BL99GYhdjjcv6ys22C9wPgn2aTVERDbPHHo4NbS3hgp7");
}
pub mod warp_timestamp_again {
solana_sdk::declare_id!("GvDsGDkH5gyzwpDhxNixx8vtx1kwYHH13RiNAPw27zXb");
}
pub mod check_init_vote_data {
solana_sdk::declare_id!("3ccR6QpxGYsAbWyfevEtBNGfWV4xBffxRj2tD6A9i39F");
}
pub mod secp256k1_recover_syscall_enabled {
solana_sdk::declare_id!("6RvdSWHh8oh72Dp7wMTS2DBkf3fRPtChfNrAo3cZZoXJ");
}
pub mod system_transfer_zero_check {
solana_sdk::declare_id!("BrTR9hzw4WBGFP65AJMbpAo64DcA3U6jdPSga9fMV5cS");
}
pub mod blake3_syscall_enabled {
solana_sdk::declare_id!("HTW2pSyErTj4BV6KBM9NZ9VBUJVxt7sacNWcf76wtzb3");
}
pub mod dedupe_config_program_signers {
solana_sdk::declare_id!("8kEuAshXLsgkUEdcFVLqrjCGGHVWFW99ZZpxvAzzMtBp");
}
pub mod deterministic_shred_seed_enabled {
solana_sdk::declare_id!("FjSRMpFe7mofQ3WrEMT7Smjk2sME1XdAoRxcv55V6M44");
}
pub mod verify_tx_signatures_len {
solana_sdk::declare_id!("EVW9B5xD9FFK7vw1SBARwMA4s5eRo5eKJdKpsBikzKBz");
}
pub mod vote_stake_checked_instructions {
solana_sdk::declare_id!("BcWknVcgvonN8sL4HE4XFuEVgfcee5MwxWPAgP6ZV89X");
}
pub mod neon_evm_compute_budget {
solana_sdk::declare_id!("GLrVvDPkQi5PMYUrsYWT9doZhSHr1BVZXqj5DbFps3rS");
}
pub mod rent_for_sysvars {
solana_sdk::declare_id!("BKCPBQQBZqggVnFso5nQ8rQ4RwwogYwjuUt9biBjxwNF");
}
pub mod libsecp256k1_0_5_upgrade_enabled {
solana_sdk::declare_id!("DhsYfRjxfnh2g7HKJYSzT79r74Afa1wbHkAgHndrA1oy");
}
pub mod tx_wide_compute_cap {
solana_sdk::declare_id!("5ekBxc8itEnPv4NzGJtr8BVVQLNMQuLMNQQj7pHoLNZ9");
}
pub mod spl_token_v2_set_authority_fix {
solana_sdk::declare_id!("FToKNBYyiF4ky9s8WsmLBXHCht17Ek7RXaLZGHzzQhJ1");
}
pub mod merge_nonce_error_into_system_error {
solana_sdk::declare_id!("21AWDosvp3pBamFW91KB35pNoaoZVTM7ess8nr2nt53B");
}
pub mod disable_fees_sysvar {
solana_sdk::declare_id!("JAN1trEUEtZjgXYzNBYHU9DYd7GnThhXfFP7SzPXkPsG");
}
pub mod stake_merge_with_unmatched_credits_observed {
solana_sdk::declare_id!("meRgp4ArRPhD3KtCY9c5yAf2med7mBLsjKTPeVUHqBL");
}
pub mod gate_large_block {
solana_sdk::declare_id!("2ry7ygxiYURULZCrypHhveanvP5tzZ4toRwVp89oCNSj");
}
pub mod zk_token_sdk_enabled {
solana_sdk::declare_id!("zk1snxsc6Fh3wsGNbbHAJNHiJoYgF29mMnTSusGx5EJ");
}
pub mod versioned_tx_message_enabled {
solana_sdk::declare_id!("3KZZ6Ks1885aGBQ45fwRcPXVBCtzUvxhUTkwKMR41Tca");
}
pub mod libsecp256k1_fail_on_bad_count {
solana_sdk::declare_id!("8aXvSuopd1PUj7UhehfXJRg6619RHp8ZvwTyyJHdUYsj");
}
pub mod instructions_sysvar_owned_by_sysvar {
solana_sdk::declare_id!("H3kBSaKdeiUsyHmeHqjJYNc27jesXZ6zWj3zWkowQbkV");
}
pub mod stake_program_advance_activating_credits_observed {
solana_sdk::declare_id!("SAdVFw3RZvzbo6DvySbSdBnHN4gkzSTH9dSxesyKKPj");
}
pub mod demote_program_write_locks {
solana_sdk::declare_id!("3E3jV7v9VcdJL8iYZUMax9DiDno8j7EWUVbhm9RtShj2");
}
pub mod ed25519_program_enabled {
solana_sdk::declare_id!("6ppMXNYLhVd7GcsZ5uV11wQEW7spppiMVfqQv5SXhDpX");
}
pub mod return_data_syscall_enabled {
solana_sdk::declare_id!("DwScAzPUjuv65TMbDnFY7AgwmotzWy3xpEJMXM3hZFaB");
}
pub mod reduce_required_deploy_balance {
solana_sdk::declare_id!("EBeznQDjcPG8491sFsKZYBi5S5jTVXMpAKNDJMQPS2kq");
}
pub mod sol_log_data_syscall_enabled {
solana_sdk::declare_id!("6uaHcKPGUy4J7emLBgUTeufhJdiwhngW6a1R9B7c2ob9");
}
pub mod stakes_remove_delegation_if_inactive {
solana_sdk::declare_id!("HFpdDDNQjvcXnXKec697HDDsyk6tFoWS2o8fkxuhQZpL");
}
pub mod do_support_realloc {
solana_sdk::declare_id!("75m6ysz33AfLA5DDEzWM1obBrnPQRSsdVQ2nRmc8Vuu1");
}
// Note: when this feature is cleaned up, also remove the secp256k1 program from
// the list of builtins and remove its files from /programs
pub mod prevent_calling_precompiles_as_programs {
solana_sdk::declare_id!("4ApgRX3ud6p7LNMJmsuaAcZY5HWctGPr5obAsjB3A54d");
}
pub mod optimize_epoch_boundary_updates {
solana_sdk::declare_id!("265hPS8k8xJ37ot82KEgjRunsUp5w4n4Q4VwwiN9i9ps");
}
pub mod remove_native_loader {
solana_sdk::declare_id!("HTTgmruMYRZEntyL3EdCDdnS6e4D5wRq1FA7kQsb66qq");
}
pub mod send_to_tpu_vote_port {
solana_sdk::declare_id!("C5fh68nJ7uyKAuYZg2x9sEQ5YrVf3dkW6oojNBSc3Jvo");
}
pub mod turbine_peers_shuffle {
solana_sdk::declare_id!("4VvpgRD6UsHvkXwpuQhtR5NG1G4esMaExeWuSEpsYRUa");
}
pub mod requestable_heap_size {
solana_sdk::declare_id!("CCu4boMmfLuqcmfTLPHQiUo22ZdUsXjgzPAURYaWt1Bw");
}
pub mod disable_fee_calculator {
solana_sdk::declare_id!("2jXx2yDmGysmBKfKYNgLj2DQyAQv6mMk2BPh4eSbyB4H");
}
pub mod add_compute_budget_program {
solana_sdk::declare_id!("4d5AKtxoh93Dwm1vHXUU3iRATuMndx1c431KgT2td52r");
}
pub mod nonce_must_be_writable {
solana_sdk::declare_id!("BiCU7M5w8ZCMykVSyhZ7Q3m2SWoR2qrEQ86ERcDX77ME");
}
pub mod spl_token_v3_3_0_release {
solana_sdk::declare_id!("Ftok2jhqAqxUWEiCVRrfRs9DPppWP8cgTB7NQNKL88mS");
}
pub mod leave_nonce_on_success {
solana_sdk::declare_id!("E8MkiWZNNPGU6n55jkGzyj8ghUmjCHRmDFdYYFYHxWhQ");
}
pub mod reject_empty_instruction_without_program {
solana_sdk::declare_id!("9kdtFSrXHQg3hKkbXkQ6trJ3Ja1xpJ22CTFSNAciEwmL");
}
pub mod fixed_memcpy_nonoverlapping_check {
solana_sdk::declare_id!("36PRUK2Dz6HWYdG9SpjeAsF5F3KxnFCakA2BZMbtMhSb");
}
pub mod reject_non_rent_exempt_vote_withdraws {
solana_sdk::declare_id!("7txXZZD6Um59YoLMF7XUNimbMjsqsWhc7g2EniiTrmp1");
}
pub mod evict_invalid_stakes_cache_entries {
solana_sdk::declare_id!("EMX9Q7TVFAmQ9V1CggAkhMzhXSg8ECp7fHrWQX2G1chf");
}
pub mod allow_votes_to_directly_update_vote_state {
solana_sdk::declare_id!("Ff8b1fBeB86q8cjq47ZhsQLgv5EkHu3G1C99zjUfAzrq");
}
pub mod cap_accounts_data_len {
solana_sdk::declare_id!("capRxUrBjNkkCpjrJxPGfPaWijB7q3JoDfsWXAnt46r");
}
pub mod max_tx_account_locks {
solana_sdk::declare_id!("CBkDroRDqm8HwHe6ak9cguPjUomrASEkfmxEaZ5CNNxz");
}
pub mod require_rent_exempt_accounts {
solana_sdk::declare_id!("BkFDxiJQWZXGTZaJQxH7wVEHkAmwCgSEVkrvswFfRJPD");
}
pub mod filter_votes_outside_slot_hashes {
solana_sdk::declare_id!("3gtZPqvPpsbXZVCx6hceMfWxtsmrjMzmg8C7PLKSxS2d");
}
pub mod update_syscall_base_costs {
solana_sdk::declare_id!("2h63t332mGCCsWK2nqqqHhN4U9ayyqhLVFvczznHDoTZ");
}
pub mod vote_withdraw_authority_may_change_authorized_voter {
solana_sdk::declare_id!("AVZS3ZsN4gi6Rkx2QUibYuSJG3S6QHib7xCYhG6vGJxU");
}
pub mod spl_associated_token_account_v1_0_4 {
solana_sdk::declare_id!("FaTa4SpiaSNH44PGC4z8bnGVTkSRYaWvrBs3KTu8XQQq");
}
pub mod reject_vote_account_close_unless_zero_credit_epoch {
solana_sdk::declare_id!("ALBk3EWdeAg2WAGf6GPDUf1nynyNqCdEVmgouG7rpuCj");
}
pub mod add_get_processed_sibling_instruction_syscall {
solana_sdk::declare_id!("CFK1hRCNy8JJuAAY8Pb2GjLFNdCThS2qwZNe3izzBMgn");
}
pub mod bank_tranaction_count_fix {
solana_sdk::declare_id!("Vo5siZ442SaZBKPXNocthiXysNviW4UYPwRFggmbgAp");
}
pub mod disable_bpf_deprecated_load_instructions {
solana_sdk::declare_id!("3XgNukcZWf9o3HdA3fpJbm94XFc4qpvTXc8h1wxYwiPi");
}
pub mod disable_bpf_unresolved_symbols_at_runtime {
solana_sdk::declare_id!("4yuaYAj2jGMGTh1sSmi4G2eFscsDq8qjugJXZoBN6YEa");
}
pub mod record_instruction_in_transaction_context_push {
solana_sdk::declare_id!("3aJdcZqxoLpSBxgeYGjPwaYS1zzcByxUDqJkbzWAH1Zb");
}
lazy_static! {
/// Map of feature identifiers to user-visible description
pub static ref FEATURE_NAMES: HashMap<Pubkey, &'static str> = [
(deprecate_rewards_sysvar::id(), "deprecate unused rewards sysvar"),
(pico_inflation::id(), "pico inflation"),
(full_inflation::devnet_and_testnet::id(), "full inflation on devnet and testnet"),
(spl_token_v2_multisig_fix::id(), "spl-token multisig fix"),
(no_overflow_rent_distribution::id(), "no overflow rent distribution"),
(filter_stake_delegation_accounts::id(), "filter stake_delegation_accounts #14062"),
(require_custodian_for_locked_stake_authorize::id(), "require custodian to authorize withdrawer change for locked stake"),
(spl_token_v2_self_transfer_fix::id(), "spl-token self-transfer fix"),
(full_inflation::mainnet::certusone::enable::id(), "full inflation enabled by Certus One"),
(full_inflation::mainnet::certusone::vote::id(), "community vote allowing Certus One to enable full inflation"),
(warp_timestamp_again::id(), "warp timestamp again, adjust bounding to 25% fast 80% slow #15204"),
(check_init_vote_data::id(), "check initialized Vote data"),
(secp256k1_recover_syscall_enabled::id(), "secp256k1_recover syscall"),
(system_transfer_zero_check::id(), "perform all checks for transfers of 0 lamports"),
(blake3_syscall_enabled::id(), "blake3 syscall"),
(dedupe_config_program_signers::id(), "dedupe config program signers"),
(deterministic_shred_seed_enabled::id(), "deterministic shred seed"),
(verify_tx_signatures_len::id(), "prohibit extra transaction signatures"),
(vote_stake_checked_instructions::id(), "vote/state program checked instructions #18345"),
(neon_evm_compute_budget::id(), "bump neon_evm's compute budget"),
(rent_for_sysvars::id(), "collect rent from accounts owned by sysvars"),
(libsecp256k1_0_5_upgrade_enabled::id(), "upgrade libsecp256k1 to v0.5.0"),
(tx_wide_compute_cap::id(), "transaction wide compute cap"),
(spl_token_v2_set_authority_fix::id(), "spl-token set_authority fix"),
(merge_nonce_error_into_system_error::id(), "merge NonceError into SystemError"),
(disable_fees_sysvar::id(), "disable fees sysvar"),
(stake_merge_with_unmatched_credits_observed::id(), "allow merging active stakes with unmatched credits_observed #18985"),
(gate_large_block::id(), "validator checks block cost against max limit in realtime, reject if exceeds."),
(zk_token_sdk_enabled::id(), "enable Zk Token proof program and syscalls"),
(versioned_tx_message_enabled::id(), "enable versioned transaction message processing"),
(libsecp256k1_fail_on_bad_count::id(), "fail libsec256k1_verify if count appears wrong"),
(instructions_sysvar_owned_by_sysvar::id(), "fix owner for instructions sysvar"),
(stake_program_advance_activating_credits_observed::id(), "Enable advancing credits observed for activation epoch #19309"),
(demote_program_write_locks::id(), "demote program write locks to readonly, except when upgradeable loader present #19593 #20265"),
(ed25519_program_enabled::id(), "enable builtin ed25519 signature verify program"),
(return_data_syscall_enabled::id(), "enable sol_{set,get}_return_data syscall"),
(reduce_required_deploy_balance::id(), "reduce required payer balance for program deploys"),
(sol_log_data_syscall_enabled::id(), "enable sol_log_data syscall"),
(stakes_remove_delegation_if_inactive::id(), "remove delegations from stakes cache when inactive"),
(do_support_realloc::id(), "support account data reallocation"),
(prevent_calling_precompiles_as_programs::id(), "prevent calling precompiles as programs"),
(optimize_epoch_boundary_updates::id(), "optimize epoch boundary updates"),
(remove_native_loader::id(), "remove support for the native loader"),
(send_to_tpu_vote_port::id(), "send votes to the tpu vote port"),
(turbine_peers_shuffle::id(), "turbine peers shuffle patch"),
(requestable_heap_size::id(), "Requestable heap frame size"),
(disable_fee_calculator::id(), "deprecate fee calculator"),
(add_compute_budget_program::id(), "Add compute_budget_program"),
(nonce_must_be_writable::id(), "nonce must be writable"),
(spl_token_v3_3_0_release::id(), "spl-token v3.3.0 release"),
(leave_nonce_on_success::id(), "leave nonce as is on success"),
(reject_empty_instruction_without_program::id(), "fail instructions which have native_loader as program_id directly"),
(fixed_memcpy_nonoverlapping_check::id(), "use correct check for nonoverlapping regions in memcpy syscall"),
(reject_non_rent_exempt_vote_withdraws::id(), "fail vote withdraw instructions which leave the account non-rent-exempt"),
(evict_invalid_stakes_cache_entries::id(), "evict invalid stakes cache entries on epoch boundaries"),
(allow_votes_to_directly_update_vote_state::id(), "enable direct vote state update"),
(cap_accounts_data_len::id(), "cap the accounts data len"),
(max_tx_account_locks::id(), "enforce max number of locked accounts per transaction"),
(require_rent_exempt_accounts::id(), "require all new transaction accounts with data to be rent-exempt"),
(filter_votes_outside_slot_hashes::id(), "filter vote slots older than the slot hashes history"),
(update_syscall_base_costs::id(), "Update syscall base costs"),
(vote_withdraw_authority_may_change_authorized_voter::id(), "vote account withdraw authority may change the authorized voter #22521"),
(spl_associated_token_account_v1_0_4::id(), "SPL Associated Token Account Program release version 1.0.4, tied to token 3.3.0 #22648"),
(reject_vote_account_close_unless_zero_credit_epoch::id(), "fail vote account withdraw to 0 unless account earned 0 credits in last completed epoch"),
(add_get_processed_sibling_instruction_syscall::id(), "add add_get_processed_sibling_instruction_syscall"),
(bank_tranaction_count_fix::id(), "Fixes Bank::transaction_count to include all committed transactions, not just successful ones"),
(disable_bpf_deprecated_load_instructions::id(), "Disable ldabs* and ldind* BPF instructions"),
(disable_bpf_unresolved_symbols_at_runtime::id(), "Disable reporting of unresolved BPF symbols at runtime"),
(record_instruction_in_transaction_context_push::id(), "Move the CPI stack overflow check to the end of push"),
/*************** ADD NEW FEATURES HERE ***************/
]
.iter()
.cloned()
.collect();
/// Unique identifier of the current software's feature set
pub static ref ID: Hash = {
let mut hasher = Hasher::default();
let mut feature_ids = FEATURE_NAMES.keys().collect::<Vec<_>>();
feature_ids.sort();
for feature in feature_ids {
hasher.hash(feature.as_ref());
}
hasher.result()
};
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct FullInflationFeaturePair {
pub vote_id: Pubkey, // Feature that grants the candidate the ability to enable full inflation
pub enable_id: Pubkey, // Feature to enable full inflation by the candidate
}
lazy_static! {
/// Set of feature pairs that once enabled will trigger full inflation
pub static ref FULL_INFLATION_FEATURE_PAIRS: HashSet<FullInflationFeaturePair> = [
FullInflationFeaturePair {
vote_id: full_inflation::mainnet::certusone::vote::id(),
enable_id: full_inflation::mainnet::certusone::enable::id(),
},
]
.iter()
.cloned()
.collect();
}
/// `FeatureSet` holds the set of currently active/inactive runtime features
#[derive(AbiExample, Debug, Clone)]
pub struct FeatureSet {
pub active: HashMap<Pubkey, Slot>,
pub inactive: HashSet<Pubkey>,
}
impl Default for FeatureSet {
fn default() -> Self {
// All features disabled
Self {
active: HashMap::new(),
inactive: FEATURE_NAMES.keys().cloned().collect(),
}
}
}
impl FeatureSet {
pub fn is_active(&self, feature_id: &Pubkey) -> bool {
self.active.contains_key(feature_id)
}
pub fn activated_slot(&self, feature_id: &Pubkey) -> Option<Slot> {
self.active.get(feature_id).copied()
}
/// List of enabled features that trigger full inflation
pub fn full_inflation_features_enabled(&self) -> HashSet<Pubkey> {
let mut hash_set = FULL_INFLATION_FEATURE_PAIRS
.iter()
.filter_map(|pair| {
if self.is_active(&pair.vote_id) && self.is_active(&pair.enable_id) {
Some(pair.enable_id)
} else {
None
}
})
.collect::<HashSet<_>>();
if self.is_active(&full_inflation::devnet_and_testnet::id()) {
hash_set.insert(full_inflation::devnet_and_testnet::id());
}
hash_set
}
/// All features enabled, useful for testing
pub fn all_enabled() -> Self {
Self {
active: FEATURE_NAMES.keys().cloned().map(|key| (key, 0)).collect(),
inactive: HashSet::new(),
}
}
/// Activate a feature
pub fn activate(&mut self, feature_id: &Pubkey, slot: u64) {
self.inactive.remove(feature_id);
self.active.insert(*feature_id, slot);
}
/// Deactivate a feature
pub fn deactivate(&mut self, feature_id: &Pubkey) {
self.active.remove(feature_id);
self.inactive.insert(*feature_id);
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_full_inflation_features_enabled_devnet_and_testnet() {
let mut feature_set = FeatureSet::default();
assert!(feature_set.full_inflation_features_enabled().is_empty());
feature_set
.active
.insert(full_inflation::devnet_and_testnet::id(), 42);
assert_eq!(
feature_set.full_inflation_features_enabled(),
[full_inflation::devnet_and_testnet::id()]
.iter()
.cloned()
.collect()
);
}
#[test]
fn test_full_inflation_features_enabled() {
// Normal sequence: vote_id then enable_id
let mut feature_set = FeatureSet::default();
assert!(feature_set.full_inflation_features_enabled().is_empty());
feature_set
.active
.insert(full_inflation::mainnet::certusone::vote::id(), 42);
assert!(feature_set.full_inflation_features_enabled().is_empty());
feature_set
.active
.insert(full_inflation::mainnet::certusone::enable::id(), 42);
assert_eq!(
feature_set.full_inflation_features_enabled(),
[full_inflation::mainnet::certusone::enable::id()]
.iter()
.cloned()
.collect()
);
// Backwards sequence: enable_id and then vote_id
let mut feature_set = FeatureSet::default();
assert!(feature_set.full_inflation_features_enabled().is_empty());
feature_set
.active
.insert(full_inflation::mainnet::certusone::enable::id(), 42);
assert!(feature_set.full_inflation_features_enabled().is_empty());
feature_set
.active
.insert(full_inflation::mainnet::certusone::vote::id(), 42);
assert_eq!(
feature_set.full_inflation_features_enabled(),
[full_inflation::mainnet::certusone::enable::id()]
.iter()
.cloned()
.collect()
);
}
#[test]
fn test_feature_set_activate_deactivate() {
let mut feature_set = FeatureSet::default();
let feature = Pubkey::new_unique();
assert!(!feature_set.is_active(&feature));
feature_set.activate(&feature, 0);
assert!(feature_set.is_active(&feature));
feature_set.deactivate(&feature);
assert!(!feature_set.is_active(&feature));
}
}
| 39.87276 | 158 | 0.723628 |
ff057a097d8827337ace5d1268cba180de0d833c | 4,026 | use crate::allocator::Allocator;
use crate::geometry::{Rotation, UnitComplex, UnitQuaternion};
use crate::{DefaultAllocator, DimName, Point, Scalar, SimdRealField, VectorN, U2, U3};
use simba::scalar::ClosedMul;
/// Trait implemented by rotations that can be used inside of an `Isometry` or `Similarity`.
pub trait AbstractRotation<N: Scalar, D: DimName>: PartialEq + ClosedMul + Clone {
/// The rotation identity.
fn identity() -> Self;
/// The rotation inverse.
fn inverse(&self) -> Self;
/// Change `self` to its inverse.
fn inverse_mut(&mut self);
/// Apply the rotation to the given vector.
fn transform_vector(&self, v: &VectorN<N, D>) -> VectorN<N, D>
where
DefaultAllocator: Allocator<N, D>;
/// Apply the rotation to the given point.
fn transform_point(&self, p: &Point<N, D>) -> Point<N, D>
where
DefaultAllocator: Allocator<N, D>;
/// Apply the inverse rotation to the given vector.
fn inverse_transform_vector(&self, v: &VectorN<N, D>) -> VectorN<N, D>
where
DefaultAllocator: Allocator<N, D>;
/// Apply the inverse rotation to the given point.
fn inverse_transform_point(&self, p: &Point<N, D>) -> Point<N, D>
where
DefaultAllocator: Allocator<N, D>;
}
impl<N: SimdRealField, D: DimName> AbstractRotation<N, D> for Rotation<N, D>
where
N::Element: SimdRealField,
DefaultAllocator: Allocator<N, D, D>,
{
#[inline]
fn identity() -> Self {
Self::identity()
}
#[inline]
fn inverse(&self) -> Self {
self.inverse()
}
#[inline]
fn inverse_mut(&mut self) {
self.inverse_mut()
}
#[inline]
fn transform_vector(&self, v: &VectorN<N, D>) -> VectorN<N, D>
where
DefaultAllocator: Allocator<N, D>,
{
self * v
}
#[inline]
fn transform_point(&self, p: &Point<N, D>) -> Point<N, D>
where
DefaultAllocator: Allocator<N, D>,
{
self * p
}
#[inline]
fn inverse_transform_vector(&self, v: &VectorN<N, D>) -> VectorN<N, D>
where
DefaultAllocator: Allocator<N, D>,
{
self.inverse_transform_vector(v)
}
#[inline]
fn inverse_transform_point(&self, p: &Point<N, D>) -> Point<N, D>
where
DefaultAllocator: Allocator<N, D>,
{
self.inverse_transform_point(p)
}
}
impl<N: SimdRealField> AbstractRotation<N, U3> for UnitQuaternion<N>
where
N::Element: SimdRealField,
{
#[inline]
fn identity() -> Self {
Self::identity()
}
#[inline]
fn inverse(&self) -> Self {
self.inverse()
}
#[inline]
fn inverse_mut(&mut self) {
self.inverse_mut()
}
#[inline]
fn transform_vector(&self, v: &VectorN<N, U3>) -> VectorN<N, U3> {
self * v
}
#[inline]
fn transform_point(&self, p: &Point<N, U3>) -> Point<N, U3> {
self * p
}
#[inline]
fn inverse_transform_vector(&self, v: &VectorN<N, U3>) -> VectorN<N, U3> {
self.inverse_transform_vector(v)
}
#[inline]
fn inverse_transform_point(&self, p: &Point<N, U3>) -> Point<N, U3> {
self.inverse_transform_point(p)
}
}
impl<N: SimdRealField> AbstractRotation<N, U2> for UnitComplex<N>
where
N::Element: SimdRealField,
{
#[inline]
fn identity() -> Self {
Self::identity()
}
#[inline]
fn inverse(&self) -> Self {
self.inverse()
}
#[inline]
fn inverse_mut(&mut self) {
self.inverse_mut()
}
#[inline]
fn transform_vector(&self, v: &VectorN<N, U2>) -> VectorN<N, U2> {
self * v
}
#[inline]
fn transform_point(&self, p: &Point<N, U2>) -> Point<N, U2> {
self * p
}
#[inline]
fn inverse_transform_vector(&self, v: &VectorN<N, U2>) -> VectorN<N, U2> {
self.inverse_transform_vector(v)
}
#[inline]
fn inverse_transform_point(&self, p: &Point<N, U2>) -> Point<N, U2> {
self.inverse_transform_point(p)
}
}
| 24.4 | 92 | 0.586687 |
ac4c31b1ed617c361d0039c0ce2860603f5bf539 | 13,719 | // Copyright 2015 The GeoRust Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::errors::Error;
use crate::json::{self, Deserialize, Deserializer, JsonObject, JsonValue, Serialize, Serializer};
use crate::{Feature, FeatureCollection, Geometry};
use std::convert::TryFrom;
use std::fmt;
use std::iter::FromIterator;
use std::str::FromStr;
/// GeoJSON Objects
///
/// ```
/// use std::convert::TryInto;
/// use geojson::{Feature, GeoJson, Geometry, Value};
/// use serde_json::json;
/// let json_value = json!({
/// "type": "Feature",
/// "geometry": {
/// "type": "Point",
/// "coordinates": [102.0, 0.5]
/// },
/// "properties": null,
/// });
/// let feature: Feature = json_value.try_into().unwrap();
///
/// // Easily convert a feature to a GeoJson
/// let geojson: GeoJson = feature.into();
/// // and back again
/// let feature2: Feature = geojson.try_into().unwrap();
/// ```
/// [GeoJSON Format Specification § 3](https://tools.ietf.org/html/rfc7946#section-3)
#[derive(Clone, Debug, PartialEq)]
pub enum GeoJson {
Geometry(Geometry),
Feature(Feature),
FeatureCollection(FeatureCollection),
}
impl<'a> From<&'a GeoJson> for JsonObject {
fn from(geojson: &'a GeoJson) -> JsonObject {
match *geojson {
GeoJson::Geometry(ref geometry) => geometry.into(),
GeoJson::Feature(ref feature) => feature.into(),
GeoJson::FeatureCollection(ref fc) => fc.into(),
}
}
}
impl From<GeoJson> for JsonValue {
fn from(geojson: GeoJson) -> JsonValue {
match geojson {
GeoJson::Geometry(geometry) => JsonValue::Object(JsonObject::from(&geometry)),
GeoJson::Feature(feature) => JsonValue::Object(JsonObject::from(&feature)),
GeoJson::FeatureCollection(fc) => JsonValue::Object(JsonObject::from(&fc)),
}
}
}
impl<G: Into<Geometry>> From<G> for GeoJson {
fn from(geometry: G) -> Self {
GeoJson::Geometry(geometry.into())
}
}
impl<G: Into<Geometry>> FromIterator<G> for GeoJson {
fn from_iter<I: IntoIterator<Item = G>>(iter: I) -> Self {
use crate::Value;
let geometries = iter.into_iter().map(|g| g.into()).collect();
let collection = Value::GeometryCollection(geometries);
GeoJson::Geometry(Geometry::new(collection))
}
}
impl From<Feature> for GeoJson {
fn from(feature: Feature) -> Self {
GeoJson::Feature(feature)
}
}
impl From<FeatureCollection> for GeoJson {
fn from(feature_collection: FeatureCollection) -> GeoJson {
GeoJson::FeatureCollection(feature_collection)
}
}
impl TryFrom<GeoJson> for Geometry {
type Error = Error;
fn try_from(value: GeoJson) -> Result<Self, Self::Error> {
match value {
GeoJson::Geometry(g) => Ok(g),
GeoJson::Feature(_) => Err(Error::ExpectedType {
expected: "Geometry".to_string(),
actual: "Feature".to_string(),
}),
GeoJson::FeatureCollection(_) => Err(Error::ExpectedType {
expected: "Geometry".to_string(),
actual: "FeatureCollection".to_string(),
}),
}
}
}
impl TryFrom<GeoJson> for Feature {
type Error = Error;
fn try_from(value: GeoJson) -> Result<Self, Self::Error> {
match value {
GeoJson::Geometry(_) => Err(Error::ExpectedType {
expected: "Feature".to_string(),
actual: "Geometry".to_string(),
}),
GeoJson::Feature(f) => Ok(f),
GeoJson::FeatureCollection(_) => Err(Error::ExpectedType {
expected: "Feature".to_string(),
actual: "FeatureCollection".to_string(),
}),
}
}
}
impl TryFrom<GeoJson> for FeatureCollection {
type Error = Error;
fn try_from(value: GeoJson) -> Result<Self, Self::Error> {
match value {
GeoJson::Geometry(_) => Err(Error::ExpectedType {
expected: "FeatureCollection".to_string(),
actual: "Geometry".to_string(),
}),
GeoJson::Feature(_) => Err(Error::ExpectedType {
expected: "FeatureCollection".to_string(),
actual: "Feature".to_string(),
}),
GeoJson::FeatureCollection(f) => Ok(f),
}
}
}
impl GeoJson {
pub fn from_json_object(object: JsonObject) -> Result<Self, Error> {
Self::try_from(object)
}
/// Converts a JSON Value into a GeoJson object.
///
/// # Example
/// ```
/// use std::convert::TryInto;
/// use geojson::{Feature, GeoJson, Geometry, Value};
/// use serde_json::json;
///
/// let json_value = json!({
/// "type": "Feature",
/// "geometry": {
/// "type": "Point",
/// "coordinates": [102.0, 0.5]
/// },
/// "properties": null,
/// });
///
/// assert!(json_value.is_object());
///
/// let geojson: GeoJson = json_value.try_into().unwrap();
///
/// assert_eq!(
/// geojson,
/// GeoJson::Feature(Feature {
/// bbox: None,
/// geometry: Some(Geometry::new(Value::Point(vec![102.0, 0.5]))),
/// id: None,
/// properties: None,
/// foreign_members: None,
/// })
/// );
/// ```
pub fn from_json_value(value: JsonValue) -> Result<Self, Error> {
Self::try_from(value)
}
/// Convenience method to convert to a JSON Value. Uses `From`.
/// ```
/// use std::convert::TryFrom;
/// use geojson::GeoJson;
/// use serde_json::json;
///
/// let geojson = GeoJson::try_from( json!({
/// "type": "Feature",
/// "geometry": {
/// "type": "Point",
/// "coordinates": [102.0, 0.5]
/// },
/// "properties": {},
/// })).unwrap();
///
/// let json_value = geojson.to_json_value();
/// assert_eq!(json_value,
/// json!({
/// "type": "Feature",
/// "geometry": {
/// "type": "Point",
/// "coordinates": [102.0, 0.5]
/// },
/// "properties": {},
/// })
/// );
/// ```
pub fn to_json_value(self) -> JsonValue {
JsonValue::from(self)
}
// Deserialize a GeoJson object from an IO stream of JSON
pub fn from_reader<R>(rdr: R) -> Result<Self, serde_json::Error>
where
R: std::io::Read,
{
serde_json::from_reader(rdr)
}
}
impl TryFrom<JsonObject> for GeoJson {
type Error = Error;
fn try_from(object: JsonObject) -> Result<Self, Self::Error> {
let type_ = match object.get("type") {
Some(json::JsonValue::String(t)) => Type::from_str(t),
_ => return Err(Error::GeometryUnknownType("type".to_owned())),
};
let type_ = type_.ok_or(Error::EmptyType)?;
match type_ {
Type::Feature => Feature::try_from(object).map(GeoJson::Feature),
Type::FeatureCollection => {
FeatureCollection::try_from(object).map(GeoJson::FeatureCollection)
}
_ => Geometry::try_from(object).map(GeoJson::Geometry),
}
}
}
impl TryFrom<JsonValue> for GeoJson {
type Error = Error;
fn try_from(value: JsonValue) -> Result<Self, Self::Error> {
if let JsonValue::Object(obj) = value {
Self::try_from(obj)
} else {
Err(Error::GeoJsonExpectedObject(value))
}
}
}
#[derive(PartialEq, Clone, Copy)]
enum Type {
Point,
MultiPoint,
LineString,
MultiLineString,
Polygon,
MultiPolygon,
GeometryCollection,
Feature,
FeatureCollection,
}
impl Type {
fn from_str(s: &str) -> Option<Self> {
match s {
"Point" => Some(Type::Point),
"MultiPoint" => Some(Type::MultiPoint),
"LineString" => Some(Type::LineString),
"MultiLineString" => Some(Type::MultiLineString),
"Polygon" => Some(Type::Polygon),
"MultiPolygon" => Some(Type::MultiPolygon),
"GeometryCollection" => Some(Type::GeometryCollection),
"Feature" => Some(Type::Feature),
"FeatureCollection" => Some(Type::FeatureCollection),
_ => None,
}
}
}
impl Serialize for GeoJson {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
JsonObject::from(self).serialize(serializer)
}
}
impl<'de> Deserialize<'de> for GeoJson {
fn deserialize<D>(deserializer: D) -> Result<GeoJson, D::Error>
where
D: Deserializer<'de>,
{
use serde::de::Error as SerdeError;
let val = JsonObject::deserialize(deserializer)?;
GeoJson::from_json_object(val).map_err(|e| D::Error::custom(e.to_string()))
}
}
/// # Example
///```
/// use geojson::GeoJson;
/// use std::str::FromStr;
///
/// let geojson_str = r#"{
/// "type": "FeatureCollection",
/// "features": [
/// {
/// "type": "Feature",
/// "properties": {},
/// "geometry": {
/// "type": "Point",
/// "coordinates": [
/// -0.13583511114120483,
/// 51.5218870403801
/// ]
/// }
/// }
/// ]
/// }
/// "#;
/// let geo_json = GeoJson::from_str(&geojson_str).unwrap();
/// if let GeoJson::FeatureCollection(collection) = geo_json {
/// assert_eq!(1, collection.features.len());
/// } else {
/// panic!("expected feature collection");
/// }
/// ```
impl FromStr for GeoJson {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let object = get_object(s)?;
GeoJson::from_json_object(object)
}
}
fn get_object(s: &str) -> Result<json::JsonObject, Error> {
match ::serde_json::from_str(s) {
Ok(json::JsonValue::Object(object)) => Ok(object),
Ok(other) => Err(Error::ExpectedObjectValue(other)),
Err(serde_error) => Err(Error::MalformedJson(serde_error)),
}
}
impl fmt::Display for GeoJson {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
::serde_json::to_string(self)
.map_err(|_| fmt::Error)
.and_then(|s| f.write_str(&s))
}
}
impl fmt::Display for Feature {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
::serde_json::to_string(self)
.map_err(|_| fmt::Error)
.and_then(|s| f.write_str(&s))
}
}
impl fmt::Display for Geometry {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
::serde_json::to_string(self)
.map_err(|_| fmt::Error)
.and_then(|s| f.write_str(&s))
}
}
impl fmt::Display for FeatureCollection {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
::serde_json::to_string(self)
.map_err(|_| fmt::Error)
.and_then(|s| f.write_str(&s))
}
}
#[cfg(test)]
mod tests {
use crate::{Error, Feature, GeoJson, Geometry, Value};
use serde_json::json;
use std::convert::TryInto;
use std::str::FromStr;
#[test]
fn test_geojson_from_reader() {
let json_str = r#"{
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [102.0, 0.5]
},
"properties": null
}"#;
let g1 = GeoJson::from_reader(json_str.as_bytes()).unwrap();
let json_value = json!({
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [102.0, 0.5]
},
"properties": null,
});
let g2: GeoJson = json_value.try_into().unwrap();
assert_eq!(g1, g2);
}
#[test]
fn test_geojson_from_value() {
let json_value = json!({
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [102.0, 0.5]
},
"properties": null,
});
assert!(json_value.is_object());
let geojson: GeoJson = json_value.try_into().unwrap();
assert_eq!(
geojson,
GeoJson::Feature(Feature {
bbox: None,
geometry: Some(Geometry::new(Value::Point(vec![102.0, 0.5]))),
id: None,
properties: None,
foreign_members: None,
})
);
}
#[test]
fn test_invalid_json() {
let geojson_str = r#"{
"type": "FeatureCollection",
"features": [
!INTENTIONAL_TYPO! {
"type": "Feature",
"properties": {},
"geometry": {
"type": "Point",
"coordinates": [
-0.13583511114120483,
51.5218870403801
]
}
}
]
}"#;
assert!(matches!(
GeoJson::from_str(geojson_str),
Err(Error::MalformedJson(_))
))
}
}
| 28.700837 | 97 | 0.534223 |
e87c0b01bda099c884db868228f9812381a184e3 | 1,414 | // plotters-iced
//
// Iced backend for Plotters
// Copyright: 2021, Joylei <[email protected]>
// License: MIT
mod svg;
mod widget;
use dodrio::bumpalo;
use plotters_backend::BackendColor;
use wasm_bindgen::prelude::*;
pub use widget::ChartWidget;
#[wasm_bindgen]
extern "C" {
#[wasm_bindgen(js_namespace = console)]
fn log(s: &str);
}
/// helper
trait AsBumpStr {
fn as_bump_str<'b>(&self, bump: &'b bumpalo::Bump) -> &'b str;
}
macro_rules! impl_as_bump_str {
($t:ident) => {
impl AsBumpStr for $t {
#[inline(always)]
fn as_bump_str<'b>(&self, bump: &'b bumpalo::Bump) -> &'b str {
bumpalo::format!(
in bump, "{}", self)
.into_bump_str()
}
}
};
}
impl_as_bump_str!(u16);
impl_as_bump_str!(i32);
impl_as_bump_str!(u32);
impl_as_bump_str!(f64);
impl AsBumpStr for &str {
#[inline(always)]
fn as_bump_str<'b>(&self, bump: &'b bumpalo::Bump) -> &'b str {
bumpalo::format!(
in bump, "{}", self)
.into_bump_str()
}
}
impl AsBumpStr for BackendColor {
#[inline(always)]
fn as_bump_str<'b>(&self, bump: &'b bumpalo::Bump) -> &'b str {
let (r, g, b) = self.rgb;
bumpalo::format!(
in bump, "#{:02X}{:02X}{:02X}", r,g,b)
.into_bump_str()
}
}
| 23.180328 | 76 | 0.533946 |
679a194e91f6c7617ac30eb7ce68ef5c79e28f29 | 8,191 | use crate::error::Error;
use std::fmt::Display;
use std::result::Result;
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Position {
pub offset: usize,
pub line: u32,
pub column: usize,
}
impl Position {
pub fn unknown() -> Position {
Position {
offset: 0,
line: 0,
column: 0,
}
}
}
impl Display for Position {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if self.line == 0 {
write!(f, "(unknown position)")
} else if self.line > 1 {
write!(f, "line {}, position {}", self.line, self.column)
} else {
write!(f, "position {}", self.column)
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub enum ActionParameter {
String(String, Position),
Link(String, Position),
}
impl ActionParameter {
pub fn new(parameter: &str) -> ActionParameter {
ActionParameter::String(parameter.to_owned(), Position::unknown())
}
pub fn new_parsed(parameter: String, position: Position) -> ActionParameter {
ActionParameter::String(parameter, position)
}
pub fn to_string(&self) -> String {
match self {
ActionParameter::String(s, _) => s.to_string(),
ActionParameter::Link(s, _) => s.to_string(),
}
}
pub fn encode(&self) -> String {
match self {
ActionParameter::String(s, _) => s.to_string(),
ActionParameter::Link(s, _) => panic!("Link not supported yet"),
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ActionRequest {
pub name: String,
pub position: Position,
pub parameters: Vec<ActionParameter>,
}
impl ActionRequest {
pub fn encode(&self) -> String {
if self.parameters.is_empty() {
self.name.to_owned()
} else {
format!(
"{}-{}",
self.name,
self.parameters
.iter()
.map(|x| x.encode())
.collect::<Vec<String>>()
.join("-")
)
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct SegmentHeader {
pub name: String,
pub level: usize,
pub position: Position,
pub parameters: Vec<ActionParameter>,
}
impl SegmentHeader {
pub fn new_parsed_minimal(level: usize, position: Position) -> Self {
SegmentHeader {
name: String::new(),
level: level,
position: position,
parameters: vec![],
}
}
pub fn new_parsed_from_action_request(
level: usize,
position: Position,
action_request: &ActionRequest,
) -> Self {
SegmentHeader {
name: action_request.name.to_owned(),
level: level,
position: position,
parameters: action_request.parameters.clone(),
}
}
pub fn encode(&self) -> String {
assert!(self.level >= 1);
let mut encoded = String::with_capacity(self.level + self.name.len());
for _ in 0..self.level {
encoded.push_str("-");
}
encoded.push_str(&self.name);
if !self.parameters.is_empty() {
assert!(self.name.len()>0);
for parameter in self.parameters.iter() {
encoded.push_str("-");
encoded.push_str(¶meter.encode())
}
}
encoded
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct QuerySegment {
pub header: Option<SegmentHeader>,
pub query: Vec<ActionRequest>,
}
impl QuerySegment {
pub fn new() -> QuerySegment {
QuerySegment {
header: None,
query: vec![],
}
}
pub fn new_from(header: Option<SegmentHeader>, query: Vec<ActionRequest>) -> QuerySegment {
QuerySegment {
header: header,
query: query,
}
}
pub fn encode(&self) -> String {
let query = self
.query
.iter()
.map(|x| x.encode())
.collect::<Vec<_>>()
.join("/");
if let Some(header) = &self.header {
if query.is_empty(){
header.encode()
}
else{
format!("{}/{}", header.encode(), query)
}
} else {
query
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Query {
pub segments: Vec<QuerySegment>,
}
impl Query {
pub fn new() -> Query {
Query { segments: vec![] }
}
pub fn add_segment(&mut self, name: &str) -> &mut QuerySegment {
let qs = QuerySegment::new_from(
Some(SegmentHeader {
name: name.to_owned(),
level: 1,
position: Position::unknown(),
parameters: vec![],
}),
vec![],
);
self.segments.push(qs);
self.segments.last_mut().unwrap()
}
pub fn encode(&self) -> String {
self.segments
.iter()
.map(|x| x.encode())
.collect::<Vec<_>>()
.join("/")
}
}
#[derive(Debug)]
pub struct ActionParametersSlice<'a>(pub &'a [ActionParameter]);
pub trait Environment<T> {
fn eval(&mut self, input: T, query: &str) -> Result<T, Error>;
}
pub trait TryActionParametersInto<T, E> {
fn try_parameters_into(&mut self, env: &mut E) -> Result<T, Error>;
}
pub trait TryParameterFrom
where
Self: std::marker::Sized,
{
fn try_parameter_from(text: &str) -> Result<Self, String>;
}
impl TryParameterFrom for i32 {
fn try_parameter_from(text: &str) -> Result<Self, String> {
text.parse()
.map_err(|_| format!("Can't parse '{}' as integer", text))
}
}
impl TryParameterFrom for String {
fn try_parameter_from(text: &str) -> Result<Self, String> {
Ok(text.to_owned())
}
}
impl<'a, T, E> TryActionParametersInto<T, E> for ActionParametersSlice<'a>
where
T: TryParameterFrom,
{
fn try_parameters_into(&mut self, env: &mut E) -> Result<T, Error> {
if self.0.is_empty() {
Err(Error::ArgumentNotSpecified)
} else {
match &self.0[0] {
ActionParameter::String(x, position) => {
let v: T =
T::try_parameter_from(&x).map_err(|message| Error::ParameterError {
message,
position: position.clone(),
})?;
self.0 = &self.0[1..];
Ok(v)
}
_ => Err(Error::General {
message: "Not implemented".to_owned(),
}),
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parameters_into_i32() -> Result<(), Box<dyn std::error::Error>> {
let v = [ActionParameter::new("123"), ActionParameter::new("234")];
let mut par = ActionParametersSlice(&v[..]);
let x: i32 = par.try_parameters_into(&mut ())?;
assert_eq!(x, 123);
let x: i32 = par.try_parameters_into(&mut ())?;
assert_eq!(x, 234);
Ok(())
}
#[test]
fn parameters_into_str() -> Result<(), Box<dyn std::error::Error>> {
let v = [ActionParameter::new("123"), ActionParameter::new("234")];
let mut par = ActionParametersSlice(&v[..]);
let x: String = par.try_parameters_into(&mut ())?;
assert_eq!(x, "123");
let x: i32 = par.try_parameters_into(&mut ())?;
assert_eq!(x, 234);
Ok(())
}
#[test]
fn encode_parameter() -> Result<(), Box<dyn std::error::Error>> {
assert_eq!(ActionParameter::new("123").encode(), "123");
Ok(())
}
#[test]
fn encode_query_segment1() -> Result<(), Box<dyn std::error::Error>> {
let mut query = Query::new();
query.add_segment("test");
assert_eq!(query.segments[0].header.as_ref().unwrap().name, "test");
assert_eq!(query.encode(), "-test");
Ok(())
}
}
| 27.579125 | 95 | 0.519961 |
2faf1843e4bd1266d6301df12cde6437e00cd975 | 19,132 | // Parity Substrate style ABIs/Abi
use crate::parser::pt;
use crate::sema::ast;
use crate::sema::tags::render;
use contract_metadata::*;
use num_traits::ToPrimitive;
use semver::Version;
use serde::{Deserialize, Serialize};
use serde_json::{Map, Value};
use std::convert::TryInto;
#[derive(Deserialize, Serialize)]
pub struct Abi {
storage: Storage,
types: Vec<Type>,
pub spec: Spec,
}
impl Abi {
pub fn get_function(&self, name: &str) -> Option<&Message> {
self.spec.messages.iter().find(|m| name == m.name)
}
}
#[derive(Deserialize, Serialize, PartialEq)]
pub struct ArrayDef {
array: Array,
}
#[derive(Deserialize, Serialize, PartialEq)]
pub struct Array {
len: usize,
#[serde(rename = "type")]
ty: usize,
}
#[derive(Deserialize, Serialize, PartialEq)]
pub struct SequenceDef {
sequence: Sequence,
}
#[derive(Deserialize, Serialize, PartialEq)]
pub struct Sequence {
#[serde(rename = "type")]
ty: usize,
}
#[derive(Deserialize, Serialize, PartialEq)]
#[serde(untagged)]
enum Type {
Builtin { def: PrimitiveDef },
BuiltinArray { def: ArrayDef },
BuiltinSequence { def: SequenceDef },
Struct { path: Vec<String>, def: Composite },
Enum { path: Vec<String>, def: EnumDef },
}
#[derive(Deserialize, Serialize, PartialEq)]
struct BuiltinType {
id: String,
def: String,
}
#[derive(Deserialize, Serialize, PartialEq)]
struct EnumVariant {
name: String,
discriminant: usize,
}
#[derive(Deserialize, Serialize, PartialEq)]
struct EnumDef {
variant: Enum,
}
#[derive(Deserialize, Serialize, PartialEq)]
struct Enum {
variants: Vec<EnumVariant>,
}
#[derive(Deserialize, Serialize, PartialEq)]
struct Composite {
composite: StructFields,
}
#[derive(Deserialize, Serialize, PartialEq)]
struct StructFields {
fields: Vec<StructField>,
}
#[derive(Deserialize, Serialize, PartialEq)]
struct PrimitiveDef {
primitive: String,
}
#[derive(Deserialize, Serialize, PartialEq)]
struct StructField {
#[serde(skip_serializing_if = "Option::is_none")]
name: Option<String>,
#[serde(rename = "type")]
ty: usize,
}
#[derive(Deserialize, Serialize)]
pub struct Constructor {
pub name: String,
pub selector: String,
pub docs: Vec<String>,
args: Vec<Param>,
}
impl Constructor {
/// Build byte string from
pub fn selector(&self) -> Vec<u8> {
parse_selector(&self.selector)
}
}
#[derive(Deserialize, Serialize)]
pub struct Message {
pub name: String,
pub selector: String,
pub docs: Vec<String>,
mutates: bool,
payable: bool,
args: Vec<Param>,
return_type: Option<ParamType>,
}
impl Message {
/// Build byte string from
pub fn selector(&self) -> Vec<u8> {
parse_selector(&self.selector)
}
}
#[derive(Deserialize, Serialize)]
pub struct Event {
docs: Vec<String>,
name: String,
args: Vec<ParamIndexed>,
}
#[derive(Deserialize, Serialize)]
pub struct Spec {
pub constructors: Vec<Constructor>,
pub messages: Vec<Message>,
pub events: Vec<Event>,
}
#[derive(Deserialize, Serialize)]
struct Param {
name: String,
#[serde(rename = "type")]
ty: ParamType,
}
#[derive(Deserialize, Serialize)]
struct ParamIndexed {
#[serde(flatten)]
param: Param,
indexed: bool,
}
#[derive(Deserialize, Serialize)]
struct ParamType {
#[serde(rename = "type")]
ty: usize,
display_name: Vec<String>,
}
#[derive(Deserialize, Serialize)]
struct Storage {
#[serde(rename = "struct")]
structs: StorageStruct,
}
#[derive(Deserialize, Serialize)]
struct StorageStruct {
fields: Vec<StorageLayout>,
}
#[derive(Deserialize, Serialize)]
struct StorageLayout {
name: String,
layout: LayoutField,
}
#[derive(Deserialize, Serialize)]
struct LayoutField {
cell: LayoutFieldCell,
}
#[derive(Deserialize, Serialize)]
struct LayoutFieldCell {
key: String,
ty: usize,
}
/// Create a new registry and create new entries. Note that the registry is
/// accessed by number, and the first entry is 1, not 0.
impl Abi {
/// Add a type to the list unless already present
fn register_ty(&mut self, ty: Type) -> usize {
match self.types.iter().position(|t| *t == ty) {
Some(i) => i + 1,
None => {
self.types.push(ty);
self.types.len()
}
}
}
/// Returns index to builtin type in registry. Type is added if not already present
fn builtin_type(&mut self, ty: &str) -> usize {
self.register_ty(Type::Builtin {
def: PrimitiveDef {
primitive: ty.to_owned(),
},
})
}
/// Returns index to builtin type in registry. Type is added if not already present
fn builtin_array_type(&mut self, elem: usize, array_len: usize) -> usize {
self.register_ty(Type::BuiltinArray {
def: ArrayDef {
array: Array {
len: array_len,
ty: elem,
},
},
})
}
/// Returns index to builtin type in registry. Type is added if not already present
fn builtin_slice_type(&mut self, elem: usize) -> usize {
self.register_ty(Type::BuiltinSequence {
def: SequenceDef {
sequence: Sequence { ty: elem },
},
})
}
/// Returns index to builtin type in registry. Type is added if not already present
fn builtin_enum_type(&mut self, e: &ast::EnumDecl) -> usize {
let mut variants: Vec<EnumVariant> = e
.values
.iter()
.map(|(key, val)| EnumVariant {
name: key.to_owned(),
discriminant: val.1,
})
.collect();
variants.sort_by(|a, b| a.discriminant.partial_cmp(&b.discriminant).unwrap());
self.register_ty(Type::Enum {
path: vec![e.name.to_owned()],
def: EnumDef {
variant: Enum { variants },
},
})
}
/// Adds struct type to registry. Does not check for duplication (yet)
fn struct_type(&mut self, path: Vec<String>, fields: Vec<StructField>) -> usize {
self.register_ty(Type::Struct {
path,
def: Composite {
composite: StructFields { fields },
},
})
}
}
pub fn load(bs: &str) -> Result<Abi, serde_json::error::Error> {
serde_json::from_str(bs)
}
fn tags(contract_no: usize, tagname: &str, ns: &ast::Namespace) -> Vec<String> {
ns.contracts[contract_no]
.tags
.iter()
.filter_map(|tag| {
if tag.tag == tagname {
Some(tag.value.to_owned())
} else {
None
}
})
.collect()
}
/// Generate the metadata for Substrate 2.0
pub fn metadata(contract_no: usize, code: &[u8], ns: &ast::Namespace) -> Value {
let hash = blake2_rfc::blake2b::blake2b(32, &[], code);
let version = Version::parse(env!("CARGO_PKG_VERSION")).unwrap();
let language = SourceLanguage::new(Language::Solidity, version.clone());
let compiler = SourceCompiler::new(Compiler::Solang, version);
let code_hash: [u8; 32] = hash.as_bytes().try_into().unwrap();
let source_wasm = SourceWasm::new(code.to_vec());
let source = Source::new(Some(source_wasm), CodeHash(code_hash), language, compiler);
let mut builder = Contract::builder();
// Add our name and tags
builder.name(ns.contracts[contract_no].name.to_string());
let mut description = tags(contract_no, "title", ns);
description.extend(tags(contract_no, "notice", ns));
if !description.is_empty() {
builder.description(description.join("\n"));
};
let authors = tags(contract_no, "author", ns);
if !authors.is_empty() {
builder.authors(authors);
} else {
builder.authors(vec!["unknown"]);
}
// FIXME: contract-metadata wants us to provide a version number, but there is no version in the solidity source
// code. Since we must provide a valid semver version, we just provide a bogus value.Abi
builder.version(Version::new(0, 0, 1));
let contract = builder.build().unwrap();
// generate the abi for our contract
let abi = gen_abi(contract_no, ns);
let mut abi_json: Map<String, Value> = Map::new();
abi_json.insert(
String::from("types"),
serde_json::to_value(&abi.types).unwrap(),
);
abi_json.insert(
String::from("spec"),
serde_json::to_value(&abi.spec).unwrap(),
);
abi_json.insert(
String::from("storage"),
serde_json::to_value(&abi.storage).unwrap(),
);
let metadata = ContractMetadata::new(source, contract, None, abi_json);
// serialize to json
serde_json::to_value(&metadata).unwrap()
}
fn gen_abi(contract_no: usize, ns: &ast::Namespace) -> Abi {
let mut abi = Abi {
types: Vec::new(),
storage: Storage {
structs: StorageStruct { fields: Vec::new() },
},
spec: Spec {
constructors: Vec::new(),
messages: Vec::new(),
events: Vec::new(),
},
};
let fields = ns.contracts[contract_no]
.layout
.iter()
.filter_map(|layout| {
let var = &ns.contracts[layout.contract_no].variables[layout.var_no];
// mappings and large types cannot be represented
if !var.ty.contains_mapping(ns) && var.ty.fits_in_memory(ns) {
Some(StorageLayout {
name: var.name.to_string(),
layout: LayoutField {
cell: LayoutFieldCell {
key: format!("0x{:064X}", layout.slot),
ty: ty_to_abi(&var.ty, ns, &mut abi).ty,
},
},
})
} else {
None
}
})
.collect();
abi.storage.structs.fields = fields;
let mut constructors = ns.contracts[contract_no]
.functions
.iter()
.filter_map(|function_no| {
let f = &ns.functions[*function_no];
if f.is_constructor() {
Some(Constructor {
name: String::from("new"),
selector: render_selector(f),
args: f
.params
.iter()
.map(|p| parameter_to_abi(p, ns, &mut abi))
.collect(),
docs: vec![render(&f.tags)],
})
} else {
None
}
})
.collect::<Vec<Constructor>>();
if let Some((f, _)) = &ns.contracts[contract_no].default_constructor {
constructors.push(Constructor {
name: String::from("new"),
selector: render_selector(f),
args: f
.params
.iter()
.map(|p| parameter_to_abi(p, ns, &mut abi))
.collect(),
docs: vec![render(&f.tags)],
});
}
let messages = ns.contracts[contract_no]
.all_functions
.keys()
.filter_map(|function_no| {
let func = &ns.functions[*function_no];
if let Some(base_contract_no) = func.contract_no {
if ns.contracts[base_contract_no].is_library() {
return None;
}
}
Some(func)
})
.filter(|f| match f.visibility {
pt::Visibility::Public(_) | pt::Visibility::External(_) => {
f.ty == pt::FunctionTy::Function
}
_ => false,
})
.map(|f| {
let payable = matches!(f.mutability, ast::Mutability::Payable(_));
Message {
name: f.name.to_owned(),
mutates: matches!(
f.mutability,
ast::Mutability::Payable(_) | ast::Mutability::Nonpayable(_)
),
payable,
return_type: match f.returns.len() {
0 => None,
1 => Some(ty_to_abi(&f.returns[0].ty, ns, &mut abi)),
_ => {
let fields = f
.returns
.iter()
.map(|f| StructField {
name: if f.name.is_empty() {
None
} else {
Some(f.name.to_string())
},
ty: ty_to_abi(&f.ty, ns, &mut abi).ty,
})
.collect();
Some(ParamType {
ty: abi.struct_type(Vec::new(), fields),
display_name: vec![],
})
}
},
selector: render_selector(f),
args: f
.params
.iter()
.map(|p| parameter_to_abi(p, ns, &mut abi))
.collect(),
docs: vec![render(&f.tags)],
}
})
.collect();
let events = ns.contracts[contract_no]
.sends_events
.iter()
.map(|event_no| {
let event = &ns.events[*event_no];
let name = event.name.to_owned();
let args = event
.fields
.iter()
.map(|p| ParamIndexed {
param: parameter_to_abi(p, ns, &mut abi),
indexed: p.indexed,
})
.collect();
let docs = vec![render(&event.tags)];
Event { docs, name, args }
})
.collect();
abi.spec = Spec {
constructors,
messages,
events,
};
abi
}
fn ty_to_abi(ty: &ast::Type, ns: &ast::Namespace, registry: &mut Abi) -> ParamType {
match ty {
ast::Type::Enum(n) => ParamType {
ty: registry.builtin_enum_type(&ns.enums[*n]),
display_name: vec![ns.enums[*n].name.to_owned()],
},
ast::Type::Bytes(n) => {
let elem = registry.builtin_type("u8");
ParamType {
ty: registry.builtin_array_type(elem, *n as usize),
display_name: vec![],
}
}
ast::Type::Mapping(_, _) => unreachable!(),
ast::Type::Array(ty, dims) => {
let mut param_ty = ty_to_abi(ty, ns, registry);
for d in dims {
if let Some(d) = d {
param_ty = ParamType {
ty: registry.builtin_array_type(param_ty.ty, d.to_usize().unwrap()),
display_name: vec![],
}
} else {
param_ty = ParamType {
ty: registry.builtin_slice_type(param_ty.ty),
display_name: vec![],
}
}
}
param_ty
}
ast::Type::StorageRef(_, ty) => ty_to_abi(ty, ns, registry),
ast::Type::Ref(ty) => ty_to_abi(ty, ns, registry),
ast::Type::Bool | ast::Type::Uint(_) | ast::Type::Int(_) => {
let scalety = match ty {
ast::Type::Bool => "bool".into(),
// Substrate doesn't like primitive types which aren't a power of 2
// The abi encoder/decoder fixes this automatically
ast::Type::Uint(n) => format!("u{}", n.next_power_of_two()),
ast::Type::Int(n) => format!("i{}", n.next_power_of_two()),
_ => unreachable!(),
};
ParamType {
ty: registry.builtin_type(&scalety),
display_name: vec![scalety.to_string()],
}
}
ast::Type::Address(_) | ast::Type::Contract(_) => {
let elem = registry.builtin_type("u8");
let ty = registry.builtin_array_type(elem, 32);
ParamType {
ty: registry.struct_type(
vec!["AccountId".to_owned()],
vec![StructField { name: None, ty }],
),
display_name: vec!["AccountId".to_owned()],
}
}
ast::Type::Struct(n) => {
let mut display_name = vec![ns.structs[*n].name.to_owned()];
if let Some(contract_name) = &ns.structs[*n].contract {
display_name.insert(0, contract_name.to_owned());
}
let def = &ns.structs[*n];
let fields = def
.fields
.iter()
.map(|f| StructField {
name: Some(f.name.to_string()),
ty: ty_to_abi(&f.ty, ns, registry).ty,
})
.collect();
ParamType {
ty: registry.struct_type(display_name.clone(), fields),
display_name,
}
}
ast::Type::DynamicBytes => {
let elem = registry.builtin_type("u8");
ParamType {
ty: registry.builtin_slice_type(elem),
display_name: vec![String::from("Vec")],
}
}
ast::Type::String => ParamType {
ty: registry.builtin_type("str"),
display_name: vec![String::from("String")],
},
ast::Type::InternalFunction { .. } => ParamType {
ty: registry.builtin_type("u32"),
display_name: vec![String::from("FunctionSelector")],
},
ast::Type::ExternalFunction { .. } => {
let fields = vec![
StructField {
name: None,
ty: ty_to_abi(&ast::Type::Address(false), ns, registry).ty,
},
StructField {
name: None,
ty: ty_to_abi(&ast::Type::Uint(32), ns, registry).ty,
},
];
let display_name = vec![String::from("ExternalFunction")];
ParamType {
ty: registry.struct_type(display_name.clone(), fields),
display_name,
}
}
_ => unreachable!(),
}
}
fn parameter_to_abi(param: &ast::Parameter, ns: &ast::Namespace, registry: &mut Abi) -> Param {
Param {
name: param.name.to_string(),
ty: ty_to_abi(¶m.ty, ns, registry),
}
}
/// Given an u32 selector, generate a byte string like: 0xF81E7E1A
fn render_selector(f: &ast::Function) -> String {
format!("0x{}", hex::encode(f.selector().to_be_bytes()))
}
/// Given a selector like "0xF81E7E1A", parse the bytes. This function
/// does not validate the input.
fn parse_selector(selector: &str) -> Vec<u8> {
hex::decode(&selector[2..]).unwrap()
}
| 28.944024 | 116 | 0.51636 |
fb88562de558edb9f0f2fc3be4451d8c5a67d34b | 13,403 | use std::path::PathBuf;
use std::str::FromStr;
use std::fs;
use std::result::Result;
use std::error::Error;
use serde_json;
use serde_json::{json, Value, Map};
use super::super::types::Config;
pub fn build(asset_map: Value, config: &Config, dist_folder: Option<&str>) -> Result<(String), Box<dyn Error>> {
let target_dist_folder = dist_folder.unwrap_or("dist");
let target_dist_path = PathBuf::from_str(format!("{}/{}/package.json", &config.project_root.display(), target_dist_folder).as_str())?;
let application_path = &asset_map["assets/application.js"];
let application_name = &config.application_name;
let mut original_env = config.env.clone();
let env = original_env.as_object_mut().unwrap();
let (mut default_app_value, default_fastboot_whitelist) = (Value::Object(Map::new()), Vec::new());
let target_app = env.get_mut("APP").unwrap_or(&mut default_app_value).as_object_mut().unwrap();
target_app.insert(String::from_str("autoboot")?, Value::Bool(false));
target_app.insert(String::from_str("name")?, Value::String(config.env["modulePrefix"].as_str().unwrap_or("frontend").to_string()));
target_app.insert(String::from_str("version")?, Value::String("0.0.0+b5f80b0d".to_string()));
let final_app = serde_json::to_value(target_app)?;
env.insert(String::from_str("APP")?, final_app);
env.insert(String::from_str("exportApplicationGlobal")?, Value::Bool(true));
env.insert(String::from_str("isModuleUnification")?, Value::Bool(true));
let host_whitelist = config.env["fastboot"]["hostWhitelist"].as_array().unwrap_or(&default_fastboot_whitelist);
let json = json!({
"dependencies": {},
"fastboot": {
"appName": application_name,
"config": {
application_name: env
},
"hostWhitelist": if host_whitelist.len() == 0 {
Value::Array(Vec::new())
} else {
Value::Array(host_whitelist.to_vec())
},
"manifest": {
"appFiles": if config.env["memserver"]["enabled"].as_bool().unwrap_or(false) {
vec![application_path, &asset_map["assets/memserver.js"]]
} else {
vec![application_path]
},
"htmlFile": "index.html",
"vendorFiles": [asset_map["assets/vendor.js"].as_str().unwrap()]
},
"moduleWhitelist": ["node-fetch", "abortcontroller-polyfill"],
"schemaVersion": 3
}
});
let json_string = serde_json::to_string_pretty(&json)?;
fs::write(target_dist_path, &json_string)?;
return Ok(json_string);
}
#[cfg(test)]
mod tests {
use std::env;
use super::*;
use std::path::PathBuf;
use serde_json::json;
use std::collections::HashMap;
use super::super::super::types::BuildCache;
fn get_development_env() -> Value {
return json!({
"ember-resolver": {
"features": {
"EMBER_RESOLVER_MODULE_UNIFICATION": true
}
},
"modulePrefix": "dummyapp",
"environment": "development",
"rootURL": "/",
"locationType": "auto",
"fastboot": {
"hostWhitelist": [
"^localhost:\\d+$"
]
},
"ember-devtools": {
"global": true,
"enabled": true
},
"memserver": {
"minify": false,
"enabled": false
},
"EmberENV": {
"FEATURES": {
"ember-module-unification": true
},
"EXTEND_PROTOTYPES": {
"Date": false
}
},
"APP": {
"API_HOST": "http://localhost:3000"
}
});
}
fn get_production_env() -> Value {
return json!({
"ember-resolver": {
"features": {
"EMBER_RESOLVER_MODULE_UNIFICATION": true
}
},
"modulePrefix": "dummyapp",
"environment": "production",
"rootURL": "/",
"locationType": "auto",
"fastboot": {
"hostWhitelist": [
"^localhost:\\d+$"
]
},
"ember-devtools": {
"global": true,
"enabled": false
},
"memserver": {
"minify": true,
"enabled": false
},
"EmberENV": {
"FEATURES": {
"ember-module-unification": true
},
"EXTEND_PROTOTYPES": {
"Date": false
}
},
"APP": {
"API_HOST": "http://localhost:3000"
}
});
}
fn setup_test() -> Result<(PathBuf, Value, Value), Box<dyn Error>> {
let current_directory = env::current_dir()?;
let project_directory = format!("{}/ember-app-boilerplate", current_directory.to_string_lossy());
env::set_current_dir(&project_directory)?;
fs::remove_dir_all("tmp").unwrap_or_else(|_| {});
fs::remove_dir_all("dist").unwrap_or_else(|_| {});
fs::create_dir_all("tmp/assets")?; // NOTE: very important breaks other tests otherwise
fs::create_dir_all("dist")?;
let example_asset_map: Value = json!({
"assets/application.js": "assets/application-df0b6cbf528e46c0aa02b74f24252ffd.js",
"assets/vendor.js": "assets/vendor-339579265dd86542580d6f7cc296dac7.js",
"assets/memserver.js": "assets/memserver-zaza79265dd86542580d6f7cc296dac7"
});
let second_example_asset_map: Value = json!({
"assets/application.js": "assets/application-aaaa6cbf528e46c0aa02b74f24252ffd.js",
"assets/vendor.js": "assets/vendor-aaaa79265dd86542580d6f7cc296dac7.js"
});
return Ok((current_directory, example_asset_map, second_example_asset_map));
}
fn finalize_test(actual_current_directory: PathBuf) -> Result<(), Box<dyn Error>> {
fs::remove_dir_all("tmp")?;
fs::remove_dir_all("dist")?;
env::set_current_dir(&actual_current_directory)?;
return Ok(());
}
#[test]
fn build_works_for_and_asset_map_and_env() -> Result<(), Box<dyn Error>> {
let (current_directory, example_asset_map, _) = setup_test()?;
let app_files = example_asset_map.get("assets/application.js").unwrap().clone();
let vendor_files = example_asset_map.get("assets/vendor.js").unwrap().clone();
let development_env = get_development_env();
let module_prefix = development_env.get("modulePrefix").unwrap().clone();
assert_eq!(fs::metadata("tmp/package.json").is_ok(), false);
let config = Config::build(
development_env,
HashMap::new(),
BuildCache::new()
);
build(example_asset_map, &config, Some("tmp"))?;
let package_json: Value =
serde_json::from_str(fs::read_to_string("tmp/package.json")?.as_str())?;
assert_eq!(package_json["dependencies"], json!({}));
assert_eq!(package_json["fastboot"]["appName"].as_str().unwrap(), "dummyapp");
assert_eq!(package_json["fastboot"]["config"]["dummyapp"]["APP"], json!({
"API_HOST": "http://localhost:3000",
"autoboot": false,
"name": module_prefix,
"version": "0.0.0+b5f80b0d"
}));
assert_eq!(package_json["fastboot"]["manifest"], json!({
"appFiles": vec![app_files],
"htmlFile": "index.html",
"vendorFiles": vec![vendor_files]
}));
assert_eq!(package_json["fastboot"]["hostWhitelist"].as_array().unwrap(), &vec![
Value::String("^localhost:\\d+$".to_string())
]);
assert_eq!(package_json["fastboot"]["moduleWhitelist"].as_array().unwrap(), &vec![
Value::String("node-fetch".to_string()), Value::String("abortcontroller-polyfill".to_string())
]);
assert_eq!(package_json["fastboot"]["schemaVersion"].as_u64().unwrap(), 3);
return finalize_test(current_directory);
}
#[test]
fn build_works_for_different_dist_path_asset_map_and_env() -> Result<(), Box<dyn Error>> {
let (current_directory, _, second_example_asset_map) = setup_test()?;
let app_files = second_example_asset_map.get("assets/application.js").unwrap().clone();
let vendor_files = second_example_asset_map.get("assets/vendor.js").unwrap().clone();
let production_env = get_production_env();
let module_prefix = production_env.get("modulePrefix").unwrap().clone();
assert_eq!(fs::metadata("dist/package.json").is_ok(), false);
let config = Config::build(
production_env,
HashMap::new(),
BuildCache::new()
);
build(second_example_asset_map, &config, Some("dist"))?;
let package_json: Value =
serde_json::from_str(fs::read_to_string("dist/package.json")?.as_str())?;
assert_eq!(package_json["dependencies"], json!({}));
assert_eq!(package_json["fastboot"]["appName"].as_str().unwrap(), "dummyapp");
assert_eq!(package_json["fastboot"]["config"]["dummyapp"]["APP"], json!({
"API_HOST": "http://localhost:3000",
"autoboot": false,
"name": module_prefix,
"version": "0.0.0+b5f80b0d"
}));
assert_eq!(package_json["fastboot"]["manifest"], json!({
"appFiles": vec![app_files],
"htmlFile": "index.html",
"vendorFiles": vec![vendor_files]
}));
assert_eq!(package_json["fastboot"]["hostWhitelist"].as_array().unwrap(), &vec![Value::String("^localhost:\\d+$".to_string())]);
assert_eq!(package_json["fastboot"]["moduleWhitelist"].as_array().unwrap(), &vec![
Value::String("node-fetch".to_string()), Value::String("abortcontroller-polyfill".to_string())
]);
assert_eq!(package_json["fastboot"]["schemaVersion"].as_u64().unwrap(), 3);
return finalize_test(current_directory);
}
#[test]
fn build_appends_memserver_path_only_on_memserver_mode() -> Result<(), Box<dyn Error>> {
let (current_directory, example_asset_map, _) = setup_test()?;
let app_files = example_asset_map.get("assets/application.js").unwrap().clone();
let vendor_files = example_asset_map.get("assets/vendor.js").unwrap().clone();
let memserver_files = example_asset_map.get("assets/memserver.js").unwrap().clone();
let development_env = json!({
"ember-resolver": {
"features": {
"EMBER_RESOLVER_MODULE_UNIFICATION": true
}
},
"modulePrefix": "dummyapp",
"environment": "development",
"rootURL": "/",
"locationType": "auto",
"fastboot": {
"hostWhitelist": [
"^localhost:\\d+$"
]
},
"ember-devtools": {
"global": true,
"enabled": true
},
"memserver": {
"minify": false,
"enabled": true
},
"EmberENV": {
"FEATURES": {
"ember-module-unification": true
},
"EXTEND_PROTOTYPES": {
"Date": false
}
},
"APP": {
"API_HOST": "http://localhost:3000"
}
});
let module_prefix = development_env.get("modulePrefix").unwrap().clone();
assert_eq!(fs::metadata("tmp/package.json").is_ok(), false);
let config = Config::build(
development_env,
HashMap::new(),
BuildCache::new()
);
build(example_asset_map, &config, Some("tmp"))?;
let package_json: Value =
serde_json::from_str(fs::read_to_string("tmp/package.json")?.as_str())?;
assert_eq!(package_json["dependencies"], json!({}));
assert_eq!(package_json["fastboot"]["appName"].as_str().unwrap(), "dummyapp");
assert_eq!(package_json["fastboot"]["config"]["dummyapp"]["APP"], json!({
"API_HOST": "http://localhost:3000",
"autoboot": false,
"name": module_prefix,
"version": "0.0.0+b5f80b0d"
}));
assert_eq!(package_json["fastboot"]["manifest"], json!({
"appFiles": vec![app_files, memserver_files],
"htmlFile": "index.html",
"vendorFiles": vec![vendor_files]
}));
assert_eq!(package_json["fastboot"]["hostWhitelist"].as_array().unwrap(), &vec![Value::String("^localhost:\\d+$".to_string())]);
assert_eq!(package_json["fastboot"]["moduleWhitelist"].as_array().unwrap(), &vec![
Value::String("node-fetch".to_string()), Value::String("abortcontroller-polyfill".to_string())
]);
assert_eq!(package_json["fastboot"]["schemaVersion"].as_u64().unwrap(), 3);
return finalize_test(current_directory);
}
}
| 38.736994 | 138 | 0.551742 |
015d31bf42cce021520639c9ddcc6bd8c1e445c8 | 733 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() {
let (p,c) = Chan::new();
let x = Some(p);
c.send(false);
match x {
Some(z) if z.recv() => { fail!() }, //~ ERROR cannot bind by-move into a pattern guard
Some(z) => { assert!(!z.recv()); },
None => fail!()
}
}
| 34.904762 | 94 | 0.639836 |
561dc0ca7de11d18060a7388d019f9755533c6af | 4,803 | //! Allows a future to execute for a maximum amount of time.
//!
//! See [`Timeout`] documentation for more details.
//!
//! [`Timeout`]: struct.Timeout.html
use crate::time::{delay_until, Delay, Duration, Instant};
use std::fmt;
use std::future::Future;
use std::pin::Pin;
use std::task::{self, Poll};
/// Require a `Future` to complete before the specified duration has elapsed.
///
/// If the future completes before the duration has elapsed, then the completed
/// value is returned. Otherwise, an error is returned.
///
/// # Cancelation
///
/// Cancelling a timeout is done by dropping the future. No additional cleanup
/// or other work is required.
///
/// The original future may be obtained by calling [`Timeout::into_inner`]. This
/// consumes the `Timeout`.
///
/// # Examples
///
/// Create a new `Timeout` set to expire in 10 milliseconds.
///
/// ```rust
/// use tokio::time::timeout;
/// use tokio::sync::oneshot;
///
/// use std::time::Duration;
///
/// # async fn dox() {
/// let (tx, rx) = oneshot::channel();
/// # tx.send(()).unwrap();
///
/// // Wrap the future with a `Timeout` set to expire in 10 milliseconds.
/// if let Err(_) = timeout(Duration::from_millis(10), rx).await {
/// println!("did not receive value within 10 ms");
/// }
/// # }
/// ```
pub fn timeout<T>(duration: Duration, future: T) -> Timeout<T>
where
T: Future,
{
let delay = Delay::new_timeout(Instant::now() + duration, duration);
Timeout::new_with_delay(future, delay)
}
/// Require a `Future` to complete before the specified instant in time.
///
/// If the future completes before the instant is reached, then the completed
/// value is returned. Otherwise, an error is returned.
///
/// # Cancelation
///
/// Cancelling a timeout is done by dropping the future. No additional cleanup
/// or other work is required.
///
/// The original future may be obtained by calling [`Timeout::into_inner`]. This
/// consumes the `Timeout`.
///
/// # Examples
///
/// Create a new `Timeout` set to expire in 10 milliseconds.
///
/// ```rust
/// use tokio::time::{Instant, timeout_at};
/// use tokio::sync::oneshot;
///
/// use std::time::Duration;
///
/// # async fn dox() {
/// let (tx, rx) = oneshot::channel();
/// # tx.send(()).unwrap();
///
/// // Wrap the future with a `Timeout` set to expire 10 milliseconds into the
/// // future.
/// if let Err(_) = timeout_at(Instant::now() + Duration::from_millis(10), rx).await {
/// println!("did not receive value within 10 ms");
/// }
/// # }
/// ```
pub fn timeout_at<T>(deadline: Instant, future: T) -> Timeout<T>
where
T: Future,
{
let delay = delay_until(deadline);
Timeout {
value: future,
delay,
}
}
/// Future returned by [`timeout`](timeout) and [`timeout_at`](timeout_at).
#[must_use = "futures do nothing unless you `.await` or poll them"]
#[derive(Debug)]
pub struct Timeout<T> {
value: T,
delay: Delay,
}
/// Error returned by `Timeout`.
#[derive(Debug, PartialEq)]
pub struct Elapsed(());
impl Elapsed {
// Used on StreamExt::timeout
#[allow(unused)]
pub(crate) fn new() -> Self {
Elapsed(())
}
}
impl<T> Timeout<T> {
pub(crate) fn new_with_delay(value: T, delay: Delay) -> Timeout<T> {
Timeout { value, delay }
}
/// Gets a reference to the underlying value in this timeout.
pub fn get_ref(&self) -> &T {
&self.value
}
/// Gets a mutable reference to the underlying value in this timeout.
pub fn get_mut(&mut self) -> &mut T {
&mut self.value
}
/// Consumes this timeout, returning the underlying value.
pub fn into_inner(self) -> T {
self.value
}
}
impl<T> Future for Timeout<T>
where
T: Future,
{
type Output = Result<T::Output, Elapsed>;
fn poll(mut self: Pin<&mut Self>, cx: &mut task::Context<'_>) -> Poll<Self::Output> {
// First, try polling the future
// Safety: we never move `self.value`
unsafe {
let p = self.as_mut().map_unchecked_mut(|me| &mut me.value);
if let Poll::Ready(v) = p.poll(cx) {
return Poll::Ready(Ok(v));
}
}
// Now check the timer
// Safety: X_X!
unsafe {
match self.map_unchecked_mut(|me| &mut me.delay).poll(cx) {
Poll::Ready(()) => Poll::Ready(Err(Elapsed(()))),
Poll::Pending => Poll::Pending,
}
}
}
}
// ===== impl Elapsed =====
impl fmt::Display for Elapsed {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
"deadline has elapsed".fmt(fmt)
}
}
impl std::error::Error for Elapsed {}
impl From<Elapsed> for std::io::Error {
fn from(_err: Elapsed) -> std::io::Error {
std::io::ErrorKind::TimedOut.into()
}
}
| 25.962162 | 89 | 0.602748 |
1e532cf1332e0beec170828f9b14cd9fab44b359 | 3,376 | #![cfg_attr(feature="nightly", feature(alloc_system))]
#[cfg(feature="nightly")]
extern crate alloc_system;
extern crate random;
extern crate tensorflow;
use random::Source;
use std::error::Error;
use std::result::Result;
use std::path::Path;
use std::process::exit;
use tensorflow::Code;
use tensorflow::Graph;
use tensorflow::Session;
use tensorflow::SessionOptions;
use tensorflow::SessionRunArgs;
use tensorflow::Status;
use tensorflow::Tensor;
fn main() {
// Putting the main code in another function serves two purposes:
// 1. We can use the `?` operator.
// 2. We can call exit safely, which does not run any destructors.
exit(match run() {
Ok(_) => 0,
Err(e) => {
println!("{}", e);
1
}
})
}
fn run() -> Result<(), Box<Error>> {
let export_dir = "examples/regression_savedmodel"; // y = w * x + b
if !Path::new(export_dir).exists() {
return Err(Box::new(Status::new_set(Code::NotFound,
&format!("Run 'python regression_savedmodel.py' to generate \
{} and try again.",
export_dir))
.unwrap()));
}
// Generate some test data.
let w = 0.1;
let b = 0.3;
let num_points = 100;
let steps = 201;
let mut rand = random::default();
let mut x = Tensor::new(&[num_points as u64]);
let mut y = Tensor::new(&[num_points as u64]);
for i in 0..num_points {
x[i] = (2.0 * rand.read::<f64>() - 1.0) as f32;
y[i] = w * x[i] + b;
}
// Load the saved model exported by regression_savedmodel.py.
let mut graph = Graph::new();
let mut session = Session::from_saved_model(&SessionOptions::new(),
&["train", "serve"],
&mut graph,
export_dir)?;
let op_x = graph.operation_by_name_required("x")?;
let op_y = graph.operation_by_name_required("y")?;
let op_train = graph.operation_by_name_required("train")?;
let op_w = graph.operation_by_name_required("w")?;
let op_b = graph.operation_by_name_required("b")?;
// Train the model (e.g. for fine tuning).
let mut train_step = SessionRunArgs::new();
train_step.add_feed(&op_x, 0, &x);
train_step.add_feed(&op_y, 0, &y);
train_step.add_target(&op_train);
for _ in 0..steps {
session.run(&mut train_step)?;
}
// Grab the data out of the session.
let mut output_step = SessionRunArgs::new();
let w_ix = output_step.request_fetch(&op_w, 0);
let b_ix = output_step.request_fetch(&op_b, 0);
session.run(&mut output_step)?;
// Check our results.
let w_hat: f32 = output_step.fetch(w_ix)?[0];
let b_hat: f32 = output_step.fetch(b_ix)?[0];
println!("Checking w: expected {}, got {}. {}",
w,
w_hat,
if (w - w_hat).abs() < 1e-3 {
"Success!"
} else {
"FAIL"
});
println!("Checking b: expected {}, got {}. {}",
b,
b_hat,
if (b - b_hat).abs() < 1e-3 {
"Success!"
} else {
"FAIL"
});
Ok(())
}
| 32.461538 | 105 | 0.527844 |
56017beff8f30bcab0eec3ea8892a33ef013d07f | 1,978 |
pub struct IconMedicationLiquid {
props: crate::Props,
}
impl yew::Component for IconMedicationLiquid {
type Properties = crate::Props;
type Message = ();
fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self
{
Self { props }
}
fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender
{
true
}
fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender
{
false
}
fn view(&self) -> yew::prelude::Html
{
yew::prelude::html! {
<svg
class=self.props.class.unwrap_or("")
width=self.props.size.unwrap_or(24).to_string()
height=self.props.size.unwrap_or(24).to_string()
viewBox="0 0 24 24"
fill=self.props.fill.unwrap_or("none")
stroke=self.props.color.unwrap_or("currentColor")
stroke-width=self.props.stroke_width.unwrap_or(2).to_string()
stroke-linecap=self.props.stroke_linecap.unwrap_or("round")
stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round")
>
<svg xmlns="http://www.w3.org/2000/svg" enable-background="new 0 0 24 24" height="24" viewBox="0 0 24 24" width="24"><g><rect fill="none" height="24" width="24"/><rect fill="none" height="24" width="24"/></g><g><g><path d="M4,5h10c0.55,0,1-0.45,1-1s-0.45-1-1-1H4C3.45,3,3,3.45,3,4S3.45,5,4,5z"/><path d="M14,6H4C2.9,6,2,6.9,2,8v11c0,1.1,0.9,2,2,2h10c1.1,0,2-0.9,2-2V8C16,6.9,15.1,6,14,6z M11.5,15h-1v1 c0,0.83-0.67,1.5-1.5,1.5S7.5,16.83,7.5,16v-1h-1C5.67,15,5,14.33,5,13.5C5,12.67,5.67,12,6.5,12h1v-1c0-0.83,0.67-1.5,1.5-1.5 s1.5,0.67,1.5,1.5v1h1c0.83,0,1.5,0.67,1.5,1.5C13,14.33,12.33,15,11.5,15z"/><path d="M20,6c-1.68,0-3,1.76-3,4c0,1.77,0.83,3.22,2,3.76V20c0,0.55,0.45,1,1,1s1-0.45,1-1v-6.24c1.17-0.54,2-1.99,2-3.76 C23,7.76,21.68,6,20,6z"/></g></g></svg>
</svg>
}
}
}
| 43 | 771 | 0.58544 |
f4698698122fa01cb7fea37847d963c9b5e119e1 | 12,207 | use util::{checksum, IterExt, BitWriter};
use crypto::{gen_random_bytes, sha256_first_byte};
use error::ErrorKind;
use failure::Error;
use mnemonic_type::MnemonicType;
use language::Language;
use std::fmt;
/// The primary type in this crate, most tasks require creating or using one.
///
/// To create a *new* [`Mnemonic`][Mnemonic] from a randomly generated key, call [`Mnemonic::new()`][Mnemonic::new()].
///
/// To get a [`Mnemonic`][Mnemonic] instance for an existing mnemonic phrase, including
/// those generated by other software or hardware wallets, use [`Mnemonic::from_phrase()`][Mnemonic::from_phrase()].
///
/// You can get the HD wallet [`Seed`][Seed] from a [`Mnemonic`][Mnemonic] by calling [`Seed::new()`][Seed::new()].
/// From there you can either get the raw byte value with [`Seed::as_bytes()`][Seed::as_bytes()], or the hex
/// representation using Rust formatting: `format!("{:X}", seed)`.
///
/// You can also get the original entropy value back from a [`Mnemonic`][Mnemonic] with [`Mnemonic::entropy()`][Mnemonic::entropy()],
/// but beware that the entropy value is **not the same thing** as an HD wallet seed, and should
/// *never* be used that way.
///
/// [Mnemonic]: ./mnemonic/struct.Mnemonic.html
/// [Mnemonic::new()]: ./mnemonic/struct.Mnemonic.html#method.new
/// [Mnemonic::from_phrase()]: ./mnemonic/struct.Mnemonic.html#method.from_phrase
/// [Mnemonic::entropy()]: ./mnemonic/struct.Mnemonic.html#method.entropy
/// [Seed]: ./seed/struct.Seed.html
/// [Seed::new()]: ./seed/struct.Seed.html#method.new
/// [Seed::as_bytes()]: ./seed/struct.Seed.html#method.as_bytes
///
#[derive(Clone)]
pub struct Mnemonic {
phrase: String,
lang: Language,
entropy: Vec<u8>,
}
impl Mnemonic {
/// Generates a new [`Mnemonic`][Mnemonic]
///
/// Use [`Mnemonic::phrase()`][Mnemonic::phrase()] to get an `str` slice of the generated phrase.
///
/// # Example
///
/// ```
/// use bip39::{Mnemonic, MnemonicType, Language};
///
/// let mnemonic = Mnemonic::new(MnemonicType::Words12, Language::English);
/// let phrase = mnemonic.phrase();
///
/// println!("phrase: {}", phrase);
///
/// assert_eq!(phrase.split(" ").count(), 12);
/// ```
///
/// [Mnemonic]: ./mnemonic/struct.Mnemonic.html
/// [Mnemonic::phrase()]: ./mnemonic/struct.Mnemonic.html#method.phrase
pub fn new(mtype: MnemonicType, lang: Language) -> Mnemonic {
let entropy = gen_random_bytes(mtype.entropy_bits() / 8);
Mnemonic::from_entropy_unchecked(entropy, lang)
}
/// Create a [`Mnemonic`][Mnemonic] from pre-generated entropy
///
/// # Example
///
/// ```
/// use bip39::{Mnemonic, MnemonicType, Language};
///
/// let entropy = &[0x33, 0xE4, 0x6B, 0xB1, 0x3A, 0x74, 0x6E, 0xA4, 0x1C, 0xDD, 0xE4, 0x5C, 0x90, 0x84, 0x6A, 0x79];
/// let mnemonic = Mnemonic::from_entropy(entropy, Language::English).unwrap();
///
/// assert_eq!("crop cash unable insane eight faith inflict route frame loud box vibrant", mnemonic.phrase());
/// assert_eq!("33E46BB13A746EA41CDDE45C90846A79", format!("{:X}", mnemonic));
/// ```
///
/// [Mnemonic]: ../mnemonic/struct.Mnemonic.html
pub fn from_entropy(entropy: &[u8], lang: Language) -> Result<Mnemonic, Error> {
// Validate entropy size
MnemonicType::for_key_size(entropy.len() * 8)?;
Ok(Self::from_entropy_unchecked(entropy, lang))
}
fn from_entropy_unchecked<E>(entropy: E, lang: Language) -> Mnemonic
where
E: Into<Vec<u8>>
{
let entropy = entropy.into();
let wordlist = lang.wordlist();
let checksum_byte = sha256_first_byte(&entropy);
// First, create a byte iterator for the given entropy and the first byte of the
// hash of the entropy that will serve as the checksum (up to 8 bits for biggest
// entropy source).
//
// Then we transform that into a bits iterator that returns 11 bits at a
// time (as u16), which we can map to the words on the `wordlist`.
//
// Given the entropy is of correct size, this ought to give us the correct word
// count.
let phrase = entropy.iter()
.chain(Some(&checksum_byte))
.bits()
.map(|bits| wordlist.get_word(bits))
.join(" ");
Mnemonic {
phrase,
lang,
entropy
}
}
/// Create a [`Mnemonic`][Mnemonic] from an existing mnemonic phrase
///
/// The phrase supplied will be checked for word length and validated according to the checksum
/// specified in BIP0039
///
/// # Example
///
/// ```
/// use bip39::{Mnemonic, Language};
///
/// let phrase = "park remain person kitchen mule spell knee armed position rail grid ankle";
/// let mnemonic = Mnemonic::from_phrase(phrase, Language::English).unwrap();
///
/// assert_eq!(phrase, mnemonic.phrase());
/// ```
///
/// [Mnemonic]: ../mnemonic/struct.Mnemonic.html
pub fn from_phrase<S>(phrase: S, lang: Language) -> Result<Mnemonic, Error>
where
S: Into<String>,
{
let phrase = phrase.into();
// this also validates the checksum and phrase length before returning the entropy so we
// can store it. We don't use the validate function here to avoid having a public API that
// takes a phrase string and returns the entropy directly.
let entropy = Mnemonic::phrase_to_entropy(&phrase, lang)?;
let mnemonic = Mnemonic {
phrase,
lang,
entropy,
};
Ok(mnemonic)
}
/// Validate a mnemonic phrase
///
/// The phrase supplied will be checked for word length and validated according to the checksum
/// specified in BIP0039.
///
/// # Example
///
/// ```
/// use bip39::{Mnemonic, Language};
///
/// let test_mnemonic = "park remain person kitchen mule spell knee armed position rail grid ankle";
///
/// assert!(Mnemonic::validate(test_mnemonic, Language::English).is_ok());
/// ```
pub fn validate(phrase: &str, lang: Language) -> Result<(), Error> {
Mnemonic::phrase_to_entropy(phrase, lang)?;
Ok(())
}
/// Calculate the checksum, verify it and return the entropy
///
/// Only intended for internal use, as returning a `Vec<u8>` that looks a bit like it could be
/// used as the seed is likely to cause problems for someone eventually. All the other functions
/// that return something like that are explicit about what it is and what to use it for.
fn phrase_to_entropy(phrase: &str, lang: Language) -> Result<Vec<u8>, Error> {
let wordmap = lang.wordmap();
// Preallocate enough space for the longest possible word list
let mut bits = BitWriter::with_capacity(264);
for word in phrase.split(" ") {
bits.push(wordmap.get_bits(&word)?);
}
let mtype = MnemonicType::for_word_count(bits.len() / 11)?;
debug_assert!(bits.len() == mtype.total_bits(), "Insufficient amount of bits to validate");
let mut entropy = bits.into_bytes();
let entropy_bytes = mtype.entropy_bits() / 8;
let actual_checksum = checksum(entropy[entropy_bytes], mtype.checksum_bits());
// Truncate to get rid of the byte containing the checksum
entropy.truncate(entropy_bytes);
let checksum_byte = sha256_first_byte(&entropy);
let expected_checksum = checksum(checksum_byte, mtype.checksum_bits());
if actual_checksum != expected_checksum {
Err(ErrorKind::InvalidChecksum)?;
}
Ok(entropy)
}
/// Get the mnemonic phrase as a string reference.
pub fn phrase(&self) -> &str {
&self.phrase
}
/// Consume the `Mnemonic` and return the phrase as a `String`.
///
/// This operation doesn't perform any allocations.
pub fn into_phrase(self) -> String {
self.phrase
}
/// Get the original entropy value of the mnemonic phrase as a slice.
///
/// # Example
///
/// ```
/// use bip39::{Mnemonic, Language};
///
/// let phrase = "park remain person kitchen mule spell knee armed position rail grid ankle";
///
/// let mnemonic = Mnemonic::from_phrase(phrase, Language::English).unwrap();
///
/// let entropy: &[u8] = mnemonic.entropy();
/// ```
///
/// **Note:** You shouldn't use the generated entropy as secrets, for that generate a new
/// `Seed` from the `Mnemonic`.
pub fn entropy(&self) -> &[u8] {
&self.entropy
}
/// Get the [`Language`][Language]
///
/// [Language]: ../language/struct.Language.html
pub fn language(&self) -> Language {
self.lang
}
}
impl AsRef<str> for Mnemonic {
fn as_ref(&self) -> &str {
self.phrase()
}
}
impl fmt::Display for Mnemonic {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(self.phrase(), f)
}
}
impl fmt::Debug for Mnemonic {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self.phrase(), f)
}
}
impl fmt::LowerHex for Mnemonic {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if f.alternate() {
f.write_str("0x")?;
}
for byte in self.entropy() {
write!(f, "{:x}", byte)?;
}
Ok(())
}
}
impl fmt::UpperHex for Mnemonic {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if f.alternate() {
f.write_str("0x")?;
}
for byte in self.entropy() {
write!(f, "{:X}", byte)?;
}
Ok(())
}
}
impl From<Mnemonic> for String {
fn from(val: Mnemonic) -> String {
val.into_phrase()
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn back_to_back() {
let m1 = Mnemonic::new(MnemonicType::Words12, Language::English);
let m2 = Mnemonic::from_phrase(m1.phrase(), Language::English).unwrap();
let m3 = Mnemonic::from_entropy(m1.entropy(), Language::English).unwrap();
assert_eq!(m1.entropy(), m2.entropy(), "Entropy must be the same");
assert_eq!(m1.entropy(), m3.entropy(), "Entropy must be the same");
assert_eq!(m1.phrase(), m2.phrase(), "Phrase must be the same");
assert_eq!(m1.phrase(), m3.phrase(), "Phrase must be the same");
}
#[test]
fn mnemonic_from_entropy() {
let entropy = &[0x33, 0xE4, 0x6B, 0xB1, 0x3A, 0x74, 0x6E, 0xA4, 0x1C, 0xDD, 0xE4, 0x5C, 0x90, 0x84, 0x6A, 0x79];
let phrase = "crop cash unable insane eight faith inflict route frame loud box vibrant";
let mnemonic = Mnemonic::from_entropy(entropy, Language::English).unwrap();
assert_eq!(phrase, mnemonic.phrase());
}
#[test]
fn mnemonic_from_phrase() {
let entropy = &[0x33, 0xE4, 0x6B, 0xB1, 0x3A, 0x74, 0x6E, 0xA4, 0x1C, 0xDD, 0xE4, 0x5C, 0x90, 0x84, 0x6A, 0x79];
let phrase = "crop cash unable insane eight faith inflict route frame loud box vibrant";
let mnemonic = Mnemonic::from_phrase(phrase, Language::English).unwrap();
assert_eq!(entropy, mnemonic.entropy());
}
#[test]
fn mnemonic_format() {
let mnemonic = Mnemonic::new(MnemonicType::Words15, Language::English);
assert_eq!(mnemonic.phrase(), format!("{}", mnemonic));
}
#[test]
fn mnemonic_hex_format() {
let entropy = &[0x33, 0xE4, 0x6B, 0xB1, 0x3A, 0x74, 0x6E, 0xA4, 0x1C, 0xDD, 0xE4, 0x5C, 0x90, 0x84, 0x6A, 0x79];
let mnemonic = Mnemonic::from_entropy(entropy, Language::English).unwrap();
assert_eq!(format!("{:x}", mnemonic), "33e46bb13a746ea41cdde45c90846a79");
assert_eq!(format!("{:X}", mnemonic), "33E46BB13A746EA41CDDE45C90846A79");
assert_eq!(format!("{:#x}", mnemonic), "0x33e46bb13a746ea41cdde45c90846a79");
assert_eq!(format!("{:#X}", mnemonic), "0x33E46BB13A746EA41CDDE45C90846A79");
}
}
| 34.002786 | 133 | 0.602933 |
9b76ada8b4c0e753982fd5df61c194cd91057bae | 4,911 | use ergotree_ir::mir::coll_map::Map;
use ergotree_ir::mir::value::CollKind;
use ergotree_ir::mir::value::Value;
use crate::eval::env::Env;
use crate::eval::EvalContext;
use crate::eval::EvalError;
use crate::eval::Evaluable;
impl Evaluable for Map {
fn eval(&self, env: &Env, ctx: &mut EvalContext) -> Result<Value, EvalError> {
let input_v = self.input.eval(env, ctx)?;
let mapper_v = self.mapper.eval(env, ctx)?;
let input_v_clone = input_v.clone();
let mut mapper_call = |arg: Value| match &mapper_v {
Value::Lambda(func_value) => {
let func_arg = func_value.args.first().ok_or_else(|| {
EvalError::NotFound(
"Map: evaluated mapper has empty arguments list".to_string(),
)
})?;
let env1 = env.clone().extend(func_arg.idx, arg);
func_value.body.eval(&env1, ctx)
}
_ => Err(EvalError::UnexpectedValue(format!(
"expected mapper to be Value::FuncValue got: {0:?}",
input_v_clone
))),
};
let mapper_input_tpe = self
.mapper_sfunc
.t_dom
.first()
.ok_or_else(|| {
EvalError::NotFound(
"Map: mapper SFunc.t_dom is empty (does not have arguments)".to_string(),
)
})?
.clone();
let normalized_input_vals: Vec<Value> = match input_v {
Value::Coll(coll) => {
if *coll.elem_tpe() != mapper_input_tpe {
return Err(EvalError::UnexpectedValue(format!(
"expected Map input element type to be {0:?}, got: {1:?}",
mapper_input_tpe,
coll.elem_tpe()
)));
};
Ok(coll.as_vec())
}
_ => Err(EvalError::UnexpectedValue(format!(
"expected Map input to be Value::Coll, got: {0:?}",
input_v
))),
}?;
normalized_input_vals
.iter()
.map(|item| mapper_call(item.clone()))
.collect::<Result<Vec<Value>, EvalError>>()
.map(|values| {
CollKind::from_vec(self.out_elem_tpe(), values).map_err(EvalError::TryExtractFrom)
})
.and_then(|v| v) // flatten <Result<Result<Value, _>, _>
.map(Value::Coll)
}
}
#[allow(clippy::panic)]
#[allow(clippy::unwrap_used)]
#[cfg(test)]
mod tests {
use std::rc::Rc;
use crate::eval::context::Context;
use crate::eval::context::TxIoVec;
use crate::eval::tests::eval_out;
use ergotree_ir::mir::bin_op::ArithOp;
use ergotree_ir::mir::bin_op::BinOp;
use ergotree_ir::mir::expr::Expr;
use ergotree_ir::mir::extract_amount::ExtractAmount;
use ergotree_ir::mir::func_value::FuncArg;
use ergotree_ir::mir::func_value::FuncValue;
use ergotree_ir::mir::property_call::PropertyCall;
use ergotree_ir::mir::unary_op::OneArgOpTryBuild;
use ergotree_ir::mir::val_use::ValUse;
use ergotree_ir::types::scontext;
use ergotree_ir::types::stype::SType;
use super::*;
use proptest::prelude::*;
proptest! {
#![proptest_config(ProptestConfig::with_cases(16))]
#[test]
fn eval_box_value(ctx in any::<Context>()) {
let data_inputs: Expr = PropertyCall::new(Expr::Context, scontext::DATA_INPUTS_PROPERTY.clone()).unwrap()
.into();
let val_use: Expr = ValUse {
val_id: 1.into(),
tpe: SType::SBox,
}
.into();
let mapper_body: Expr = BinOp {
kind: ArithOp::Plus.into(),
left: Box::new(Expr::Const(1i64.into())),
right: Box::new(Expr::ExtractAmount(
ExtractAmount::try_build(val_use)
.unwrap(),
)),
}
.into();
let expr: Expr = Map::new(
data_inputs,
FuncValue::new(
vec![FuncArg {
idx: 1.into(),
tpe: SType::SBox,
}],
mapper_body,
)
.into(),
)
.unwrap()
.into();
let ctx = Rc::new(ctx);
let output = {
let e = eval_out::<Vec<i64>>(&expr, ctx.clone());
if e.is_empty() {
None
} else {
Some(TxIoVec::from_vec(e).unwrap())
}
};
assert_eq!(
output,
ctx.data_inputs.clone().map(|d| d.mapped(| b| b.value.as_i64() + 1))
);
}
}
}
| 33.636986 | 117 | 0.486255 |
7540389461cb9ff04308f8a916d0f7342cbf8e32 | 4,105 | use crate::prelude::*;
use std::marker::PhantomData;
use std::sync::Arc;
pub struct BatchEventProcessor;
impl BatchEventProcessor {
pub fn create<'a, F, T>(handler: F) -> impl EventProcessor<'a, T>
where
T: Send + 'a,
F: Fn(&T, Sequence, bool) + Send + 'static,
{
Processor {
handler,
cursor: Default::default(),
_marker: Default::default(),
}
}
pub fn create_mut<'a, F, T>(handler: F) -> impl EventProcessorMut<'a, T>
where
T: Send + 'a,
F: Fn(&mut T, Sequence, bool) + Send + 'static,
{
ProcessorMut {
handler,
cursor: Default::default(),
_marker: Default::default(),
}
}
}
struct Processor<F, T> {
handler: F,
cursor: Arc<AtomicSequence>,
_marker: PhantomData<T>,
}
struct ProcessorMut<F, T> {
handler: F,
cursor: Arc<AtomicSequence>,
_marker: PhantomData<T>,
}
struct RunnableProcessor<F, T, D: DataProvider<T>, B: SequenceBarrier> {
processor: Processor<F, T>,
data_provider: Arc<D>,
barrier: B,
}
struct RunnableProcessorMut<F, T, D: DataProvider<T>, B: SequenceBarrier> {
processor: ProcessorMut<F, T>,
data_provider: Arc<D>,
barrier: B,
}
impl<'a, F, T> EventProcessorMut<'a, T> for Processor<F, T>
where
F: Fn(&T, Sequence, bool) + Send + 'static,
T: Send + 'a,
{
fn prepare<B: SequenceBarrier + 'a, D: DataProvider<T> + 'a>(
self,
barrier: B,
data_provider: Arc<D>,
) -> Box<dyn Runnable + 'a> {
Box::new(RunnableProcessor {
processor: self,
data_provider,
barrier,
})
}
fn get_cursor(&self) -> Arc<AtomicSequence> {
self.cursor.clone()
}
}
impl<'a, F, T> EventProcessorMut<'a, T> for ProcessorMut<F, T>
where
F: Fn(&mut T, Sequence, bool) + Send + 'static,
T: Send + 'a,
{
fn prepare<B: SequenceBarrier + 'a, D: DataProvider<T> + 'a>(
self,
barrier: B,
data_provider: Arc<D>,
) -> Box<dyn Runnable + 'a> {
Box::new(RunnableProcessorMut {
processor: self,
data_provider,
barrier,
})
}
fn get_cursor(&self) -> Arc<AtomicSequence> {
self.cursor.clone()
}
}
impl<'a, F, T> EventProcessor<'a, T> for Processor<F, T>
where
F: Fn(&T, Sequence, bool) + Send + 'static,
T: Send + 'a,
{
}
impl<F, T, D, B> Runnable for RunnableProcessor<F, T, D, B>
where
F: Fn(&T, Sequence, bool) + Send + 'static,
D: DataProvider<T>,
B: SequenceBarrier,
T: Send,
{
fn run(self: Box<Self>) {
let f = &self.processor.handler;
let cursor = &self.processor.cursor;
let data_provider = &self.data_provider;
let barrier = &self.barrier;
loop {
let next = cursor.get() + 1;
let available = match barrier.wait_for(next) {
Some(seq) => seq,
None => return,
};
for i in next..=available {
let value = unsafe { data_provider.get(i) };
f(value, i, i == available);
}
cursor.set(available);
barrier.signal();
}
}
}
impl<F, T, D, B> Runnable for RunnableProcessorMut<F, T, D, B>
where
F: Fn(&mut T, Sequence, bool) + Send + 'static,
D: DataProvider<T>,
B: SequenceBarrier,
T: Send,
{
fn run(self: Box<Self>) {
let f = &self.processor.handler;
let cursor = &self.processor.cursor;
let data_provider = &self.data_provider;
let barrier = &self.barrier;
loop {
let next = cursor.get() + 1;
let available = match barrier.wait_for(next) {
Some(seq) => seq,
None => return,
};
for i in next..=available {
let value = unsafe { data_provider.get_mut(i) };
f(value, i, i == available);
}
cursor.set(available);
barrier.signal();
}
}
}
| 24.289941 | 76 | 0.526431 |
ed2a765a1d3a0e7935bf521f173166981ae9cd4c | 5,498 | // Copyright 2020 IOTA Stiftung
// SPDX-License-Identifier: Apache-2.0
use wasmlib::*;
use crate::*;
use crate::contract::*;
static mut LOCAL_STATE_MUST_INCREMENT: bool = false;
pub fn func_init(_ctx: &ScFuncContext, f: &InitContext) {
if f.params.counter().exists() {
let counter = f.params.counter().value();
f.state.counter().set_value(counter);
}
}
pub fn func_call_increment(ctx: &ScFuncContext, f: &CallIncrementContext) {
let counter = f.state.counter();
let value = counter.value();
counter.set_value(value + 1);
if value == 0 {
ScFuncs::call_increment(ctx).func.call();
}
}
pub fn func_call_increment_recurse5x(ctx: &ScFuncContext, f: &CallIncrementRecurse5xContext) {
let counter = f.state.counter();
let value = counter.value();
counter.set_value(value + 1);
if value < 5 {
ScFuncs::call_increment_recurse5x(ctx).func.call();
}
}
pub fn func_endless_loop(_ctx: &ScFuncContext, _f: &EndlessLoopContext) {
loop {}
}
pub fn func_increment(_ctx: &ScFuncContext, f: &IncrementContext) {
let counter = f.state.counter();
counter.set_value(counter.value() + 1);
}
pub fn func_local_state_internal_call(ctx: &ScFuncContext, f: &LocalStateInternalCallContext) {
unsafe {
LOCAL_STATE_MUST_INCREMENT = false;
}
when_must_increment_state(ctx, &f.state);
unsafe {
LOCAL_STATE_MUST_INCREMENT = true;
}
when_must_increment_state(ctx, &f.state);
when_must_increment_state(ctx, &f.state);
// counter ends up as 2
}
pub fn func_local_state_post(ctx: &ScFuncContext, _f: &LocalStatePostContext) {
unsafe {
LOCAL_STATE_MUST_INCREMENT = false;
}
// prevent multiple identical posts, need a dummy param to differentiate them
local_state_post(ctx, 1);
unsafe {
LOCAL_STATE_MUST_INCREMENT = true;
}
local_state_post(ctx, 2);
local_state_post(ctx, 3);
// counter ends up as 0
}
pub fn func_local_state_sandbox_call(ctx: &ScFuncContext, _f: &LocalStateSandboxCallContext) {
unsafe {
LOCAL_STATE_MUST_INCREMENT = false;
}
ScFuncs::when_must_increment(ctx).func.call();
unsafe {
LOCAL_STATE_MUST_INCREMENT = true;
}
ScFuncs::when_must_increment(ctx).func.call();
ScFuncs::when_must_increment(ctx).func.call();
// counter ends up as 0
}
pub fn func_post_increment(ctx: &ScFuncContext, f: &PostIncrementContext) {
let counter = f.state.counter();
let value = counter.value();
counter.set_value(value + 1);
if value == 0 {
ScFuncs::increment(ctx).func.transfer_iotas(1).post();
}
}
pub fn func_repeat_many(ctx: &ScFuncContext, f: &RepeatManyContext) {
let counter = f.state.counter();
let value = counter.value();
counter.set_value(value + 1);
let state_repeats = f.state.num_repeats();
let mut repeats = f.params.num_repeats().value();
if repeats == 0 {
repeats = state_repeats.value();
if repeats == 0 {
return;
}
}
state_repeats.set_value(repeats - 1);
ScFuncs::repeat_many(ctx).func.transfer_iotas(1).post();
}
pub fn func_test_leb128(ctx: &ScFuncContext, _f: &TestLeb128Context) {
leb128_save(ctx, "v-1", -1);
leb128_save(ctx, "v-2", -2);
leb128_save(ctx, "v-126", -126);
leb128_save(ctx, "v-127", -127);
leb128_save(ctx, "v-128", -128);
leb128_save(ctx, "v-129", -129);
leb128_save(ctx, "v0", 0);
leb128_save(ctx, "v+1", 1);
leb128_save(ctx, "v+2", 2);
leb128_save(ctx, "v+126", 126);
leb128_save(ctx, "v+127", 127);
leb128_save(ctx, "v+128", 128);
leb128_save(ctx, "v+129", 129);
}
pub fn func_when_must_increment(ctx: &ScFuncContext, f: &WhenMustIncrementContext) {
when_must_increment_state(ctx, &f.state);
}
// note that get_counter mirrors the state of the 'counter' state variable
// which means that if the state variable was not present it also will not be present in the result
pub fn view_get_counter(_ctx: &ScViewContext, f: &GetCounterContext) {
let counter = f.state.counter();
if counter.exists() {
f.results.counter().set_value(counter.value());
}
}
fn leb128_save(ctx: &ScFuncContext, name: &str, value: i64) {
let mut encoder = BytesEncoder::new();
encoder.int64(value);
let spot = ctx.state().get_bytes(name);
spot.set_value(&encoder.data());
let bytes = spot.value();
let mut decoder = BytesDecoder::new(&bytes);
let retrieved = decoder.int64();
if retrieved != value {
ctx.log(&(name.to_string() + " in : " + &value.to_string()));
ctx.log(&(name.to_string() + " out: " + &retrieved.to_string()));
}
}
fn local_state_post(ctx: &ScFuncContext, nr: i64) {
//note: we add a dummy parameter here to prevent "duplicate outputs not allowed" error
let f = ScFuncs::when_must_increment(ctx);
f.params.dummy().set_value(nr);
f.func.transfer_iotas(1).post();
}
fn when_must_increment_state(ctx: &ScFuncContext, state: &MutableIncCounterState) {
ctx.log("when_must_increment called");
unsafe {
if !LOCAL_STATE_MUST_INCREMENT {
return;
}
}
let counter = state.counter();
counter.set_value(counter.value() + 1);
}
pub fn func_increment_with_delay(ctx: &ScFuncContext, f: &IncrementWithDelayContext) {
let delay = f.params.delay().value();
let inc = inccounter::ScFuncs::call_increment(ctx);
inc.func.delay(delay).transfer_iotas(1).post();
}
| 31.062147 | 99 | 0.662241 |
5b0106bbfe8c645039910d852eaf51aa07d32dcd | 1,828 | use super::*;
use log::LevelFilter;
use log4rs::append::file::FileAppender;
use log4rs::config::{Appender, Config, Logger, Root};
use log4rs::encode::pattern::PatternEncoder;
use log4rs::Handle;
fn create_config(path: &Option<String>, level: LevelFilter) -> Result<Config> {
let encoder =
PatternEncoder::new("{date(%H:%M:%S)} {level} {thread} {file}:{line} {message}{n}");
let mut config_builder =
Config::builder().logger(Logger::builder().build("languageclient", level));
let mut root_builder = Root::builder();
if let Some(path) = path {
// Ensure log file writable.
{
let mut f = std::fs::OpenOptions::new()
.create(true)
.write(true)
.truncate(true)
.open(path)
.with_context(|err| format!("Failed to open file ({}): {}", path, err))?;
#[allow(write_literal)]
writeln!(
f,
"#######\nLanguageClient {} {}\n#######",
env!("CARGO_PKG_VERSION"),
env!("GIT_HASH")
)?;
}
let appender = FileAppender::builder()
.encoder(Box::new(encoder))
.build(path)?;
config_builder =
config_builder.appender(Appender::builder().build("logfile", Box::new(appender)));
root_builder = root_builder.appender("logfile");
}
let config = config_builder.build(root_builder.build(level))?;
Ok(config)
}
pub fn init() -> Result<Handle> {
let handle = log4rs::init_config(create_config(&None, LevelFilter::Warn)?)?;
Ok(handle)
}
pub fn update_settings(handle: &Handle, path: &Option<String>, level: LevelFilter) -> Result<()> {
let config = create_config(path, level)?;
handle.set_config(config);
Ok(())
}
| 32.642857 | 98 | 0.56674 |
4bd395fc6358f21d836a206d3b08d427baa8684d | 20,207 | use query_engine_tests::*;
// TODO(dom): Not working on mongo
#[test_suite(schema(schema), exclude(MongoDb))]
mod order_by_aggr {
use indoc::indoc;
use query_engine_tests::{assert_query_many, run_query};
fn schema() -> String {
let schema = indoc! {
r#"model User {
#id(id, Int, @id)
name String
posts Post[]
#m2m(categories, Category[], Int)
}
model Post {
#id(id, Int, @id)
title String
user User @relation(fields: [userId], references: [id])
userId Int
#m2m(categories, Category[], Int)
}
model Category {
#id(id, Int, @id)
name String
#m2m(posts, Post[], Int)
#m2m(users, User[], Int)
}"#
};
schema.to_owned()
}
#[connector_test]
async fn one2m_count_asc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyUser(orderBy: { posts: { _count: asc } }) {
id
posts {
title
}
}
}"#),
@r###"{"data":{"findManyUser":[{"id":3,"posts":[]},{"id":1,"posts":[{"title":"alice_post_1"}]},{"id":2,"posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]}]}}"###
);
Ok(())
}
#[connector_test]
async fn one2m_count_desc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyUser(orderBy: { posts: { _count: desc } }) {
id
posts {
title
}
}
}"#),
@r###"{"data":{"findManyUser":[{"id":2,"posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]},{"id":1,"posts":[{"title":"alice_post_1"}]},{"id":3,"posts":[]}]}}"###
);
Ok(())
}
#[connector_test]
async fn m2m_count_asc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyPost(orderBy: { categories: { _count: asc } }) {
title
categories {
name
}
}
}"#),
@r###"{"data":{"findManyPost":[{"title":"bob_post_1","categories":[{"name":"Finance"}]},{"title":"alice_post_1","categories":[{"name":"News"},{"name":"Society"}]},{"title":"bob_post_2","categories":[{"name":"History"},{"name":"Gaming"},{"name":"Hacking"}]}]}}"###
);
Ok(())
}
#[connector_test]
async fn m2m_count_desc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyPost(orderBy: { categories: { _count: desc } }) {
title
categories {
name
}
}
}"#),
@r###"{"data":{"findManyPost":[{"title":"bob_post_2","categories":[{"name":"History"},{"name":"Gaming"},{"name":"Hacking"}]},{"title":"alice_post_1","categories":[{"name":"News"},{"name":"Society"}]},{"title":"bob_post_1","categories":[{"name":"Finance"}]}]}}"###
);
Ok(())
}
#[connector_test]
async fn one2m_count_asc_field_asc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyUser(orderBy: [{ posts: { _count: asc } }, { name: asc }]) {
id
name
posts {
title
}
}
}"#),
@r###"{"data":{"findManyUser":[{"id":3,"name":"Motongo","posts":[]},{"id":1,"name":"Alice","posts":[{"title":"alice_post_1"}]},{"id":2,"name":"Bob","posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]}]}}"###
);
Ok(())
}
// "[Combo] Ordering by one2m count asc + field desc" should "work"
#[connector_test]
async fn one2m_count_asc_field_desc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyUser(orderBy: [{ name: desc }, { posts: { _count: asc } }]) {
id
name
posts {
title
}
}
}"#),
@r###"{"data":{"findManyUser":[{"id":3,"name":"Motongo","posts":[]},{"id":2,"name":"Bob","posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]},{"id":1,"name":"Alice","posts":[{"title":"alice_post_1"}]}]}}"###
);
Ok(())
}
// "[Combo] Ordering by m2m count asc + field desc" should "work"
#[connector_test]
async fn m2m_count_asc_field_desc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyPost(orderBy: [{ categories: { _count: asc } }, { title: asc }]) {
title
categories {
name
}
}
}"#),
@r###"{"data":{"findManyPost":[{"title":"bob_post_1","categories":[{"name":"Finance"}]},{"title":"alice_post_1","categories":[{"name":"News"},{"name":"Society"}]},{"title":"bob_post_2","categories":[{"name":"History"},{"name":"Gaming"},{"name":"Hacking"}]}]}}"###
);
Ok(())
}
// "[Combo] Ordering by one2m field asc + m2m count desc" should "work"
#[connector_test]
async fn one2m_field_asc_m2m_count_desc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyPost(orderBy: [{ user: { name: asc }}, { categories: { _count: desc }}]) {
user {
name
}
categories {
name
}
}
}"#),
@r###"{"data":{"findManyPost":[{"user":{"name":"Alice"},"categories":[{"name":"News"},{"name":"Society"}]},{"user":{"name":"Bob"},"categories":[{"name":"History"},{"name":"Gaming"},{"name":"Hacking"}]},{"user":{"name":"Bob"},"categories":[{"name":"Finance"}]}]}}"###
);
Ok(())
}
// "[2+ Hops] Ordering by m2one2m count asc" should "work"
#[connector_test]
async fn m2one2m_count_asc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyPost(orderBy: [{ user: { categories: { _count: asc } } }, { id: asc }]) {
id
user { categories { name } }
}
}"#),
@r###"{"data":{"findManyPost":[{"id":1,"user":{"categories":[{"name":"Startup"}]}},{"id":2,"user":{"categories":[{"name":"Computer Science"},{"name":"Music"}]}},{"id":3,"user":{"categories":[{"name":"Computer Science"},{"name":"Music"}]}}]}}"###
);
Ok(())
}
// "[2+ Hops] Ordering by m2one2m count desc" should "work"
#[connector_test]
async fn m2one2m_count_desc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
assert_query_many!(
runner,
r#"{
findManyPost(orderBy: { user: { categories: { _count: desc } } }) {
id
user { categories { name } }
}
}"#,
vec![
r#"{"data":{"findManyPost":[{"id":2,"user":{"categories":[{"name":"Computer Science"},{"name":"Music"}]}},{"id":3,"user":{"categories":[{"name":"Computer Science"},{"name":"Music"}]}},{"id":1,"user":{"categories":[{"name":"Startup"}]}}]}}"#,
r#"{"data":{"findManyPost":[{"id":3,"user":{"categories":[{"name":"Computer Science"},{"name":"Music"}]}},{"id":2,"user":{"categories":[{"name":"Computer Science"},{"name":"Music"}]}},{"id":1,"user":{"categories":[{"name":"Startup"}]}}]}}"#,
]
);
Ok(())
}
// "[Combo][2+ Hops] Ordering by m2m count asc + m2one2m count desc" should "work"
#[connector_test]
async fn m2m_count_asc_m2one2m_count_desc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyPost(orderBy: [{ categories: { _count: asc }}, { user: { categories: { _count: desc }} }]) {
id
categories(orderBy: { name: asc }) {
name
}
}
}"#),
@r###"{"data":{"findManyPost":[{"id":2,"categories":[{"name":"Finance"}]},{"id":1,"categories":[{"name":"News"},{"name":"Society"}]},{"id":3,"categories":[{"name":"Gaming"},{"name":"Hacking"},{"name":"History"}]}]}}"###
);
Ok(())
}
// "[Combo][2+ Hops] Ordering by m2one field asc + m2one2m count desc" should "work"
#[connector_test]
async fn m2one_field_asc_m2one2m_count_desc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
assert_query_many!(
runner,
r#"{
findManyPost(orderBy: [{ user: { name: asc }}, { user: { categories: { _count: desc }} }]) {
id
user {
name
categories { name }
}
}
}"#,
vec![
r#"{"data":{"findManyPost":[{"id":1,"user":{"name":"Alice","categories":[{"name":"Startup"}]}},{"id":2,"user":{"name":"Bob","categories":[{"name":"Computer Science"},{"name":"Music"}]}},{"id":3,"user":{"name":"Bob","categories":[{"name":"Computer Science"},{"name":"Music"}]}}]}}"#,
r#"{"data":{"findManyPost":[{"id":1,"user":{"name":"Alice","categories":[{"name":"Startup"}]}},{"id":3,"user":{"name":"Bob","categories":[{"name":"Computer Science"},{"name":"Music"}]}},{"id":2,"user":{"name":"Bob","categories":[{"name":"Computer Science"},{"name":"Music"}]}}]}}"#,
]
);
Ok(())
}
// With pagination tests
// "[Cursor] Ordering by one2m count asc" should "work"
#[connector_test]
async fn cursor_one2m_count_asc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyUser(orderBy: { posts: { _count: asc } }, cursor: { id: 2 }) {
id
posts {
title
}
}
}"#),
@r###"{"data":{"findManyUser":[{"id":2,"posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]}]}}"###
);
Ok(())
}
// "[Cursor] Ordering by one2m count desc" should "work"
#[connector_test]
async fn cursor_one2m_count_desc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyUser(orderBy: { posts: { _count: desc } }, cursor: { id: 1 }) {
id
posts {
title
}
}
}"#),
@r###"{"data":{"findManyUser":[{"id":1,"posts":[{"title":"alice_post_1"}]}]}}"###
);
Ok(())
}
// "[Cursor] Ordering by m2m count asc" should "work"
#[connector_test]
async fn cursor_m2m_count_asc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyPost(orderBy: { categories: { _count: asc } }, cursor: { id: 2 }, take: 2) {
id
title
categories {
name
}
}
}"#),
@r###"{"data":{"findManyPost":[{"id":2,"title":"bob_post_1","categories":[{"name":"Finance"}]},{"id":1,"title":"alice_post_1","categories":[{"name":"News"},{"name":"Society"}]}]}}"###
);
Ok(())
}
// "[Cursor] Ordering by m2m count desc" should "work"
#[connector_test]
async fn cursor_m2m_count_desc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyPost(orderBy: { categories: { _count: desc } }, cursor: { id: 1 }, take: 2) {
id
title
categories {
name
}
}
}"#),
@r###"{"data":{"findManyPost":[{"id":1,"title":"alice_post_1","categories":[{"name":"News"},{"name":"Society"}]},{"id":2,"title":"bob_post_1","categories":[{"name":"Finance"}]}]}}"###
);
Ok(())
}
// "[Cursor][Combo] Ordering by one2m count asc + field asc"
#[connector_test]
async fn cursor_one2m_count_asc_field_asc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyUser(orderBy: [{ posts: { _count: asc } }, { name: asc }], cursor: { id: 2 }) {
id
name
posts {
title
}
}
}"#),
@r###"{"data":{"findManyUser":[{"id":2,"name":"Bob","posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]}]}}"###
);
Ok(())
}
// "[Cursor][Combo] Ordering by one2m count asc + field desc" should "work"
#[connector_test]
async fn cursor_one2m_count_asc_field_desc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyUser(orderBy: [{ name: desc }, { posts: { _count: asc } }], cursor: { id: 2 }, take: 1) {
id
name
posts {
title
}
}
}"#),
@r###"{"data":{"findManyUser":[{"id":2,"name":"Bob","posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]}]}}"###
);
Ok(())
}
// "[Cursor][Combo] Ordering by m2m count asc + field desc" should "work"
#[connector_test]
async fn cursor_m2m_count_asc_field_desc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyPost(orderBy: [{ categories: { _count: asc } }, { title: asc }], cursor: { id: 2 }, take: 2) {
id
title
categories {
name
}
}
}"#),
@r###"{"data":{"findManyPost":[{"id":2,"title":"bob_post_1","categories":[{"name":"Finance"}]},{"id":1,"title":"alice_post_1","categories":[{"name":"News"},{"name":"Society"}]}]}}"###
);
Ok(())
}
// "[Cursor][Combo] Ordering by one2m field asc + m2m count desc" should "work"
#[connector_test]
async fn cursor_one2m_field_asc_m2m_count_desc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyPost(orderBy: [{ user: { name: asc }}, { categories: { _count: desc }}], cursor: { id: 1 }, take: 2) {
id
title
user {
name
}
categories {
name
}
}
}"#),
@r###"{"data":{"findManyPost":[{"id":1,"title":"alice_post_1","user":{"name":"Alice"},"categories":[{"name":"News"},{"name":"Society"}]},{"id":3,"title":"bob_post_2","user":{"name":"Bob"},"categories":[{"name":"History"},{"name":"Gaming"},{"name":"Hacking"}]}]}}"###
);
Ok(())
}
// "[Cursor][2+ Hops] Ordering by m2one2m count asc" should "work"
#[connector_test]
async fn cursor_m2one2m_count_asc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyPost(orderBy: [{ user: { categories: { _count: asc } } }, { id: asc }], cursor: { id: 2 }, take: 1) {
id
user { categories { name } }
}
}"#),
@r###"{"data":{"findManyPost":[{"id":2,"user":{"categories":[{"name":"Computer Science"},{"name":"Music"}]}}]}}"###
);
Ok(())
}
// "[Cursor][2+ Hops] Ordering by m2one2m count desc" should "work"
#[connector_test]
async fn cursor_m2one2m_count_desc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyPost(orderBy: [{ user: { categories: { _count: desc } } }, { id: asc }], cursor: { id: 2 }, take: 2) {
id
user { categories { name } }
}
}"#),
@r###"{"data":{"findManyPost":[{"id":2,"user":{"categories":[{"name":"Computer Science"},{"name":"Music"}]}},{"id":3,"user":{"categories":[{"name":"Computer Science"},{"name":"Music"}]}}]}}"###
);
Ok(())
}
// "[Cursor][Combo][2+ Hops] Ordering by m2m count asc + m2one2m count desc" should "work"
#[connector_test]
async fn cursor_m2m_count_asc_m2one2m_count_desc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyPost(orderBy: [{ categories: { _count: asc }}, { user: { categories: { _count: desc }} }], cursor: { id: 2 }, take: 2) {
id
categories { name }
user { categories { name } }
}
}"#),
@r###"{"data":{"findManyPost":[{"id":2,"categories":[{"name":"Finance"}],"user":{"categories":[{"name":"Computer Science"},{"name":"Music"}]}},{"id":1,"categories":[{"name":"News"},{"name":"Society"}],"user":{"categories":[{"name":"Startup"}]}}]}}"###
);
Ok(())
}
// "[Cursor][Combo][2+ Hops] Ordering by m2one field asc + m2one2m count desc" should "work"
#[connector_test]
async fn cursor_m2one_field_asc_m2one2m_count_desc(runner: &Runner) -> TestResult<()> {
create_test_data(runner).await?;
insta::assert_snapshot!(
run_query!(runner, r#"{
findManyPost(orderBy: [{ user: { name: asc }}, { user: { categories: { _count: desc }} }, { id: asc }], cursor: { id: 2 }, take: 2) {
id
user {
name
categories { name }
}
}
}"#),
@r###"{"data":{"findManyPost":[{"id":2,"user":{"name":"Bob","categories":[{"name":"Computer Science"},{"name":"Music"}]}},{"id":3,"user":{"name":"Bob","categories":[{"name":"Computer Science"},{"name":"Music"}]}}]}}"###
);
Ok(())
}
async fn create_test_data(runner: &Runner) -> TestResult<()> {
create_row(runner, r#"{ id: 1, name: "Alice", categories: { create: [{ id: 1, name: "Startup" }] }, posts: { create: { id: 1, title: "alice_post_1", categories: { create: [{ id: 2, name: "News" }, { id: 3, name: "Society" }] }} } }"#).await?;
create_row(runner, r#"{ id: 2, name: "Bob", categories: { create: [{ id: 4, name: "Computer Science" }, { id: 5, name: "Music" }] }, posts: { create: [{ id: 2, title: "bob_post_1", categories: { create: [{ id: 6, name: "Finance" }] } }, { id: 3, title: "bob_post_2", categories: { create: [{ id: 7, name: "History" }, { id: 8, name: "Gaming" }, { id: 9, name: "Hacking" }] } }] } }"#).await?;
create_row(runner, r#"{ id: 3, name: "Motongo" }"#).await?;
Ok(())
}
async fn create_row(runner: &Runner, data: &str) -> TestResult<()> {
runner
.query(format!("mutation {{ createOneUser(data: {}) {{ id }} }}", data))
.await?
.assert_success();
Ok(())
}
}
| 37.145221 | 400 | 0.48305 |
72b12771674344a2d3d5918a84db6b7ea3dae7bf | 3,546 | #![deny(warnings)]
#![no_main]
#![no_std]
use core::cell::{Cell, RefCell};
use cortex_m::interrupt::{free, Mutex};
use cortex_m::peripheral::NVIC;
use cortex_m_rt::entry;
use stm32h7xx_hal::gpio::{Edge, ExtiPin, Input, Output, PushPull};
use stm32h7xx_hal::{interrupt, pac, prelude::*};
// LED pin
use stm32h7xx_hal::gpio::gpioa::PA1;
// Button pins
use stm32h7xx_hal::gpio::gpioc::PC5;
use stm32h7xx_hal::gpio::gpioe::PE3;
#[macro_use]
mod utilities;
use log::info;
// Semaphore for synchronization
static SEMAPHORE: Mutex<Cell<bool>> = Mutex::new(Cell::new(true));
// Setup the sharing of pins between the main loop and the interrupts
static BUTTON1_PIN: Mutex<RefCell<Option<PE3<Input>>>> =
Mutex::new(RefCell::new(None));
static BUTTON2_PIN: Mutex<RefCell<Option<PC5<Input>>>> =
Mutex::new(RefCell::new(None));
static LED: Mutex<RefCell<Option<PA1<Output<PushPull>>>>> =
Mutex::new(RefCell::new(None));
#[entry]
fn main() -> ! {
utilities::logger::init();
info!("stm32h7xx-hal example - EXTI Interrupt");
let mut cp = cortex_m::Peripherals::take().unwrap();
let dp = pac::Peripherals::take().unwrap();
info!("Setup PWR...");
let pwr = dp.PWR.constrain();
let pwrcfg = example_power!(pwr).freeze();
info!("Setup RCC...");
let rcc = dp.RCC.constrain();
let ccdr = rcc.sys_ck(100.MHz()).freeze(pwrcfg, &dp.SYSCFG);
// Push button configuration
let mut syscfg = dp.SYSCFG;
let mut exti = dp.EXTI;
let gpioe = dp.GPIOE.split(ccdr.peripheral.GPIOE);
let mut button1 = gpioe.pe3.into_pull_up_input();
button1.make_interrupt_source(&mut syscfg);
button1.trigger_on_edge(&mut exti, Edge::Rising);
button1.enable_interrupt(&mut exti);
let gpioc = dp.GPIOC.split(ccdr.peripheral.GPIOC);
let mut button2 = gpioc.pc5.into_pull_up_input();
button2.make_interrupt_source(&mut syscfg);
button2.trigger_on_edge(&mut exti, Edge::Rising);
button2.enable_interrupt(&mut exti);
let gpioa = dp.GPIOA.split(ccdr.peripheral.GPIOA);
let led = gpioa.pa1.into_push_pull_output();
// Save information needed by the interrupt handlers to the global variable
free(|cs| {
BUTTON1_PIN.borrow(cs).replace(Some(button1));
BUTTON2_PIN.borrow(cs).replace(Some(button2));
LED.borrow(cs).replace(Some(led));
});
// Enable the button interrupts
unsafe {
cp.NVIC.set_priority(interrupt::EXTI3, 1);
cp.NVIC.set_priority(interrupt::EXTI9_5, 1);
NVIC::unmask::<interrupt>(interrupt::EXTI3);
NVIC::unmask::<interrupt>(interrupt::EXTI9_5);
}
loop {
cortex_m::asm::nop();
}
}
fn toggle_led(on_or_off: bool) {
free(|cs| {
if let Some(b) = LED.borrow(cs).borrow_mut().as_mut() {
if on_or_off {
b.set_high();
} else {
b.set_low();
}
}
});
}
#[interrupt]
fn EXTI9_5() {
info!("EXTI9_5 fired!");
toggle_led(true);
free(|cs| {
if let Some(b) = BUTTON2_PIN.borrow(cs).borrow_mut().as_mut() {
b.clear_interrupt_pending_bit()
}
// Signal that the interrupt fired
SEMAPHORE.borrow(cs).set(false);
});
}
#[interrupt]
fn EXTI3() {
info!("EXTI3 fired!");
toggle_led(false);
free(|cs| {
if let Some(b) = BUTTON1_PIN.borrow(cs).borrow_mut().as_mut() {
b.clear_interrupt_pending_bit()
}
// Signal that the interrupt fired
SEMAPHORE.borrow(cs).set(false);
});
}
| 27.488372 | 79 | 0.631134 |
e5ac01927f6e8e4ba3f3226d89db01b37dce984e | 8,159 | use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
#[test]
fn vpmulhuw_1() {
run_test(&Instruction { mnemonic: Mnemonic::VPMULHUW, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM2)), operand3: Some(Direct(XMM2)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 233, 228, 202], OperandSize::Dword)
}
#[test]
fn vpmulhuw_2() {
run_test(&Instruction { mnemonic: Mnemonic::VPMULHUW, operand1: Some(Direct(XMM5)), operand2: Some(Direct(XMM1)), operand3: Some(IndirectDisplaced(EAX, 1732950149, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 241, 228, 168, 133, 184, 74, 103], OperandSize::Dword)
}
#[test]
fn vpmulhuw_3() {
run_test(&Instruction { mnemonic: Mnemonic::VPMULHUW, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM3)), operand3: Some(Direct(XMM2)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 225, 228, 210], OperandSize::Qword)
}
#[test]
fn vpmulhuw_4() {
run_test(&Instruction { mnemonic: Mnemonic::VPMULHUW, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM3)), operand3: Some(IndirectScaledIndexedDisplaced(RAX, RSI, Four, 819374641, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 225, 228, 180, 176, 49, 170, 214, 48], OperandSize::Qword)
}
#[test]
fn vpmulhuw_5() {
run_test(&Instruction { mnemonic: Mnemonic::VPMULHUW, operand1: Some(Direct(YMM2)), operand2: Some(Direct(YMM7)), operand3: Some(Direct(YMM3)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 197, 228, 211], OperandSize::Dword)
}
#[test]
fn vpmulhuw_6() {
run_test(&Instruction { mnemonic: Mnemonic::VPMULHUW, operand1: Some(Direct(YMM2)), operand2: Some(Direct(YMM4)), operand3: Some(IndirectScaledIndexedDisplaced(ECX, EBX, Eight, 1645259245, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 221, 228, 148, 217, 237, 169, 16, 98], OperandSize::Dword)
}
#[test]
fn vpmulhuw_7() {
run_test(&Instruction { mnemonic: Mnemonic::VPMULHUW, operand1: Some(Direct(YMM4)), operand2: Some(Direct(YMM6)), operand3: Some(Direct(YMM5)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 205, 228, 229], OperandSize::Qword)
}
#[test]
fn vpmulhuw_8() {
run_test(&Instruction { mnemonic: Mnemonic::VPMULHUW, operand1: Some(Direct(YMM5)), operand2: Some(Direct(YMM3)), operand3: Some(IndirectScaledIndexed(RCX, RCX, Four, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 229, 228, 44, 137], OperandSize::Qword)
}
#[test]
fn vpmulhuw_9() {
run_test(&Instruction { mnemonic: Mnemonic::VPMULHUW, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM0)), operand3: Some(Direct(XMM1)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 241, 125, 139, 228, 193], OperandSize::Dword)
}
#[test]
fn vpmulhuw_10() {
run_test(&Instruction { mnemonic: Mnemonic::VPMULHUW, operand1: Some(Direct(XMM3)), operand2: Some(Direct(XMM5)), operand3: Some(IndirectScaledDisplaced(EAX, Two, 1563835845, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 241, 85, 138, 228, 28, 69, 197, 61, 54, 93], OperandSize::Dword)
}
#[test]
fn vpmulhuw_11() {
run_test(&Instruction { mnemonic: Mnemonic::VPMULHUW, operand1: Some(Direct(XMM27)), operand2: Some(Direct(XMM25)), operand3: Some(Direct(XMM26)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 1, 53, 130, 228, 218], OperandSize::Qword)
}
#[test]
fn vpmulhuw_12() {
run_test(&Instruction { mnemonic: Mnemonic::VPMULHUW, operand1: Some(Direct(XMM25)), operand2: Some(Direct(XMM13)), operand3: Some(IndirectScaledDisplaced(RDI, Four, 382502070, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 97, 21, 140, 228, 12, 189, 182, 132, 204, 22], OperandSize::Qword)
}
#[test]
fn vpmulhuw_13() {
run_test(&Instruction { mnemonic: Mnemonic::VPMULHUW, operand1: Some(Direct(YMM6)), operand2: Some(Direct(YMM0)), operand3: Some(Direct(YMM0)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 241, 125, 171, 228, 240], OperandSize::Dword)
}
#[test]
fn vpmulhuw_14() {
run_test(&Instruction { mnemonic: Mnemonic::VPMULHUW, operand1: Some(Direct(YMM5)), operand2: Some(Direct(YMM2)), operand3: Some(IndirectDisplaced(EAX, 327899942, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 241, 109, 172, 228, 168, 38, 91, 139, 19], OperandSize::Dword)
}
#[test]
fn vpmulhuw_15() {
run_test(&Instruction { mnemonic: Mnemonic::VPMULHUW, operand1: Some(Direct(YMM18)), operand2: Some(Direct(YMM3)), operand3: Some(Direct(YMM18)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 161, 101, 173, 228, 210], OperandSize::Qword)
}
#[test]
fn vpmulhuw_16() {
run_test(&Instruction { mnemonic: Mnemonic::VPMULHUW, operand1: Some(Direct(YMM10)), operand2: Some(Direct(YMM20)), operand3: Some(IndirectDisplaced(RBX, 580310561, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 113, 93, 164, 228, 147, 33, 214, 150, 34], OperandSize::Qword)
}
#[test]
fn vpmulhuw_17() {
run_test(&Instruction { mnemonic: Mnemonic::VPMULHUW, operand1: Some(Direct(ZMM6)), operand2: Some(Direct(ZMM0)), operand3: Some(Direct(ZMM3)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 241, 125, 203, 228, 243], OperandSize::Dword)
}
#[test]
fn vpmulhuw_18() {
run_test(&Instruction { mnemonic: Mnemonic::VPMULHUW, operand1: Some(Direct(ZMM0)), operand2: Some(Direct(ZMM0)), operand3: Some(IndirectScaledIndexedDisplaced(ECX, EDI, Four, 350387922, Some(OperandSize::Zmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 241, 125, 206, 228, 132, 185, 210, 126, 226, 20], OperandSize::Dword)
}
#[test]
fn vpmulhuw_19() {
run_test(&Instruction { mnemonic: Mnemonic::VPMULHUW, operand1: Some(Direct(ZMM20)), operand2: Some(Direct(ZMM27)), operand3: Some(Direct(ZMM7)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 225, 37, 198, 228, 231], OperandSize::Qword)
}
#[test]
fn vpmulhuw_20() {
run_test(&Instruction { mnemonic: Mnemonic::VPMULHUW, operand1: Some(Direct(ZMM15)), operand2: Some(Direct(ZMM23)), operand3: Some(IndirectDisplaced(RAX, 956714644, Some(OperandSize::Zmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K1), broadcast: None }, &[98, 113, 69, 193, 228, 184, 148, 78, 6, 57], OperandSize::Qword)
}
| 74.853211 | 443 | 0.711362 |
6a81ee6641079cf8e98fe626875bee742f38ae5d | 2,323 | // generated by lelwel
mod imp;
pub use imp::*;
use super::diag::*;
use super::parser::*;
use super::token::*;
use bumpalo::Bump;
use std::cell::Cell;
/// Abstract syntax tree
#[derive(Debug)]
pub struct Ast<'a, Input: TokenStream> {
arena: Bump,
root: Option<&'a <Parser as Parsing<'a, Input>>::Output>,
}
impl<'a, Input: TokenStream> Ast<'a, Input> {
/// Creates a new abstract syntax tree.
pub fn new(input: &mut Input, diag: &mut Diag) -> Ast<'a, Input> {
let mut ast = Ast {
arena: Bump::new(),
root: None,
};
// borrow arena for the lifetime of the returned Ast
match Parser::parse(input, Self::extend(&ast.arena), diag) {
Ok(root) => ast.root = Some(Self::extend(ast.arena.alloc(root))),
Err(e) => diag.error(e, input.current().range),
}
ast
}
/// Gets the root node of the `Ast`.
#[allow(dead_code)]
pub fn root(&self) -> Option<&'a <Parser as Parsing<'a, Input>>::Output> {
self.root
}
/// Extends lifetime of reference to lifetime of `Ast`.
#[allow(dead_code)]
fn extend<'b, T>(reference: &'b T) -> &'a T {
unsafe { &*(reference as *const T) }
}
}
/// Reference to another node in the `Ast`.
pub struct Ref<'a, T> {
target: Cell<Option<&'a T>>,
}
#[allow(dead_code)]
impl<'a, T> Ref<'a, T> {
/// Creates a new `Ref`.
pub fn new(init: Option<&'a T>) -> Ref<'a, T> {
Ref {
target: Cell::new(init),
}
}
/// Sets the target of the `Ref`.
pub fn set(&self, value: &'a T) {
self.target.set(Some(value));
}
/// Gets the target of the `Ref`.
pub fn get(&self) -> Option<&'a T> {
self.target.get()
}
}
impl<'a, T> std::fmt::Debug for Ref<'a, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let ptr = match self.get() {
Some(e) => e,
None => std::ptr::null(),
};
std::fmt::Pointer::fmt(&ptr, f)
}
}
impl<'a, T> std::fmt::Pointer for Ref<'a, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let ptr = match self.get() {
Some(e) => e,
None => std::ptr::null(),
};
std::fmt::Pointer::fmt(&ptr, f)
}
}
| 24.978495 | 78 | 0.517865 |
1d01e9b5fb7dc9533cf8eefbdf7699538ee84e09 | 4,911 | use super::Peekable;
/// An iterator adapter that places a separator between all elements.
///
/// This `struct` is created by [`Iterator::intersperse`]. See its documentation
/// for more information.
#[unstable(feature = "iter_intersperse", reason = "recently added", issue = "79524")]
#[derive(Debug, Clone)]
pub struct Intersperse<I: Iterator>
where
I::Item: Clone,
{
separator: I::Item,
iter: Peekable<I>,
needs_sep: bool,
}
impl<I: Iterator> Intersperse<I>
where
I::Item: Clone,
{
pub(in crate::iter) fn new(iter: I, separator: I::Item) -> Self {
Self { iter: iter.peekable(), separator, needs_sep: false }
}
}
#[unstable(feature = "iter_intersperse", reason = "recently added", issue = "79524")]
impl<I> Iterator for Intersperse<I>
where
I: Iterator,
I::Item: Clone,
{
type Item = I::Item;
#[inline]
fn next(&mut self) -> Option<I::Item> {
if self.needs_sep && self.iter.peek().is_some() {
self.needs_sep = false;
Some(self.separator.clone())
} else {
self.needs_sep = true;
self.iter.next()
}
}
fn fold<B, F>(self, init: B, f: F) -> B
where
Self: Sized,
F: FnMut(B, Self::Item) -> B,
{
let separator = self.separator;
intersperse_fold(self.iter, init, f, move || separator.clone(), self.needs_sep)
}
fn size_hint(&self) -> (usize, Option<usize>) {
intersperse_size_hint(&self.iter, self.needs_sep)
}
}
/// An iterator adapter that places a separator between all elements.
///
/// This `struct` is created by [`Iterator::intersperse_with`]. See its
/// documentation for more information.
#[unstable(feature = "iter_intersperse", reason = "recently added", issue = "79524")]
pub struct IntersperseWith<I, G>
where
I: Iterator,
{
separator: G,
iter: Peekable<I>,
needs_sep: bool,
}
#[unstable(feature = "iter_intersperse", reason = "recently added", issue = "79524")]
impl<I, G> crate::fmt::Debug for IntersperseWith<I, G>
where
I: Iterator + crate::fmt::Debug,
I::Item: crate::fmt::Debug,
G: crate::fmt::Debug,
{
fn fmt(&self, f: &mut crate::fmt::Formatter<'_>) -> crate::fmt::Result {
f.debug_struct("IntersperseWith")
.field("separator", &self.separator)
.field("iter", &self.iter)
.field("needs_sep", &self.needs_sep)
.finish()
}
}
#[unstable(feature = "iter_intersperse", reason = "recently added", issue = "79524")]
impl<I, G> crate::clone::Clone for IntersperseWith<I, G>
where
I: Iterator + crate::clone::Clone,
I::Item: crate::clone::Clone,
G: Clone,
{
fn clone(&self) -> Self {
IntersperseWith {
separator: self.separator.clone(),
iter: self.iter.clone(),
needs_sep: self.needs_sep.clone(),
}
}
}
impl<I, G> IntersperseWith<I, G>
where
I: Iterator,
G: FnMut() -> I::Item,
{
pub(in crate::iter) fn new(iter: I, separator: G) -> Self {
Self { iter: iter.peekable(), separator, needs_sep: false }
}
}
#[unstable(feature = "iter_intersperse", reason = "recently added", issue = "79524")]
impl<I, G> Iterator for IntersperseWith<I, G>
where
I: Iterator,
G: FnMut() -> I::Item,
{
type Item = I::Item;
#[inline]
fn next(&mut self) -> Option<I::Item> {
if self.needs_sep && self.iter.peek().is_some() {
self.needs_sep = false;
Some((self.separator)())
} else {
self.needs_sep = true;
self.iter.next()
}
}
fn fold<B, F>(self, init: B, f: F) -> B
where
Self: Sized,
F: FnMut(B, Self::Item) -> B,
{
intersperse_fold(self.iter, init, f, self.separator, self.needs_sep)
}
fn size_hint(&self) -> (usize, Option<usize>) {
intersperse_size_hint(&self.iter, self.needs_sep)
}
}
fn intersperse_size_hint<I>(iter: &I, needs_sep: bool) -> (usize, Option<usize>)
where
I: Iterator,
{
let (lo, hi) = iter.size_hint();
let next_is_elem = !needs_sep;
let lo = lo.saturating_sub(next_is_elem as usize).saturating_add(lo);
let hi = match hi {
Some(hi) => hi.saturating_sub(next_is_elem as usize).checked_add(hi),
None => None,
};
(lo, hi)
}
fn intersperse_fold<I, B, F, G>(
mut iter: Peekable<I>,
init: B,
mut f: F,
mut separator: G,
needs_sep: bool,
) -> B
where
I: Iterator,
F: FnMut(B, I::Item) -> B,
G: FnMut() -> I::Item,
{
let mut accum = init;
// Use `peek()` first to avoid calling `next()` on an empty iterator.
if !needs_sep || iter.peek().is_some() {
if let Some(x) = iter.next() {
accum = f(accum, x);
}
}
iter.fold(accum, |mut accum, x| {
accum = f(accum, separator());
accum = f(accum, x);
accum
})
}
| 25.984127 | 87 | 0.580737 |
e653131e363d51bc362c9b5b4e7f20387216bf94 | 491 | use super::*;
#[test]
fn lex_a() {
use Token::*;
assert_eq!(
lex("<peter\\ > <= mueller[asdf]\\a\\}")
.into_iter()
.map(|(_, t)| t)
.collect::<Vec<_>>(),
vec![
RuleOpen,
String("peter ".into()),
RuleClose,
Assign,
String("mueller".into()),
OptOpen,
String("asdf".into()),
OptClose,
String("a}".into())
]
)
}
| 20.458333 | 50 | 0.366599 |
3a2eb362e4d2b61e69a9e475006fa4f4fad9b846 | 1,954 | use crate::core::ribosome::CallContext;
use crate::core::ribosome::RibosomeT;
use holochain_types::prelude::*;
use holochain_wasmer_host::prelude::WasmError;
use std::sync::Arc;
#[allow(clippy::extra_unused_lifetimes)]
pub fn agent_info<'a>(
_ribosome: Arc<impl RibosomeT>,
call_context: Arc<CallContext>,
_input: (),
) -> Result<AgentInfo, WasmError> {
let agent_pubkey = tokio_helper::block_forever_on(async move {
let lock = call_context.host_access.workspace().read().await;
lock.source_chain.agent_pubkey()
})
.map_err(|source_chain_error| WasmError::Host(source_chain_error.to_string()))?;
Ok(AgentInfo {
agent_initial_pubkey: agent_pubkey.clone(),
agent_latest_pubkey: agent_pubkey,
})
}
#[cfg(test)]
#[cfg(feature = "slow_tests")]
pub mod test {
use crate::fixt::ZomeCallHostAccessFixturator;
use ::fixt::prelude::*;
use holochain_types::prelude::*;
use holochain_types::test_utils::fake_agent_pubkey_1;
use holochain_wasm_test_utils::TestWasm;
#[tokio::test(flavor = "multi_thread")]
async fn invoke_import_agent_info_test() {
let test_env = holochain_lmdb::test_utils::test_cell_env();
let env = test_env.env();
let mut workspace =
crate::core::workflow::CallZomeWorkspace::new(env.clone().into()).unwrap();
crate::core::workflow::fake_genesis(&mut workspace.source_chain)
.await
.unwrap();
let workspace_lock = crate::core::workflow::CallZomeWorkspaceLock::new(workspace);
let mut host_access = fixt!(ZomeCallHostAccess);
host_access.workspace = workspace_lock;
let agent_info: AgentInfo =
crate::call_test_ribosome!(host_access, TestWasm::AgentInfo, "agent_info", ());
assert_eq!(agent_info.agent_initial_pubkey, fake_agent_pubkey_1(),);
assert_eq!(agent_info.agent_latest_pubkey, fake_agent_pubkey_1(),);
}
}
| 34.892857 | 91 | 0.685261 |
1e1709787351f5531c2fbf4e7bfe3d50e6c73e83 | 1,163 | //! Ergo blockchain types
// Coding conventions
#![forbid(unsafe_code)]
#![deny(non_upper_case_globals)]
#![deny(non_camel_case_types)]
#![deny(non_snake_case)]
#![deny(unused_mut)]
#![deny(dead_code)]
#![deny(unused_imports)]
#![deny(missing_docs)]
#![deny(rustdoc::broken_intra_doc_links)]
#![deny(clippy::wildcard_enum_match_arm)]
#![deny(clippy::unwrap_used)]
#![deny(clippy::expect_used)]
#![deny(clippy::todo)]
#![deny(clippy::unimplemented)]
#![deny(clippy::panic)]
mod base16_bytes;
mod block_id;
mod digest32;
pub mod ec_point;
mod extensioncandidate;
mod header;
mod json;
mod peer_addr;
mod peer_connection_dir;
mod preheader;
mod votes;
pub use base16_bytes::Base16DecodedBytes;
pub use base16_bytes::Base16EncodedBytes;
pub use block_id::BlockId;
pub use digest32::blake2b256_hash;
pub use digest32::ADDigest;
pub use digest32::Digest;
pub use digest32::Digest32;
pub use digest32::DigestNError;
pub use ec_point::EcPoint;
pub use extensioncandidate::ExtensionCandidate;
pub use header::{AutolykosSolution, Header};
pub use peer_addr::PeerAddr;
pub use peer_connection_dir::ConnectionDirection;
pub use preheader::PreHeader;
pub use votes::Votes;
| 24.744681 | 49 | 0.774721 |
1ed8882145a0b93a667956eb0e64c77ec2cbf95b | 738 | // Write some data into a file
// Usage: cargo run <file path> string
use std::io::Write;
use std::fs::File;
use std::env;
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
panic!("Usage: cargo run <file path> string");
}
let file_name = args[1].clone();
let contents = args[2].clone();
match File::create(file_name) {
Ok(mut f) => {
match f.write(contents.as_bytes()) {
Ok(_) => {}
Err(error) => {
println!("Error writing to file: {}", error);
}
}
}
Err(error) => {
println!("Error when creating file for writing: {}", error);
}
}
}
| 24.6 | 72 | 0.47832 |
e81aba48fb1982b3a666ec1c65aec255f5efef45 | 762 | //! Demo app for egui
#[cfg(target_arch = "wasm32")]
use eframe::wasm_bindgen::{self, prelude::*};
/// This is the entry-point for all the web-assembly.
/// This is called once from the HTML.
/// It loads the app, installs some callbacks, then returns.
/// You can add more callbacks like this if you want to call in to your code.
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen]
pub fn start(canvas_id: &str) -> Result<(), wasm_bindgen::JsValue> {
// Make sure panics are logged using `console.error`.
console_error_panic_hook::set_once();
// Redirect tracing to console.log and friends:
tracing_wasm::set_as_global_default();
eframe::start_web(
canvas_id,
Box::new(|cc| Box::new(egui_demo_lib::WrapApp::new(cc))),
)
}
| 31.75 | 77 | 0.67979 |
231c1c1017afb7713c4576dc3a569ede16d5f00f | 638 | /// 256 bytes total for DMG
pub const BOOTROM_SIZE_DMG: usize = 0x100;
pub const BOOTROM_SIZE_CGB: usize = 0x900;
type BootRomData = Vec<u8>;
pub struct BootRom {
pub is_finished: bool,
data: BootRomData,
}
impl BootRom {
pub fn new(data: Option<BootRomData>) -> Self {
match data {
Some(rom) => Self {
is_finished: false,
data: rom,
},
None => Self {
is_finished: true,
data: Vec::new(),
},
}
}
pub fn read_byte(&self, address: u16) -> u8 {
self.data[address as usize]
}
}
| 21.266667 | 51 | 0.514107 |
f536b566881b054eb6e64946c1574f3695ddaaa3 | 581 | // Copyright 2020 IOTA Stiftung
// SPDX-License-Identifier: Apache-2.0
// (Re-)generated by schema tool
// >>>> DO NOT CHANGE THIS FILE! <<<<
// Change the json schema instead
// @formatter:off
#![allow(dead_code)]
use wasmlib::*;
use crate::*;
pub(crate) const IDX_RESULT_HELLO_WORLD: usize = 0;
pub const KEY_MAP_LEN: usize = 1;
pub const KEY_MAP: [&str; KEY_MAP_LEN] = [
RESULT_HELLO_WORLD,
];
pub static mut IDX_MAP: [Key32; KEY_MAP_LEN] = [Key32(0); KEY_MAP_LEN];
pub fn idx_map(idx: usize) -> Key32 {
unsafe {
IDX_MAP[idx]
}
}
// @formatter:on
| 17.606061 | 71 | 0.659208 |
623cdefb9630196d691ead480a10958ef9ea2f65 | 3,661 | #[doc = "Register `EVENTS_RXFRAMEEND` reader"]
pub struct R(crate::R<EVENTS_RXFRAMEEND_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<EVENTS_RXFRAMEEND_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<EVENTS_RXFRAMEEND_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<EVENTS_RXFRAMEEND_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `EVENTS_RXFRAMEEND` writer"]
pub struct W(crate::W<EVENTS_RXFRAMEEND_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<EVENTS_RXFRAMEEND_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<EVENTS_RXFRAMEEND_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<EVENTS_RXFRAMEEND_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `EVENTS_RXFRAMEEND` reader - "]
pub struct EVENTS_RXFRAMEEND_R(crate::FieldReader<bool, bool>);
impl EVENTS_RXFRAMEEND_R {
pub(crate) fn new(bits: bool) -> Self {
EVENTS_RXFRAMEEND_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for EVENTS_RXFRAMEEND_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `EVENTS_RXFRAMEEND` writer - "]
pub struct EVENTS_RXFRAMEEND_W<'a> {
w: &'a mut W,
}
impl<'a> EVENTS_RXFRAMEEND_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | (value as u32 & 0x01);
self.w
}
}
impl R {
#[doc = "Bit 0"]
#[inline(always)]
pub fn events_rxframeend(&self) -> EVENTS_RXFRAMEEND_R {
EVENTS_RXFRAMEEND_R::new((self.bits & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0"]
#[inline(always)]
pub fn events_rxframeend(&mut self) -> EVENTS_RXFRAMEEND_W {
EVENTS_RXFRAMEEND_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Received data has been checked (CRC, parity) and transferred to RAM, and EasyDMA has ended accessing the RX buffer\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [events_rxframeend](index.html) module"]
pub struct EVENTS_RXFRAMEEND_SPEC;
impl crate::RegisterSpec for EVENTS_RXFRAMEEND_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [events_rxframeend::R](R) reader structure"]
impl crate::Readable for EVENTS_RXFRAMEEND_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [events_rxframeend::W](W) writer structure"]
impl crate::Writable for EVENTS_RXFRAMEEND_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets EVENTS_RXFRAMEEND to value 0"]
impl crate::Resettable for EVENTS_RXFRAMEEND_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 32.39823 | 512 | 0.632068 |
fcd6cd7c9e416eae7901f7f1e447b564af9cebc3 | 373 | /*input
100
200
300
400
500
600
700
800
900
900
*/
fn main() {
let mut v: Vec<u16> = Vec::new();
let mut input;
for _ in 0..10 {
input = format!("");
std::io::stdin().read_line(&mut input).expect("");
v.push(input.trim().parse().expect(""));
}
v.sort();
v.reverse();
for i in 0..3 {
println!("{}", v[i]);
}
}
| 13.814815 | 58 | 0.479893 |
5d2bddc54138fb8f144a5d1f99b3991bc854751f | 1,408 | // MIT License
// Copyright (c) 2018 Tyler Laing (ZerothLaw)
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
mod conditionalattribute;
mod debugger;
mod debuggerattributes;
mod stackframe;
mod stacktrace;
pub mod symbolstore;
pub use self::conditionalattribute::*;
pub use self::debugger::*;
pub use self::debuggerattributes::*;
pub use self::stackframe::*;
pub use self::stacktrace::*; | 44 | 81 | 0.769176 |
09c27222428795543085365346d40adbea22485f | 2,029 | #[derive(Default)]
pub struct Day {}
fn score(c: char) -> usize {
match c {
')' => 3,
']' => 57,
'}' => 1197,
_ => 25137,
}
}
fn score2(c: char) -> usize {
match c {
')' => 1,
']' => 2,
'}' => 3,
_ => 4,
}
}
impl crate::Day for Day {
type Input = Vec<String>;
fn gen(&self, data: &str) -> Self::Input {
data.trim_end().lines().map(|s| s.into()).collect()
}
fn part1(&self, input: &Self::Input) -> String {
let mut res = 0;
for line in input {
let mut stack: Vec<char> = vec![];
for c in line.chars() {
match c {
'(' => stack.push(')'),
'[' => stack.push(']'),
'{' => stack.push('}'),
'<' => stack.push('>'),
c => {
let expected = stack.pop().unwrap();
if c != expected {
res += score(c);
}
}
}
}
}
format!("{}", res)
}
fn part2(&self, input: &Self::Input) -> String {
let mut scores: Vec<usize> = vec![];
'lines: for line in input {
let mut stack: Vec<char> = vec![];
for c in line.chars() {
match c {
'(' => stack.push(')'),
'[' => stack.push(']'),
'{' => stack.push('}'),
'<' => stack.push('>'),
c => {
let expected = stack.pop().unwrap();
if c != expected {
continue 'lines;
}
}
}
}
scores.push(stack.iter().rev().fold(0, |t, x| t * 5 + score2(*x)));
}
scores.sort_unstable();
let res = scores[scores.len() / 2];
format!("{}", res)
}
}
| 26.697368 | 79 | 0.325776 |
23782c3ccc9bb62661850220a5b3829d0bba903d | 7,835 | // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use errors::{Error, ErrorKind};
use rustc_serialize::json;
use std::str::FromStr;
use std::path::Path;
use runtest::ProcRes;
// These structs are a subset of the ones found in
// `syntax::json`.
#[derive(RustcEncodable, RustcDecodable)]
struct Diagnostic {
message: String,
code: Option<DiagnosticCode>,
level: String,
spans: Vec<DiagnosticSpan>,
children: Vec<Diagnostic>,
rendered: Option<String>,
}
#[derive(RustcEncodable, RustcDecodable, Clone)]
struct DiagnosticSpan {
file_name: String,
line_start: usize,
line_end: usize,
column_start: usize,
column_end: usize,
is_primary: bool,
label: Option<String>,
expansion: Option<Box<DiagnosticSpanMacroExpansion>>,
}
#[derive(RustcEncodable, RustcDecodable, Clone)]
struct DiagnosticSpanMacroExpansion {
/// span where macro was applied to generate this code
span: DiagnosticSpan,
/// name of macro that was applied (e.g., "foo!" or "#[derive(Eq)]")
macro_decl_name: String,
}
#[derive(RustcEncodable, RustcDecodable, Clone)]
struct DiagnosticCode {
/// The code itself.
code: String,
/// An explanation for the code.
explanation: Option<String>,
}
pub fn parse_output(file_name: &str, output: &str, proc_res: &ProcRes) -> Vec<Error> {
output.lines()
.flat_map(|line| parse_line(file_name, line, output, proc_res))
.collect()
}
fn parse_line(file_name: &str, line: &str, output: &str, proc_res: &ProcRes) -> Vec<Error> {
// The compiler sometimes intermingles non-JSON stuff into the
// output. This hack just skips over such lines. Yuck.
if line.starts_with('{') {
match json::decode::<Diagnostic>(line) {
Ok(diagnostic) => {
let mut expected_errors = vec![];
push_expected_errors(&mut expected_errors, &diagnostic, &[], file_name);
expected_errors
}
Err(error) => {
proc_res.fatal(Some(&format!("failed to decode compiler output as json: \
`{}`\noutput: {}\nline: {}",
error,
line,
output)));
}
}
} else {
vec![]
}
}
fn push_expected_errors(expected_errors: &mut Vec<Error>,
diagnostic: &Diagnostic,
default_spans: &[&DiagnosticSpan],
file_name: &str) {
let spans_in_this_file: Vec<_> = diagnostic.spans
.iter()
.filter(|span| Path::new(&span.file_name) == Path::new(&file_name))
.collect();
let primary_spans: Vec<_> = spans_in_this_file.iter()
.cloned()
.filter(|span| span.is_primary)
.take(1) // sometimes we have more than one showing up in the json; pick first
.collect();
let primary_spans = if primary_spans.is_empty() {
// subdiagnostics often don't have a span of their own;
// inherit the span from the parent in that case
default_spans
} else {
&primary_spans
};
// We break the output into multiple lines, and then append the
// [E123] to every line in the output. This may be overkill. The
// intention was to match existing tests that do things like "//|
// found `i32` [E123]" and expect to match that somewhere, and yet
// also ensure that `//~ ERROR E123` *always* works. The
// assumption is that these multi-line error messages are on their
// way out anyhow.
let with_code = |span: &DiagnosticSpan, text: &str| {
match diagnostic.code {
Some(ref code) =>
// FIXME(#33000) -- it'd be better to use a dedicated
// UI harness than to include the line/col number like
// this, but some current tests rely on it.
//
// Note: Do NOT include the filename. These can easily
// cause false matches where the expected message
// appears in the filename, and hence the message
// changes but the test still passes.
format!("{}:{}: {}:{}: {} [{}]",
span.line_start, span.column_start,
span.line_end, span.column_end,
text, code.code.clone()),
None =>
// FIXME(#33000) -- it'd be better to use a dedicated UI harness
format!("{}:{}: {}:{}: {}",
span.line_start, span.column_start,
span.line_end, span.column_end,
text),
}
};
// Convert multi-line messages into multiple expected
// errors. We expect to replace these with something
// more structured shortly anyhow.
let mut message_lines = diagnostic.message.lines();
if let Some(first_line) = message_lines.next() {
for span in primary_spans {
let msg = with_code(span, first_line);
let kind = ErrorKind::from_str(&diagnostic.level).ok();
expected_errors.push(Error {
line_num: span.line_start,
kind: kind,
msg: msg,
});
}
}
for next_line in message_lines {
for span in primary_spans {
expected_errors.push(Error {
line_num: span.line_start,
kind: None,
msg: with_code(span, next_line),
});
}
}
// If the message has a suggestion, register that.
if let Some(ref rendered) = diagnostic.rendered {
let start_line = primary_spans.iter().map(|s| s.line_start).min().expect("\
every suggestion should have at least one span");
for (index, line) in rendered.lines().enumerate() {
expected_errors.push(Error {
line_num: start_line + index,
kind: Some(ErrorKind::Suggestion),
msg: line.to_string(),
});
}
}
// Add notes for the backtrace
for span in primary_spans {
for frame in &span.expansion {
push_backtrace(expected_errors, frame, file_name);
}
}
// Add notes for any labels that appear in the message.
for span in spans_in_this_file.iter()
.filter(|span| span.label.is_some()) {
expected_errors.push(Error {
line_num: span.line_start,
kind: Some(ErrorKind::Note),
msg: span.label.clone().unwrap(),
});
}
// Flatten out the children.
for child in &diagnostic.children {
push_expected_errors(expected_errors, child, primary_spans, file_name);
}
}
fn push_backtrace(expected_errors: &mut Vec<Error>,
expansion: &DiagnosticSpanMacroExpansion,
file_name: &str) {
if Path::new(&expansion.span.file_name) == Path::new(&file_name) {
expected_errors.push(Error {
line_num: expansion.span.line_start,
kind: Some(ErrorKind::Note),
msg: format!("in this expansion of {}", expansion.macro_decl_name),
});
}
for previous_expansion in &expansion.span.expansion {
push_backtrace(expected_errors, previous_expansion, file_name);
}
}
| 36.105991 | 92 | 0.581876 |
4a27eb2ee2da62932f65e691c03431363293210e | 317 | //! Api requests via yew FetchService
mod articles;
mod auth;
mod comments;
mod profiles;
mod requests;
mod tags;
pub use articles::Articles;
pub use auth::Auth;
pub use comments::Comments;
pub use profiles::Profiles;
pub use requests::{get_token, is_authenticated, limit, set_token, Requests};
pub use tags::Tags;
| 19.8125 | 76 | 0.763407 |
14b1bbc61fff81be81d8c5e08b1b35eda0f5e2a2 | 896 | #[macro_export]
macro_rules! read_string {
($stream:ident) => {{
let mut buf = [0; 1];
$stream.read(&mut buf);
let len = buf.first().unwrap();
let mut vec_str = Vec::new();
for _ in 0..*len {
let mut buf2 = [0; 1];
$stream.read(&mut buf2);
vec_str.push(*buf2.first().unwrap());
}
String::from_utf8(vec_str).unwrap()
}};
}
/// Checks the result and returns error if result is error.
#[macro_export]
macro_rules! check_error_f {
($func:expr) => {
match $func {
Ok(n) => n,
Err(n) => return Err(n)
}
};
}
#[macro_export]
macro_rules! check_error_e {
($func:expr) => {
match $func {
Ok(0) => return Err("Unexpected EOF".to_string()),
Ok(n) => (),
Err(n) => return Err(n.to_string())
}
};
}
| 23.578947 | 62 | 0.483259 |
f7139991940e4bef0af923ea2732319fa3ca3cbf | 27,588 | // Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Blockchain explorer allows to get information about blocks and transactions in the blockchain.
//! It allows to request transactions from a block together with the execution statuses,
//! iterate over blocks, etc.
//!
//! This crate is distinct from the [explorer *service*][explorer-service] crate. While this crate
//! provides Rust language APIs for retrieving info from the blockchain, the explorer service
//! translates these APIs into REST and WebSocket endpoints. Correspondingly, this crate is
//! primarily useful for Rust-language client apps. Another use case is testing; the [testkit]
//! returns [`BlockWithTransactions`] from its `create_block*` methods and re-exports the entire
//! crate as `explorer`.
//!
//! See the examples in the crate for examples of usage.
//!
//! [explorer-service]: https://docs.rs/exonum-explorer-service/
//! [`BlockWithTransactions`]: struct.BlockWithTransactions.html
//! [testkit]: https://docs.rs/exonum-testkit/latest/exonum_testkit/struct.TestKit.html
use chrono::{DateTime, Utc};
use exonum::{
blockchain::{Block, CallInBlock, Schema, TxLocation},
crypto::Hash,
helpers::Height,
merkledb::{ListProof, MapProof, ObjectHash, Snapshot},
messages::{AnyTx, Precommit, Verified},
runtime::{ExecutionError, ExecutionErrorSerde, ExecutionStatus},
};
use serde::{Serialize, Serializer};
use serde_derive::*;
use std::{
cell::{Ref, RefCell},
collections::{BTreeMap, Bound},
fmt,
ops::{Index, RangeBounds},
slice,
time::UNIX_EPOCH,
};
pub mod api;
/// Ending height of the range (exclusive), given the a priori max height.
fn end_height(bound: Bound<&Height>, max: Height) -> Height {
use std::cmp::min;
let inner_end = match bound {
Bound::Included(height) => height.next(),
Bound::Excluded(height) => *height,
Bound::Unbounded => max.next(),
};
min(inner_end, max.next())
}
/// Information about a block in the blockchain.
///
/// # JSON presentation
///
/// JSON object with the following fields:
///
/// | Name | Equivalent type | Description |
/// |------|-------|--------|
/// | `block` | [`Block`] | Block header as recorded in the blockchain |
/// | `precommits` | `Vec<`[`Precommit`]`>` | Precommits authorizing the block |
/// | `txs` | `Vec<`[`Hash`]`>` | Hashes of transactions in the block |
///
/// [`Block`]: https://docs.rs/exonum/latest/exonum/blockchain/struct.Block.html
/// [`Precommit`]: https://docs.rs/exonum/latest/exonum/messages/struct.Precommit.html
/// [`Hash`]: https://docs.rs/exonum-crypto/latest/exonum_crypto/struct.Hash.html
#[derive(Debug)]
pub struct BlockInfo<'a> {
header: Block,
explorer: &'a BlockchainExplorer<'a>,
precommits: RefCell<Option<Vec<Verified<Precommit>>>>,
txs: RefCell<Option<Vec<Hash>>>,
}
impl<'a> BlockInfo<'a> {
fn new(explorer: &'a BlockchainExplorer<'_>, height: Height) -> Self {
let schema = explorer.schema;
let hashes = schema.block_hashes_by_height();
let blocks = schema.blocks();
let block_hash = hashes
.get(height.0)
.unwrap_or_else(|| panic!("Block not found, height: {:?}", height));
let header = blocks
.get(&block_hash)
.unwrap_or_else(|| panic!("Block not found, hash: {:?}", block_hash));
BlockInfo {
explorer,
header,
precommits: RefCell::new(None),
txs: RefCell::new(None),
}
}
/// Returns block header as recorded in the blockchain.
pub fn header(&self) -> &Block {
&self.header
}
/// Extracts the header discarding all other information.
pub fn into_header(self) -> Block {
self.header
}
/// Returns the height of this block.
///
/// This method is equivalent to calling `block.header().height()`.
pub fn height(&self) -> Height {
self.header.height
}
/// Returns the number of transactions in this block.
pub fn len(&self) -> usize {
self.header.tx_count as usize
}
/// Is this block empty (i.e., contains no transactions)?
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Returns a list of precommits for this block.
pub fn precommits(&self) -> Ref<'_, [Verified<Precommit>]> {
if self.precommits.borrow().is_none() {
let precommits = self.explorer.precommits(&self.header);
*self.precommits.borrow_mut() = Some(precommits);
}
Ref::map(self.precommits.borrow(), |cache| {
cache.as_ref().unwrap().as_ref()
})
}
/// Lists hashes of transactions included in this block.
pub fn transaction_hashes(&self) -> Ref<'_, [Hash]> {
if self.txs.borrow().is_none() {
let txs = self.explorer.transaction_hashes(&self.header);
*self.txs.borrow_mut() = Some(txs);
}
Ref::map(self.txs.borrow(), |cache| cache.as_ref().unwrap().as_ref())
}
/// Returns a transaction with the specified index in the block.
pub fn transaction(&self, index: usize) -> Option<CommittedTransaction> {
self.transaction_hashes()
.get(index)
.map(|hash| self.explorer.committed_transaction(hash, None))
}
/// Returns the proof for the execution status of a call within this block.
///
/// Note that if the call did not result in an error or did not happen at all, the returned
/// proof will not contain entries. To distinguish between two cases, one can inspect
/// the number of transactions in the block or IDs of the active services when the block
/// was executed.
pub fn error_proof(&self, call_location: CallInBlock) -> MapProof<CallInBlock, ExecutionError> {
self.explorer
.schema
.call_errors(self.header.height)
.get_proof(call_location)
}
/// Iterates over transactions in the block.
pub fn iter(&self) -> Transactions<'_, '_> {
Transactions {
block: self,
ptr: 0,
len: self.len(),
}
}
/// Loads transactions, errors and precommits for the block.
pub fn with_transactions(self) -> BlockWithTransactions {
let (explorer, header, precommits, transactions) =
(self.explorer, self.header, self.precommits, self.txs);
let precommits = precommits
.into_inner()
.unwrap_or_else(|| explorer.precommits(&header));
let transactions = transactions
.into_inner()
.unwrap_or_else(|| explorer.transaction_hashes(&header))
.iter()
.map(|tx_hash| explorer.committed_transaction(tx_hash, None))
.collect();
let errors: Vec<_> = self
.explorer
.schema
.call_errors(header.height)
.iter()
.map(|(location, error)| ErrorWithLocation { location, error })
.collect();
BlockWithTransactions {
header,
precommits,
transactions,
errors,
}
}
}
impl<'a> Serialize for BlockInfo<'a> {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
use serde::ser::SerializeStruct;
let mut s = serializer.serialize_struct("BlockInfo", 3)?;
s.serialize_field("block", &self.header)?;
s.serialize_field("precommits", &*self.precommits())?;
s.serialize_field("txs", &*self.transaction_hashes())?;
s.end()
}
}
/// Iterator over transactions in a block.
#[derive(Debug)]
pub struct Transactions<'r, 'a> {
block: &'r BlockInfo<'a>,
ptr: usize,
len: usize,
}
impl<'a, 'r> Iterator for Transactions<'a, 'r> {
type Item = CommittedTransaction;
fn next(&mut self) -> Option<CommittedTransaction> {
if self.ptr == self.len {
None
} else {
let transaction = self.block.transaction(self.ptr);
self.ptr += 1;
transaction
}
}
}
impl<'a, 'r: 'a> IntoIterator for &'r BlockInfo<'a> {
type Item = CommittedTransaction;
type IntoIter = Transactions<'a, 'r>;
fn into_iter(self) -> Transactions<'a, 'r> {
self.iter()
}
}
/// Information about a block in the blockchain with info on transactions eagerly loaded.
#[derive(Debug, Serialize, Deserialize)]
pub struct BlockWithTransactions {
/// Block header as recorded in the blockchain.
#[serde(rename = "block")]
pub header: Block,
/// Precommits.
pub precommits: Vec<Verified<Precommit>>,
/// Transactions in the order they appear in the block.
pub transactions: Vec<CommittedTransaction>,
/// Errors that have occurred within the block.
pub errors: Vec<ErrorWithLocation>,
}
/// Execution error together with its location within the block.
#[derive(Debug, Serialize, Deserialize)]
pub struct ErrorWithLocation {
/// Location of the error.
pub location: CallInBlock,
/// Error data.
#[serde(with = "ExecutionErrorSerde")]
pub error: ExecutionError,
}
impl fmt::Display for ErrorWithLocation {
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(formatter, "In {}: {}", self.location, self.error)
}
}
impl BlockWithTransactions {
/// Returns the height of this block.
///
/// This method is equivalent to calling `block.header.height()`.
pub fn height(&self) -> Height {
self.header.height
}
/// Returns the number of transactions in this block.
pub fn len(&self) -> usize {
self.transactions.len()
}
/// Is this block empty (i.e., contains no transactions)?
pub fn is_empty(&self) -> bool {
self.transactions.is_empty()
}
/// Iterates over transactions in the block.
pub fn iter(&self) -> EagerTransactions<'_> {
self.transactions.iter()
}
/// Returns errors converted into a map. Note that this is potentially a costly operation.
pub fn error_map(&self) -> BTreeMap<CallInBlock, &ExecutionError> {
self.errors.iter().map(|e| (e.location, &e.error)).collect()
}
}
/// Iterator over transactions in [`BlockWithTransactions`].
///
/// [`BlockWithTransactions`]: struct.BlockWithTransactions.html
pub type EagerTransactions<'a> = slice::Iter<'a, CommittedTransaction>;
impl Index<usize> for BlockWithTransactions {
type Output = CommittedTransaction;
fn index(&self, index: usize) -> &CommittedTransaction {
self.transactions.get(index).unwrap_or_else(|| {
panic!(
"Index exceeds number of transactions in block {}",
self.len()
);
})
}
}
/// Returns a transaction in the block by its hash. Beware that this is a slow operation
/// (linear w.r.t. the number of transactions in a block).
impl Index<Hash> for BlockWithTransactions {
type Output = CommittedTransaction;
fn index(&self, index: Hash) -> &CommittedTransaction {
self.transactions
.iter()
.find(|&tx| tx.message.object_hash() == index)
.unwrap_or_else(|| {
panic!("No transaction with hash {} in the block", index);
})
}
}
impl<'a> IntoIterator for &'a BlockWithTransactions {
type Item = &'a CommittedTransaction;
type IntoIter = EagerTransactions<'a>;
fn into_iter(self) -> EagerTransactions<'a> {
self.iter()
}
}
/// Information about a particular transaction in the blockchain.
///
/// # JSON presentation
///
/// | Name | Equivalent type | Description |
/// |------|-------|--------|
/// | `message` | `Verified<AnyTx>` | Transaction as recorded in the blockchain |
/// | `location` | [`TxLocation`] | Location of the transaction in the block |
/// | `location_proof` | [`ListProof`]`<`[`Hash`]`>` | Proof of transaction inclusion into a block |
/// | `status` | (custom; see below) | Execution status |
/// | `time` | [`DateTime`]`<`[`Utc`]`>` | Commitment time* |
///
/// \* By commitment time we mean an approximate commitment time of the block
/// which includes the transaction. This time is a median time of the precommit local times
/// of each validator.
///
/// ## `status` field
///
/// The `status` field is a more readable representation of the [`ExecutionStatus`] type.
///
/// For successfully executed transactions, `status` is equal to
///
/// ```json
/// { "type": "success" }
/// ```
///
/// For transactions that cause an [`ExecutionError`], `status` contains the error code
/// and an optional description, i.e., has the following type in the [TypeScript] notation:
///
/// ```typescript
/// type Error = {
/// type: 'service_error' | 'core_error' | 'common_error' | 'runtime_error' | 'unexpected_error',
/// code?: number,
/// description?: string,
/// runtime_id: number,
/// call_site?: CallSite,
/// };
///
/// type CallSite = MethodCallSite | HookCallSite;
///
/// type MethodCallSite = {
/// call_type: 'method',
/// instance_id: number,
/// interface?: string,
/// method_id: number,
/// };
///
/// type HookCallSite = {
/// call_type: 'constructor' | 'before_transactions' | 'after_transactions',
/// instance_id: number,
/// };
/// ```
///
/// Explanations:
///
/// - `Error.type` determines the component responsible for the error. Usually, errors
/// are generated by the service code, but they can also be caused by the dispatch logic,
/// runtime associated with the service, or come from another source (`unexpected_error`s).
/// - `Error.code` is the error code. For service errors, this code is specific
/// to the service instance (which can be obtained from `call_site`), and for runtime errors -
/// to the runtime. For core errors, the codes are fixed; their meaning can be found
/// in the [`CoreError`] docs. The code is present for all error types except
/// `unexpected_error`s, in which the code is always absent.
/// Besides types listed above, there is also a set of errors that can occur within any context,
/// which are organized in the [`CommonError`].
/// - `Error.description` is an optional human-readable description of the error.
/// - `Error.runtime_id` is the numeric ID of the runtime in which the error has occurred. Note
/// that the runtime is defined for all error types, not just `runtime_error`s, since
/// for any request it's possible to say which runtime is responsible for its processing.
/// - `Error.call_site` provides most precise known location of the call in which the error
/// has occurred.
///
/// [`TxLocation`]: https://docs.rs/exonum/latest/exonum/blockchain/struct.TxLocation.html
/// [`ListProof`]: https://docs.rs/exonum-merkledb/latest/exonum_merkledb/indexes/proof_list/struct.ListProof.html
/// [`Hash`]: https://docs.rs/exonum-crypto/latest/exonum_crypto/struct.Hash.html
/// [`ExecutionStatus`]: https://docs.rs/exonum/latest/exonum/runtime/struct.ExecutionStatus.html
/// [`ExecutionError`]: https://docs.rs/exonum/latest/exonum/runtime/struct.ExecutionError.html
/// [`CoreError`]: https://docs.rs/exonum/latest/exonum/runtime/enum.CoreError.html
/// [`CommonError`]: https://docs.rs/exonum/latest/exonum/runtime/enum.CommonError.html
/// [TypeScript]: https://www.typescriptlang.org/
/// [`DateTime`]: https://docs.rs/chrono/0.4.10/chrono/struct.DateTime.html
/// [`Utc`]: https://docs.rs/chrono/0.4.10/chrono/offset/struct.Utc.html
#[derive(Debug, Serialize, Deserialize)]
pub struct CommittedTransaction {
message: Verified<AnyTx>,
location: TxLocation,
location_proof: ListProof<Hash>,
status: ExecutionStatus,
time: DateTime<Utc>,
}
impl CommittedTransaction {
/// Returns the content of the transaction.
pub fn message(&self) -> &Verified<AnyTx> {
&self.message
}
/// Returns the transaction location in block.
pub fn location(&self) -> &TxLocation {
&self.location
}
/// Returns a proof that transaction is recorded in the blockchain.
pub fn location_proof(&self) -> &ListProof<Hash> {
&self.location_proof
}
/// Returns the status of the transaction execution.
pub fn status(&self) -> Result<(), &ExecutionError> {
self.status.0.as_ref().map(drop)
}
/// Returns an approximate commit time of the block which includes this transaction.
pub fn time(&self) -> &DateTime<Utc> {
&self.time
}
}
/// Information about the transaction.
///
/// Values of this type are returned by the `transaction()` method of the `BlockchainExplorer`.
///
/// # JSON presentation
///
/// ## Committed transactions
///
/// Committed transactions are represented just like a `CommittedTransaction`,
/// with the additional `type` field equal to `"committed"`.
///
/// ## Transaction in pool
///
/// Transactions in pool are represented with a 2-field object:
///
/// - `type` field contains transaction type (`"in-pool"`).
/// - `message` is the full transaction message serialized to the hexadecimal form.
///
/// # Examples
///
/// ```
/// use exonum_explorer::TransactionInfo;
/// use exonum::{crypto::gen_keypair, runtime::InstanceId};
/// # use exonum_derive::*;
/// # use serde_derive::*;
/// # use serde_json::json;
///
/// /// Service interface.
/// #[exonum_interface]
/// trait ServiceInterface<Ctx> {
/// type Output;
/// #[interface_method(id = 0)]
/// fn create_wallet(&self, ctx: Ctx, username: String) -> Self::Output;
/// }
///
/// # fn main() {
/// // Create a signed transaction.
/// let keypair = gen_keypair();
/// const SERVICE_ID: InstanceId = 100;
/// let tx = keypair.create_wallet(SERVICE_ID, "Alice".to_owned());
/// // This transaction in pool will be represented as follows:
/// let json = json!({
/// "type": "in_pool",
/// "message": tx,
/// });
/// let parsed: TransactionInfo = serde_json::from_value(json).unwrap();
/// assert!(parsed.is_in_pool());
/// # }
/// ```
#[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum TransactionInfo {
/// Transaction is in the memory pool, but not yet committed to the blockchain.
InPool {
/// A content of the uncommitted transaction.
message: Verified<AnyTx>,
},
/// Transaction is already committed to the blockchain.
Committed(CommittedTransaction),
}
impl TransactionInfo {
/// Returns the content of this transaction.
pub fn message(&self) -> &Verified<AnyTx> {
match *self {
TransactionInfo::InPool { ref message } => message,
TransactionInfo::Committed(ref tx) => tx.message(),
}
}
/// Is this in-pool transaction?
pub fn is_in_pool(&self) -> bool {
match *self {
TransactionInfo::InPool { .. } => true,
_ => false,
}
}
/// Is this a committed transaction?
pub fn is_committed(&self) -> bool {
match *self {
TransactionInfo::Committed(_) => true,
_ => false,
}
}
/// Returns a reference to the inner committed transaction if this transaction is committed.
/// For transactions in pool, returns `None`.
pub fn as_committed(&self) -> Option<&CommittedTransaction> {
match *self {
TransactionInfo::Committed(ref tx) => Some(tx),
_ => None,
}
}
}
/// Blockchain explorer.
///
/// # Notes
///
/// The explorer wraps a specific [`Snapshot`] of the blockchain state; that is,
/// all calls to the methods of an explorer instance are guaranteed to be consistent.
///
/// [`Snapshot`]: https://docs.rs/exonum-merkledb/latest/exonum_merkledb/trait.Snapshot.html
#[derive(Debug, Copy, Clone)]
pub struct BlockchainExplorer<'a> {
schema: Schema<&'a dyn Snapshot>,
}
impl<'a> BlockchainExplorer<'a> {
/// Creates a new `BlockchainExplorer` instance from the provided snapshot.
pub fn new(snapshot: &'a dyn Snapshot) -> Self {
BlockchainExplorer {
schema: Schema::new(snapshot),
}
}
/// Creates a new `BlockchainExplorer` instance from the core schema.
pub fn from_schema(schema: Schema<&'a dyn Snapshot>) -> Self {
BlockchainExplorer { schema }
}
/// Returns information about the transaction identified by the hash.
pub fn transaction(&self, tx_hash: &Hash) -> Option<TransactionInfo> {
let message = self.transaction_without_proof(tx_hash)?;
if self.schema.transactions_pool().contains(tx_hash) {
return Some(TransactionInfo::InPool { message });
}
let tx = self.committed_transaction(tx_hash, Some(message));
Some(TransactionInfo::Committed(tx))
}
/// Returns the status of a call in a block.
///
/// # Return value
///
/// This method will return `Ok(())` both if the call completed successfully, or if
/// was not performed at all. The caller is responsible to distinguish these two outcomes.
pub fn call_status(
&self,
block_height: Height,
call_location: CallInBlock,
) -> Result<(), ExecutionError> {
match self.schema.call_errors(block_height).get(&call_location) {
None => Ok(()),
Some(e) => Err(e),
}
}
/// Return transaction message without proof.
pub fn transaction_without_proof(&self, tx_hash: &Hash) -> Option<Verified<AnyTx>> {
self.schema.transactions().get(tx_hash)
}
fn precommits(&self, block: &Block) -> Vec<Verified<Precommit>> {
self.schema
.precommits(&block.object_hash())
.iter()
.collect()
}
fn transaction_hashes(&self, block: &Block) -> Vec<Hash> {
let tx_hashes_table = self.schema.block_transactions(block.height);
tx_hashes_table.iter().collect()
}
/// Retrieves a transaction that is known to be committed.
fn committed_transaction(
&self,
tx_hash: &Hash,
maybe_content: Option<Verified<AnyTx>>,
) -> CommittedTransaction {
let location = self
.schema
.transactions_locations()
.get(tx_hash)
.unwrap_or_else(|| panic!("Location not found for transaction hash {:?}", tx_hash));
let location_proof = self
.schema
.block_transactions(location.block_height())
.get_proof(u64::from(location.position_in_block()));
let block_precommits = self
.schema
.block_and_precommits(location.block_height())
.unwrap();
let time = median_precommits_time(&block_precommits.precommits);
// Unwrap is OK here, because we already know that transaction is committed.
let status = self.schema.transaction_result(location).unwrap();
CommittedTransaction {
message: maybe_content.unwrap_or_else(|| {
self.schema
.transactions()
.get(tx_hash)
.expect("BUG: Cannot find transaction in database")
}),
location,
location_proof,
status: ExecutionStatus(status),
time,
}
}
/// Return the height of the blockchain.
pub fn height(&self) -> Height {
self.schema.height()
}
/// Returns block information for the specified height or `None` if there is no such block.
pub fn block(&self, height: Height) -> Option<BlockInfo<'_>> {
if self.height() >= height {
Some(BlockInfo::new(self, height))
} else {
None
}
}
/// Return a block together with its transactions at the specified height, or `None`
/// if there is no such block.
pub fn block_with_txs(&self, height: Height) -> Option<BlockWithTransactions> {
let txs_table = self.schema.block_transactions(height);
let block_proof = self.schema.block_and_precommits(height);
let errors = self.schema.call_errors(height);
block_proof.map(|proof| BlockWithTransactions {
header: proof.block,
precommits: proof.precommits,
transactions: txs_table
.iter()
.map(|tx_hash| self.committed_transaction(&tx_hash, None))
.collect(),
errors: errors
.iter()
.map(|(location, error)| ErrorWithLocation { location, error })
.collect(),
})
}
/// Iterates over blocks in the blockchain.
pub fn blocks<R: RangeBounds<Height>>(&self, heights: R) -> Blocks<'_> {
use std::cmp::max;
let max_height = self.schema.height();
let ptr = match heights.start_bound() {
Bound::Included(height) => *height,
Bound::Excluded(height) => height.next(),
Bound::Unbounded => Height(0),
};
Blocks {
explorer: self,
ptr,
back: max(ptr, end_height(heights.end_bound(), max_height)),
}
}
}
/// Iterator over blocks in the blockchain.
pub struct Blocks<'a> {
explorer: &'a BlockchainExplorer<'a>,
ptr: Height,
back: Height,
}
impl<'a> fmt::Debug for Blocks<'a> {
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
formatter
.debug_struct("Blocks")
.field("ptr", &self.ptr)
.field("back", &self.back)
.finish()
}
}
impl<'a> Iterator for Blocks<'a> {
type Item = BlockInfo<'a>;
fn next(&mut self) -> Option<BlockInfo<'a>> {
if self.ptr == self.back {
return None;
}
let block = BlockInfo::new(self.explorer, self.ptr);
self.ptr = self.ptr.next();
Some(block)
}
fn size_hint(&self) -> (usize, Option<usize>) {
let exact = (self.back.0 - self.ptr.0) as usize;
(exact, Some(exact))
}
fn count(self) -> usize {
(self.back.0 - self.ptr.0) as usize
}
fn nth(&mut self, n: usize) -> Option<BlockInfo<'a>> {
if self.ptr.0 + n as u64 >= self.back.0 {
self.ptr = self.back;
None
} else {
self.ptr = Height(self.ptr.0 + n as u64);
let block = BlockInfo::new(self.explorer, self.ptr);
self.ptr = self.ptr.next();
Some(block)
}
}
}
impl<'a> DoubleEndedIterator for Blocks<'a> {
fn next_back(&mut self) -> Option<BlockInfo<'a>> {
if self.ptr == self.back {
return None;
}
self.back = self.back.previous();
Some(BlockInfo::new(self.explorer, self.back))
}
}
/// Calculates a median time from precommits.
pub fn median_precommits_time(precommits: &[Verified<Precommit>]) -> DateTime<Utc> {
if precommits.is_empty() {
UNIX_EPOCH.into()
} else {
let mut times: Vec<_> = precommits.iter().map(|p| p.payload().time()).collect();
times.sort();
times[times.len() / 2]
}
}
| 33.767442 | 114 | 0.621683 |
235ee66c9a94ba1440b7b47cca885334c8bcb3a9 | 1,132 | pub mod audio;
pub mod data;
pub mod logging;
pub mod sockets;
#[cfg(not(target_os = "android"))]
pub mod commands;
#[cfg(not(target_os = "android"))]
pub mod ffmpeg;
#[cfg(not(target_os = "android"))]
pub mod graphics;
pub mod prelude {
pub use crate::{
fmt_e,
logging::{log_event, Event, StrResult},
trace_err, trace_none, trace_str,
};
pub use log::{debug, error, info, warn};
}
////////////////////////////////////////////////////////
use prelude::*;
use std::future::Future;
use tokio::{sync::oneshot, task};
// Tokio tasks are not cancelable. This function awaits a cancelable task.
pub async fn spawn_cancelable(
future: impl Future<Output = StrResult> + Send + 'static,
) -> StrResult {
// this channel is actually never used. cancel_receiver will be notified when _cancel_sender is
// dropped
let (_cancel_sender, cancel_receiver) = oneshot::channel::<()>();
trace_err!(
task::spawn(async {
tokio::select! {
res = future => res,
_ = cancel_receiver => Ok(()),
}
})
.await
)?
}
| 24.608696 | 99 | 0.577739 |
5698439a79b16e1a834f531dc2e999d1f3a78aa1 | 11,757 | /*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
use aws_types::os_shim_internal;
use std::borrow::Cow;
use std::io::ErrorKind;
use std::path::{Component, Path, PathBuf};
use tracing::Instrument;
/// In-memory source of profile data
pub struct Source {
/// Contents and path of ~/.aws/config
pub config_file: File,
/// Contents and path of ~/.aws/credentials
pub credentials_file: File,
/// Profile to use
///
/// Overridden via `$AWS_PROFILE`, defaults to `default`
pub profile: Cow<'static, str>,
}
/// In-memory configuration file
pub struct File {
pub path: String,
pub contents: String,
}
#[derive(Clone, Copy)]
pub enum FileKind {
Config,
Credentials,
}
impl FileKind {
fn default_path(&self) -> &'static str {
match &self {
FileKind::Credentials => "~/.aws/credentials",
FileKind::Config => "~/.aws/config",
}
}
fn override_environment_variable(&self) -> &'static str {
match &self {
FileKind::Config => "AWS_CONFIG_FILE",
FileKind::Credentials => "AWS_SHARED_CREDENTIALS_FILE",
}
}
}
/// Load a [Source](Source) from a given environment and filesystem.
pub async fn load(proc_env: &os_shim_internal::Env, fs: &os_shim_internal::Fs) -> Source {
let home = home_dir(proc_env, Os::real());
let config = load_config_file(FileKind::Config, &home, fs, proc_env)
.instrument(tracing::info_span!("load_config_file"))
.await;
let credentials = load_config_file(FileKind::Credentials, &home, fs, proc_env)
.instrument(tracing::info_span!("load_credentials_file"))
.await;
Source {
config_file: config,
credentials_file: credentials,
profile: proc_env
.get("AWS_PROFILE")
.map(Cow::Owned)
.unwrap_or(Cow::Borrowed("default")),
}
}
/// Loads an AWS Config file
///
/// Both the default & the overriding patterns may contain `~/` which MUST be expanded to the users
/// home directory in a platform-aware way (see [`expand_home`](expand_home))
///
/// Arguments:
/// * `kind`: The type of config file to load
/// * `home_directory`: Home directory to use during home directory expansion
/// * `fs`: Filesystem abstraction
/// * `environment`: Process environment abstraction
async fn load_config_file(
kind: FileKind,
home_directory: &Option<String>,
fs: &os_shim_internal::Fs,
environment: &os_shim_internal::Env,
) -> File {
let path = environment
.get(kind.override_environment_variable())
.map(Cow::Owned)
.ok()
.unwrap_or_else(|| kind.default_path().into());
let expanded = expand_home(path.as_ref(), home_directory);
if path != expanded.to_string_lossy() {
tracing::debug!(before = ?path, after = ?expanded, "home directory expanded");
}
// read the data at the specified path
// if the path does not exist, log a warning but pretend it was actually an empty file
let data = match fs.read_to_end(&expanded).await {
Ok(data) => data,
Err(e) => {
match e.kind() {
ErrorKind::NotFound if path == kind.default_path() => {
tracing::info!(path = %path, "config file not found")
}
ErrorKind::NotFound if path != kind.default_path() => {
// in the case where the user overrode the path with an environment variable,
// log more loudly than the case where the default path was missing
tracing::warn!(path = %path, env = %kind.override_environment_variable(), "config file overridden via environment variable not found")
}
_other => tracing::warn!(path = %path, error = %e, "failed to read config file"),
};
Default::default()
}
};
// if the file is not valid utf-8, log a warning and use an empty file instead
let data = match String::from_utf8(data) {
Ok(data) => data,
Err(e) => {
tracing::warn!(path = %path, error = %e, "config file did not contain utf-8 encoded data");
Default::default()
}
};
tracing::info!(path = %path, size = ?data.len(), "config file loaded");
File {
// lossy is OK here, the name of this file is just for debugging purposes
path: expanded.to_string_lossy().into(),
contents: data,
}
}
fn expand_home(path: impl AsRef<Path>, home_dir: &Option<String>) -> PathBuf {
let path = path.as_ref();
let mut components = path.components();
let start = components.next();
match start {
None => path.into(), // empty path,
Some(Component::Normal(s)) if s == "~" => {
// do homedir replacement
let path = match home_dir {
Some(dir) => {
tracing::debug!(home = ?dir, path = ?path, "performing home directory substitution");
dir.clone()
}
None => {
tracing::warn!(
"could not determine home directory but home expansion was requested"
);
// if we can't determine the home directory, just leave it as `~`
"~".into()
}
};
let mut path: PathBuf = path.into();
// rewrite the path using system-specific path separators
for component in components {
path.push(component);
}
path
}
// Finally, handle the case where it doesn't begin with some version of `~/`:
// NOTE: in this case we aren't performing path rewriting. This is correct because
// this path comes from an environment variable on the target
// platform, so in that case, the separators should already be correct.
_other => path.into(),
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
enum Os {
Windows,
NotWindows,
}
impl Os {
pub fn real() -> Self {
match std::env::consts::OS {
"windows" => Os::Windows,
_ => Os::NotWindows,
}
}
}
/// Resolve a home directory given a set of environment variables
fn home_dir(env_var: &os_shim_internal::Env, os: Os) -> Option<String> {
if let Ok(home) = env_var.get("HOME") {
tracing::debug!(src = "HOME", "loaded home directory");
return Some(home);
}
if os == Os::Windows {
if let Ok(home) = env_var.get("USERPROFILE") {
tracing::debug!(src = "USERPROFILE", "loaded home directory");
return Some(home);
}
let home_drive = env_var.get("HOMEDRIVE");
let home_path = env_var.get("HOMEPATH");
tracing::debug!(src = "HOMEDRIVE/HOMEPATH", "loaded home directory");
if let (Ok(mut drive), Ok(path)) = (home_drive, home_path) {
drive.push_str(&path);
return Some(drive);
}
}
None
}
#[cfg(test)]
mod tests {
use crate::profile::parser::source::{expand_home, home_dir, load, Os};
use aws_types::os_shim_internal::{Env, Fs};
use serde::Deserialize;
use std::collections::HashMap;
use std::error::Error;
use std::fs;
#[test]
fn only_expand_home_prefix() {
// ~ is only expanded as a single component (currently)
let path = "~aws/config";
assert_eq!(expand_home(&path, &None).to_str().unwrap(), "~aws/config");
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct SourceTests {
tests: Vec<TestCase>,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct TestCase {
name: String,
environment: HashMap<String, String>,
platform: String,
profile: Option<String>,
config_location: String,
credentials_location: String,
}
/// Run all tests from file-location-tests.json
#[test]
fn run_tests() -> Result<(), Box<dyn Error>> {
let tests = fs::read_to_string("test-data/file-location-tests.json")?;
let tests: SourceTests = serde_json::from_str(&tests)?;
for (i, test) in tests.tests.into_iter().enumerate() {
eprintln!("test: {}", i);
check(test)
.now_or_never()
.expect("these futures should never poll");
}
Ok(())
}
use futures_util::FutureExt;
use tracing_test::traced_test;
#[traced_test]
#[test]
fn logs_produced_default() {
let env = Env::from_slice(&[("HOME", "/user/name")]);
let mut fs = HashMap::new();
fs.insert(
"/user/name/.aws/config".to_string(),
"[default]\nregion = us-east-1",
);
let fs = Fs::from_map(fs);
let _src = load(&env, &fs).now_or_never();
assert!(logs_contain("config file loaded"));
assert!(logs_contain("performing home directory substitution"));
}
async fn check(test_case: TestCase) {
let fs = Fs::real();
let env = Env::from(test_case.environment);
let platform_matches = (cfg!(windows) && test_case.platform == "windows")
|| (!cfg!(windows) && test_case.platform != "windows");
if platform_matches {
let source = load(&env, &fs).await;
if let Some(expected_profile) = test_case.profile {
assert_eq!(source.profile, expected_profile, "{}", &test_case.name);
}
assert_eq!(
source.config_file.path, test_case.config_location,
"{}",
&test_case.name
);
assert_eq!(
source.credentials_file.path, test_case.credentials_location,
"{}",
&test_case.name
)
} else {
println!(
"NOTE: ignoring test case for {} which does not apply to our platform: \n {}",
&test_case.platform, &test_case.name
)
}
}
#[test]
#[cfg_attr(windows, ignore)]
fn test_expand_home() {
let path = "~/.aws/config";
assert_eq!(
expand_home(&path, &Some("/user/foo".to_string()))
.to_str()
.unwrap(),
"/user/foo/.aws/config"
);
}
#[test]
fn homedir_profile_only_windows() {
// windows specific variables should only be considered when the platform is windows
let env = Env::from_slice(&[("USERPROFILE", "C:\\Users\\name")]);
assert_eq!(
home_dir(&env, Os::Windows),
Some("C:\\Users\\name".to_string())
);
assert_eq!(home_dir(&env, Os::NotWindows), None);
}
#[test]
fn expand_home_no_home() {
// there is an edge case around expansion when no home directory exists
// if no home directory can be determined, leave the path as is
if !cfg!(windows) {
assert_eq!(expand_home("~/config", &None).to_str().unwrap(), "~/config")
} else {
assert_eq!(
expand_home("~/config", &None).to_str().unwrap(),
"~\\config"
)
}
}
/// Test that a linux oriented path expands on windows
#[test]
#[cfg_attr(not(windows), ignore)]
fn test_expand_home_windows() {
let path = "~/.aws/config";
assert_eq!(
expand_home(&path, &Some("C:\\Users\\name".to_string()))
.to_str()
.unwrap(),
"C:\\Users\\name\\.aws\\config"
);
}
}
| 33.211864 | 154 | 0.563919 |
bfccc2b3fba804d9e35996fb5cc4b129b236ab1c | 1,445 | // Copyright 2020 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT
use chain_sync::SyncConfig;
use forest_libp2p::Libp2pConfig;
use rpc_client::DEFAULT_PORT;
use serde::Deserialize;
use utils::get_home_dir;
#[derive(Debug, Deserialize)]
#[serde(default)]
pub struct Config {
pub network: Libp2pConfig,
pub data_dir: String,
pub genesis_file: Option<String>,
pub enable_rpc: bool,
pub rpc_port: String,
pub rpc_token: Option<String>,
/// If this is true, then we do not validate the imported snapshot.
/// Otherwise, we validate and compute the states.
pub snapshot: bool,
pub snapshot_path: Option<String>,
/// Skips loading import CAR file and assumes it's already been loaded.
/// Will use the cids in the header of the file to index the chain.
pub skip_load: bool,
pub sync: SyncConfig,
pub encrypt_keystore: bool,
pub metrics_port: u16,
}
impl Default for Config {
fn default() -> Self {
Self {
network: Libp2pConfig::default(),
data_dir: get_home_dir() + "/.forest",
genesis_file: None,
enable_rpc: true,
rpc_port: DEFAULT_PORT.to_owned(),
rpc_token: None,
snapshot_path: None,
snapshot: false,
skip_load: false,
sync: SyncConfig::default(),
encrypt_keystore: true,
metrics_port: 6116,
}
}
}
| 29.489796 | 75 | 0.635986 |
21acd02d14b38cc70b5f59e7389ec1953bcb2cb5 | 23,904 | #[doc = "Register `TA0CCTL2` reader"]
pub struct R(crate::R<TA0CCTL2_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<TA0CCTL2_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::convert::From<crate::R<TA0CCTL2_SPEC>> for R {
fn from(reader: crate::R<TA0CCTL2_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `TA0CCTL2` writer"]
pub struct W(crate::W<TA0CCTL2_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<TA0CCTL2_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl core::convert::From<crate::W<TA0CCTL2_SPEC>> for W {
fn from(writer: crate::W<TA0CCTL2_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `CCIFG` reader - Capture/compare interrupt flag"]
pub struct CCIFG_R(crate::FieldReader<bool, bool>);
impl CCIFG_R {
pub(crate) fn new(bits: bool) -> Self {
CCIFG_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CCIFG_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CCIFG` writer - Capture/compare interrupt flag"]
pub struct CCIFG_W<'a> {
w: &'a mut W,
}
impl<'a> CCIFG_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | (value as u16 & 0x01);
self.w
}
}
#[doc = "Field `COV` reader - Capture/compare overflow flag"]
pub struct COV_R(crate::FieldReader<bool, bool>);
impl COV_R {
pub(crate) fn new(bits: bool) -> Self {
COV_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for COV_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `COV` writer - Capture/compare overflow flag"]
pub struct COV_W<'a> {
w: &'a mut W,
}
impl<'a> COV_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | ((value as u16 & 0x01) << 1);
self.w
}
}
#[doc = "Field `OUT` reader - PWM Output signal if output mode 0"]
pub struct OUT_R(crate::FieldReader<bool, bool>);
impl OUT_R {
pub(crate) fn new(bits: bool) -> Self {
OUT_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for OUT_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `OUT` writer - PWM Output signal if output mode 0"]
pub struct OUT_W<'a> {
w: &'a mut W,
}
impl<'a> OUT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | ((value as u16 & 0x01) << 2);
self.w
}
}
#[doc = "Field `CCI` reader - Capture input signal (read)"]
pub struct CCI_R(crate::FieldReader<bool, bool>);
impl CCI_R {
pub(crate) fn new(bits: bool) -> Self {
CCI_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CCI_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CCI` writer - Capture input signal (read)"]
pub struct CCI_W<'a> {
w: &'a mut W,
}
impl<'a> CCI_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | ((value as u16 & 0x01) << 3);
self.w
}
}
#[doc = "Field `CCIE` reader - Capture/compare interrupt enable"]
pub struct CCIE_R(crate::FieldReader<bool, bool>);
impl CCIE_R {
pub(crate) fn new(bits: bool) -> Self {
CCIE_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CCIE_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CCIE` writer - Capture/compare interrupt enable"]
pub struct CCIE_W<'a> {
w: &'a mut W,
}
impl<'a> CCIE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | ((value as u16 & 0x01) << 4);
self.w
}
}
#[doc = "Output mode 2\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum OUTMOD_A {
#[doc = "0: PWM output mode: 0 - output only"]
OUTMOD_0 = 0,
#[doc = "1: PWM output mode: 1 - set"]
OUTMOD_1 = 1,
#[doc = "2: PWM output mode: 2 - PWM toggle/reset"]
OUTMOD_2 = 2,
#[doc = "3: PWM output mode: 3 - PWM set/reset"]
OUTMOD_3 = 3,
#[doc = "4: PWM output mode: 4 - toggle"]
OUTMOD_4 = 4,
#[doc = "5: PWM output mode: 5 - Reset"]
OUTMOD_5 = 5,
#[doc = "6: PWM output mode: 6 - PWM toggle/set"]
OUTMOD_6 = 6,
#[doc = "7: PWM output mode: 7 - PWM reset/set"]
OUTMOD_7 = 7,
}
impl From<OUTMOD_A> for u8 {
#[inline(always)]
fn from(variant: OUTMOD_A) -> Self {
variant as _
}
}
#[doc = "Field `OUTMOD` reader - Output mode 2"]
pub struct OUTMOD_R(crate::FieldReader<u8, OUTMOD_A>);
impl OUTMOD_R {
pub(crate) fn new(bits: u8) -> Self {
OUTMOD_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> OUTMOD_A {
match self.bits {
0 => OUTMOD_A::OUTMOD_0,
1 => OUTMOD_A::OUTMOD_1,
2 => OUTMOD_A::OUTMOD_2,
3 => OUTMOD_A::OUTMOD_3,
4 => OUTMOD_A::OUTMOD_4,
5 => OUTMOD_A::OUTMOD_5,
6 => OUTMOD_A::OUTMOD_6,
7 => OUTMOD_A::OUTMOD_7,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `OUTMOD_0`"]
#[inline(always)]
pub fn is_outmod_0(&self) -> bool {
**self == OUTMOD_A::OUTMOD_0
}
#[doc = "Checks if the value of the field is `OUTMOD_1`"]
#[inline(always)]
pub fn is_outmod_1(&self) -> bool {
**self == OUTMOD_A::OUTMOD_1
}
#[doc = "Checks if the value of the field is `OUTMOD_2`"]
#[inline(always)]
pub fn is_outmod_2(&self) -> bool {
**self == OUTMOD_A::OUTMOD_2
}
#[doc = "Checks if the value of the field is `OUTMOD_3`"]
#[inline(always)]
pub fn is_outmod_3(&self) -> bool {
**self == OUTMOD_A::OUTMOD_3
}
#[doc = "Checks if the value of the field is `OUTMOD_4`"]
#[inline(always)]
pub fn is_outmod_4(&self) -> bool {
**self == OUTMOD_A::OUTMOD_4
}
#[doc = "Checks if the value of the field is `OUTMOD_5`"]
#[inline(always)]
pub fn is_outmod_5(&self) -> bool {
**self == OUTMOD_A::OUTMOD_5
}
#[doc = "Checks if the value of the field is `OUTMOD_6`"]
#[inline(always)]
pub fn is_outmod_6(&self) -> bool {
**self == OUTMOD_A::OUTMOD_6
}
#[doc = "Checks if the value of the field is `OUTMOD_7`"]
#[inline(always)]
pub fn is_outmod_7(&self) -> bool {
**self == OUTMOD_A::OUTMOD_7
}
}
impl core::ops::Deref for OUTMOD_R {
type Target = crate::FieldReader<u8, OUTMOD_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `OUTMOD` writer - Output mode 2"]
pub struct OUTMOD_W<'a> {
w: &'a mut W,
}
impl<'a> OUTMOD_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: OUTMOD_A) -> &'a mut W {
self.bits(variant.into())
}
#[doc = "PWM output mode: 0 - output only"]
#[inline(always)]
pub fn outmod_0(self) -> &'a mut W {
self.variant(OUTMOD_A::OUTMOD_0)
}
#[doc = "PWM output mode: 1 - set"]
#[inline(always)]
pub fn outmod_1(self) -> &'a mut W {
self.variant(OUTMOD_A::OUTMOD_1)
}
#[doc = "PWM output mode: 2 - PWM toggle/reset"]
#[inline(always)]
pub fn outmod_2(self) -> &'a mut W {
self.variant(OUTMOD_A::OUTMOD_2)
}
#[doc = "PWM output mode: 3 - PWM set/reset"]
#[inline(always)]
pub fn outmod_3(self) -> &'a mut W {
self.variant(OUTMOD_A::OUTMOD_3)
}
#[doc = "PWM output mode: 4 - toggle"]
#[inline(always)]
pub fn outmod_4(self) -> &'a mut W {
self.variant(OUTMOD_A::OUTMOD_4)
}
#[doc = "PWM output mode: 5 - Reset"]
#[inline(always)]
pub fn outmod_5(self) -> &'a mut W {
self.variant(OUTMOD_A::OUTMOD_5)
}
#[doc = "PWM output mode: 6 - PWM toggle/set"]
#[inline(always)]
pub fn outmod_6(self) -> &'a mut W {
self.variant(OUTMOD_A::OUTMOD_6)
}
#[doc = "PWM output mode: 7 - PWM reset/set"]
#[inline(always)]
pub fn outmod_7(self) -> &'a mut W {
self.variant(OUTMOD_A::OUTMOD_7)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 5)) | ((value as u16 & 0x07) << 5);
self.w
}
}
#[doc = "Field `CAP` reader - Capture mode: 1 /Compare mode : 0"]
pub struct CAP_R(crate::FieldReader<bool, bool>);
impl CAP_R {
pub(crate) fn new(bits: bool) -> Self {
CAP_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CAP_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CAP` writer - Capture mode: 1 /Compare mode : 0"]
pub struct CAP_W<'a> {
w: &'a mut W,
}
impl<'a> CAP_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | ((value as u16 & 0x01) << 8);
self.w
}
}
#[doc = "Field `SCCI` reader - Latched capture signal (read)"]
pub struct SCCI_R(crate::FieldReader<bool, bool>);
impl SCCI_R {
pub(crate) fn new(bits: bool) -> Self {
SCCI_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for SCCI_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SCCI` writer - Latched capture signal (read)"]
pub struct SCCI_W<'a> {
w: &'a mut W,
}
impl<'a> SCCI_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | ((value as u16 & 0x01) << 10);
self.w
}
}
#[doc = "Field `SCS` reader - Capture sychronize"]
pub struct SCS_R(crate::FieldReader<bool, bool>);
impl SCS_R {
pub(crate) fn new(bits: bool) -> Self {
SCS_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for SCS_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SCS` writer - Capture sychronize"]
pub struct SCS_W<'a> {
w: &'a mut W,
}
impl<'a> SCS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | ((value as u16 & 0x01) << 11);
self.w
}
}
#[doc = "Capture input select 1\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum CCIS_A {
#[doc = "0: Capture input select: 0 - CCIxA"]
CCIS_0 = 0,
#[doc = "1: Capture input select: 1 - CCIxB"]
CCIS_1 = 1,
#[doc = "2: Capture input select: 2 - GND"]
CCIS_2 = 2,
#[doc = "3: Capture input select: 3 - Vcc"]
CCIS_3 = 3,
}
impl From<CCIS_A> for u8 {
#[inline(always)]
fn from(variant: CCIS_A) -> Self {
variant as _
}
}
#[doc = "Field `CCIS` reader - Capture input select 1"]
pub struct CCIS_R(crate::FieldReader<u8, CCIS_A>);
impl CCIS_R {
pub(crate) fn new(bits: u8) -> Self {
CCIS_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CCIS_A {
match self.bits {
0 => CCIS_A::CCIS_0,
1 => CCIS_A::CCIS_1,
2 => CCIS_A::CCIS_2,
3 => CCIS_A::CCIS_3,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `CCIS_0`"]
#[inline(always)]
pub fn is_ccis_0(&self) -> bool {
**self == CCIS_A::CCIS_0
}
#[doc = "Checks if the value of the field is `CCIS_1`"]
#[inline(always)]
pub fn is_ccis_1(&self) -> bool {
**self == CCIS_A::CCIS_1
}
#[doc = "Checks if the value of the field is `CCIS_2`"]
#[inline(always)]
pub fn is_ccis_2(&self) -> bool {
**self == CCIS_A::CCIS_2
}
#[doc = "Checks if the value of the field is `CCIS_3`"]
#[inline(always)]
pub fn is_ccis_3(&self) -> bool {
**self == CCIS_A::CCIS_3
}
}
impl core::ops::Deref for CCIS_R {
type Target = crate::FieldReader<u8, CCIS_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CCIS` writer - Capture input select 1"]
pub struct CCIS_W<'a> {
w: &'a mut W,
}
impl<'a> CCIS_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CCIS_A) -> &'a mut W {
self.bits(variant.into())
}
#[doc = "Capture input select: 0 - CCIxA"]
#[inline(always)]
pub fn ccis_0(self) -> &'a mut W {
self.variant(CCIS_A::CCIS_0)
}
#[doc = "Capture input select: 1 - CCIxB"]
#[inline(always)]
pub fn ccis_1(self) -> &'a mut W {
self.variant(CCIS_A::CCIS_1)
}
#[doc = "Capture input select: 2 - GND"]
#[inline(always)]
pub fn ccis_2(self) -> &'a mut W {
self.variant(CCIS_A::CCIS_2)
}
#[doc = "Capture input select: 3 - Vcc"]
#[inline(always)]
pub fn ccis_3(self) -> &'a mut W {
self.variant(CCIS_A::CCIS_3)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 12)) | ((value as u16 & 0x03) << 12);
self.w
}
}
#[doc = "Capture mode 1\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum CM_A {
#[doc = "0: Capture mode: 0 - disabled"]
CM_0 = 0,
#[doc = "1: Capture mode: 1 - pos. edge"]
CM_1 = 1,
#[doc = "2: Capture mode: 1 - neg. edge"]
CM_2 = 2,
#[doc = "3: Capture mode: 1 - both edges"]
CM_3 = 3,
}
impl From<CM_A> for u8 {
#[inline(always)]
fn from(variant: CM_A) -> Self {
variant as _
}
}
#[doc = "Field `CM` reader - Capture mode 1"]
pub struct CM_R(crate::FieldReader<u8, CM_A>);
impl CM_R {
pub(crate) fn new(bits: u8) -> Self {
CM_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CM_A {
match self.bits {
0 => CM_A::CM_0,
1 => CM_A::CM_1,
2 => CM_A::CM_2,
3 => CM_A::CM_3,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `CM_0`"]
#[inline(always)]
pub fn is_cm_0(&self) -> bool {
**self == CM_A::CM_0
}
#[doc = "Checks if the value of the field is `CM_1`"]
#[inline(always)]
pub fn is_cm_1(&self) -> bool {
**self == CM_A::CM_1
}
#[doc = "Checks if the value of the field is `CM_2`"]
#[inline(always)]
pub fn is_cm_2(&self) -> bool {
**self == CM_A::CM_2
}
#[doc = "Checks if the value of the field is `CM_3`"]
#[inline(always)]
pub fn is_cm_3(&self) -> bool {
**self == CM_A::CM_3
}
}
impl core::ops::Deref for CM_R {
type Target = crate::FieldReader<u8, CM_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CM` writer - Capture mode 1"]
pub struct CM_W<'a> {
w: &'a mut W,
}
impl<'a> CM_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CM_A) -> &'a mut W {
self.bits(variant.into())
}
#[doc = "Capture mode: 0 - disabled"]
#[inline(always)]
pub fn cm_0(self) -> &'a mut W {
self.variant(CM_A::CM_0)
}
#[doc = "Capture mode: 1 - pos. edge"]
#[inline(always)]
pub fn cm_1(self) -> &'a mut W {
self.variant(CM_A::CM_1)
}
#[doc = "Capture mode: 1 - neg. edge"]
#[inline(always)]
pub fn cm_2(self) -> &'a mut W {
self.variant(CM_A::CM_2)
}
#[doc = "Capture mode: 1 - both edges"]
#[inline(always)]
pub fn cm_3(self) -> &'a mut W {
self.variant(CM_A::CM_3)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 14)) | ((value as u16 & 0x03) << 14);
self.w
}
}
impl R {
#[doc = "Bit 0 - Capture/compare interrupt flag"]
#[inline(always)]
pub fn ccifg(&self) -> CCIFG_R {
CCIFG_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Capture/compare overflow flag"]
#[inline(always)]
pub fn cov(&self) -> COV_R {
COV_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - PWM Output signal if output mode 0"]
#[inline(always)]
pub fn out(&self) -> OUT_R {
OUT_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Capture input signal (read)"]
#[inline(always)]
pub fn cci(&self) -> CCI_R {
CCI_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - Capture/compare interrupt enable"]
#[inline(always)]
pub fn ccie(&self) -> CCIE_R {
CCIE_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bits 5:7 - Output mode 2"]
#[inline(always)]
pub fn outmod(&self) -> OUTMOD_R {
OUTMOD_R::new(((self.bits >> 5) & 0x07) as u8)
}
#[doc = "Bit 8 - Capture mode: 1 /Compare mode : 0"]
#[inline(always)]
pub fn cap(&self) -> CAP_R {
CAP_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 10 - Latched capture signal (read)"]
#[inline(always)]
pub fn scci(&self) -> SCCI_R {
SCCI_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 11 - Capture sychronize"]
#[inline(always)]
pub fn scs(&self) -> SCS_R {
SCS_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bits 12:13 - Capture input select 1"]
#[inline(always)]
pub fn ccis(&self) -> CCIS_R {
CCIS_R::new(((self.bits >> 12) & 0x03) as u8)
}
#[doc = "Bits 14:15 - Capture mode 1"]
#[inline(always)]
pub fn cm(&self) -> CM_R {
CM_R::new(((self.bits >> 14) & 0x03) as u8)
}
}
impl W {
#[doc = "Bit 0 - Capture/compare interrupt flag"]
#[inline(always)]
pub fn ccifg(&mut self) -> CCIFG_W {
CCIFG_W { w: self }
}
#[doc = "Bit 1 - Capture/compare overflow flag"]
#[inline(always)]
pub fn cov(&mut self) -> COV_W {
COV_W { w: self }
}
#[doc = "Bit 2 - PWM Output signal if output mode 0"]
#[inline(always)]
pub fn out(&mut self) -> OUT_W {
OUT_W { w: self }
}
#[doc = "Bit 3 - Capture input signal (read)"]
#[inline(always)]
pub fn cci(&mut self) -> CCI_W {
CCI_W { w: self }
}
#[doc = "Bit 4 - Capture/compare interrupt enable"]
#[inline(always)]
pub fn ccie(&mut self) -> CCIE_W {
CCIE_W { w: self }
}
#[doc = "Bits 5:7 - Output mode 2"]
#[inline(always)]
pub fn outmod(&mut self) -> OUTMOD_W {
OUTMOD_W { w: self }
}
#[doc = "Bit 8 - Capture mode: 1 /Compare mode : 0"]
#[inline(always)]
pub fn cap(&mut self) -> CAP_W {
CAP_W { w: self }
}
#[doc = "Bit 10 - Latched capture signal (read)"]
#[inline(always)]
pub fn scci(&mut self) -> SCCI_W {
SCCI_W { w: self }
}
#[doc = "Bit 11 - Capture sychronize"]
#[inline(always)]
pub fn scs(&mut self) -> SCS_W {
SCS_W { w: self }
}
#[doc = "Bits 12:13 - Capture input select 1"]
#[inline(always)]
pub fn ccis(&mut self) -> CCIS_W {
CCIS_W { w: self }
}
#[doc = "Bits 14:15 - Capture mode 1"]
#[inline(always)]
pub fn cm(&mut self) -> CM_W {
CM_W { w: self }
}
#[doc = "Writes raw bits to the register."]
pub unsafe fn bits(&mut self, bits: u16) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Timer0_A5 Capture/Compare Control 2\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ta0cctl2](index.html) module"]
pub struct TA0CCTL2_SPEC;
impl crate::RegisterSpec for TA0CCTL2_SPEC {
type Ux = u16;
}
#[doc = "`read()` method returns [ta0cctl2::R](R) reader structure"]
impl crate::Readable for TA0CCTL2_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [ta0cctl2::W](W) writer structure"]
impl crate::Writable for TA0CCTL2_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets TA0CCTL2 to value 0"]
impl crate::Resettable for TA0CCTL2_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 29.258262 | 424 | 0.548778 |
d7bfadbbbfd351ca7bbc7d48d6154ad672b424e1 | 10,960 | // Copyright 2022 The Matrix.org Foundation C.I.C.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io::{Cursor, Read};
use serde::{Deserialize, Serialize};
use thiserror::Error;
use zeroize::Zeroize;
use super::ratchet::Ratchet;
use crate::{
utilities::{base64_decode, base64_encode},
Ed25519PublicKey, Ed25519Signature, SignatureError,
};
/// Error type describing failure modes for the `SessionKey` and
/// `ExportedSessionKey` decoding.
#[derive(Debug, Error)]
pub enum SessionKeyDecodeError {
/// The encoded session key had a unsupported version.
#[error("The session key had a invalid version, expected {0}, got {1}")]
Version(u8, u8),
/// The encoded session key didn't contain enough data to be decoded.
#[error("The session key was too short {0}")]
Read(#[from] std::io::Error),
/// The encoded session key wasn't valid base64.
#[error("The session key wasn't valid base64: {0}")]
Base64(#[from] base64::DecodeError),
/// The signature on the session key was invalid.
#[error("The signature on the session key was invalid: {0}")]
Signature(#[from] SignatureError),
/// The encoded session key contains an invalid public key.
#[error("The public key of session was invalid: {0}")]
PublicKey(#[from] crate::KeyError),
}
/// The exported session key.
///
/// This uses the same format as the `SessionKey` minus the signature at the
/// end.
pub struct ExportedSessionKey {
pub(crate) ratchet_index: u32,
pub(crate) ratchet: Box<[u8; 128]>,
pub(crate) signing_key: Ed25519PublicKey,
}
/// The session key, can be used to create a [`InboundGroupSession`].
///
/// Uses the session-sharing format described in the [Olm spec].
///
/// +---+----+--------+--------+--------+--------+------+-----------+
/// | V | i | R(i,0) | R(i,1) | R(i,2) | R(i,3) | Kpub | Signature |
/// +---+----+--------+--------+--------+--------+------+-----------+
/// 0 1 5 37 69 101 133 165 229 bytes
///
/// The version byte, V, is "\x02".
/// This is followed by the ratchet index, iii, which is encoded as a
/// big-endian 32-bit integer; the 128 bytes of the ratchet; and the public
/// part of the Ed25519 keypair.
///
/// The data is then signed using the Ed25519 key, and the 64-byte signature is
/// appended.
///
/// [`InboundGroupSession`]: #crate.megolm.InboundGroupSession
/// [Olm spec]: https://gitlab.matrix.org/matrix-org/olm/blob/master/docs/megolm.md#session-sharing-format
pub struct SessionKey {
pub(super) session_key: ExportedSessionKey,
pub(super) signature: Ed25519Signature,
}
impl Zeroize for ExportedSessionKey {
fn zeroize(&mut self) {
self.ratchet_index.zeroize();
self.ratchet.zeroize();
}
}
impl Drop for ExportedSessionKey {
fn drop(&mut self) {
self.zeroize()
}
}
impl ExportedSessionKey {
const VERSION: u8 = 1;
pub(super) fn new(ratchet: &Ratchet, signing_key: Ed25519PublicKey) -> Self {
let ratchet_index = ratchet.index();
let mut ratchet_bytes = Box::new([0u8; Ratchet::RATCHET_LENGTH]);
ratchet_bytes.copy_from_slice(ratchet.as_bytes());
Self { ratchet_index, ratchet: ratchet_bytes, signing_key }
}
fn to_bytes_with_version(&self, version: u8) -> Vec<u8> {
let index = self.ratchet_index.to_be_bytes();
[[version].as_ref(), index.as_ref(), self.ratchet.as_ref(), self.signing_key.as_bytes()]
.concat()
}
/// Serialize the `ExportedSessionKey` to a byte vector.
pub fn to_bytes(&self) -> Vec<u8> {
self.to_bytes_with_version(Self::VERSION)
}
/// Deserialize the `ExportedSessionKey` from a byte slice.
pub fn from_bytes(bytes: &[u8]) -> Result<Self, SessionKeyDecodeError> {
let mut cursor = Cursor::new(bytes);
Self::decode_key(Self::VERSION, &mut cursor)
}
/// Serialize the `ExportedSessionKey` to a base64 encoded string.
///
/// This method will first use the [`ExportedSessionKey::to_bytes()`] to
/// convert the session key to a byte vector and then encode the byte vector
/// to a string using unpadded base64 as the encoding.
pub fn to_base64(&self) -> String {
let mut bytes = self.to_bytes();
let ret = base64_encode(&bytes);
bytes.zeroize();
ret
}
/// Deserialize the `ExportedSessionKey` from base64 encoded string.
pub fn from_base64(key: &str) -> Result<Self, SessionKeyDecodeError> {
let mut bytes = base64_decode(key)?;
let ret = Self::from_bytes(&bytes);
bytes.zeroize();
ret
}
fn decode_key(
expected_version: u8,
cursor: &mut Cursor<&[u8]>,
) -> Result<ExportedSessionKey, SessionKeyDecodeError> {
let mut version = [0u8; 1];
let mut index = [0u8; 4];
let mut ratchet = Box::new([0u8; 128]);
let mut public_key = [0u8; Ed25519PublicKey::LENGTH];
cursor.read_exact(&mut version)?;
if version[0] != expected_version {
Err(SessionKeyDecodeError::Version(expected_version, version[0]))
} else {
cursor.read_exact(&mut index)?;
cursor.read_exact(ratchet.as_mut_slice())?;
cursor.read_exact(&mut public_key)?;
let signing_key = Ed25519PublicKey::from_slice(&public_key)?;
let ratchet_index = u32::from_be_bytes(index);
Ok(ExportedSessionKey { ratchet_index, ratchet, signing_key })
}
}
}
impl SessionKey {
const VERSION: u8 = 2;
pub(super) fn new(ratchet: &Ratchet, signing_key: Ed25519PublicKey) -> Self {
let session_key = ExportedSessionKey::new(ratchet, signing_key);
Self {
session_key,
signature: Ed25519Signature::from_slice(&[0; Ed25519Signature::LENGTH])
.expect("Can't create an empty signature"),
}
}
pub(crate) fn to_signature_bytes(&self) -> Vec<u8> {
self.session_key.to_bytes_with_version(Self::VERSION)
}
/// Serialize the `SessionKey` to a byte vector.
pub fn to_bytes(&self) -> Vec<u8> {
let mut bytes = self.to_signature_bytes();
bytes.extend(self.signature.to_bytes());
bytes
}
/// Deserialize the `SessionKey` from a byte slice.
pub fn from_bytes(bytes: &[u8]) -> Result<Self, SessionKeyDecodeError> {
let mut cursor = Cursor::new(bytes);
let session_key = ExportedSessionKey::decode_key(Self::VERSION, &mut cursor)?;
let mut signature = [0u8; Ed25519Signature::LENGTH];
cursor.read_exact(&mut signature)?;
let signature = Ed25519Signature::from_slice(&signature)?;
let decoded = cursor.into_inner();
session_key
.signing_key
.verify(&decoded[..decoded.len() - Ed25519Signature::LENGTH], &signature)?;
Ok(Self { session_key, signature })
}
/// Serialize the `SessionKey` to a base64 encoded string.
///
/// This method will first use the [`SessionKey::to_bytes()`] to
/// convert the session key to a byte vector and then encode the byte vector
/// to a string using unpadded base64 as the encoding.
pub fn to_base64(&self) -> String {
let mut bytes = self.to_bytes();
let ret = base64_encode(&bytes);
bytes.zeroize();
ret
}
/// Deserialize the `SessionKey` from base64 encoded string.
pub fn from_base64(key: &str) -> Result<Self, SessionKeyDecodeError> {
let mut bytes = base64_decode(key)?;
let ret = Self::from_bytes(&bytes);
bytes.zeroize();
ret
}
}
impl Serialize for SessionKey {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let mut encoded = self.to_base64();
let ret = encoded.serialize(serializer);
encoded.zeroize();
ret
}
}
impl<'de> Deserialize<'de> for SessionKey {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let mut session_key = String::deserialize(deserializer)?;
let ret = Self::from_base64(&session_key).map_err(serde::de::Error::custom);
session_key.zeroize();
ret
}
}
impl Serialize for ExportedSessionKey {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let mut encoded = self.to_base64();
let ret = encoded.serialize(serializer);
encoded.zeroize();
ret
}
}
impl<'de> Deserialize<'de> for ExportedSessionKey {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let mut session_key = String::deserialize(deserializer)?;
let ret = Self::from_base64(&session_key).map_err(serde::de::Error::custom);
session_key.zeroize();
ret
}
}
#[cfg(test)]
mod test {
use crate::megolm::{ExportedSessionKey, GroupSession, InboundGroupSession, SessionKey};
#[test]
fn session_key_serialization() -> Result<(), anyhow::Error> {
let session = GroupSession::new();
let key = session.session_key();
let serialized = serde_json::to_string(&key)?;
let deserialized: SessionKey = serde_json::from_str(&serialized)?;
assert_eq!(key.session_key.ratchet, deserialized.session_key.ratchet);
assert_eq!(key.session_key.ratchet_index, deserialized.session_key.ratchet_index);
assert_eq!(key.session_key.signing_key, deserialized.session_key.signing_key);
assert_eq!(key.signature, deserialized.signature);
Ok(())
}
#[test]
fn exported_session_key_serialization() -> Result<(), anyhow::Error> {
let session = GroupSession::new();
let mut session = InboundGroupSession::from(&session);
let key = session.export_at(0).expect(
"A freshly created inbound session can always be exported at the initial index",
);
let serialized = serde_json::to_string(&key)?;
let deserialized: ExportedSessionKey = serde_json::from_str(&serialized)?;
assert_eq!(key.ratchet, deserialized.ratchet);
assert_eq!(key.ratchet_index, deserialized.ratchet_index);
assert_eq!(key.signing_key, deserialized.signing_key);
Ok(())
}
}
| 32.140762 | 106 | 0.633577 |
164e289ee50253641b8462cdaab7ebbbdd2df5a1 | 10,133 | #![cfg_attr(not(feature = "std"), no_std)]
#![warn(unused, future_incompatible, nonstandard_style, rust_2018_idioms)]
#![forbid(unsafe_code)]
#![allow(
clippy::op_ref,
clippy::suspicious_op_assign_impl,
clippy::many_single_char_names
)]
#[macro_use]
extern crate derivative;
#[macro_use]
extern crate ark_std;
use crate::group::Group;
use ark_ff::{
bytes::{FromBytes, ToBytes},
fields::{Field, PrimeField, SquareRootField},
UniformRand,
};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, ConstantSerializedSize};
use ark_std::{
fmt::{Debug, Display},
hash::Hash,
ops::{Add, AddAssign, MulAssign, Neg, Sub, SubAssign},
vec::Vec,
};
use num_traits::Zero;
use zeroize::Zeroize;
pub mod models;
pub use self::models::*;
pub mod group;
pub mod msm;
pub trait PairingEngine: Sized + 'static + Copy + Debug + Sync + Send + Eq + PartialEq {
/// This is the scalar field of the G1/G2 groups.
type Fr: PrimeField + SquareRootField;
/// The projective representation of an element in G1.
type G1Projective: ProjectiveCurve<BaseField = Self::Fq, ScalarField = Self::Fr, Affine = Self::G1Affine>
+ From<Self::G1Affine>
+ Into<Self::G1Affine>
+ MulAssign<Self::Fr>; // needed due to https://github.com/rust-lang/rust/issues/69640
/// The affine representation of an element in G1.
type G1Affine: AffineCurve<BaseField = Self::Fq, ScalarField = Self::Fr, Projective = Self::G1Projective>
+ From<Self::G1Projective>
+ Into<Self::G1Projective>
+ Into<Self::G1Prepared>;
/// A G1 element that has been preprocessed for use in a pairing.
type G1Prepared: ToBytes + Default + Clone + Send + Sync + Debug + From<Self::G1Affine>;
/// The projective representation of an element in G2.
type G2Projective: ProjectiveCurve<BaseField = Self::Fqe, ScalarField = Self::Fr, Affine = Self::G2Affine>
+ From<Self::G2Affine>
+ Into<Self::G2Affine>
+ MulAssign<Self::Fr>; // needed due to https://github.com/rust-lang/rust/issues/69640
/// The affine representation of an element in G2.
type G2Affine: AffineCurve<BaseField = Self::Fqe, ScalarField = Self::Fr, Projective = Self::G2Projective>
+ From<Self::G2Projective>
+ Into<Self::G2Projective>
+ Into<Self::G2Prepared>;
/// A G2 element that has been preprocessed for use in a pairing.
type G2Prepared: ToBytes + Default + Clone + Send + Sync + Debug + From<Self::G2Affine>;
/// The base field that hosts G1.
type Fq: PrimeField + SquareRootField;
/// The extension field that hosts G2.
type Fqe: SquareRootField;
/// The extension field that hosts the target group of the pairing.
type Fqk: Field;
/// Perform a miller loop with some number of (G1, G2) pairs.
#[must_use]
fn miller_loop<'a, I>(i: I) -> Self::Fqk
where
I: IntoIterator<Item = &'a (Self::G1Prepared, Self::G2Prepared)>;
/// Perform final exponentiation of the result of a miller loop.
#[must_use]
fn final_exponentiation(_: &Self::Fqk) -> Option<Self::Fqk>;
/// Computes a product of pairings.
#[must_use]
fn product_of_pairings<'a, I>(i: I) -> Self::Fqk
where
I: IntoIterator<Item = &'a (Self::G1Prepared, Self::G2Prepared)>,
{
Self::final_exponentiation(&Self::miller_loop(i)).unwrap()
}
/// Performs multiple pairing operations
#[must_use]
fn pairing<G1, G2>(p: G1, q: G2) -> Self::Fqk
where
G1: Into<Self::G1Affine>,
G2: Into<Self::G2Affine>,
{
let g1_prep = Self::G1Prepared::from(p.into());
let g2_prep = Self::G2Prepared::from(q.into());
Self::product_of_pairings(core::iter::once(&(g1_prep, g2_prep)))
}
}
/// Projective representation of an elliptic curve point guaranteed to be
/// in the correct prime order subgroup.
pub trait ProjectiveCurve:
Eq
+ 'static
+ Sized
+ ToBytes
+ FromBytes
+ Copy
+ Clone
+ Default
+ Send
+ Sync
+ Hash
+ Debug
+ Display
+ UniformRand
+ Zeroize
+ Zero
+ Neg<Output = Self>
+ Add<Self, Output = Self>
+ Sub<Self, Output = Self>
+ AddAssign<Self>
+ SubAssign<Self>
+ MulAssign<<Self as ProjectiveCurve>::ScalarField>
+ for<'a> Add<&'a Self, Output = Self>
+ for<'a> Sub<&'a Self, Output = Self>
+ for<'a> AddAssign<&'a Self>
+ for<'a> SubAssign<&'a Self>
+ core::iter::Sum<Self>
+ for<'a> core::iter::Sum<&'a Self>
+ From<<Self as ProjectiveCurve>::Affine>
{
const COFACTOR: &'static [u64];
type ScalarField: PrimeField + SquareRootField;
type BaseField: Field;
type Affine: AffineCurve<Projective = Self, ScalarField = Self::ScalarField, BaseField = Self::BaseField>
+ From<Self>
+ Into<Self>;
/// Returns a fixed generator of unknown exponent.
#[must_use]
fn prime_subgroup_generator() -> Self;
/// Normalizes a slice of projective elements so that
/// conversion to affine is cheap.
fn batch_normalization(v: &mut [Self]);
/// Normalizes a slice of projective elements and outputs a vector
/// containing the affine equivalents.
fn batch_normalization_into_affine(v: &[Self]) -> Vec<Self::Affine> {
let mut v = v.to_vec();
Self::batch_normalization(&mut v);
v.into_iter().map(|v| v.into()).collect()
}
/// Checks if the point is already "normalized" so that
/// cheap affine conversion is possible.
#[must_use]
fn is_normalized(&self) -> bool;
/// Doubles this element.
#[must_use]
fn double(&self) -> Self {
let mut copy = *self;
copy.double_in_place();
copy
}
/// Doubles this element in place.
fn double_in_place(&mut self) -> &mut Self;
/// Converts self into the affine representation.
fn into_affine(&self) -> Self::Affine {
(*self).into()
}
/// Set `self` to be `self + other`, where `other: Self::Affine`.
/// This is usually faster than adding `other` in projective form.
fn add_mixed(mut self, other: &Self::Affine) -> Self {
self.add_assign_mixed(other);
self
}
/// Set `self` to be `self + other`, where `other: Self::Affine`.
/// This is usually faster than adding `other` in projective form.
fn add_assign_mixed(&mut self, other: &Self::Affine);
/// Performs scalar multiplication of this element.
fn mul<S: AsRef<[u64]>>(mut self, other: S) -> Self {
let mut res = Self::zero();
for b in ark_ff::BitIteratorBE::without_leading_zeros(other) {
res.double_in_place();
if b {
res += self;
}
}
self = res;
self
}
}
/// Affine representation of an elliptic curve point guaranteed to be
/// in the correct prime order subgroup.
pub trait AffineCurve:
Eq
+ 'static
+ Sized
+ ToBytes
+ FromBytes
+ CanonicalSerialize
+ ConstantSerializedSize
+ CanonicalDeserialize
+ Copy
+ Clone
+ Default
+ Send
+ Sync
+ Hash
+ Debug
+ Display
+ Zero
+ Neg<Output = Self>
+ Zeroize
+ From<<Self as AffineCurve>::Projective>
{
const COFACTOR: &'static [u64];
type ScalarField: PrimeField + SquareRootField + Into<<Self::ScalarField as PrimeField>::BigInt>;
type BaseField: Field;
type Projective: ProjectiveCurve<Affine = Self, ScalarField = Self::ScalarField, BaseField = Self::BaseField>
+ From<Self>
+ Into<Self>
+ MulAssign<Self::ScalarField>; // needed due to https://github.com/rust-lang/rust/issues/69640
/// Returns a fixed generator of unknown exponent.
#[must_use]
fn prime_subgroup_generator() -> Self;
/// Converts self into the projective representation.
fn into_projective(&self) -> Self::Projective {
(*self).into()
}
/// Returns a group element if the set of bytes forms a valid group element,
/// otherwise returns None. This function is primarily intended for sampling
/// random group elements from a hash-function or RNG output.
fn from_random_bytes(bytes: &[u8]) -> Option<Self>;
/// Performs scalar multiplication of this element with mixed addition.
#[must_use]
fn mul<S: Into<<Self::ScalarField as PrimeField>::BigInt>>(&self, other: S)
-> Self::Projective;
/// Multiply this element by the cofactor and output the
/// resulting projective element.
#[must_use]
fn mul_by_cofactor_to_projective(&self) -> Self::Projective;
/// Multiply this element by the cofactor.
#[must_use]
fn mul_by_cofactor(&self) -> Self {
self.mul_by_cofactor_to_projective().into()
}
/// Multiply this element by the inverse of the cofactor in
/// `Self::ScalarField`.
#[must_use]
fn mul_by_cofactor_inv(&self) -> Self;
}
impl<C: ProjectiveCurve> Group for C {
type ScalarField = C::ScalarField;
#[inline]
#[must_use]
fn double(&self) -> Self {
let mut tmp = *self;
tmp += self;
tmp
}
#[inline]
fn double_in_place(&mut self) -> &mut Self {
<C as ProjectiveCurve>::double_in_place(self)
}
}
/// Preprocess a G1 element for use in a pairing.
pub fn prepare_g1<E: PairingEngine>(g: impl Into<E::G1Affine>) -> E::G1Prepared {
let g: E::G1Affine = g.into();
E::G1Prepared::from(g)
}
/// Preprocess a G2 element for use in a pairing.
pub fn prepare_g2<E: PairingEngine>(g: impl Into<E::G2Affine>) -> E::G2Prepared {
let g: E::G2Affine = g.into();
E::G2Prepared::from(g)
}
/// A cycle of pairing-friendly elliptic curves.
pub trait CycleEngine: Sized + 'static + Copy + Debug + Sync + Send
where
<Self::E2 as PairingEngine>::G1Projective: MulAssign<<Self::E1 as PairingEngine>::Fq>,
<Self::E2 as PairingEngine>::G2Projective: MulAssign<<Self::E1 as PairingEngine>::Fq>,
{
type E1: PairingEngine;
type E2: PairingEngine<
Fr = <Self::E1 as PairingEngine>::Fq,
Fq = <Self::E1 as PairingEngine>::Fr,
>;
}
| 31.082822 | 113 | 0.63535 |
08ed5b71fd29d47fb1c5f22e127e7e92f652a5ad | 7,556 | use crate::config;
use crate::{Query, RespData};
use serde::Deserialize;
use serde_json::Value;
use sha2::{Digest, Sha256};
use uuid::Uuid;
use super::new_client;
#[derive(Deserialize, Debug, Clone)]
pub struct YoudaoAPIKey {
pub secret_key: String,
pub id: String,
}
#[derive(Clone, Debug)]
pub struct Youdao {
url_free: String,
api_key: Option<YoudaoAPIKey>,
}
impl Youdao {
pub fn new() -> Youdao {
Youdao {
url_free: "https://openapi.youdao.com/api".to_owned(),
api_key: config::get().youdao.clone(),
}
}
}
unsafe impl Send for Youdao {}
unsafe impl Sync for Youdao {}
impl Youdao {
pub async fn query(&self, query: Query) -> Result<RespData, String> {
log::info!("requesting youdao translate");
match &self.api_key {
Some(api_key) => {
let client = new_client();
// https://ai.youdao.com/DOCSIRMA/html/%E8%87%AA%E7%84%B6%E8%AF%AD%E8%A8%80%E7%BF%BB%E8%AF%91/API%E6%96%87%E6%A1%A3/%E6%96%87%E6%9C%AC%E7%BF%BB%E8%AF%91%E6%9C%8D%E5%8A%A1/%E6%96%87%E6%9C%AC%E7%BF%BB%E8%AF%91%E6%9C%8D%E5%8A%A1-API%E6%96%87%E6%A1%A3.html
let salt = Uuid::new_v4().to_string();
let text = &query.text;
let curtime = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_secs()
.to_string();
let mut sign = String::new();
sign.push_str(&api_key.id);
// utf-8 length
let text_len = text.chars().count();
if text_len <= 20 {
sign.push_str(text);
} else {
let beg: String = text.chars().take(10).collect();
let end: String = text
.chars()
.rev()
.take(10)
.collect::<String>()
.chars()
.rev()
.collect();
sign.push_str(&beg);
sign.push_str(&text_len.to_string());
sign.push_str(&end);
}
sign.push_str(&salt);
sign.push_str(&curtime);
sign.push_str(&api_key.secret_key);
let mut hasher = Sha256::new();
hasher.update(sign);
sign = format!("{:X}", hasher.finalize());
let lang_from = Youdao::map_lang(&query.lang_from);
let lang_to = Youdao::map_lang(&query.lang_to);
let params = [
("q", &query.text),
("from", &lang_from),
("to", &lang_to),
("appKey", &api_key.id),
("salt", &salt),
("sign", &sign),
("signType", &"v3".into()),
("curtime", &curtime),
// audio
// ("ext", &"TODO".into()),
("strict", &"false".into()),
];
let resp = client
.post(&self.url_free)
.form(¶ms)
.send()
.await
.unwrap();
let resp_data: Value = resp.json().await.unwrap();
log::debug!("raw data from youdao translate: {:?}", resp_data);
let error_code = resp_data.get("errorCode").unwrap().as_str().unwrap();
if error_code != "0" {
return Err(error_code.to_string());
}
// get lang_from and lang_to
let langs: Vec<&str> = resp_data
.get("l")
.unwrap()
.as_str()
.unwrap()
.split('2')
.collect();
let lang_from = langs[0];
let _lang_to = langs[1];
// only exists when looking up a word
if let Some(basic) = resp_data.get("basic") {
if lang_from == "en" {
log::debug!("{:?}", basic);
// phonetics
let mut ps: Vec<(String, String)> = Vec::new();
if let Some(us_phonetic) = basic.get("us-phonetic") {
let us_phonetic = us_phonetic.as_str().unwrap().to_string();
ps.push(("us".to_string(), format!("/{}/", us_phonetic)));
}
if let Some(uk_phonetic) = basic.get("uk-phonetic") {
let uk_phonetic = uk_phonetic.as_str().unwrap().to_string();
ps.push(("us".to_string(), format!("/{}/", uk_phonetic)));
}
// explains
let explains: String = parse_explains_field(basic);
return Ok(RespData {
backend: "youdao translate".to_owned(),
query,
// short_desc: resp.text().unwrap(),
basic_desc: explains,
phonetic_symbol: Some(ps),
detail_desc: None,
audio: None,
});
} else {
// if not english, only word explanation
let trans = parse_explains_field(basic);
return Ok(RespData {
backend: "youdao translate".to_owned(),
query,
// short_desc: resp.text().unwrap(),
basic_desc: trans,
phonetic_symbol: None,
detail_desc: None,
audio: None,
});
}
}
// translation, always exist
let trans_list: Vec<&str> = resp_data
.get("translation")
.unwrap()
.as_array()
.unwrap()
.into_iter()
.map(move |v: &Value| v.as_str().unwrap())
.collect();
let trans = trans_list.join("\n");
// let basic = resp_data.get("basic");
Ok(RespData {
backend: "youdao translate".to_owned(),
query,
// short_desc: resp.text().unwrap(),
basic_desc: trans,
phonetic_symbol: None,
detail_desc: None,
audio: None,
})
}
None => Err(String::from("no youdao API key")),
}
}
}
impl Youdao {
fn map_lang(lang_code: &str) -> String {
if lang_code == "zh" {
String::from("zh-CHS")
} else {
lang_code.to_string()
}
}
}
fn parse_explains_field(basic: &Value) -> String {
basic
.get("explains")
.unwrap()
.as_array()
.unwrap()
.into_iter()
.fold(String::new(), |mut x: String, y: &Value| {
x.push_str(y.as_str().unwrap());
x.push_str(";\t");
x
})
}
| 36.858537 | 268 | 0.408682 |
fef95907263fb0713148f46c715b4dae24e80ec0 | 20,807 | // FIXME: un-unwrap();
use serenity::{
builder::EditMessage,
model::interactions::{
application_command::ApplicationCommandInteraction,
message_component::ButtonStyle,
InteractionApplicationCommandCallbackDataFlags,
InteractionResponseType::{ChannelMessageWithSource, DeferredUpdateMessage},
},
prelude::TypeMapKey,
};
use crate::{
env,
extensions::*,
message_component::MessageComponentInteraction,
models::{CouncilVoting, SuspectMessageEdit, VotingAction},
Arc, Channel, Context, Interaction, Message, MessageId, MessageUpdateEvent, Mutex, User,
};
pub struct PendingEdits {
edits: Vec<u64>,
}
impl TypeMapKey for PendingEdits {
type Value = Arc<Mutex<PendingEdits>>;
}
impl PendingEdits {
pub fn new() -> PendingEdits {
let edits = Vec::new();
Self { edits }
}
pub fn add(&mut self, message_id: u64) {
self.edits.push(message_id);
}
pub fn remove(&mut self, message_id: u64) {
self.edits.retain(|x| *x != message_id);
}
pub fn contains(&self, message_id: u64) -> bool {
self.edits.contains(&message_id)
}
}
async fn is_reported(ctx: &Context, message_id: u64) -> bool {
let db = ctx.get_db().await;
db.is_reported(message_id).await.unwrap_or(false)
}
fn filter_votes(id: i32, actions: Vec<VotingAction>) -> String {
let mut actions = actions
.iter()
.filter(|x| x.vote_type == id)
.map(|x| format!("\n<@{}>", x.voter_user_id))
.collect::<String>();
if actions.is_empty() {
actions = "-".to_string()
}
actions
}
fn generate_moderation_message(
message: &mut EditMessage,
voting: CouncilVoting,
edits: Vec<SuspectMessageEdit>,
votes: Vec<VotingAction>,
) {
let guild_id = env::var("GUILD_ID").expect("NO GUILD_ID in .env");
let message_link = format!(
"https://discord.com/channels/{}/{}/{}",
guild_id, voting.suspect_message_channel_id, voting.suspect_message_id
);
let delete_voters = filter_votes(0, votes.clone());
let silence_voters = filter_votes(1, votes.clone());
let block_reporter_voters = filter_votes(2, votes);
message.embed(|e| {
e.color(serenity::utils::Color::RED);
e.title("Viestistä on tehty ilmoitus!");
e.field("Arvojäseniä paikalla", voting.moderators_online, true);
e.field(
"Viestin kanava",
format!("<#{}>", voting.suspect_message_channel_id),
true,
);
e.field(
"Viestin lähettänyt",
format!("<@{}>", voting.suspect_id),
true,
);
e.field(
"Ilmoituksen tehnyt",
format!("<@{}>", voting.reporter_id),
true,
);
e.description(format!(
"Viestin sisältö:\n```\n{}```",
voting.suspect_message_content
));
e.field(
format!(
"Poistamisen puolesta {}/{}",
voting.delete_votes, voting.delete_votes_required
),
delete_voters,
true,
);
e.field(
format!(
"Hiljennyksen puolesta {}/{}",
voting.silence_votes, voting.silence_votes_required
),
silence_voters,
true,
);
e.field(
format!(
"Ilmoittajan estämisen puolesta {}/{}",
voting.block_reporter_votes, voting.block_reporter_votes_required
),
block_reporter_voters,
true,
);
e.footer(|f| {
f.text(format!(
"Viesti lähetetty: {}",
voting.suspect_message_send_time
))
})
});
for edit in &edits {
if edit.new_content.is_empty() {
message.add_embed(|e| {
e.title("Viesti on poistettu");
e.footer(|f| f.text(format!("Poiston ajankohta: {}", edit.edit_time)))
});
break;
}
message.add_embed(|e| {
e.title("Viestiä on muokattu");
e.description(format!("Uusi sisältö:\n```\n{}```", edit.new_content));
e.footer(|f| f.text(format!("Muokkausajankohta: {}", edit.edit_time)))
});
}
message.components(|c| {
c.create_action_row(|r| {
r.create_button(|b| {
b.label("Poista viesti");
b.style(ButtonStyle::Secondary);
if voting.delete_votes == voting.delete_votes_required
|| (!edits.is_empty() && edits.last().unwrap().new_content.is_empty())
{
b.disabled(true);
}
b.custom_id("delete_button")
});
r.create_button(|b| {
b.label("Hiljennä jäsen");
b.style(ButtonStyle::Danger);
if voting.silence_votes == voting.silence_votes_required {
b.disabled(true);
}
b.custom_id("ban_button")
});
r.create_button(|b| {
b.label("Estä ilmoittaja");
b.style(ButtonStyle::Danger);
if voting.block_reporter_votes == voting.block_reporter_votes_required {
b.disabled(true);
}
b.custom_id("abuse_button")
});
if !message_link.is_empty() {
r.create_button(|b| {
b.label("Näytä viesti");
b.style(ButtonStyle::Link);
b.url(message_link)
});
}
r.create_button(|b| {
b.label(format!("{} klikkausta tuhlattu", voting.useless_clicks));
b.style(ButtonStyle::Success);
b.custom_id("useless_button")
})
})
});
}
async fn update_voting_message(ctx: &Context, voting_message_id: u64) {
let moderation_channel_id: u64 = env::var("MOD_CHANNEL_ID")
.expect("No MOD_CHANNEL_ID in .env")
.parse()
.expect("Invalid MOD_CHANNEL_ID provided");
let db = ctx.get_db().await;
let event = db.get_voting_event(voting_message_id).await.unwrap();
let votes = db.get_voting_event_votes(voting_message_id).await.unwrap();
let edits = db.get_voting_event_edits(voting_message_id).await.unwrap();
let mut message = ctx
.http
.get_message(moderation_channel_id, voting_message_id)
.await
.unwrap();
message
.edit(&ctx.http, |m| {
generate_moderation_message(m, event, edits, votes);
m
})
.await
.unwrap()
}
/// This handles a message_changed event an checks for
/// reported messages that are edited. It then updates the message on the
/// moderation channel with the message's new content and the time of the edit.
///
/// (Due to discord limitations the maximum number of logged edits is 9 after which they will no
/// longer be logged)
// NOTE: This could become a problem in which case a workaround can be implemented
pub async fn handle_edit(ctx: &Context, event: &MessageUpdateEvent) {
if !is_reported(ctx, event.id.0).await {
return;
}
let db = ctx.get_db().await;
let voting_event = db.get_voting_event_for_message(event.id.0).await.unwrap();
db.add_edit_event(event.to_owned(), voting_event.vote_message_id)
.await
.unwrap();
update_voting_message(ctx, voting_event.vote_message_id as u64).await;
}
/// This handles the deletion of a message
/// First it check whether the message is reported
/// After that it proceeds accordingly.
/// If a reported message is deleted the deletion time will be logged into the embed-chain
pub async fn handle_delete(ctx: &Context, message_id: MessageId) {
if !is_reported(ctx, message_id.0).await {
return;
}
let db = ctx.get_db().await;
let voting_event = db.get_voting_event_for_message(message_id.0).await.unwrap();
db.message_deleted(
chrono::Local::now().naive_local(),
voting_event.vote_message_id,
)
.await
.unwrap();
update_voting_message(ctx, voting_event.vote_message_id as u64).await;
}
/// Handles an event where a message was reported using the "⛔ Ilmianna viesti" message command
/// This sends an embed to the moderation channel, containing some information about the message
/// and the reported
pub async fn handle_report(ctx: &Context, interaction: ApplicationCommandInteraction) {
let no_reports_role_id: u64 = env::var("NO_REPORTS_ROLE_ID")
.expect("Expected NO_REPORTS_ROLE_ID in .env")
.parse()
.expect("Invalid NO_REPORTS_ROLE_ID provided");
let guild_id: u64 = env::var("GUILD_ID")
.expect("Expected GUILD_ID in .env")
.parse()
.expect("Invalid GUILD_ID provided");
let moderation_channel_id = env::var("MOD_CHANNEL_ID")
.expect("MOD_CHANNEL_ID id expected")
.parse::<u64>()
.expect("Invalid MOD_CHANNEL_ID provided");
if interaction
.user
.has_role(&ctx.http, guild_id, no_reports_role_id)
.await
.unwrap()
{
info!("Skipping blacklisted reporter {}", interaction.user.id.0);
interaction
.create_interaction_response(&ctx.http, |r| {
r.interaction_response_data(|d| {
d.flags(
InteractionApplicationCommandCallbackDataFlags::EPHEMERAL
);
d.content("Sinut on hyllytetty ilmoitus-ominaisuuden väärinkäytöstä :rage:! Ilmoitustasi ei lähetetty.")
});
r.kind(ChannelMessageWithSource)
})
.await
.unwrap();
return;
}
let message = if is_moderator(ctx, &interaction.user).await {
format!(
"Viesti on ilmiannettu arvojäsenten neuvostolle, <#{}>",
moderation_channel_id
)
} else {
"Viesti on ilmiannettu arvojäsenten neuvostolle".to_string()
};
interaction
.create_interaction_response(&ctx.http, |r| {
r.interaction_response_data(|d| {
d.flags(InteractionApplicationCommandCallbackDataFlags::EPHEMERAL);
d.content(message)
});
r.kind(ChannelMessageWithSource)
})
.await
.unwrap();
let suspect_message = interaction.data.resolved.messages.values().next().unwrap();
if is_reported(ctx, suspect_message.id.0).await {
info!(
"The message {} is already reported! Skipping...",
suspect_message.id.0
);
return;
}
let mods_online = get_online_mod_count(ctx).await;
let moderation_channel = ctx.http.get_channel(moderation_channel_id).await.unwrap();
let suspect = suspect_message.author.clone();
let voting_message = moderation_channel
.id()
.send_message(&ctx.http, |m| {
m.embed(|e| e.title("Viestistä on tehty ilmoitus!"))
})
.await
.unwrap();
let db = ctx.get_db().await;
db.new_reported_message(
voting_message.id.0,
suspect_message.to_owned(),
interaction.user.id.0,
mods_online as i32,
)
.await
.unwrap();
update_voting_message(ctx, voting_message.id.0).await;
let message_link = suspect_message.link_ensured(&ctx.http).await;
suspect
.dm(&ctx.http, |m| {
m.content("Viestistäsi on tehty ilmoitus moderaattoreille!");
m.components(|c| {
c.create_action_row(|r| {
r.create_button(|b| {
b.label("Näytä viesti");
b.style(ButtonStyle::Link);
b.url(message_link)
})
})
})
})
.await
.unwrap();
}
/// Get the amount of online members who have access to the moderation channel.
/// This is done by comparing the members of the channel to the member that are currently present on
/// the server.
async fn get_online_mod_count(ctx: &Context) -> usize {
let channelid = env::var("MOD_CHANNEL_ID")
.expect("MOD_CHANNEL_ID id expected")
.parse::<u64>()
.expect("Invalid mod role id");
if let Channel::Guild(channel) = ctx.http.get_channel(channelid).await.unwrap() {
let precenses = ctx.cache.guild(channel.guild_id).await.unwrap().presences;
let mut members = channel.members(&ctx.cache).await.unwrap();
members.retain(|m| precenses.contains_key(&m.user.id) && !m.user.bot);
return members.len();
};
unreachable!()
}
/// Check if the given user is a moderator or not, based on their access to the moderation channel
async fn is_moderator(ctx: &Context, user: &User) -> bool {
let channelid = env::var("MOD_CHANNEL_ID")
.expect("MOD_CHANNEL_ID id expected")
.parse::<u64>()
.expect("Invalid mod role id");
if let Channel::Guild(channel) = ctx.http.get_channel(channelid).await.unwrap() {
return channel
.permissions_for_user(&ctx.cache, user)
.await
.unwrap()
.read_messages();
}
unreachable!();
}
/// The function to handle a vote-addition event for the "delete_button"
/// This function adds the vote then checks whether the goal is reached
/// and then acts accordingly, either by deleting the message and then updating
/// the announcement on the moderation channel or just by updating the announcement
async fn handle_delete_vote(ctx: &Context, voter: User, message: &mut Message) {
let db = ctx.get_db().await;
let event = db.get_voting_event(message.id.0).await.unwrap();
if event.delete_votes == event.delete_votes_required {
return;
}
if db
.add_vote(event.vote_message_id, voter.id.0, 0)
.await
.unwrap()
== 0
{
db.remove_vote(event.vote_message_id, voter.id.0, 0)
.await
.unwrap();
} else {
let event = db.get_voting_event(message.id.0).await.unwrap();
if event.delete_votes == event.delete_votes_required {
let message = ctx
.http
.get_message(
event.suspect_message_channel_id as u64,
event.suspect_message_id as u64,
)
.await
.unwrap();
message.delete(&ctx.http).await.unwrap();
db.message_deleted(chrono::Local::now().naive_local(), event.vote_message_id)
.await
.unwrap();
}
}
update_voting_message(ctx, event.vote_message_id as u64).await;
}
/// The function to handle a vote-addition event for the "ban_button"
/// This function adds the vote then checks whether the goal is reached
/// and then acts accordingly, either by banning the member and then updating
/// the announcement on the moderation channel or just by updating the announcement
//
// NOTE: The ban actually only applies the "silenced" role upon the user
async fn handle_silence_vote(ctx: &Context, voter: User, message: &mut Message) {
let db = ctx.get_db().await;
let event = db.get_voting_event(message.id.0).await.unwrap();
if event.silence_votes == event.silence_votes_required {
return;
}
if db
.add_vote(event.vote_message_id, voter.id.0, 1)
.await
.unwrap()
== 0
{
db.remove_vote(event.vote_message_id, voter.id.0, 1)
.await
.unwrap();
} else {
let event = db.get_voting_event(message.id.0).await.unwrap();
if event.silence_votes == event.silence_votes_required {
let guild_id: u64 = env::var("GUILD_ID")
.expect("Expected GUILD_ID in .env")
.parse()
.expect("Invalid GUILD_ID provided");
let silence_role: u64 = env::var("SILENCED_ROLE_ID")
.expect("Expected SILENCED_ROLE_ID in .env")
.parse()
.expect("Invalid SILENCED_ROLE_ID provided");
let mut member = ctx
.http
.get_member(guild_id, event.suspect_id as u64)
.await
.unwrap();
db.silence_user(member.user.id.0).await.unwrap();
member.add_role(&ctx.http, silence_role).await.unwrap();
if (member.user.dm(&ctx.http, |m| {
m.content("Sinut on hiljennetty huonon käyttäytymisen vuoksi arvojäsenten toimesta.\n\nMikäli haluat keskusteluoikeutesi takaisin, voit olla yhteydessä Mastermindeihin joko yksityisviestitse tai sähköpostitse [email protected]. Tarkistathan sääntömme kanavalta <#798799175072219136>.")
}).await).is_err() {
info!("Unable to send \"Silenced notification\" to {}", member.user.id.0);
}
}
}
update_voting_message(ctx, event.vote_message_id as u64).await;
}
/// This function handles the press off the "abuse_button"
/// If the vote-goal is reached, the user will be given a
/// role that prevents them from further abusing the reporting feature
async fn handle_abuse_vote(ctx: &Context, voter: User, message: &mut Message) {
let db = ctx.get_db().await;
let event = db.get_voting_event(message.id.0).await.unwrap();
if event.block_reporter_votes == event.block_reporter_votes_required {
return;
}
if db
.add_vote(event.vote_message_id, voter.id.0, 2)
.await
.unwrap()
== 0
{
db.remove_vote(event.vote_message_id, voter.id.0, 2)
.await
.unwrap();
} else {
let event = db.get_voting_event(message.id.0).await.unwrap();
if event.block_reporter_votes == event.block_reporter_votes_required {
let guild_id: u64 = env::var("GUILD_ID")
.expect("Expected GUILD_ID in .env")
.parse()
.expect("Invalid GUILD_ID provided");
let abuse_role: u64 = env::var("NO_REPORTS_ROLE_ID")
.expect("Expected NO_REPORTS_ROLE_ID in .env")
.parse()
.expect("Invalid NO_REPORTS_ROLE_ID provided");
let mut member = ctx
.http
.get_member(guild_id, event.reporter_id as u64)
.await
.unwrap();
member.add_role(&ctx.http, abuse_role).await.unwrap();
}
}
update_voting_message(ctx, event.vote_message_id as u64).await;
}
async fn handle_useless_button(ctx: &Context, component: &mut MessageComponentInteraction) {
let db = ctx.get_db().await;
let pending_edits = ctx.get_pending_edits().await;
db.add_useless_click(component.message.id.0).await.unwrap();
component
.create_interaction_response(&ctx.http, |r| r.kind(DeferredUpdateMessage))
.await
.unwrap();
if !pending_edits.lock().await.contains(component.message.id.0) {
pending_edits.lock().await.add(component.message.id.0);
update_voting_message(ctx, component.message.id.0).await;
pending_edits.lock().await.remove(component.message.id.0);
}
}
/// This function handles the vote-interactions and the report interaction and
/// calls the appropriate functions for them (logging stuff in the logs)
pub async fn handle_vote_interaction(ctx: &Context, interaction: Interaction) {
if let Interaction::MessageComponent(mut component) = interaction {
match component.data.custom_id.as_str() {
"delete_button" => {
info!("Delete vote by {}", component.user.tag());
handle_delete_vote(ctx, component.user.clone(), &mut component.message).await;
}
"ban_button" => {
info!("Ban vote by {}", component.user.tag());
handle_silence_vote(ctx, component.user.clone(), &mut component.message).await;
}
"abuse_button" => {
info!("Abuse vote by {}", component.user.tag());
handle_abuse_vote(ctx, component.user.clone(), &mut component.message).await;
}
"useless_button" => {
handle_useless_button(ctx, &mut component).await;
return;
}
_ => {
debug!("Unknown interaction: {}", component.data.custom_id);
return;
}
}
component
.create_interaction_response(&ctx.http, |r| r.kind(DeferredUpdateMessage))
.await
.unwrap();
}
}
| 36.696649 | 313 | 0.590234 |
03131edd955b33b43508113070b35ab019f2a136 | 50,417 | // Copyright (c) 2016-2020 Fabian Schuiki
//! Name resolution.
//!
//! This module implements the infrastructure to describe scopes and resolve
//! names in them.
use crate::crate_prelude::*;
use crate::{
ast::AnyNode,
ast_map::AstNode,
common::{SessionContext, Verbosity},
hir::HirNode,
port_list::{self, AsPortedNode},
ParamEnv,
};
use std::{
collections::HashMap,
hash::{Hash, Hasher},
sync::Arc,
};
/// One local scope.
///
/// A rib represents a any kind of scope. Ribs form a tree structure along their
/// parents that may be traversed from the bottom to the top to perform name
/// resolution, or top to bottom to lookup hierarchical names.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Rib {
/// The node this rib is associated with.
///
/// When querying the compiler for a node's rib, what you get in return is
/// not necessarily the rib of that node. If the node does not generate a
/// new rib, you get back the rib of a parent node.
pub node: NodeId,
/// The parent rib.
///
/// Note that this does not necessarily correspond to the parent node, but
/// may skip nodes that do not contain a rib.
pub parent: Option<NodeId>,
/// The data associated with the rib.
pub kind: RibKind,
}
/// A local scope kind.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum RibKind {
/// A normal declaration.
Normal(Spanned<Name>, NodeId),
/// A module.
Module(HashMap<Name, NodeId>),
/// An enum type declaration.
Enum(HashMap<Name, NodeId>),
/// An imported rib.
Import(Box<Rib>),
}
impl Rib {
/// Look up a name.
pub fn get(&self, name: Name) -> Option<NodeId> {
self.kind.get(name)
}
/// Resolve import ribs to the imported rib.
pub fn resolve_imports(&self) -> &Self {
match &self.kind {
RibKind::Import(rib) => rib.as_ref(),
_ => self,
}
}
}
impl RibKind {
/// Look up a name.
pub fn get(&self, name: Name) -> Option<NodeId> {
match *self {
RibKind::Normal(n, id) if n.value == name => Some(id),
RibKind::Module(ref defs) | RibKind::Enum(ref defs) => defs.get(&name).cloned(),
_ => None,
}
}
}
/// Determine the local rib that applies to a node.
///
/// This will return either the rib the node itself generates, or the next rib
/// up the hierarchy.
pub(crate) fn local_rib<'gcx>(cx: &impl Context<'gcx>, node_id: NodeId) -> Result<&'gcx Rib> {
let ast = cx.ast_of(node_id)?;
trace!("local_rib for {} ({:?})", ast.desc_full(), node_id);
let mut parent = None;
let mut kind = match ast {
AstNode::TypeParam(_, decl) => Some(RibKind::Normal(decl.name, node_id)),
AstNode::ValueParam(_, decl) => Some(RibKind::Normal(decl.name, node_id)),
AstNode::Module(_) => Some(RibKind::Module(HashMap::new())),
AstNode::VarDecl(decl, _, _) | AstNode::NetDecl(decl, _, _) => Some(RibKind::Normal(
Spanned::new(decl.name, decl.name_span),
node_id,
)),
AstNode::GenvarDecl(decl) => Some(RibKind::Normal(decl.name, node_id)),
AstNode::Stmt(stmt) => match stmt.kind {
ast::VarDeclStmt(_) => {
let hir = match cx.hir_of(node_id)? {
HirNode::Stmt(x) => x,
_ => unreachable!(),
};
match hir.kind {
hir::StmtKind::InlineGroup { rib, .. } => return cx.local_rib(rib),
_ => None,
}
}
_ => None,
},
AstNode::Package(_) => Some(RibKind::Module(HashMap::new())),
AstNode::Type(_) => {
let hir = match cx.hir_of(node_id)? {
HirNode::Type(x) => x,
_ => unreachable!(),
};
local_rib_kind_for_type(cx, &hir.kind)
}
AstNode::Import(import) => {
let pkg_id = match cx.gcx().find_package(import.pkg.value) {
Some(id) => id,
None => {
cx.emit(
DiagBuilder2::error(format!("`{}` not found", import.pkg.value))
.span(import.pkg.span),
);
return Err(());
}
};
if let Some(name) = import.name {
trace!(
"Importing single `{}` from `{}`: {}",
name,
import.pkg,
cx.ast_of(pkg_id)?.desc_full()
);
let resolved = cx.resolve_downwards_or_error(name, pkg_id)?;
let rib = cx.local_rib(resolved)?;
Some(RibKind::Import(Box::new(rib.clone())))
} else {
let rib = cx.hierarchical_rib(pkg_id)?;
trace!("Importing entire `{}`: {:#?}", import.pkg, rib);
Some(RibKind::Import(Box::new(rib.clone())))
}
}
AstNode::SubroutineDecl(decl) => Some(RibKind::Normal(decl.prototype.name, node_id)),
_ => None,
};
if kind.is_none() {
let hir = cx.hir_of(node_id)?;
kind = match hir {
HirNode::Typedef(def) => {
parent = Some(def.ty);
Some(RibKind::Normal(
Spanned::new(def.name.value, def.name.span),
node_id,
))
}
HirNode::IntPort(port) => Some(RibKind::Normal(port.name, node_id)),
_ => None,
};
}
let kind = match kind {
Some(kind) => kind,
None => {
return cx.local_rib(
cx.parent_node_id(node_id)
.expect("root node must produce a rib"),
);
}
};
let rib = Rib {
node: node_id,
parent: match parent.or_else(|| cx.parent_node_id(node_id)) {
Some(parent_id) => Some(cx.local_rib(parent_id)?.node),
None => None,
},
kind: kind,
};
if cx.sess().has_verbosity(Verbosity::NAMES) {
let mut d = DiagBuilder2::note(format!(
"created local rib for {}",
cx.ast_of(node_id)?.desc_full()
))
.span(cx.span(node_id))
.add_note(format!("{:?}", rib.kind));
if let Some(parent) = rib.parent {
d = d
.add_note("Parent is here:".to_string())
.span(cx.span(parent));
}
cx.emit(d);
}
Ok(cx.arena().alloc_rib(rib))
}
fn local_rib_kind_for_type<'gcx>(cx: &impl Context<'gcx>, kind: &hir::TypeKind) -> Option<RibKind> {
trace!("creating local rib for type {:#?}", kind);
match kind {
hir::TypeKind::PackedArray(inner, ..) => local_rib_kind_for_type(cx, inner.as_ref()),
hir::TypeKind::Enum(ref variants, _) => Some(RibKind::Enum(
variants
.iter()
.map(|(name, id)| (name.value, *id))
.collect(),
)),
_ => None,
}
}
/// Determine the hierarchical rib of a node.
///
/// This will return a rib containing the hierarchical names exposed by a node.
pub(crate) fn hierarchical_rib<'gcx>(
cx: &impl Context<'gcx>,
node_id: NodeId,
) -> Result<&'gcx Rib> {
let hir = cx.hir_of(node_id)?;
let mut names = HashMap::new();
let mut rib_id = match hir {
HirNode::Package(pkg) => Some(pkg.last_rib),
HirNode::Module(module) => Some(module.last_rib),
_ => panic!("{} has no hierarchical rib", hir.desc_full()),
};
while let Some(id) = rib_id {
let rib = cx.local_rib(id)?;
match rib.kind {
RibKind::Normal(name, def) => {
names.insert(name.value, def);
}
RibKind::Module(ref defs) => names.extend(defs),
RibKind::Enum(ref defs) => names.extend(defs),
RibKind::Import(_) => (), // imports are never visible
}
rib_id = rib.parent;
if rib_id == Some(node_id) {
rib_id = None;
}
}
let rib = Rib {
node: node_id,
parent: None,
kind: RibKind::Module(names),
};
Ok(cx.arena().alloc_rib(rib))
}
/// Resolve a name upwards through the ribs.
///
/// This is equivalent to performing regular scoped namespace lookup.
pub(crate) fn resolve_upwards<'gcx>(
cx: &impl Context<'gcx>,
name: Name,
start_at: NodeId,
) -> Result<Option<NodeId>> {
// Get the AST associated with the node ID and map it to an AnyNode.
let ast = cx.ast_of(start_at)?;
let node = match ast.get_any() {
Some(x) => x,
None => bug_span!(
cx.span(start_at),
cx,
"resolve_upwards called on node which doesn't implement AnyNode yet: {:?}",
ast
),
};
cx.resolve_local(name, cx.scope_location(node), false)
.map(|def| def.map(|def| def.node.id()))
}
/// Resolve a name downwards.
///
/// This is equivalent to performing a hierarchical name lookup.
pub(crate) fn resolve_downwards<'gcx>(
cx: &impl Context<'gcx>,
name: Name,
start_at: NodeId,
) -> Result<Option<NodeId>> {
// Get the AST associated with the node ID and map it to a ScopedNode.
let ast = cx.ast_of(start_at)?;
let node = match ast.get_any().and_then(|n| n.as_all().get_scoped_node()) {
Some(x) => x,
None => bug_span!(
cx.span(start_at),
cx,
"resolve_downwards called on node which doesn't implement ScopedNode yet: {:?}",
ast
),
};
Ok(cx.resolve_namespace(name, node).map(|def| def.node.id()))
}
/// Resolve a node to its target.
pub(crate) fn resolve_node<'gcx>(
cx: &impl Context<'gcx>,
node_id: NodeId,
env: ParamEnv,
) -> Result<NodeId> {
let hir = cx.hir_of(node_id)?;
match hir {
HirNode::Expr(expr) => match expr.kind {
hir::ExprKind::Ident(ident) => return cx.resolve_upwards_or_error(ident, node_id),
hir::ExprKind::Scope(scope_id, name) => {
let within = cx.resolve_node(scope_id, env)?;
return cx.resolve_downwards_or_error(name, within);
}
_ => (),
},
HirNode::Type(ty) => match ty.kind {
hir::TypeKind::Named(name) => return cx.resolve_upwards_or_error(name, node_id),
hir::TypeKind::Scope(scope_id, name) => {
let within = cx.resolve_node(scope_id, env)?;
return cx.resolve_downwards_or_error(name, within);
}
_ => (),
},
HirNode::IntPort(port) if port.data.is_none() => {
return cx
.resolve_hierarchical_or_error(
port.name,
port.node.as_all().get_scoped_node().unwrap(),
)
.map(|def| def.node.id());
}
_ => (),
}
error!("{:#?}", hir);
cx.emit(DiagBuilder2::bug("cannot resolve node").span(hir.human_span()));
Err(())
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct StructDef {
pub packed: bool,
pub fields: Vec<StructField>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct StructField {
pub name: Spanned<Name>,
pub ty: NodeId,
pub field: NodeId,
}
/// Obtain the details of a struct definition.
pub(crate) fn struct_def<'gcx>(cx: &impl Context<'gcx>, node_id: NodeId) -> Result<Arc<StructDef>> {
let hir = cx.hir_of(node_id)?;
let struct_fields = match hir {
HirNode::Type(hir::Type {
kind: hir::TypeKind::Struct(ref fields),
..
}) => fields,
_ => {
cx.emit(
DiagBuilder2::error(format!("{} is not a struct", hir.desc_full()))
.span(hir.human_span()),
);
return Err(());
}
};
let fields = struct_fields
.iter()
.flat_map(|&id| match cx.hir_of(id) {
Ok(HirNode::VarDecl(vd)) => Some(StructField {
name: vd.name,
ty: vd.ty,
field: id,
}),
_ => None,
})
.collect();
Ok(Arc::new(StructDef {
packed: true,
fields,
}))
}
/// Resolve the field name in a field access expression.
///
/// Returns the index of the field that is actually being accessed, and a
/// reference to the member itself.
#[moore_derive::query]
pub(crate) fn resolve_field_access<'a>(
cx: &impl Context<'a>,
node_id: NodeId,
env: ParamEnv,
) -> Result<(usize, &'a ty::StructMember<'a>)> {
let hir = match cx.hir_of(node_id)? {
HirNode::Expr(x) => x,
_ => unreachable!(),
};
let (target_id, name) = match hir.kind {
hir::ExprKind::Field(target_id, name) => (target_id, name),
_ => unreachable!(),
};
let ty = cx.type_of(target_id, env)?;
let strukt = if let Some(strukt) = ty.get_struct() {
strukt
} else {
let mut d = DiagBuilder2::error(format!("value of type `{}` is not a struct", ty))
.span(hir.human_span());
if ty.resolve_full() != ty {
d = d.add_note(format!("`{}` is defined as `{}`", ty, ty.resolve_full()));
}
cx.emit(d);
return Err(());
};
let index = strukt
.members
.iter()
.position(|member| member.name.value == name.value);
match index {
Some(x) => Ok((x, &strukt.members[x])),
None => {
cx.emit(
DiagBuilder2::error(format!("value of type `{}` has no field `{}`", ty, name))
.span(name.span())
.add_note(format!("`{}` was defined here:", ty))
.span(strukt.ast.span()),
);
Err(())
}
}
}
/// Determine the scope generated by a node.
pub fn generated_scope_id<'gcx>(
cx: &impl Context<'gcx>,
node_id: NodeId,
) -> Result<&'gcx Scope<'gcx>> {
let ast = cx.ast_of(node_id)?;
Ok(match ast {
AstNode::Module(node) => cx.generated_scope(node),
AstNode::Package(node) => cx.generated_scope(node),
_ => bug_span!(ast.span(), cx, "node does not generate a scope"),
})
}
/// Marker trait for AST nodes that generate a scope.
pub trait ScopedNode<'a>: ast::AnyNode<'a> {}
impl<'a> ScopedNode<'a> for ast::Root<'a> {}
impl<'a> ScopedNode<'a> for ast::SourceFile<'a> {}
impl<'a> ScopedNode<'a> for ast::Module<'a> {}
impl<'a> ScopedNode<'a> for ast::Interface<'a> {}
impl<'a> ScopedNode<'a> for ast::Package<'a> {}
impl<'a> ScopedNode<'a> for ast::Stmt<'a> {}
impl<'a> ScopedNode<'a> for ast::Procedure<'a> {}
impl<'a> ScopedNode<'a> for ast::ClassDecl<'a> {}
impl<'a> ScopedNode<'a> for ast::SubroutineDecl<'a> {}
impl<'a> ScopedNode<'a> for ast::GenerateFor<'a> {}
impl<'a> ScopedNode<'a> for ast::GenerateIf<'a> {}
impl<'a> ScopedNode<'a> for ast::GenerateCase<'a> {}
impl<'a> ScopedNode<'a> for ast::GenerateBlock<'a> {}
// Compare and hash scoped nodes by reference for use in the query system.
impl<'a> Eq for &'a dyn ScopedNode<'a> {}
impl<'a> PartialEq for &'a dyn ScopedNode<'a> {
fn eq(&self, other: &Self) -> bool {
std::ptr::eq(self.as_ptr(), other.as_ptr())
}
}
impl<'a> Hash for &'a dyn ScopedNode<'a> {
fn hash<H: Hasher>(&self, h: &mut H) {
std::ptr::hash(self.as_ptr(), h)
}
}
/// Anything that can be converted to a `ScopedNode`.
pub trait AsScopedNode<'a> {
/// Return this node as a `ScopedNode`, or `None` if it generates no scope.
fn get_scoped_node(&self) -> Option<&'a dyn ScopedNode<'a>>;
/// Check if this node is a `ScopedNode`.
fn is_scoped_node(&self) -> bool {
self.get_scoped_node().is_some()
}
}
impl<'a> AsScopedNode<'a> for ast::AllNode<'a> {
fn get_scoped_node(&self) -> Option<&'a dyn ScopedNode<'a>> {
match *self {
// This should reflect the impl trait list above!
ast::AllNode::Root(x) => Some(x),
ast::AllNode::SourceFile(x) => Some(x),
ast::AllNode::Module(x) => Some(x),
ast::AllNode::Interface(x) => Some(x),
ast::AllNode::Package(x) => Some(x),
ast::AllNode::Stmt(x) => match x.kind {
ast::SequentialBlock(..)
| ast::ParallelBlock(..)
| ast::IfStmt { .. }
| ast::CaseStmt { .. }
| ast::ForeverStmt(..)
| ast::RepeatStmt(..)
| ast::WhileStmt(..)
| ast::DoStmt(..)
| ast::ForStmt(..)
| ast::ForeachStmt(..) => Some(x),
_ => None,
},
ast::AllNode::Procedure(x) => Some(x),
ast::AllNode::ClassDecl(x) => Some(x),
ast::AllNode::SubroutineDecl(x) => Some(x),
ast::AllNode::GenerateFor(x) => Some(x),
ast::AllNode::GenerateIf(x) => Some(x),
ast::AllNode::GenerateCase(x) => Some(x),
ast::AllNode::GenerateBlock(x) => Some(x),
_ => None,
}
}
}
/// Determine the scope generated by a node.
#[moore_derive::query]
pub(crate) fn generated_scope<'a>(
cx: &impl Context<'a>,
node: &'a dyn ScopedNode<'a>,
) -> &'a Scope<'a> {
// Find the parent scope.
let parent = node
.get_parent()
.map(|_| cx.scope_location(node.as_any()).scope);
// Create a new scope generator which will traverse the AST.
let mut gen = ScopeGenerator::new(
cx,
Scope {
node,
parent,
defs: Default::default(),
wildcard_imports: Default::default(),
subscopes: Default::default(),
},
);
debug!("Generating scope {:?}", node);
// Add definitions for the analyzed ports.
if let Some(node) = node.as_all().get_ported() {
for node in &cx.canonicalize_ports(node).int {
// Skip ports which are not definitions themselves, but reference
// another definition in the body.
if node.data.is_some() {
gen.add_def(Def {
node: DefNode::IntPort(node),
name: node.name,
vis: DefVis::LOCAL | DefVis::HIERARCHICAL,
may_override: true,
ordered: false,
});
}
}
}
// Gather the definitions.
node.accept(&mut gen);
// If this is the AST root, pull up `GLOBAL` definitions from the subscopes.
if node.as_all().is_root() {
trace!("Pulling up global defs from subscopes");
for node in gen.scope.subscopes.clone() {
let scope = cx.generated_scope(node);
for &def in scope.defs.values() {
gen.add_def(def);
}
}
}
// Allocate the scope into the arena and return it.
trace!("Generated scope {:#?}", gen.scope);
cx.gcx().arena.alloc_scope(gen.scope)
}
/// A scope.
#[derive(Debug)]
pub struct Scope<'a> {
/// The node which generates this scope.
pub node: &'a dyn ScopedNode<'a>,
/// The node which generates the parent scope, if any.
pub parent: Option<&'a dyn ScopedNode<'a>>,
/// The definitions in this scope.
pub defs: HashMap<Name, Def<'a>>,
/// The wildcard imports in this scope.
pub wildcard_imports: Vec<&'a ast::ImportItem<'a>>,
/// The subscopes.
pub subscopes: Vec<&'a dyn ScopedNode<'a>>,
}
/// A definition in a scope.
#[derive(Debug, Clone, Copy)]
pub struct Def<'a> {
/// The node which defines the name.
pub node: DefNode<'a>,
/// The name of the definition.
pub name: Spanned<Name>,
/// Where the definition is visible.
pub vis: DefVis,
/// Whether the definition may override a previous one.
pub may_override: bool,
/// Whether the definitions is only visible to things that come after it.
pub ordered: bool,
}
bitflags::bitflags! {
/// Visibility of a definition.
pub struct DefVis: u8 {
/// Whether the definition is visible to local resolution, e.g. `foo`.
const LOCAL = 1 << 0;
/// Whether the definition is accessible in a hierarchical name, e.g.
/// `parent.foo`.
const HIERARCHICAL = 1 << 1;
/// Whether the definition is accessible during namespace resolution,
/// e.g. `parent::foo`.
const NAMESPACE = 1 << 2;
/// Whether the definitions is visible in the global scope. Definitions
/// in a `SourceFile` will be re-exported into `Root` if they are marked
/// as global.
const GLOBAL = 1 << 3;
}
}
/// A node that generates a definition.
#[derive(Debug, Clone, Copy)]
pub enum DefNode<'a> {
/// Any AST node.
Ast(&'a dyn ast::AnyNode<'a>),
/// An internal port of a module.
IntPort(&'a port_list::IntPort<'a>),
}
impl<'a> std::fmt::Display for DefNode<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match *self {
DefNode::Ast(node) => std::fmt::Display::fmt(node, f),
DefNode::IntPort(node) => std::fmt::Display::fmt(node.ast, f),
}
}
}
impl<'a> ast::AnyNode<'a> for DefNode<'a> {
fn id(&self) -> moore_common::NodeId {
match *self {
DefNode::Ast(node) => node.id(),
DefNode::IntPort(node) => node.id,
}
}
fn span(&self) -> moore_common::source::Span {
match *self {
DefNode::Ast(node) => node.span(),
DefNode::IntPort(node) => node.span,
}
}
fn order(&self) -> usize {
match *self {
DefNode::Ast(node) => node.order(),
DefNode::IntPort(node) => node.ast.order(),
}
}
}
impl<'a> ast::AnyNodeData for DefNode<'a> {
fn fmt_indefinite(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match *self {
DefNode::Ast(node) => node.fmt_indefinite(f),
DefNode::IntPort(node) => node.ast.fmt_indefinite(f),
}
}
}
impl<'a> ast::BasicNode<'a> for DefNode<'a> {
fn type_name(&self) -> &'static str {
match self {
DefNode::Ast(node) => node.type_name(),
DefNode::IntPort(node) => node.ast.type_name(),
}
}
fn as_all(&'a self) -> syntax::ast::AllNode<'a> {
match self {
DefNode::Ast(node) => node.as_all(),
DefNode::IntPort(node) => node.ast.as_all(),
}
}
fn as_any(&'a self) -> &'a (dyn syntax::ast::AnyNode<'a> + 'a) {
match self {
DefNode::Ast(node) => node.as_any(),
DefNode::IntPort(node) => node.ast.as_any(),
}
}
}
// TODO: The following visitor business should not be necessary just to get some
// struct to act like a fundamental node.
impl<'a> ast::AcceptVisitor<'a> for DefNode<'a> {
fn accept(&'a self, _: &mut dyn syntax::ast::Visitor<'a>) {
panic!("accept() called on non-AST node {:?}", self);
}
}
impl<'a> ast::ForEachNode<'a> for DefNode<'a> {}
impl<'a> ast::ForEachChild<'a> for DefNode<'a> {
fn for_each_child(&'a self, _: &mut dyn std::ops::FnMut(&'a dyn ast::AnyNode<'a>)) {
panic!("for_each_child() called on non-AST node {:?}", self);
}
}
/// A visitor that gathers the contents of a scope from the AST.
struct ScopeGenerator<'a, 'c, C> {
/// The context.
cx: &'c C,
/// The scope being assembled.
scope: Scope<'a>,
}
impl<'a, 'c, C: Context<'a>> ScopeGenerator<'a, 'c, C> {
/// Create a new scope generator.
pub fn new(cx: &'c C, scope: Scope<'a>) -> Self {
ScopeGenerator { cx, scope }
}
/// Register a subscope.
pub fn add_subscope(&mut self, node: &'a dyn ScopedNode<'a>) {
trace!(" - Adding subscope for {:?}", node);
self.scope.subscopes.push(node);
}
/// Register a wildcard import.
pub fn add_wildcard_import(&mut self, node: &'a ast::ImportItem<'a>) {
trace!(" - Adding wildcard import {:?}", node);
self.scope.wildcard_imports.push(node);
}
/// Register a definition.
pub fn add_def(&mut self, mut def: Def<'a>) {
trace!(" - Adding definition {:?}", def);
// Check that the definition does not collide with a previous one.
if let Some(existing) = self.scope.defs.get(&def.name.value) {
match existing.node {
// Do not redefine ports.
DefNode::IntPort(_) => return,
// Handle being allowed to override forward declarations
DefNode::Ast(node) => {
if let ast::AllNode::Typedef(ast) = node.as_all() {
if let ast::ForwardType { kind: _ } = ast.ty.kind.data {
def.may_override = true;
}
}
}
};
// We can also have multiple forward declarations of the same name, must
// ignore all but the first one here. This also correctly handles the case
// where a "forward" declaration actually comes after the definition, which
// is apparently not an error in svlog.
if let DefNode::Ast(node) = def.node {
if let ast::AllNode::Typedef(ast) = node.as_all() {
if let ast::ForwardType { kind: _ } = ast.ty.kind.data {
return;
}
}
}
if !def.may_override {
let d = DiagBuilder2::error(format!("`{}` is defined multiple times", def.name))
.span(def.name.span)
.add_note(format!("Previous definition of `{}` was here:", def.name))
.span(existing.name.span);
self.cx.emit(d);
return;
}
}
// Store the definition.
self.scope.defs.insert(def.name.value, def);
}
}
impl<'a, C: Context<'a>> ast::Visitor<'a> for ScopeGenerator<'a, '_, C> {
// We return `false` in the pre-visit functions when the visited node
// generates a subscope, to avoid gobbling up its local definitions.
fn pre_visit_root(&mut self, node: &'a ast::Root<'a>) -> bool {
self.add_subscope(node);
false
}
fn pre_visit_source_file(&mut self, node: &'a ast::SourceFile<'a>) -> bool {
self.add_subscope(node);
false
}
fn pre_visit_module(&mut self, node: &'a ast::Module<'a>) -> bool {
self.add_subscope(node);
self.add_def(Def {
node: DefNode::Ast(node),
name: node.name,
vis: DefVis::LOCAL | DefVis::GLOBAL,
may_override: true,
ordered: false,
});
false
}
fn pre_visit_interface(&mut self, node: &'a ast::Interface<'a>) -> bool {
self.add_subscope(node);
self.add_def(Def {
node: DefNode::Ast(node),
name: node.name,
vis: DefVis::LOCAL | DefVis::GLOBAL,
may_override: true,
ordered: false,
});
false
}
fn pre_visit_modport_name(&mut self, node: &'a ast::ModportName<'a>) -> bool {
self.add_def(Def {
node: DefNode::Ast(node),
name: node.name,
vis: DefVis::LOCAL | DefVis::HIERARCHICAL,
may_override: false,
ordered: false,
});
false
}
fn pre_visit_package(&mut self, node: &'a ast::Package<'a>) -> bool {
self.add_subscope(node);
self.add_def(Def {
node: DefNode::Ast(node),
name: node.name,
vis: DefVis::LOCAL | DefVis::NAMESPACE | DefVis::GLOBAL,
may_override: false,
ordered: false,
});
false
}
fn pre_visit_import_item(&mut self, node: &'a ast::ImportItem<'a>) -> bool {
if let Some(name) = node.name {
self.add_def(Def {
node: DefNode::Ast(node),
name,
vis: DefVis::LOCAL,
may_override: false,
ordered: true,
});
} else {
self.add_wildcard_import(node);
}
true
}
fn pre_visit_var_decl_name(&mut self, node: &'a ast::VarDeclName<'a>) -> bool {
// Don't register definitions for struct members.
if node
.get_parent()
.and_then(|p| p.as_all().get_struct_member())
.is_some()
{
return true;
}
self.add_def(Def {
node: DefNode::Ast(node),
name: Spanned::new(node.name, node.name_span),
vis: DefVis::LOCAL | DefVis::HIERARCHICAL,
may_override: false,
ordered: true,
});
true
}
fn pre_visit_param_type_decl(&mut self, node: &'a ast::ParamTypeDecl<'a>) -> bool {
self.add_def(Def {
node: DefNode::Ast(node),
name: node.name,
vis: DefVis::LOCAL | DefVis::NAMESPACE | DefVis::HIERARCHICAL,
may_override: false,
ordered: true,
});
true
}
fn pre_visit_param_value_decl(&mut self, node: &'a ast::ParamValueDecl<'a>) -> bool {
self.add_def(Def {
node: DefNode::Ast(node),
name: node.name,
vis: DefVis::LOCAL | DefVis::NAMESPACE | DefVis::HIERARCHICAL,
may_override: false,
ordered: true,
});
true
}
fn pre_visit_genvar_decl(&mut self, node: &'a ast::GenvarDecl<'a>) -> bool {
self.add_def(Def {
node: DefNode::Ast(node),
name: node.name,
vis: DefVis::LOCAL | DefVis::HIERARCHICAL,
may_override: false,
ordered: true,
});
true
}
fn pre_visit_enum_name(&mut self, node: &'a ast::EnumName<'a>) -> bool {
self.add_def(Def {
node: DefNode::Ast(node),
name: node.name,
vis: DefVis::LOCAL | DefVis::NAMESPACE | DefVis::HIERARCHICAL,
may_override: false,
ordered: true,
});
true
}
fn pre_visit_typedef(&mut self, node: &'a ast::Typedef<'a>) -> bool {
self.add_def(Def {
node: DefNode::Ast(node),
name: node.name,
vis: DefVis::LOCAL | DefVis::NAMESPACE | DefVis::HIERARCHICAL,
may_override: false,
ordered: true,
});
true
}
fn pre_visit_procedure(&mut self, node: &'a ast::Procedure<'a>) -> bool {
self.add_subscope(node);
false
}
fn pre_visit_class_decl(&mut self, node: &'a ast::ClassDecl<'a>) -> bool {
self.add_subscope(node);
// self.add_def(Def {
// node: DefNode::Ast(node),
// name: node.name,
// vis: DefVis::LOCAL | DefVis::NAMESPACE,
// may_override: false,
// ordered: true,
// });
false
}
fn pre_visit_subroutine_decl(&mut self, node: &'a ast::SubroutineDecl<'a>) -> bool {
self.add_subscope(node);
self.add_def(Def {
node: DefNode::Ast(node),
name: node.prototype.name,
vis: DefVis::LOCAL | DefVis::NAMESPACE | DefVis::HIERARCHICAL,
may_override: false,
ordered: false,
});
false
}
fn pre_visit_subroutine_port(&mut self, node: &'a ast::SubroutinePort<'a>) -> bool {
if let Some(ref name) = node.name {
self.add_def(Def {
node: DefNode::Ast(node),
name: name.name,
vis: DefVis::LOCAL,
may_override: false,
ordered: true,
});
}
true
}
fn pre_visit_inst_name(&mut self, node: &'a ast::InstName<'a>) -> bool {
self.add_def(Def {
node: DefNode::Ast(node),
name: node.name,
vis: DefVis::LOCAL | DefVis::HIERARCHICAL,
may_override: false,
ordered: false,
});
true
}
fn pre_visit_generate_for(&mut self, node: &'a ast::GenerateFor<'a>) -> bool {
self.add_subscope(node);
false
}
fn pre_visit_generate_if(&mut self, node: &'a ast::GenerateIf<'a>) -> bool {
self.add_subscope(node);
false
}
fn pre_visit_generate_case(&mut self, node: &'a ast::GenerateCase<'a>) -> bool {
self.add_subscope(node);
false
}
fn pre_visit_generate_block(&mut self, node: &'a ast::GenerateBlock<'a>) -> bool {
self.add_subscope(node);
if let Some(name) = node.label {
self.add_def(Def {
node: DefNode::Ast(node),
name,
vis: DefVis::LOCAL | DefVis::HIERARCHICAL,
may_override: false,
ordered: false,
});
}
false
}
fn pre_visit_stmt(&mut self, node: &'a ast::Stmt<'a>) -> bool {
// Do not traverse into statements that generate their own scope.
match node.kind {
ast::SequentialBlock(..)
| ast::ParallelBlock(..)
| ast::IfStmt { .. }
| ast::CaseStmt { .. }
| ast::ForeverStmt(..)
| ast::RepeatStmt(..)
| ast::WhileStmt(..)
| ast::DoStmt(..)
| ast::ForStmt(..)
| ast::ForeachStmt(..) => {
self.add_subscope(node);
false
}
_ => true,
}
}
fn pre_visit_foreach_index(&mut self, node: &'a ast::ForeachIndex<'a>) -> bool {
self.add_def(Def {
node: DefNode::Ast(node),
name: node.name,
vis: DefVis::LOCAL,
may_override: false,
ordered: true,
});
true
}
fn pre_visit_dpi_decl(&mut self, node: &'a ast::DpiDecl<'a>) -> bool {
match node.data {
ast::DpiDeclData::Import { ref prototype, .. } => {
self.add_def(Def {
node: DefNode::Ast(node),
name: prototype.name,
vis: DefVis::LOCAL | DefVis::NAMESPACE,
may_override: false,
ordered: true,
});
}
_ => (),
}
false
}
}
/// Determine the location of a node within its enclosing scope.
#[moore_derive::query]
pub(crate) fn scope_location<'a>(
cx: &impl Context<'a>,
node: &'a dyn ast::AnyNode<'a>,
) -> ScopeLocation<'a> {
trace!("Finding scope location of {:?}", node);
// Keep the lexical order of the initiating node around.
let order = node.order();
// Starting at the current node, check if it generates a scope, and if not,
// advance to its parent.
let mut next: Option<&dyn ast::AnyNode> = node.get_parent();
while let Some(node) = next {
if let Some(scoped) = node.as_all().get_scoped_node() {
trace!(" - Found {:?}", node);
return ScopeLocation {
scope: scoped,
order,
};
} else {
trace!(" - Upwards to {:?}", node);
next = node.get_parent();
}
}
// We should never arrive here, since the root node always generates a
// scope.
bug_span!(
node.span(),
cx,
"arrived at root node {:?}, which must generate a scope",
node
);
}
/// A location of a node within its enclosing scope.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ScopeLocation<'a> {
/// The node which generates the enclosing scope.
pub scope: &'a dyn ScopedNode<'a>,
/// The lexical order within that scope.
pub order: usize,
}
/// Resolve a local name in a scope.
///
/// This traverses up the scope tree until a definition with visibility `LOCAL`
/// is found. Returns `None` if no such name exists.
#[moore_derive::query]
pub(crate) fn resolve_local<'a>(
cx: &impl Context<'a>,
name: Name,
at: ScopeLocation<'a>,
skip_imports: bool,
) -> Result<Option<&'a Def<'a>>> {
debug!("Resolving `{}` locally at {:?}", name, at);
let scope = cx.generated_scope(at.scope);
let mut next = Some(scope);
while let Some(scope) = next {
next = scope.parent.map(|p| cx.generated_scope(p));
trace!(" - Looking in scope {:?}", scope.node);
// Try to find a matching definition in this scope.
if let Some(def) = scope.defs.get(&name) {
// Check if it is visible to local name resolution.
let vis_ok = def.vis.contains(DefVis::LOCAL);
// If the definition requires def-before-use, check that it was defined
// before the location we are trying to use it.
let order_ok = !def.ordered || def.node.order() < at.order;
// Return this definition if it matches.
if vis_ok && order_ok {
// In case what we've found is an import like `import foo::A`,
// resolve `A` in `foo` now.
let def = if let DefNode::Ast(node) = def.node {
if let Some(import) = node.as_all().get_import_item() {
trace!(" - Following {:?}", import);
let inside = cx.resolve_imported_scope(import)?;
let binding =
cx.resolve_namespace_or_error(import.name.unwrap(), inside)?;
binding
} else {
def
}
} else {
def
};
debug!(" - Found {:?}", def);
return Ok(Some(def));
}
}
// Check the wildcard imports for any luck.
if skip_imports {
continue;
}
for &import in scope.wildcard_imports.iter().rev() {
if import.order() > at.order {
continue;
}
let inside = cx.resolve_imported_scope(import)?;
let def = cx.resolve_namespace(name, inside);
if def.is_some() {
return Ok(def);
}
}
}
Ok(None)
}
/// Resolve a local name in a scope or emit an error.
///
/// Calls `resolve_local`. Either returns `Ok` if a node was found, or `Err`
/// after emitting a diagnostic error message.
#[moore_derive::query]
pub(crate) fn resolve_local_or_error<'a>(
cx: &impl Context<'a>,
name: Spanned<Name>,
at: ScopeLocation<'a>,
skip_imports: bool,
) -> Result<&'a Def<'a>> {
match cx.resolve_local(name.value, at, skip_imports)? {
Some(def) => {
if cx.sess().has_verbosity(Verbosity::NAMES) {
let d = DiagBuilder2::note("name resolution")
.span(name.span)
.add_note(format!("Resolved `{}` to this {}:", name, def.node))
.span(def.node.span());
cx.emit(d);
}
Ok(def)
}
None => {
cx.emit(DiagBuilder2::error(format!("`{}` not found", name.value)).span(name.span));
Err(())
}
}
}
/// Resolve a name in a scope as a namespace lookup.
///
/// This checks if the scope contains a definition with visibility
/// `NAMESPACE`. Returns `None` if no such name exists.
#[moore_derive::query]
pub(crate) fn resolve_namespace<'a>(
cx: &impl Context<'a>,
name: Name,
inside: &'a dyn ScopedNode<'a>,
) -> Option<&'a Def<'a>> {
debug!("Resolving `{}` in namespace {:?}", name, inside);
let scope = cx.generated_scope(inside);
match scope.defs.get(&name) {
Some(def) if def.vis.contains(DefVis::NAMESPACE) => {
debug!(" - Found {:?}", def);
Some(def)
}
_ => None,
}
}
/// Resolve a name in a scope as a namespace lookup or emit an error.
///
/// Calls `resolve_namespace`. Either returns `Ok` if a node was found, or `Err`
/// after emitting a diagnostic error message.
#[moore_derive::query]
pub(crate) fn resolve_namespace_or_error<'a>(
cx: &impl Context<'a>,
name: Spanned<Name>,
inside: &'a dyn ScopedNode<'a>,
) -> Result<&'a Def<'a>> {
match cx.resolve_namespace(name.value, inside) {
Some(def) => {
if cx.sess().has_verbosity(Verbosity::NAMES) {
let d = DiagBuilder2::note("name resolution")
.span(name.span)
.add_note(format!("Resolved `{}` to this {}:", name, def.node))
.span(def.node.span());
cx.emit(d);
}
Ok(def)
}
None => {
cx.emit(
DiagBuilder2::error(format!("`{}` not found in {}", name.value, inside))
.span(name.span)
.add_note(format!("{} was defined here:", inside))
.span(inside.human_span()),
);
Err(())
}
}
}
/// Resolve a name in a scope as a hierarchical lookup.
///
/// This checks if the scope contains a definition with visibility
/// `HIERARCHICAL`. Returns `None` if no such name exists.
#[moore_derive::query]
pub(crate) fn resolve_hierarchical<'a>(
cx: &impl Context<'a>,
name: Name,
inside: &'a dyn ScopedNode<'a>,
) -> Option<&'a Def<'a>> {
debug!("Resolving `{}` hierarchically in {:?}", name, inside);
let scope = cx.generated_scope(inside);
match scope.defs.get(&name) {
Some(def) if def.vis.contains(DefVis::HIERARCHICAL) => {
debug!(" - Found {:?}", def);
Some(def)
}
_ => None,
}
}
/// Resolve a name in a scope as a hierarchical lookup or emit an error.
///
/// Calls `resolve_hierarchical`. Either returns `Ok` if a node was found, or
/// `Err` after emitting a diagnostic error message.
#[moore_derive::query]
pub(crate) fn resolve_hierarchical_or_error<'a>(
cx: &impl Context<'a>,
name: Spanned<Name>,
inside: &'a dyn ScopedNode<'a>,
) -> Result<&'a Def<'a>> {
match cx.resolve_hierarchical(name.value, inside) {
Some(def) => {
if cx.sess().has_verbosity(Verbosity::NAMES) {
let d = DiagBuilder2::note("name resolution")
.span(name.span)
.add_note(format!("Resolved `{}` to this {}:", name, def.node))
.span(def.node.span());
cx.emit(d);
}
Ok(def)
}
None => {
cx.emit(
DiagBuilder2::error(format!("`{}` not found in {}", name.value, inside))
.span(name.span)
.add_note(format!("{} was defined here:", inside))
.span(inside.human_span()),
);
Err(())
}
}
}
/// Resolve an import to the scope it imports.
///
/// This function emits a diagnostic if the target of the import has no scope.
/// Being a query, this ensures that the error is only produced once.
#[moore_derive::query]
pub(crate) fn resolve_imported_scope<'a>(
cx: &impl Context<'a>,
node: &'a ast::ImportItem<'a>,
) -> Result<&'a dyn ScopedNode<'a>> {
// Resolve the imported name, e.g. the `foo` in `import foo::*`.
let at = cx.scope_location(node);
let inside = cx.resolve_local_or_error(node.pkg, at, true)?;
// Ensure that what we have found is something we can actually perform a
// namespace lookup into.
let inside = match inside.node {
DefNode::Ast(node) => Some(node),
_ => None,
};
let inside = inside.and_then(|x| x.as_all().get_scoped_node());
match inside {
Some(x) => Ok(x),
None => {
cx.emit(
DiagBuilder2::error(format!("name `{}` does not refer to a package", node.pkg))
.span(node.pkg.span),
);
Err(())
}
}
}
/// Recursively ensures that all scopes have been constructed and potential
/// diagnostics emitted.
///
/// This function helps in triggering naming conflicts at a defined point in the
/// compilation.
pub(crate) fn materialize_scope<'a>(cx: &impl Context<'a>, node: &'a dyn ScopedNode<'a>) {
debug!("Materializing scope {:?}", node);
let scope = cx.generated_scope(node);
for &subscope in &scope.subscopes {
materialize_scope(cx, subscope);
}
}
/// Recursively resolves names throughout the AST.
///
/// Returns `true` if all names resolved successfully, `false` otherwise. This
/// function helps in triggering name resolution errors at a defined point in
/// the compilation.
#[moore_derive::query]
pub(crate) fn nameck<'a>(cx: &impl Context<'a>, node: &'a dyn ast::AnyNode<'a>) -> bool {
debug!("Checking name resolution on {:?}", node);
let mut rv = ResolutionVisitor::new(cx);
node.accept(&mut rv);
!rv.failed
}
/// A visitor that emits diagnostics for every resolved named.
pub(crate) struct ResolutionVisitor<'cx, C> {
pub cx: &'cx C,
pub failed: bool,
}
impl<'cx, C> ResolutionVisitor<'cx, C> {
/// Create a new name resolution visitor.
pub fn new(cx: &'cx C) -> Self {
ResolutionVisitor { cx, failed: false }
}
}
impl<'a, 'cx, C> ast::Visitor<'a> for ResolutionVisitor<'cx, C>
where
C: Context<'a>,
'a: 'cx,
{
fn pre_visit_expr(&mut self, node: &'a ast::Expr<'a>) -> bool {
// Don't resolve the left-hand side of named pattern fields,
// since these refer to field names.
if let Some(patfield) = node
.get_parent()
.and_then(|p| p.as_all().get_pattern_field())
{
match patfield.data {
ast::PatternFieldData::Member(ref name_expr, ..) if name_expr.as_ref() == node => {
return false;
}
_ => (),
}
}
// Don't resolve the expression in implicit and explicit ports.
// These are meant to be analyzed in more detail by the port
// list ananlysis query.
if let Some(port) = node.get_parent().and_then(|p| p.as_all().get_port()) {
debug!("Checking {:?} in port {:?}", node, port);
match port.data {
ast::PortData::Explicit { .. } | ast::PortData::Implicit(..) => return false,
_ => (),
}
}
match node.data {
ast::IdentExpr(ident) => {
self.failed |= self
.cx
.resolve_local_or_error(ident, self.cx.scope_location(node), false)
.is_err();
false
}
_ => true,
}
}
fn pre_visit_type(&mut self, node: &'a ast::Type<'a>) -> bool {
match node.kind.data {
ast::NamedType(ident) => {
self.failed |= self
.cx
.resolve_local_or_error(ident, self.cx.scope_location(node), false)
.is_err();
false
}
_ => true,
}
}
}
/// Any AST node that can be instantiated.
#[derive(Debug, Clone, Copy)]
pub enum InstTarget<'a> {
/// A module instance.
Module(&'a ast::Module<'a>),
/// A interface instance.
Interface(&'a ast::Interface<'a>),
}
impl<'a> InstTarget<'a> {
/// Get the instantiated node as a `AnyNode`.
pub fn as_any(&self) -> &'a dyn ast::AnyNode<'a> {
match *self {
Self::Module(x) => x,
Self::Interface(x) => x,
}
}
}
/// Resolve the target of an instantiation.
#[moore_derive::query]
pub(crate) fn resolve_inst_target<'a>(
cx: &impl Context<'a>,
inst: &'a ast::Inst<'a>,
) -> Result<InstTarget<'a>> {
// Resolve the name of the instantiated module.
let loc = cx.scope_location(inst);
let def = cx.resolve_local_or_error(inst.target, loc, false)?;
trace!("Resolved instance `{}` to {:?}", inst.target, def);
// Check what exactly we are instantiating.
let target = match def.node {
DefNode::Ast(ast) => match ast.as_all() {
ast::AllNode::Module(x) => Some(InstTarget::Module(x)),
ast::AllNode::Interface(x) => Some(InstTarget::Interface(x)),
_ => None,
},
_ => None,
};
match target {
Some(x) => Ok(x),
None => {
cx.emit(
DiagBuilder2::error(format!("`{}` is not a module or interface", inst.target))
.span(inst.target.span)
.add_note(format!("{} was declared here:", def.node))
.span(def.node.span()),
);
Err(())
}
}
}
| 33.701203 | 101 | 0.513279 |
28095fce86aec6d3133f3d10fb4ca7c033ad9ac3 | 3,241 | // Take a look at the license at the top of the repository in the LICENSE file.
use crate::TargetFlags;
use glib::translate::*;
use libc::c_char;
use std::ffi::CStr;
#[derive(Clone, Debug)]
#[repr(C)]
pub struct TargetEntry {
target: String,
flags: TargetFlags,
info: u32,
}
impl TargetEntry {
pub fn new(target: &str, flags: TargetFlags, info: u32) -> TargetEntry {
assert_initialized_main_thread!();
TargetEntry {
target: target.to_owned(),
flags,
info,
}
}
pub fn get_target(&self) -> &str {
&self.target
}
pub fn get_flags(&self) -> TargetFlags {
self.flags
}
pub fn get_info(&self) -> u32 {
self.info
}
}
#[doc(hidden)]
impl<'a> ToGlibPtr<'a, *const ffi::GtkTargetEntry> for TargetEntry {
type Storage = (Box<ffi::GtkTargetEntry>, Stash<'a, *mut c_char, String>);
#[inline]
fn to_glib_none(&'a self) -> Stash<'a, *const ffi::GtkTargetEntry, Self> {
let target = self.target.to_glib_none();
let target_entry = Box::new(ffi::GtkTargetEntry {
target: target.0,
flags: self.flags.bits(),
info: self.info,
});
Stash(&*target_entry, (target_entry, target))
}
}
#[doc(hidden)]
impl<'a> ToGlibPtrMut<'a, *mut ffi::GtkTargetEntry> for TargetEntry {
type Storage = (Box<ffi::GtkTargetEntry>, Stash<'a, *mut c_char, String>);
#[inline]
fn to_glib_none_mut(&'a mut self) -> StashMut<'a, *mut ffi::GtkTargetEntry, Self> {
let target = self.target.to_glib_none();
let mut target_entry = Box::new(ffi::GtkTargetEntry {
target: target.0,
flags: self.flags.bits(),
info: self.info,
});
StashMut(&mut *target_entry, (target_entry, target))
}
}
#[doc(hidden)]
impl FromGlibPtrNone<*const ffi::GtkTargetEntry> for TargetEntry {
unsafe fn from_glib_none(ptr: *const ffi::GtkTargetEntry) -> Self {
TargetEntry {
target: CStr::from_ptr((*ptr).target).to_string_lossy().into_owned(),
flags: TargetFlags::from_bits((*ptr).flags).unwrap(),
info: (*ptr).info,
}
}
}
#[doc(hidden)]
impl FromGlibPtrNone<*mut ffi::GtkTargetEntry> for TargetEntry {
unsafe fn from_glib_none(ptr: *mut ffi::GtkTargetEntry) -> Self {
TargetEntry {
target: CStr::from_ptr((*ptr).target).to_string_lossy().into_owned(),
flags: TargetFlags::from_bits((*ptr).flags).unwrap(),
info: (*ptr).info,
}
}
}
#[doc(hidden)]
impl FromGlibPtrFull<*mut ffi::GtkTargetEntry> for TargetEntry {
#[inline]
unsafe fn from_glib_full(ptr: *mut ffi::GtkTargetEntry) -> Self {
let target_entry = TargetEntry {
target: CStr::from_ptr((*ptr).target).to_string_lossy().into_owned(),
flags: TargetFlags::from_bits((*ptr).flags).unwrap(),
info: (*ptr).info,
};
ffi::gtk_target_entry_free(ptr);
target_entry
}
}
impl glib::StaticType for TargetEntry {
fn static_type() -> glib::types::Type {
skip_assert_initialized!();
unsafe { from_glib(ffi::gtk_target_entry_get_type()) }
}
}
| 28.182609 | 87 | 0.598889 |
90fe996e902818c02aec9ad5dd0545d2319f3599 | 1,488 | use std::ops::Deref;
/// A wrapper for [`drogue_cloud_endpoint_common::auth::DeviceAuthenticator`].
#[derive(Clone, Debug)]
pub struct DeviceAuthenticator(pub drogue_cloud_endpoint_common::auth::DeviceAuthenticator);
impl Deref for DeviceAuthenticator {
type Target = drogue_cloud_endpoint_common::auth::DeviceAuthenticator;
fn deref(&self) -> &Self::Target {
&self.0
}
}
/// An implementation that **does not+* verify, it only accepts the certificates.
///
/// This is required because: 1) we call the authentication service at a later time 2) contacting
/// the authentication service involved another network call, and may block. However, the
/// verifier isn't capable of running asynchronous. So we would block the whole I/O loop of the
/// endpoint.
#[cfg(feature = "rustls")]
pub struct AcceptAllClientCertVerifier;
#[cfg(feature = "rustls")]
impl rust_tls::server::ClientCertVerifier for AcceptAllClientCertVerifier {
fn client_auth_mandatory(&self) -> Option<bool> {
Some(false)
}
fn client_auth_root_subjects(&self) -> Option<rust_tls::DistinguishedNames> {
Some(rust_tls::DistinguishedNames::new())
}
fn verify_client_cert(
&self,
_end_entity: &rust_tls::Certificate,
_intermediates: &[rust_tls::Certificate],
_now: std::time::SystemTime,
) -> Result<rust_tls::server::ClientCertVerified, rust_tls::Error> {
Ok(rust_tls::server::ClientCertVerified::assertion())
}
}
| 34.604651 | 97 | 0.711022 |
2f6ddf0fc5053817d971eee4a39b671d9b3c95ca | 1,963 | //! GC algorithms from the MMTk suite.
//!
//! This module provides various GC plans, each of which implements a GC algorithm.
//! Generally a plan consists of a few parts:
//! * A plan type that implements the [`Plan`](crate::plan::Plan) trait, which defines
//! spaces used in the plan, and their behaviors in GC and page accounting.
//! * A mutator definition, which describes the mapping between allocators and allocation semantics,
//! and the mapping between allocators and spaces. If the plan needs barrier, the barrier definition is
//! also included here.
//! * A constant for [`PlanConstraints`](crate::plan::PlanConstraints), which defines
//! plan-specific constants.
//! * Plan-specific [`GCWork`](crate::scheduler::GCWork), which is scheduled during GC. If the plan
//! implements a copying GC, a [`CopyContext`](crate::plan::CopyContext) also needs to be provided.
//!
//! For more about implementing a plan, it is recommended to read the [MMTk tutorial](/docs/tutorial/Tutorial.md).
mod barriers;
pub use barriers::BarrierSelector;
mod controller_collector_context;
mod global;
pub(crate) use global::create_mutator;
pub(crate) use global::create_plan;
pub use global::AllocationSemantics;
pub use global::CopyContext;
pub(crate) use global::GcStatus;
pub use global::Plan;
mod mutator_context;
pub use mutator_context::Mutator;
pub use mutator_context::MutatorContext;
mod plan_constraints;
pub use plan_constraints::PlanConstraints;
mod tracelocal;
pub use tracelocal::TraceLocal;
mod transitive_closure;
pub use transitive_closure::TransitiveClosure;
mod gencopy;
mod marksweep;
mod nogc;
mod semispace;
// Expose plan constraints as public. Though a binding can get them from plan.constraints(),
// it is possible for performance reasons that they want the constraints as constants.
pub use gencopy::GENCOPY_CONSTRAINTS;
pub use marksweep::MS_CONSTRAINTS;
pub use nogc::NOGC_CONSTRAINTS;
pub use semispace::SS_CONSTRAINTS;
| 35.690909 | 114 | 0.768212 |
48e631afefaaf0e9dc85af8a1d84beb133da6a58 | 2,417 | use multiqueue2 as multiqueue;
use crossbeam::scope;
use time::OffsetDateTime;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Barrier;
use crate::multiqueue::{broadcast_queue_with, wait, BroadcastReceiver, BroadcastSender};
fn precise_time_ns() -> u32{
OffsetDateTime::now_utc().nanosecond() - OffsetDateTime::UNIX_EPOCH.nanosecond()
}
#[inline(never)]
fn recv(barrier: &Barrier, mreader: BroadcastReceiver<u64>, sum: &AtomicUsize, check: bool) {
let reader = mreader.into_single().unwrap();
barrier.wait();
let start = precise_time_ns();
let mut cur = 0;
while let Ok(pushed) = reader.recv() {
if cur != pushed && check {
panic!("Got {}, expected {}", pushed, cur);
}
cur += 1;
}
sum.fetch_add((precise_time_ns() - start) as usize, Ordering::SeqCst);
}
fn send(barrier: &Barrier, writer: BroadcastSender<u64>, num_push: usize) {
barrier.wait();
for i in 0..num_push as u64 {
loop {
let topush = i;
if writer.try_send(topush).is_ok() {
break;
}
}
}
}
fn runit(name: &str, n_senders: usize, n_readers: usize) {
let num_do = 100_000_000;
let (writer, reader) = broadcast_queue_with(20000, wait::BlockingWait::new());
let barrier = Barrier::new(1 + n_senders + n_readers);
let bref = &barrier;
let ns_atomic = AtomicUsize::new(0);
scope(|scope| {
for _ in 0..n_senders {
let w = writer.clone();
scope.spawn(move |_| {
send(bref, w, num_do);
});
}
writer.unsubscribe();
for _ in 0..n_readers {
let aref = &ns_atomic;
let r = reader.add_stream();
let check = n_senders == 1;
scope.spawn(move |_| {
recv(bref, r, aref, check);
});
}
reader.unsubscribe();
barrier.wait();
})
.unwrap();
let ns_spent = (ns_atomic.load(Ordering::Relaxed) as f64) / n_readers as f64;
let ns_per_item = ns_spent / (num_do as f64);
println!(
"Time spent doing {} push/pop pairs for {} was {} ns per item",
num_do, name, ns_per_item
);
}
fn main() {
runit("1p::1c", 1, 1);
runit("1p::1c_2b", 1, 2);
runit("1p::1c_3b", 1, 3);
runit("2p::1c", 2, 1);
runit("2p::1c_2b", 2, 2);
runit("2p::1c_3b", 2, 3);
}
| 28.435294 | 93 | 0.563095 |
1c31517845bec0562249bca0b05a168006e35a0e | 2,226 | use crate::{Filter, NodeIterator, SourceCode, ValidationError, Validator};
use tree_sitter::Node;
#[allow(unused_macros)]
macro_rules! assert_some {
($expression:expr) => {
match $expression {
Some(item) => item,
None => panic!("assertion failed: Option instance is not some"),
}
};
}
#[allow(unused_macros)]
macro_rules! assert_err {
($expression:expr) => {
match $expression {
Err(err) => err,
ok => panic!("assertion failed: {:?} does not match Err()", ok),
}
};
}
fn recur_validate(
node: &Node,
source: &str,
validator: Box<dyn Validator>,
filter: &dyn Filter,
) -> Result<(), ValidationError> {
for n in NodeIterator::new(node.walk(), source, filter) {
validator.validate(&n, source)?;
}
Ok(())
}
pub fn node_lowercase_contains(node_kind: &str, node: &Node, source: &str, pat: &str) -> bool {
match get_text(node_kind, node, source) {
Some(ident) => ident.to_lowercase().contains(pat),
None => false,
}
}
pub fn node_lowercase_eq(node_kind: &str, node: &Node, source: &str, s: &str) -> bool {
match get_text(node_kind, node, source) {
Some(ident) => ident.to_lowercase().eq(s),
None => false,
}
}
fn get_text<'a>(node_kind: &str, node: &Node, source: &'a str) -> Option<&'a str> {
if node.kind() == node_kind {
let ident = match node.utf8_text(source.as_bytes()) {
Ok(ident) => ident,
Err(err) => {
error!("failed to get identifier: {:?}", err);
return None;
}
};
return Some(ident);
}
None
}
#[allow(dead_code)]
pub fn assert_source_ok(source_code: &str, validator: Box<dyn Validator>, filter: &dyn Filter) {
let res = validate(source_code, validator, filter);
assert!(res.is_ok());
}
pub fn validate(
source_code: &str,
validator: Box<dyn Validator>,
filter: &dyn Filter,
) -> Result<(), ValidationError> {
let parse_result = SourceCode::parse(source_code);
let source = assert_some!(parse_result);
let root = source.get_root_node();
recur_validate(&root, source_code, validator, filter)
}
| 27.825 | 96 | 0.590296 |
036a4963deb9520876478a0faffa2576f41fbc36 | 9,598 | use crate::marker::Marker;
use core::fmt;
use paste::paste;
use serde::de::{self, Visitor};
use self::{enum_::UnitVariantAccess, map::MapAccess, seq::SeqAccess};
mod enum_;
mod map;
mod seq;
use super::Error;
type Result<T> = core::result::Result<T, Error>;
#[cfg(test)]
fn print_debug<T>(prefix: &str, function_name: &str, de: &Deserializer) {
#[cfg(not(feature = "std"))]
extern crate std;
#[cfg(not(feature = "std"))]
use std::println;
println!(
"{}{}<{}> ({:?})",
prefix,
function_name,
core::any::type_name::<T>(),
&de.slice[de.index..core::cmp::min(de.slice.len(), de.index + 10)]
);
}
#[cfg(test)]
fn print_debug_value<T, V: core::fmt::Debug>(function_name: &str, de: &Deserializer, value: &V) {
#[cfg(not(feature = "std"))]
extern crate std;
#[cfg(not(feature = "std"))]
use std::println;
println!(
"{}<{}> => {:?} ({:?})",
function_name,
core::any::type_name::<T>(),
value,
&de.slice[de.index..core::cmp::min(de.slice.len(), de.index + 10)]
);
}
#[cfg(not(test))]
fn print_debug<T>(_prefix: &str, _function_name: &str, _de: &Deserializer) {}
#[cfg(not(test))]
fn print_debug_value<T, V: core::fmt::Debug>(_function_name: &str, _de: &Deserializer, _value: &V) {}
pub(crate) struct Deserializer<'b> {
slice: &'b [u8],
index: usize,
}
impl<'a> Deserializer<'a> {
pub const fn new(slice: &'a [u8]) -> Deserializer<'_> { Deserializer { slice, index: 0 } }
fn eat_byte(&mut self) { self.index += 1; }
fn peek(&mut self) -> Option<Marker> { Some(Marker::from_u8(*self.slice.get(self.index)?)) }
}
// NOTE(deserialize_*signed) we avoid parsing into u64 and then casting to a smaller integer, which
// is what upstream does, to avoid pulling in 64-bit compiler intrinsics, which waste a few KBs of
// Flash, when targeting non 64-bit architectures
macro_rules! deserialize_primitive {
($ty:ident) => {
paste! {
fn [<deserialize_ $ty>]<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value>
{
print_debug::<V>("Deserializer::deserialize_", stringify!($ty), &self);
let (value, len) = paste! { super::[<read_ $ty>](&self.slice[self.index..])? };
self.index += len;
print_debug_value::<$ty, $ty>(stringify!(concat_idents!(Deserializer::deserialize_, $ty)), &self, &value);
paste! { visitor.[<visit_ $ty>](value) }
}}
};
}
macro_rules! deserialize_primitives {
($($ty:ident),*) => { $( deserialize_primitive!($ty); )* };
}
impl<'a, 'de> de::Deserializer<'de> for &'a mut Deserializer<'de> {
type Error = Error;
deserialize_primitives!(bool, u8, u16, u32, u64, i8, i16, i32, i64, f32, f64);
fn deserialize_str<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> {
print_debug::<V>("Deserializer::deserialize_", "str", &self);
let (s, len) = super::read_str(&self.slice[self.index..])?;
self.index += len;
visitor.visit_borrowed_str(s)
}
fn deserialize_bytes<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> {
print_debug::<V>("Deserializer::deserialize_", "bytes", &self);
let (value, len) = super::read_bin(&self.slice[self.index..])?;
self.index += len;
visitor.visit_borrowed_bytes(value)
}
fn deserialize_byte_buf<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> {
print_debug::<V>("Deserializer::deserialize_", "byte_buf", &self);
self.deserialize_bytes(visitor)
}
fn deserialize_option<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> {
print_debug::<V>("Deserializer::deserialize_", "option", &self);
let marker = self.peek().ok_or(Error::EndOfBuffer)?;
match marker {
Marker::Null => {
self.eat_byte();
visitor.visit_none()
}
_ => visitor.visit_some(self),
}
}
fn deserialize_seq<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> {
print_debug::<V>("Deserializer::deserialize_", "seq", &self);
let (len, header_len) = crate::decode::read_array_len(&self.slice[self.index..])?;
self.index += header_len;
visitor.visit_seq(SeqAccess::new(self, len))
}
fn deserialize_tuple<V: Visitor<'de>>(self, _len: usize, visitor: V) -> Result<V::Value> {
print_debug::<V>("Deserializer::deserialize_", "tuple", &self);
self.deserialize_seq(visitor)
}
fn deserialize_tuple_struct<V: Visitor<'de>>(self, _name: &'static str, _len: usize, visitor: V) -> Result<V::Value> {
print_debug::<V>("Deserializer::deserialize_", "tuple_struct", &self);
self.deserialize_seq(visitor)
}
fn deserialize_map<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> {
print_debug::<V>("Deserializer::deserialize_", "map", &self);
let (len, header_len) = crate::decode::read_map_len(&self.slice[self.index..])?;
self.index += header_len;
visitor.visit_map(MapAccess::new(self, len))
}
fn deserialize_struct<V: Visitor<'de>>(self, _name: &'static str, _fields: &'static [&'static str], visitor: V) -> Result<V::Value> {
print_debug::<V>("Deserializer::deserialize_", "struct", &self);
self.deserialize_map(visitor)
}
fn deserialize_enum<V: Visitor<'de>>(self, _name: &'static str, _variants: &'static [&'static str], visitor: V) -> Result<V::Value> {
print_debug::<V>("Deserializer::deserialize_", "enum", &self);
visitor.visit_enum(UnitVariantAccess::new(self))
}
fn deserialize_identifier<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> {
print_debug::<V>("Deserializer::deserialize_", "identifier", &self);
self.deserialize_str(visitor)
}
/// Unsupported. Can’t parse a value without knowing its expected type.
fn deserialize_any<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> {
print_debug::<V>("Deserializer::deserialize_", "any", &self);
let (_, n) = super::skip_any(&self.slice[self.index..])?;
self.index += n;
visitor.visit_unit()
}
/// Used to throw out fields that we don’t want to keep in our structs.
fn deserialize_ignored_any<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> {
print_debug::<V>("Deserializer::deserialize_", "ignored_any", &self);
self.deserialize_any(visitor)
}
/// Unsupported. Use a more specific deserialize_* method
fn deserialize_unit<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> {
print_debug::<V>("Deserializer::deserialize_", "unit", &self);
let marker = self.peek().ok_or(Error::EndOfBuffer)?;
match marker {
Marker::Null | Marker::FixArray(0) => {
self.eat_byte();
visitor.visit_unit()
}
_ => Err(Error::InvalidType),
}
}
/// Unsupported. Use a more specific deserialize_* method
fn deserialize_unit_struct<V: Visitor<'de>>(self, _name: &'static str, visitor: V) -> Result<V::Value> {
print_debug::<V>("Deserializer::deserialize_", "unit_struct", &self);
self.deserialize_unit(visitor)
}
fn deserialize_char<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> {
print_debug::<V>("Deserializer::deserialize_", "char", &self);
//TODO Need to decide how to encode this. Probably as a str?
self.deserialize_str(visitor)
}
fn deserialize_newtype_struct<V: Visitor<'de>>(self, _name: &'static str, visitor: V) -> Result<V::Value> {
print_debug::<V>("Deserializer::deserialize_", "newtype_struct", &self);
visitor.visit_newtype_struct(self)
}
/// Unsupported. String is not available in no-std.
fn deserialize_string<V: Visitor<'de>>(self, _visitor: V) -> Result<V::Value> {
print_debug::<V>("Deserializer::deserialize_", "string", &self);
unreachable!()
}
}
impl ::serde::de::StdError for Error {}
impl de::Error for Error {
#[cfg_attr(not(feature = "custom-error-messages"), allow(unused_variables))]
fn custom<T>(msg: T) -> Self
where T: fmt::Display {
#[cfg(not(feature = "custom-error-messages"))]
{
Error::CustomError
}
#[cfg(feature = "custom-error-messages")]
{
use core::fmt::Write;
let mut string = heapless::String::new();
write!(string, "{:.64}", msg).unwrap();
Error::CustomErrorWithMessage(string)
}
}
}
impl fmt::Display for Error {
#[cfg(debug_assertions)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}",
match self {
Error::InvalidType => "Unexpected type encountered.",
Error::OutOfBounds => "Index out of bounds.",
Error::EndOfBuffer => "EOF while parsing.",
Error::CustomError => "Did not match deserializer's expected format.",
#[cfg(feature = "custom-error-messages")]
Error::CustomErrorWithMessage(msg) => msg.as_str(),
}
)
}
#[cfg(not(debug_assertions))]
fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { Ok(()) }
}
| 38.8583 | 138 | 0.577412 |
5d0077a48a06ebd5a2b936a092ddb17dd4eb2701 | 1,431 |
pub struct IconWest {
props: crate::Props,
}
impl yew::Component for IconWest {
type Properties = crate::Props;
type Message = ();
fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self
{
Self { props }
}
fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender
{
true
}
fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender
{
false
}
fn view(&self) -> yew::prelude::Html
{
yew::prelude::html! {
<svg
class=self.props.class.unwrap_or("")
width=self.props.size.unwrap_or(24).to_string()
height=self.props.size.unwrap_or(24).to_string()
viewBox="0 0 24 24"
fill=self.props.fill.unwrap_or("none")
stroke=self.props.color.unwrap_or("currentColor")
stroke-width=self.props.stroke_width.unwrap_or(2).to_string()
stroke-linecap=self.props.stroke_linecap.unwrap_or("round")
stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round")
>
<svg xmlns="http://www.w3.org/2000/svg" enable-background="new 0 0 24 24" height="24" viewBox="0 0 24 24" width="24"><rect fill="none" height="24" width="24"/><path d="M9,19l1.41-1.41L5.83,13H22V11H5.83l4.59-4.59L9,5l-7,7L9,19z"/></svg>
</svg>
}
}
}
| 31.108696 | 248 | 0.571628 |
69a8b951d33107a36556ee68d3dd25e38984d9c2 | 1,693 | use crate::commitment::CommitmentKey;
use crate::gang::{GroupElement, Scalar};
use crate::gargamel::{self, PublicKey, SecretKey};
use cryptoxide::blake2b::Blake2b;
use cryptoxide::chacha20::ChaCha20;
use cryptoxide::digest::Digest;
#[derive(Clone)]
pub struct Encrypted {
e1: gargamel::Ciphertext,
e2: Box<[u8]>,
}
fn bc_key(ck: &GroupElement) -> ChaCha20 {
let mut out = [0u8; 44];
let mut h = Blake2b::new(44);
h.input(&ck.to_bytes());
h.result(&mut out);
ChaCha20::new(&out[0..32], &out[32..44])
}
fn bc_process(ck: &GroupElement, m: &[u8]) -> Vec<u8> {
let mut key = bc_key(ck);
let mut dat = m.to_vec();
key.process_mut(&mut dat);
dat
}
pub fn encrypt(pk: &PublicKey, ck: &CommitmentKey, m: &[u8], r: &Scalar) -> Encrypted {
let e1 = gargamel::encrypt_point(pk, &ck.h, r);
let e2 = bc_process(&ck.h, m).into_boxed_slice();
Encrypted { e1, e2 }
}
#[allow(dead_code)]
pub fn decrypt(sk: &SecretKey, e: &Encrypted) -> Vec<u8> {
let ck = gargamel::decrypt_point(sk, &e.e1);
bc_process(&ck, &e.e2)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::gargamel::Keypair;
use rand_chacha::ChaCha20Rng;
use rand_core::SeedableRng;
#[test]
fn encrypt_decrypt() {
let mut r = ChaCha20Rng::from_seed([0u8; 32]);
let ck = CommitmentKey::generate(&mut r);
let k = SecretKey::generate(&mut r);
let k = Keypair::from_secretkey(k);
let r = Scalar::random(&mut r);
let m = [1, 3, 4, 5, 6, 7];
let encrypted = encrypt(&k.public_key, &ck, &m, &r);
let result = decrypt(&k.secret_key, &encrypted);
assert_eq!(&m[..], &result[..])
}
}
| 26.046154 | 87 | 0.603071 |
d686f02a9fe39ee5574120431afcbd2382f0b5c0 | 149 | fn main() {
() < std::mem::size_of::<_>(); //~ ERROR: mismatched types
[0u8; std::mem::size_of::<_>()]; //~ ERROR: type annotations needed
}
| 29.8 | 71 | 0.557047 |
fc79d5fc18e3704317d24db85d66f79eb9716c77 | 15,716 | //! m-of-n multi-sig address
use itertools::Itertools;
use parity_scale_codec::{Decode, Encode};
use super::{Error, ErrorKind, PublicKey, Result};
use chain_core::common::{MerkleTree, Proof, H256};
use chain_core::tx::data::address::ExtendedAddr;
use chain_core::tx::witness::tree::RawPubkey;
// TODO: Remove pub
/// m-of-n multi-sig address
#[derive(Debug, Encode, Decode)]
pub struct MultiSigAddress {
/// Number of required co-signers
pub m: u64,
/// Total number of co-signers
pub n: u64,
/// Public key of current signer
pub self_public_key: PublicKey,
/// Merkle tree with different combinations of `n` public keys as leaf nodes
pub merkle_tree: MerkleTree<RawPubkey>,
}
impl From<MultiSigAddress> for ExtendedAddr {
fn from(addr: MultiSigAddress) -> Self {
addr.to_extended_addr()
}
}
impl MultiSigAddress {
/// Create MultiSig address from list of public keys
pub fn new(
public_keys: Vec<PublicKey>,
self_public_key: PublicKey,
required_signers: usize,
) -> Result<Self> {
let total_signers = public_keys.len();
if required_signers > total_signers
|| total_signers == 0
|| required_signers == 0
|| !public_keys.contains(&self_public_key)
{
// TODO: Return different error kinds for different input errors
return Err(ErrorKind::InvalidInput.into());
}
let combinations = public_key_combinations(public_keys, required_signers)?;
let merkle_tree = MerkleTree::new(combinations);
Ok(MultiSigAddress {
m: required_signers as u64,
n: total_signers as u64,
self_public_key,
merkle_tree,
})
}
#[inline]
/// Returns root hash of the underlying MerkleTree
pub fn root_hash(&self) -> H256 {
self.merkle_tree.root_hash()
}
/// Generate inclusion proof of particular public keys combination in the
/// MultiSig address
pub fn generate_proof(
&self,
mut public_keys: Vec<PublicKey>,
) -> Result<Option<Proof<RawPubkey>>> {
if public_keys.len() != self.required_signers() {
return Err(Error::new(
ErrorKind::InvalidInput,
format!(
"{} public keys are required to generate a proof",
self.required_signers()
),
));
}
public_keys.sort();
let raw_pubkey = PublicKey::combine_to_raw_pubkey(&public_keys)?;
Ok(self.merkle_tree.generate_proof(raw_pubkey))
}
/// Returns ExtendedAddr representation of the MultiSigAddress
#[inline]
pub fn to_extended_addr(&self) -> ExtendedAddr {
ExtendedAddr::OrTree(self.root_hash())
}
/// Returns required number of co-signers
#[inline]
pub fn required_signers(&self) -> usize {
self.m as usize
}
/// Returns total number of co-signers
#[inline]
pub fn total_signers(&self) -> usize {
self.n as usize
}
/// Returns self public key
#[inline]
pub fn self_public_key(&self) -> PublicKey {
self.self_public_key.clone()
}
}
fn public_key_combinations(
public_keys: Vec<PublicKey>,
required_signers: usize,
) -> Result<Vec<RawPubkey>> {
if public_keys.is_empty() {
return Err(Error::new(
ErrorKind::InvalidInput,
"Length of public keys cannot be zero",
));
}
if required_signers > public_keys.len() {
return Err(Error::new(
ErrorKind::InvalidInput,
"Length of public keys cannot be less than number of required signers",
));
}
if required_signers == 0 {
return Err(Error::new(
ErrorKind::InvalidInput,
"Number of required signers cannot be zero",
));
}
let mut combinations = public_keys
.into_iter()
.combinations(required_signers)
.map(|mut combination| {
combination.sort();
PublicKey::combine_to_raw_pubkey(&combination)
})
.collect::<Result<Vec<RawPubkey>>>()?;
combinations.sort();
Ok(combinations)
}
#[cfg(test)]
mod multi_sig_tests {
use super::*;
use crate::PrivateKey;
mod generate_proof {
use super::*;
#[test]
fn should_throw_error_when_number_of_provided_public_keys_is_different_from_required_signers(
) {
let public_key_1 = PublicKey::from(
&PrivateKey::new().expect("Derive public key from private key should work"),
);
let public_key_2 = PublicKey::from(
&PrivateKey::new().expect("Derive public key from private key should work"),
);
let public_keys = vec![public_key_1.clone(), public_key_2.clone()];
let required_signers = 2;
let multi_sig_address =
MultiSigAddress::new(public_keys, public_key_1.clone(), required_signers)
.expect("Should create MultiSig address");
let target_public_keys = vec![public_key_1.clone()];
let maybe_proof_result = multi_sig_address.generate_proof(target_public_keys);
assert!(maybe_proof_result.is_err());
assert_eq!(
maybe_proof_result.unwrap_err().kind(),
ErrorKind::InvalidInput
);
}
}
mod new {
use super::*;
#[test]
fn should_throw_error_when_public_key_list_is_empty() {
let public_keys = Vec::new();
let self_public_key = PublicKey::from(
&PrivateKey::new().expect("Derive public key from private key should work"),
);
let required_signers = 1;
let result = MultiSigAddress::new(public_keys, self_public_key, required_signers);
assert!(result.is_err());
assert_eq!(result.unwrap_err().kind(), ErrorKind::InvalidInput);
}
#[test]
fn should_throw_error_when_required_signers_is_zero() {
let public_key = PublicKey::from(
&PrivateKey::new().expect("Derive public key from private key should work"),
);
let public_keys = vec![public_key.clone()];
let self_public_key = PublicKey::from(
&PrivateKey::new().expect("Derive public key from private key should work"),
);
let required_signers = 0;
let result = MultiSigAddress::new(public_keys, self_public_key, required_signers);
assert!(result.is_err());
assert_eq!(result.unwrap_err().kind(), ErrorKind::InvalidInput);
}
#[test]
fn should_throw_error_when_self_public_key_is_not_in_public_key_list() {
let public_key_1 = PublicKey::from(
&PrivateKey::new().expect("Derive public key from private key should work"),
);
let public_key_2 = PublicKey::from(
&PrivateKey::new().expect("Derive public key from private key should work"),
);
let public_keys = vec![public_key_1.clone()];
let required_signers = 1;
let result = MultiSigAddress::new(public_keys, public_key_2, required_signers);
assert!(result.is_err());
assert_eq!(result.unwrap_err().kind(), ErrorKind::InvalidInput);
}
#[test]
fn should_throw_error_when_required_signers_is_greater_than_total_signers() {
let public_key_1 = PublicKey::from(
&PrivateKey::new().expect("Derive public key from private key should work"),
);
let public_key_2 = PublicKey::from(
&PrivateKey::new().expect("Derive public key from private key should work"),
);
let public_keys = vec![public_key_1.clone(), public_key_2.clone()];
let required_signers = 10;
let result = MultiSigAddress::new(public_keys, public_key_1, required_signers);
assert!(result.is_err());
assert_eq!(result.unwrap_err().kind(), ErrorKind::InvalidInput);
}
#[test]
fn should_work() {
let public_key_1 = PublicKey::from(
&PrivateKey::new().expect("Derive public key from private key should work"),
);
let public_key_2 = PublicKey::from(
&PrivateKey::new().expect("Derive public key from private key should work"),
);
let public_key_3 = PublicKey::from(
&PrivateKey::new().expect("Derive public key from private key should work"),
);
let public_keys = vec![
public_key_1.clone(),
public_key_2.clone(),
public_key_3.clone(),
];
let required_signers = 2;
let result = MultiSigAddress::new(public_keys, public_key_1, required_signers);
assert!(result.is_ok());
}
}
mod public_key_combinations {
use super::*;
#[test]
fn should_throw_error_when_public_keys_is_empty() {
let required_signers = 1;
let result = public_key_combinations(Vec::new(), required_signers);
assert!(result.is_err());
assert_eq!(
result
.expect_err("Length of public keys cannot be zero")
.kind(),
ErrorKind::InvalidInput
);
}
#[test]
fn should_throw_error_when_required_signers_is_larger_than_total_public_keys() {
let public_keys = vec![PublicKey::from(
&PrivateKey::new().expect("Derive public key from private key should work"),
)];
let required_signers = 2;
let result = public_key_combinations(public_keys, required_signers);
assert!(result.is_err());
assert_eq!(
result
.expect_err(
"Length of public keys cannot be less than number of required signers"
)
.kind(),
ErrorKind::InvalidInput
);
}
#[test]
fn should_throw_error_when_required_signers_is_zero() {
let public_keys = vec![PublicKey::from(
&PrivateKey::new().expect("Derive public key from private key should work"),
)];
let required_signers = 0;
let result = public_key_combinations(public_keys, required_signers);
assert!(result.is_err());
assert_eq!(
result
.expect_err("Number of required signers cannot be zero")
.kind(),
ErrorKind::InvalidInput
);
}
#[test]
fn should_return_result_of_raw_pub_key_combinations() {
// 8f07ddd5e9f5179cff19486034181ed76505baaad53e5d994064127b56c5841bd1e8a8697ad42251de39f6a72081dfdf42abc542a6d6fe0715548b588fafbe70
let public_key_1 = PublicKey::from(
&PrivateKey::deserialize_from(&[0x01; 32]).expect("32 bytes, within curve order"),
);
// 66074d25a751c4743342c90ad8ead9454daa00d9b9aed29bca321036d16c4b4dd036ed0d31bd98c1546bb6577f852e668442060feb7c256d8b20fed0a2ad3e2a
let public_key_2 = PublicKey::from(
&PrivateKey::deserialize_from(&[0x02; 32]).expect("32 bytes, within curve order"),
);
// 37e31fcbbdbdc5c3449a7e533cc8a68fac67c827321323273d50348106e61f5358546af286730e3bd9924e52cd0f205a70ac475a67842aa81b481ee765c2929e
let public_key_3 = PublicKey::from(
&PrivateKey::deserialize_from(&[0x03; 32]).expect("32 bytes, within curve order"),
);
let public_keys = vec![
public_key_1.clone(),
public_key_2.clone(),
public_key_3.clone(),
];
let required_signers = 1;
assert_eq!(
public_key_combinations(public_keys.clone(), required_signers).unwrap(),
vec![
RawPubkey::from(&public_key_2),
RawPubkey::from(&public_key_3),
RawPubkey::from(&public_key_1),
]
);
let required_signers = 2;
assert_eq!(
public_key_combinations(public_keys.clone(), required_signers).unwrap(),
vec![
RawPubkey::from(
PublicKey::combine(&vec![public_key_2.clone(), public_key_1.clone()])
.expect("Combine public keys should work")
.0
),
RawPubkey::from(
PublicKey::combine(&vec![public_key_3.clone(), public_key_2.clone()])
.expect("Combine public keys should work")
.0
),
RawPubkey::from(
PublicKey::combine(&vec![public_key_3.clone(), public_key_1.clone()])
.expect("Combine public keys should work")
.0
),
]
);
let required_signers = 3;
assert_eq!(
public_key_combinations(public_keys.clone(), required_signers).unwrap(),
vec![RawPubkey::from(
PublicKey::combine(&vec![
public_key_3.clone(),
public_key_2.clone(),
public_key_1.clone()
])
.expect("Combine public keys should work")
.0
),]
);
}
}
#[test]
fn check_root_hash_flow() {
// 8f07ddd5e9f5179cff19486034181ed76505baaad53e5d994064127b56c5841bd1e8a8697ad42251de39f6a72081dfdf42abc542a6d6fe0715548b588fafbe70
let public_key_1 = PublicKey::from(
&PrivateKey::deserialize_from(&[0x01; 32]).expect("32 bytes, within curve order"),
);
// 66074d25a751c4743342c90ad8ead9454daa00d9b9aed29bca321036d16c4b4dd036ed0d31bd98c1546bb6577f852e668442060feb7c256d8b20fed0a2ad3e2a
let public_key_2 = PublicKey::from(
&PrivateKey::deserialize_from(&[0x02; 32]).expect("32 bytes, within curve order"),
);
// 37e31fcbbdbdc5c3449a7e533cc8a68fac67c827321323273d50348106e61f5358546af286730e3bd9924e52cd0f205a70ac475a67842aa81b481ee765c2929e
let public_key_3 = PublicKey::from(
&PrivateKey::deserialize_from(&[0x03; 32]).expect("32 bytes, within curve order"),
);
let public_keys = vec![
public_key_1.clone(),
public_key_2.clone(),
public_key_3.clone(),
];
let required_signers = 2;
let multi_sig_address =
MultiSigAddress::new(public_keys, public_key_1.clone(), required_signers)
.expect("Should create MultiSig address");
let target_public_keys = vec![public_key_2.clone(), public_key_1.clone()];
let maybe_proof_result = multi_sig_address.generate_proof(target_public_keys);
assert!(maybe_proof_result.is_ok());
let maybe_proof = maybe_proof_result.unwrap();
assert!(maybe_proof.is_some());
let proof = maybe_proof.unwrap();
let root_hash = multi_sig_address.root_hash();
assert!(proof.verify(&root_hash));
}
}
| 35.963387 | 143 | 0.577437 |
fc642a25b48e20abd121197fd2d7dc54014d5e40 | 2,920 | // Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fmt;
use common_datavalues::columns::DataColumn;
use common_datavalues::prelude::DataColumnsWithField;
use common_datavalues::DataType;
use common_datavalues::DataTypeAndNullable;
use common_exception::Result;
use dyn_clone::DynClone;
use crate::scalars::Monotonicity;
pub trait Function: fmt::Display + Sync + Send + DynClone {
/// Returns the name of the function, should be unique.
fn name(&self) -> &str;
/// Calculate the monotonicity from arguments' monotonicity information.
/// The input should be argument's monotonicity. For binary function it should be an
/// array of left expression's monotonicity and right expression's monotonicity.
/// For unary function, the input should be an array of the only argument's monotonicity.
/// The returned monotonicity should have 'left' and 'right' fields None -- the boundary
/// calculation relies on the function.eval method.
fn get_monotonicity(&self, _args: &[Monotonicity]) -> Result<Monotonicity> {
Ok(Monotonicity::default())
}
/// The method returns the return_type of this function.
fn return_type(&self, args: &[DataTypeAndNullable]) -> Result<DataType>;
/// Whether the function may return null.
/// The default implementation checks if any nullable input exists and returns true if exist; otherwise false.
fn nullable(&self, arg_fields: &[DataTypeAndNullable]) -> Result<bool> {
let any_input_nullable = arg_fields.iter().any(|field| field.is_nullable());
Ok(any_input_nullable)
}
/// Evaluate the function, e.g. run/execute the function.
fn eval(&self, _columns: &DataColumnsWithField, _input_rows: usize) -> Result<DataColumn>;
/// Whether the function passes through null input.
/// Return true is the function just return null with any given null input.
/// Return false if the function may return non-null with null input.
///
/// For example, arithmetic plus('+') will output null for any null input, like '12 + null = null'.
/// It has no idea of how to handle null, but just pass through.
///
/// While ISNULL function treats null input as a valid one. For example ISNULL(NULL, 'test') will return 'test'.
fn passthrough_null(&self) -> bool {
true
}
}
dyn_clone::clone_trait_object!(Function);
| 43.58209 | 117 | 0.714726 |
010596a34a56cbce1ffe2f96ec065d073289e75c | 19 | uint_module!(u16);
| 9.5 | 18 | 0.736842 |
e4d04b896bae34899a06137a068760dd41f227b6 | 2,990 | // Copyright (c) Facebook, Inc. and its affiliates.
// SPDX-License-Identifier: Apache-2.0
use crate::{
base_types::{Async, AsyncResult, NodeTime},
smr_context::SmrContext,
};
// -- BEGIN FILE node_update_actions --
/// Actions required by `ConsensusNode::update_node`.
#[derive(Debug)]
pub struct NodeUpdateActions<Context: SmrContext> {
/// Time at which to call `update_node` again, at the latest.
pub next_scheduled_update: NodeTime,
/// Whether we need to send a notification to a subset of nodes.
pub should_send: Vec<Context::Author>,
/// Whether we need to send a notification to all other nodes.
pub should_broadcast: bool,
/// Whether we need to request data from all other nodes.
pub should_query_all: bool,
}
// -- END FILE --
impl<Context: SmrContext> Default for NodeUpdateActions<Context> {
fn default() -> Self {
Self {
next_scheduled_update: NodeTime::default(),
should_send: Vec::new(),
should_broadcast: false,
should_query_all: false,
}
}
}
// -- BEGIN FILE consensus_node --
/// Core event handlers of a consensus node.
pub trait ConsensusNode<Context: SmrContext>: Sized {
/// Read data from storage and crate a view of the node state in memory.
fn load_node(context: &mut Context, clock: NodeTime) -> AsyncResult<Self>;
/// Execute one step of the main event loop of the protocol.
/// "Stage" changes to the node state by mutating `self`.
fn update_node(&mut self, context: &mut Context, clock: NodeTime)
-> NodeUpdateActions<Context>;
/// Save the "staged" node state into storage, possibly after applying additional async
/// operations.
fn save_node<'a>(&'a mut self, context: &'a mut Context) -> AsyncResult<'a, ()>;
}
// -- END FILE --
// -- BEGIN FILE data_sync_node --
/// Network event handlers of a consensus node.
pub trait DataSyncNode<Context> {
type Notification;
type Request;
type Response;
/// Sender role: what to send to initiate a data-synchronization exchange with a receiver.
fn create_notification(&self, context: &Context) -> Self::Notification;
/// Query role: what to send to initiate a query exchange and obtain data from a sender.
fn create_request(&self, context: &Context) -> Self::Request;
/// Sender role: handle a request from a receiver.
fn handle_request<'a>(
&'a self,
context: &'a mut Context,
request: Self::Request,
) -> Async<'a, Self::Response>;
/// Receiver role: accept or refuse a notification.
fn handle_notification<'a>(
&'a mut self,
context: &'a mut Context,
notification: Self::Notification,
) -> Async<'a, Option<Self::Request>>;
/// Receiver role: receive data.
fn handle_response<'a>(
&'a mut self,
context: &'a mut Context,
response: Self::Response,
clock: NodeTime,
) -> Async<'a, ()>;
}
// -- END FILE --
| 33.977273 | 94 | 0.651171 |
67bdcd285e96d4823785d1a8bbe9ef8d8cd251b8 | 12,412 | use std::{collections::BTreeSet, fmt::Debug};
use datasize::DataSize;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use crate::{
components::consensus::{
highway_core::{
endorsement::SignedEndorsement,
evidence::Evidence,
highway::{PingError, VertexError},
state::{self, Panorama},
validators::{ValidatorIndex, Validators},
},
traits::{Context, ValidatorSecret},
},
types::Timestamp,
};
/// A dependency of a `Vertex` that can be satisfied by one or more other vertices.
#[derive(Clone, DataSize, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
#[serde(bound(
serialize = "C::Hash: Serialize",
deserialize = "C::Hash: Deserialize<'de>",
))]
pub(crate) enum Dependency<C>
where
C: Context,
{
Unit(C::Hash),
Evidence(ValidatorIndex),
Endorsement(C::Hash),
Ping(ValidatorIndex, Timestamp),
}
impl<C: Context> Dependency<C> {
/// Returns whether this identifies a unit, as opposed to other types of vertices.
pub(crate) fn is_unit(&self) -> bool {
matches!(self, Dependency::Unit(_))
}
}
/// An element of the protocol state, that might depend on other elements.
///
/// It is the vertex in a directed acyclic graph, whose edges are dependencies.
#[derive(Clone, DataSize, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)]
#[serde(bound(
serialize = "C::Hash: Serialize",
deserialize = "C::Hash: Deserialize<'de>",
))]
pub(crate) enum Vertex<C>
where
C: Context,
{
Unit(SignedWireUnit<C>),
Evidence(Evidence<C>),
Endorsements(Endorsements<C>),
Ping(Ping<C>),
}
impl<C: Context> Vertex<C> {
/// Returns the consensus value mentioned in this vertex, if any.
///
/// These need to be validated before passing the vertex into the protocol state. E.g. if
/// `C::ConsensusValue` is a transaction, it should be validated first (correct signature,
/// structure, gas limit, etc.). If it is a hash of a transaction, the transaction should be
/// obtained _and_ validated. Only after that, the vertex can be considered valid.
pub(crate) fn value(&self) -> Option<&C::ConsensusValue> {
match self {
Vertex::Unit(swunit) => swunit.wire_unit().value.as_ref(),
Vertex::Evidence(_) | Vertex::Endorsements(_) | Vertex::Ping(_) => None,
}
}
/// Returns the unit hash of this vertex (if it is a unit).
pub(crate) fn unit_hash(&self) -> Option<C::Hash> {
match self {
Vertex::Unit(swunit) => Some(swunit.hash()),
Vertex::Evidence(_) | Vertex::Endorsements(_) | Vertex::Ping(_) => None,
}
}
/// Returns the seq number of this vertex (if it is a unit).
pub(crate) fn unit_seq_number(&self) -> Option<u64> {
match self {
Vertex::Unit(swunit) => Some(swunit.wire_unit().seq_number),
_ => None,
}
}
/// Returns whether this is evidence, as opposed to other types of vertices.
pub(crate) fn is_evidence(&self) -> bool {
matches!(self, Vertex::Evidence(_))
}
/// Returns a `Timestamp` provided the vertex is a `Vertex::Unit` or `Vertex::Ping`.
pub(crate) fn timestamp(&self) -> Option<Timestamp> {
match self {
Vertex::Unit(signed_wire_unit) => Some(signed_wire_unit.wire_unit().timestamp),
Vertex::Ping(ping) => Some(ping.timestamp()),
Vertex::Evidence(_) | Vertex::Endorsements(_) => None,
}
}
pub(crate) fn creator(&self) -> Option<ValidatorIndex> {
match self {
Vertex::Unit(signed_wire_unit) => Some(signed_wire_unit.wire_unit().creator),
Vertex::Ping(ping) => Some(ping.creator),
Vertex::Evidence(_) | Vertex::Endorsements(_) => None,
}
}
pub(crate) fn id(&self) -> Dependency<C> {
match self {
Vertex::Unit(signed_wire_unit) => Dependency::Unit(signed_wire_unit.hash()),
Vertex::Evidence(evidence) => Dependency::Evidence(evidence.perpetrator()),
Vertex::Endorsements(endorsement) => Dependency::Endorsement(endorsement.unit),
Vertex::Ping(ping) => Dependency::Ping(ping.creator(), ping.timestamp()),
}
}
/// Returns a reference to the unit, or `None` if this is not a unit.
pub(crate) fn unit(&self) -> Option<&SignedWireUnit<C>> {
match self {
Vertex::Unit(signed_wire_unit) => Some(signed_wire_unit),
_ => None,
}
}
/// Returns true whether unit is a proposal.
pub(crate) fn is_proposal(&self) -> bool {
self.value().is_some()
}
}
#[derive(Clone, DataSize, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)]
#[serde(bound(
serialize = "C::Hash: Serialize",
deserialize = "C::Hash: Deserialize<'de>",
))]
pub(crate) struct SignedWireUnit<C>
where
C: Context,
{
pub(crate) hashed_wire_unit: HashedWireUnit<C>,
pub(crate) signature: C::Signature,
}
impl<C: Context> SignedWireUnit<C> {
pub(crate) fn new(
hashed_wire_unit: HashedWireUnit<C>,
secret_key: &C::ValidatorSecret,
) -> Self {
let signature = secret_key.sign(&hashed_wire_unit.hash);
SignedWireUnit {
hashed_wire_unit,
signature,
}
}
pub(crate) fn wire_unit(&self) -> &WireUnit<C> {
self.hashed_wire_unit.wire_unit()
}
pub(crate) fn hash(&self) -> C::Hash {
self.hashed_wire_unit.hash()
}
}
#[derive(Clone, DataSize, Debug, Eq, PartialEq, Hash)]
pub(crate) struct HashedWireUnit<C>
where
C: Context,
{
hash: C::Hash,
wire_unit: WireUnit<C>,
}
impl<C> HashedWireUnit<C>
where
C: Context,
{
/// Computes the unit's hash and creates a new `HashedWireUnit`.
pub(crate) fn new(wire_unit: WireUnit<C>) -> Self {
let hash = wire_unit.compute_hash();
Self::new_with_hash(wire_unit, hash)
}
pub(crate) fn into_inner(self) -> WireUnit<C> {
self.wire_unit
}
pub(crate) fn wire_unit(&self) -> &WireUnit<C> {
&self.wire_unit
}
pub(crate) fn hash(&self) -> C::Hash {
self.hash
}
/// Creates a new `HashedWireUnit`. Make sure the `hash` is correct, and identical with the
/// result of `wire_unit.compute_hash`.
pub(crate) fn new_with_hash(wire_unit: WireUnit<C>, hash: C::Hash) -> Self {
HashedWireUnit { hash, wire_unit }
}
}
impl<C: Context> Serialize for HashedWireUnit<C> {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
self.wire_unit.serialize(serializer)
}
}
impl<'de, C: Context> Deserialize<'de> for HashedWireUnit<C> {
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
Ok(HashedWireUnit::new(<_>::deserialize(deserializer)?))
}
}
/// A unit as it is sent over the wire, possibly containing a new block.
#[derive(Clone, DataSize, Eq, PartialEq, Serialize, Deserialize, Hash)]
#[serde(bound(
serialize = "C::Hash: Serialize",
deserialize = "C::Hash: Deserialize<'de>",
))]
pub(crate) struct WireUnit<C>
where
C: Context,
{
pub(crate) panorama: Panorama<C>,
pub(crate) creator: ValidatorIndex,
pub(crate) instance_id: C::InstanceId,
pub(crate) value: Option<C::ConsensusValue>,
pub(crate) seq_number: u64,
pub(crate) timestamp: Timestamp,
pub(crate) round_exp: u8,
pub(crate) endorsed: BTreeSet<C::Hash>,
}
impl<C: Context> Debug for WireUnit<C> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
/// A type whose debug implementation prints ".." (without the quotes).
struct Ellipsis;
impl Debug for Ellipsis {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "..")
}
}
f.debug_struct("WireUnit")
.field("value", &self.value.as_ref().map(|_| Ellipsis))
.field("creator.0", &self.creator.0)
.field("instance_id", &self.instance_id)
.field("seq_number", &self.seq_number)
.field("timestamp", &self.timestamp.millis())
.field("panorama", self.panorama.as_ref())
.field("round_exp", &self.round_exp)
.field("endorsed", &self.endorsed)
.field("round_id()", &self.round_id())
.finish()
}
}
impl<C: Context> WireUnit<C> {
pub(crate) fn into_hashed(self) -> HashedWireUnit<C> {
HashedWireUnit::new(self)
}
/// Returns the time at which the round containing this unit began.
pub(crate) fn round_id(&self) -> Timestamp {
state::round_id(self.timestamp, self.round_exp)
}
/// Returns the creator's previous unit.
pub(crate) fn previous(&self) -> Option<&C::Hash> {
self.panorama[self.creator].correct()
}
/// Returns the unit's hash, which is used as a unit identifier.
fn compute_hash(&self) -> C::Hash {
// TODO: Use serialize_into to avoid allocation?
<C as Context>::hash(&bincode::serialize(self).expect("serialize WireUnit"))
}
}
#[derive(Clone, DataSize, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)]
#[serde(bound(
serialize = "C::Hash: Serialize",
deserialize = "C::Hash: Deserialize<'de>",
))]
pub(crate) struct Endorsements<C>
where
C: Context,
{
pub(crate) unit: C::Hash,
pub(crate) endorsers: Vec<(ValidatorIndex, C::Signature)>,
}
impl<C: Context> Endorsements<C> {
/// Returns hash of the endorsed vote.
pub fn unit(&self) -> &C::Hash {
&self.unit
}
/// Returns an iterator over validator indexes that endorsed the `unit`.
pub fn validator_ids(&self) -> impl Iterator<Item = ValidatorIndex> + '_ {
self.endorsers.iter().map(|(v, _)| *v)
}
}
impl<C: Context> From<SignedEndorsement<C>> for Endorsements<C> {
fn from(signed_e: SignedEndorsement<C>) -> Self {
Endorsements {
unit: *signed_e.unit(),
endorsers: vec![(signed_e.validator_idx(), *signed_e.signature())],
}
}
}
/// A ping sent by a validator to signal that it is online but has not created new units in a
/// while.
#[derive(Clone, DataSize, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)]
#[serde(bound(
serialize = "C::Hash: Serialize",
deserialize = "C::Hash: Deserialize<'de>",
))]
pub(crate) struct Ping<C>
where
C: Context,
{
creator: ValidatorIndex,
timestamp: Timestamp,
instance_id: C::InstanceId,
signature: C::Signature,
}
impl<C: Context> Ping<C> {
/// Creates a new signed ping.
pub(crate) fn new(
creator: ValidatorIndex,
timestamp: Timestamp,
instance_id: C::InstanceId,
sk: &C::ValidatorSecret,
) -> Self {
let signature = sk.sign(&Self::hash(creator, timestamp, instance_id));
Ping {
creator,
timestamp,
instance_id,
signature,
}
}
/// The creator who signals that it is online.
pub(crate) fn creator(&self) -> ValidatorIndex {
self.creator
}
/// The timestamp when the ping was created.
pub(crate) fn timestamp(&self) -> Timestamp {
self.timestamp
}
/// Validates the ping and returns an error if it is not signed by the creator.
pub(crate) fn validate(
&self,
validators: &Validators<C::ValidatorId>,
our_instance_id: &C::InstanceId,
) -> Result<(), VertexError> {
let Ping {
creator,
timestamp,
instance_id,
signature,
} = self;
if instance_id != our_instance_id {
return Err(PingError::InstanceId.into());
}
let v_id = validators.id(self.creator).ok_or(PingError::Creator)?;
let hash = Self::hash(*creator, *timestamp, *instance_id);
if !C::verify_signature(&hash, v_id, signature) {
return Err(PingError::Signature.into());
}
Ok(())
}
/// Computes the hash of a ping, i.e. of the creator and timestamp.
fn hash(creator: ValidatorIndex, timestamp: Timestamp, instance_id: C::InstanceId) -> C::Hash {
let bytes = bincode::serialize(&(creator, timestamp, instance_id)).expect("serialize Ping");
<C as Context>::hash(&bytes)
}
}
| 31.502538 | 100 | 0.609733 |
ebb8c95ec7666a66a68bc036e54eda7179e63f9a | 1,894 | use cosmwasm_std::{Binary, HumanAddr, Uint128};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
pub struct InitMsg {
pub count: i32,
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum HandleMsg {
Increment {},
Reset {
count: i32,
},
Register {
reg_addr: HumanAddr,
reg_hash: String,
},
Receive {
sender: HumanAddr,
amount: Uint128,
msg: Binary,
},
Redeem {
addr: HumanAddr,
hash: String,
to: HumanAddr,
amount: Uint128,
},
Fail {},
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum QueryMsg {
// GetCount returns the current count as a json-encoded number
GetCount {},
}
// We define a custom struct for each query response
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
pub struct CountResponse {
pub count: i32,
}
// Messages sent to SNIP-20 contracts
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum Snip20Msg {
RegisterReceive {
code_hash: String,
padding: Option<String>,
},
Redeem {
amount: Uint128,
padding: Option<String>,
},
}
impl Snip20Msg {
pub fn register_receive(code_hash: String) -> Self {
Snip20Msg::RegisterReceive {
code_hash,
padding: None, // TODO add padding calculation
}
}
pub fn redeem(amount: Uint128) -> Self {
Snip20Msg::Redeem {
amount,
padding: None, // TODO add padding calculation
}
}
}
| 24.597403 | 71 | 0.595037 |
18015fc9a67366a129e634ff4ff66c6e3b12b91f | 5,921 | use crate::conn::{self, DateTime};
use chrono::Timelike;
lazy_static::lazy_static! {
static ref COLORS: Vec<::termion::color::AnsiValue> = {
let mut c = Vec::with_capacity(45);
for r in 1..6 {
for g in 1..6 {
for b in 1..6 {
if r < 2 || g < 2 || b < 2 {
c.push(::termion::color::AnsiValue::rgb(r, g, b));
}
}
}
}
c
};
}
fn djb2(input: &str) -> u64 {
let mut hash: u64 = 5381;
for c in input.bytes() {
hash = (hash << 5).wrapping_add(hash).wrapping_add(u64::from(c));
}
hash
}
pub struct ChanMessage {
formatted_width: Option<usize>,
pub raw: String,
formatted: String,
sender: String,
timestamp: DateTime,
reactions: Vec<(String, usize)>,
}
impl From<conn::Message> for ChanMessage {
fn from(message: conn::Message) -> ChanMessage {
ChanMessage {
formatted_width: None,
raw: message.contents,
formatted: String::new(),
sender: message.sender,
timestamp: message.timestamp,
reactions: message.reactions,
}
}
}
impl ChanMessage {
// Prevent mutating the timestamp but make it visible
pub fn timestamp(&self) -> &DateTime {
&self.timestamp
}
pub fn sender(&self) -> &str {
&self.sender
}
pub fn color(&self) -> termion::color::AnsiValue {
COLORS[djb2(&self.sender) as usize % COLORS.len()]
}
pub fn add_reaction(&mut self, reaction: &str) {
let mut found = false;
if let Some(r) = self.reactions.iter_mut().find(|rxn| rxn.0 == reaction) {
r.1 += 1;
found = true;
}
if !found {
self.reactions.push((reaction.into(), 1));
}
self.formatted_width = None;
}
pub fn remove_reaction(&mut self, reaction: &str) {
if let Some(r) = self.reactions.iter_mut().find(|rxn| rxn.0 == reaction) {
r.1 = r.1.saturating_sub(1);
self.formatted_width = None;
}
self.reactions = self.reactions.iter().cloned().filter(|r| r.1 > 0).collect();
}
// TODO: This should probably just go away if we have immutable message history
pub fn edit_to(&mut self, contents: String) {
self.raw = contents;
self.formatted_width = None;
}
pub fn formatted_to(&mut self, width: usize) -> &str {
use std::fmt::Write;
use textwrap::{NoHyphenation, Wrapper};
if Some(width) == self.formatted_width {
return &self.formatted;
}
use chrono::TimeZone;
let timezone = ::chrono::offset::Local::now().timezone();
let localtime = timezone.from_utc_datetime(&self.timestamp.as_chrono().naive_utc());
self.formatted_width = Some(width);
self.formatted.clear();
let indent_str = " ";
// 2 for the `: ` after the name, 8 for the time
let sender_spacer = " ".repeat(self.sender.chars().count() + 2 + 8);
let wrapper = Wrapper::with_splitter(width, NoHyphenation)
.subsequent_indent(indent_str)
.initial_indent(indent_str)
.break_words(true);
let first_line_wrapper = Wrapper::with_splitter(width, NoHyphenation)
.subsequent_indent(indent_str)
.initial_indent(&sender_spacer)
.break_words(true);
for (l, line) in self.raw.lines().enumerate() {
// wrap_iter produces nothing on an empty line, so we have to supply the required newline
if line == "" {
self.formatted.push('\n');
}
if l == 0 {
for (l, wrapped_line) in first_line_wrapper.wrap_iter(line.trim_start()).enumerate()
{
if l == 0 {
let _ = write!(
self.formatted,
"({:02}:{:02}) {}: ",
localtime.time().hour(),
localtime.time().minute(),
self.sender,
);
self.formatted
.extend(wrapped_line.chars().skip_while(|c| c.is_whitespace()));
} else {
self.formatted.push_str(&wrapped_line);
}
self.formatted.push('\n');
}
} else {
for wrapped_line in wrapper.wrap_iter(&line) {
self.formatted.push_str(&wrapped_line);
self.formatted.push('\n');
}
}
}
if !self.reactions.is_empty() {
let mut reactions_string = String::new();
for (r, count) in &self.reactions {
let _ = write!(reactions_string, "{}({}) ", r, count);
}
let wrapper = Wrapper::with_splitter(width, NoHyphenation)
.break_words(true)
.initial_indent(indent_str)
.subsequent_indent(indent_str);
for line in wrapper.wrap_iter(&reactions_string) {
// Apparently terminal colors are reset by the Goto mechanism I'm using to move
// from one line to another
//self.formatted.push_str(&gray);
self.formatted.push_str(&line);
self.formatted.push('\n');
}
// Clean trailing whitespace from messages
while self.formatted.ends_with(char::is_whitespace) {
self.formatted.pop();
}
}
// Clean trailing whitespace from messages
while self.formatted.ends_with(char::is_whitespace) {
self.formatted.pop();
}
&self.formatted
}
}
| 32.532967 | 101 | 0.512582 |
abee997fe1ee6d63b2cb9de0f7fcdd52150b9374 | 386 | use crate::structs::UServiceType;
pub struct IServicePreferences {
service_type: String,
are_having: bool,
are_livestreaming: bool,
livestream_is_public: bool,
}
pub fn new() -> IServicePreferences {
return IServicePreferences {
service_type: UServiceType::BURIAL.to_string(),
are_having: false,
are_livestreaming: false,
livestream_is_public: false,
};
}
| 21.444444 | 51 | 0.738342 |
7662300bcaa59aaeff1b6f76c256e51c2d5a7d8d | 1,779 | /*
* Copyright 2021 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use serde::Deserialize;
use serde::Serialize;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct Span {
pub left: usize,
pub right: usize,
}
impl Span {
pub fn new(left: usize, right: usize) -> Self {
Self { left, right }
}
pub fn contains_position(&self, position: usize) -> bool {
self.left < position && position < self.right
}
pub fn contains_span(&self, span: Self) -> bool {
self.contains_position(span.left) && self.contains_position(span.right)
}
}
use std::cmp::Ordering;
impl PartialOrd for Span {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
let self_min = std::cmp::min(self.left, self.right);
let other_min = std::cmp::min(other.left, other.right);
if self_min < other_min {
Some(Ordering::Less)
} else if self == other {
Some(Ordering::Equal)
} else {
Some(Ordering::Greater)
}
}
}
impl Ord for Span {
fn cmp(&self, other: &Self) -> Ordering {
// it's safe since partial_cmp always returns Some
self.partial_cmp(other).unwrap()
}
}
| 28.238095 | 79 | 0.645306 |
48c790f983565884be379fb68e89c63c97bdbbdc | 48,146 | pub type mode_t = u16;
pub type pthread_attr_t = *mut ::c_void;
pub type rlim_t = i64;
pub type pthread_mutex_t = *mut ::c_void;
pub type pthread_mutexattr_t = *mut ::c_void;
pub type pthread_cond_t = *mut ::c_void;
pub type pthread_condattr_t = *mut ::c_void;
pub type pthread_rwlock_t = *mut ::c_void;
pub type pthread_rwlockattr_t = *mut ::c_void;
pub type pthread_key_t = ::c_int;
pub type tcflag_t = ::c_uint;
pub type speed_t = ::c_uint;
pub type nl_item = ::c_int;
pub type id_t = i64;
pub type vm_size_t = ::uintptr_t;
#[cfg_attr(feature = "extra_traits", derive(Debug))]
pub enum timezone {}
impl ::Copy for timezone {}
impl ::Clone for timezone {
fn clone(&self) -> timezone { *self }
}
s! {
pub struct in_addr {
pub s_addr: ::in_addr_t,
}
pub struct ip_mreq {
pub imr_multiaddr: in_addr,
pub imr_interface: in_addr,
}
pub struct glob_t {
pub gl_pathc: ::size_t,
pub gl_matchc: ::size_t,
pub gl_offs: ::size_t,
pub gl_flags: ::c_int,
pub gl_pathv: *mut *mut ::c_char,
__unused3: *mut ::c_void,
__unused4: *mut ::c_void,
__unused5: *mut ::c_void,
__unused6: *mut ::c_void,
__unused7: *mut ::c_void,
__unused8: *mut ::c_void,
}
pub struct addrinfo {
pub ai_flags: ::c_int,
pub ai_family: ::c_int,
pub ai_socktype: ::c_int,
pub ai_protocol: ::c_int,
pub ai_addrlen: ::socklen_t,
pub ai_canonname: *mut ::c_char,
pub ai_addr: *mut ::sockaddr,
pub ai_next: *mut addrinfo,
}
pub struct sigset_t {
bits: [u32; 4],
}
pub struct siginfo_t {
pub si_signo: ::c_int,
pub si_errno: ::c_int,
pub si_code: ::c_int,
pub si_pid: ::pid_t,
pub si_uid: ::uid_t,
pub si_status: ::c_int,
pub si_addr: *mut ::c_void,
_pad: [::c_int; 12],
}
pub struct sigaction {
pub sa_sigaction: ::sighandler_t,
pub sa_flags: ::c_int,
pub sa_mask: sigset_t,
}
pub struct sched_param {
pub sched_priority: ::c_int,
}
pub struct Dl_info {
pub dli_fname: *const ::c_char,
pub dli_fbase: *mut ::c_void,
pub dli_sname: *const ::c_char,
pub dli_saddr: *mut ::c_void,
}
pub struct sockaddr_in {
pub sin_len: u8,
pub sin_family: ::sa_family_t,
pub sin_port: ::in_port_t,
pub sin_addr: ::in_addr,
pub sin_zero: [::c_char; 8],
}
pub struct termios {
pub c_iflag: ::tcflag_t,
pub c_oflag: ::tcflag_t,
pub c_cflag: ::tcflag_t,
pub c_lflag: ::tcflag_t,
pub c_cc: [::cc_t; ::NCCS],
pub c_ispeed: ::speed_t,
pub c_ospeed: ::speed_t,
}
pub struct flock {
pub l_start: ::off_t,
pub l_len: ::off_t,
pub l_pid: ::pid_t,
pub l_type: ::c_short,
pub l_whence: ::c_short,
#[cfg(not(target_os = "dragonfly"))]
pub l_sysid: ::c_int,
}
pub struct sf_hdtr {
pub headers: *mut ::iovec,
pub hdr_cnt: ::c_int,
pub trailers: *mut ::iovec,
pub trl_cnt: ::c_int,
}
pub struct lconv {
pub decimal_point: *mut ::c_char,
pub thousands_sep: *mut ::c_char,
pub grouping: *mut ::c_char,
pub int_curr_symbol: *mut ::c_char,
pub currency_symbol: *mut ::c_char,
pub mon_decimal_point: *mut ::c_char,
pub mon_thousands_sep: *mut ::c_char,
pub mon_grouping: *mut ::c_char,
pub positive_sign: *mut ::c_char,
pub negative_sign: *mut ::c_char,
pub int_frac_digits: ::c_char,
pub frac_digits: ::c_char,
pub p_cs_precedes: ::c_char,
pub p_sep_by_space: ::c_char,
pub n_cs_precedes: ::c_char,
pub n_sep_by_space: ::c_char,
pub p_sign_posn: ::c_char,
pub n_sign_posn: ::c_char,
pub int_p_cs_precedes: ::c_char,
pub int_n_cs_precedes: ::c_char,
pub int_p_sep_by_space: ::c_char,
pub int_n_sep_by_space: ::c_char,
pub int_p_sign_posn: ::c_char,
pub int_n_sign_posn: ::c_char,
}
pub struct cmsgcred {
pub cmcred_pid: ::pid_t,
pub cmcred_uid: ::uid_t,
pub cmcred_euid: ::uid_t,
pub cmcred_gid: ::gid_t,
pub cmcred_ngroups: ::c_short,
pub cmcred_groups: [::gid_t; CMGROUP_MAX],
}
pub struct rtprio {
pub type_: ::c_ushort,
pub prio: ::c_ushort,
}
pub struct in6_pktinfo {
pub ipi6_addr: ::in6_addr,
pub ipi6_ifindex: ::c_uint,
}
pub struct arphdr {
pub ar_hrd: u16,
pub ar_pro: u16,
pub ar_hln: u8,
pub ar_pln: u8,
pub ar_op: u16,
}
}
s_no_extra_traits! {
pub struct sockaddr_storage {
pub ss_len: u8,
pub ss_family: ::sa_family_t,
__ss_pad1: [u8; 6],
__ss_align: i64,
__ss_pad2: [u8; 112],
}
}
cfg_if! {
if #[cfg(feature = "extra_traits")] {
impl PartialEq for sockaddr_storage {
fn eq(&self, other: &sockaddr_storage) -> bool {
self.ss_len == other.ss_len
&& self.ss_family == other.ss_family
&& self.__ss_pad1 == other.__ss_pad1
&& self.__ss_align == other.__ss_align
&& self
.__ss_pad2
.iter()
.zip(other.__ss_pad2.iter())
.all(|(a, b)| a == b)
}
}
impl Eq for sockaddr_storage {}
impl ::fmt::Debug for sockaddr_storage {
fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
f.debug_struct("sockaddr_storage")
.field("ss_len", &self.ss_len)
.field("ss_family", &self.ss_family)
.field("__ss_pad1", &self.__ss_pad1)
.field("__ss_align", &self.__ss_align)
// FIXME: .field("__ss_pad2", &self.__ss_pad2)
.finish()
}
}
impl ::hash::Hash for sockaddr_storage {
fn hash<H: ::hash::Hasher>(&self, state: &mut H) {
self.ss_len.hash(state);
self.ss_family.hash(state);
self.__ss_pad1.hash(state);
self.__ss_align.hash(state);
self.__ss_pad2.hash(state);
}
}
}
}
pub const AIO_LISTIO_MAX: ::c_int = 16;
pub const AIO_CANCELED: ::c_int = 1;
pub const AIO_NOTCANCELED: ::c_int = 2;
pub const AIO_ALLDONE: ::c_int = 3;
pub const LIO_NOP: ::c_int = 0;
pub const LIO_WRITE: ::c_int = 1;
pub const LIO_READ: ::c_int = 2;
pub const LIO_WAIT: ::c_int = 1;
pub const LIO_NOWAIT: ::c_int = 0;
pub const SIGEV_NONE: ::c_int = 0;
pub const SIGEV_SIGNAL: ::c_int = 1;
pub const SIGEV_THREAD: ::c_int = 2;
pub const SIGEV_KEVENT: ::c_int = 3;
pub const CODESET: ::nl_item = 0;
pub const D_T_FMT: ::nl_item = 1;
pub const D_FMT: ::nl_item = 2;
pub const T_FMT: ::nl_item = 3;
pub const T_FMT_AMPM: ::nl_item = 4;
pub const AM_STR: ::nl_item = 5;
pub const PM_STR: ::nl_item = 6;
pub const DAY_1: ::nl_item = 7;
pub const DAY_2: ::nl_item = 8;
pub const DAY_3: ::nl_item = 9;
pub const DAY_4: ::nl_item = 10;
pub const DAY_5: ::nl_item = 11;
pub const DAY_6: ::nl_item = 12;
pub const DAY_7: ::nl_item = 13;
pub const ABDAY_1: ::nl_item = 14;
pub const ABDAY_2: ::nl_item = 15;
pub const ABDAY_3: ::nl_item = 16;
pub const ABDAY_4: ::nl_item = 17;
pub const ABDAY_5: ::nl_item = 18;
pub const ABDAY_6: ::nl_item = 19;
pub const ABDAY_7: ::nl_item = 20;
pub const MON_1: ::nl_item = 21;
pub const MON_2: ::nl_item = 22;
pub const MON_3: ::nl_item = 23;
pub const MON_4: ::nl_item = 24;
pub const MON_5: ::nl_item = 25;
pub const MON_6: ::nl_item = 26;
pub const MON_7: ::nl_item = 27;
pub const MON_8: ::nl_item = 28;
pub const MON_9: ::nl_item = 29;
pub const MON_10: ::nl_item = 30;
pub const MON_11: ::nl_item = 31;
pub const MON_12: ::nl_item = 32;
pub const ABMON_1: ::nl_item = 33;
pub const ABMON_2: ::nl_item = 34;
pub const ABMON_3: ::nl_item = 35;
pub const ABMON_4: ::nl_item = 36;
pub const ABMON_5: ::nl_item = 37;
pub const ABMON_6: ::nl_item = 38;
pub const ABMON_7: ::nl_item = 39;
pub const ABMON_8: ::nl_item = 40;
pub const ABMON_9: ::nl_item = 41;
pub const ABMON_10: ::nl_item = 42;
pub const ABMON_11: ::nl_item = 43;
pub const ABMON_12: ::nl_item = 44;
pub const ERA: ::nl_item = 45;
pub const ERA_D_FMT: ::nl_item = 46;
pub const ERA_D_T_FMT: ::nl_item = 47;
pub const ERA_T_FMT: ::nl_item = 48;
pub const ALT_DIGITS: ::nl_item = 49;
pub const RADIXCHAR: ::nl_item = 50;
pub const THOUSEP: ::nl_item = 51;
pub const YESEXPR: ::nl_item = 52;
pub const NOEXPR: ::nl_item = 53;
pub const YESSTR: ::nl_item = 54;
pub const NOSTR: ::nl_item = 55;
pub const CRNCYSTR: ::nl_item = 56;
pub const D_MD_ORDER: ::nl_item = 57;
pub const ALTMON_1: ::nl_item = 58;
pub const ALTMON_2: ::nl_item = 59;
pub const ALTMON_3: ::nl_item = 60;
pub const ALTMON_4: ::nl_item = 61;
pub const ALTMON_5: ::nl_item = 62;
pub const ALTMON_6: ::nl_item = 63;
pub const ALTMON_7: ::nl_item = 64;
pub const ALTMON_8: ::nl_item = 65;
pub const ALTMON_9: ::nl_item = 66;
pub const ALTMON_10: ::nl_item = 67;
pub const ALTMON_11: ::nl_item = 68;
pub const ALTMON_12: ::nl_item = 69;
pub const EXIT_FAILURE: ::c_int = 1;
pub const EXIT_SUCCESS: ::c_int = 0;
pub const EOF: ::c_int = -1;
pub const SEEK_SET: ::c_int = 0;
pub const SEEK_CUR: ::c_int = 1;
pub const SEEK_END: ::c_int = 2;
pub const SEEK_DATA: ::c_int = 3;
pub const SEEK_HOLE: ::c_int = 4;
pub const _IOFBF: ::c_int = 0;
pub const _IONBF: ::c_int = 2;
pub const _IOLBF: ::c_int = 1;
pub const BUFSIZ: ::c_uint = 1024;
pub const FOPEN_MAX: ::c_uint = 20;
pub const FILENAME_MAX: ::c_uint = 1024;
pub const L_tmpnam: ::c_uint = 1024;
pub const TMP_MAX: ::c_uint = 308915776;
pub const O_NOCTTY: ::c_int = 32768;
pub const O_DIRECT: ::c_int = 0x00010000;
pub const S_IFIFO: mode_t = 4096;
pub const S_IFCHR: mode_t = 8192;
pub const S_IFBLK: mode_t = 24576;
pub const S_IFDIR: mode_t = 16384;
pub const S_IFREG: mode_t = 32768;
pub const S_IFLNK: mode_t = 40960;
pub const S_IFSOCK: mode_t = 49152;
pub const S_IFMT: mode_t = 61440;
pub const S_IEXEC: mode_t = 64;
pub const S_IWRITE: mode_t = 128;
pub const S_IREAD: mode_t = 256;
pub const S_IRWXU: mode_t = 448;
pub const S_IXUSR: mode_t = 64;
pub const S_IWUSR: mode_t = 128;
pub const S_IRUSR: mode_t = 256;
pub const S_IRWXG: mode_t = 56;
pub const S_IXGRP: mode_t = 8;
pub const S_IWGRP: mode_t = 16;
pub const S_IRGRP: mode_t = 32;
pub const S_IRWXO: mode_t = 7;
pub const S_IXOTH: mode_t = 1;
pub const S_IWOTH: mode_t = 2;
pub const S_IROTH: mode_t = 4;
pub const F_OK: ::c_int = 0;
pub const R_OK: ::c_int = 4;
pub const W_OK: ::c_int = 2;
pub const X_OK: ::c_int = 1;
pub const STDIN_FILENO: ::c_int = 0;
pub const STDOUT_FILENO: ::c_int = 1;
pub const STDERR_FILENO: ::c_int = 2;
pub const F_LOCK: ::c_int = 1;
pub const F_TEST: ::c_int = 3;
pub const F_TLOCK: ::c_int = 2;
pub const F_ULOCK: ::c_int = 0;
pub const F_DUPFD_CLOEXEC: ::c_int = 17;
pub const SIGHUP: ::c_int = 1;
pub const SIGINT: ::c_int = 2;
pub const SIGQUIT: ::c_int = 3;
pub const SIGILL: ::c_int = 4;
pub const SIGABRT: ::c_int = 6;
pub const SIGEMT: ::c_int = 7;
pub const SIGFPE: ::c_int = 8;
pub const SIGKILL: ::c_int = 9;
pub const SIGSEGV: ::c_int = 11;
pub const SIGPIPE: ::c_int = 13;
pub const SIGALRM: ::c_int = 14;
pub const SIGTERM: ::c_int = 15;
pub const PROT_NONE: ::c_int = 0;
pub const PROT_READ: ::c_int = 1;
pub const PROT_WRITE: ::c_int = 2;
pub const PROT_EXEC: ::c_int = 4;
pub const MAP_FILE: ::c_int = 0x0000;
pub const MAP_SHARED: ::c_int = 0x0001;
pub const MAP_PRIVATE: ::c_int = 0x0002;
pub const MAP_FIXED: ::c_int = 0x0010;
pub const MAP_ANON: ::c_int = 0x1000;
pub const MAP_ANONYMOUS: ::c_int = MAP_ANON;
pub const MAP_FAILED: *mut ::c_void = !0 as *mut ::c_void;
pub const MCL_CURRENT: ::c_int = 0x0001;
pub const MCL_FUTURE: ::c_int = 0x0002;
pub const MS_SYNC: ::c_int = 0x0000;
pub const MS_ASYNC: ::c_int = 0x0001;
pub const MS_INVALIDATE: ::c_int = 0x0002;
pub const EPERM: ::c_int = 1;
pub const ENOENT: ::c_int = 2;
pub const ESRCH: ::c_int = 3;
pub const EINTR: ::c_int = 4;
pub const EIO: ::c_int = 5;
pub const ENXIO: ::c_int = 6;
pub const E2BIG: ::c_int = 7;
pub const ENOEXEC: ::c_int = 8;
pub const EBADF: ::c_int = 9;
pub const ECHILD: ::c_int = 10;
pub const EDEADLK: ::c_int = 11;
pub const ENOMEM: ::c_int = 12;
pub const EACCES: ::c_int = 13;
pub const EFAULT: ::c_int = 14;
pub const ENOTBLK: ::c_int = 15;
pub const EBUSY: ::c_int = 16;
pub const EEXIST: ::c_int = 17;
pub const EXDEV: ::c_int = 18;
pub const ENODEV: ::c_int = 19;
pub const ENOTDIR: ::c_int = 20;
pub const EISDIR: ::c_int = 21;
pub const EINVAL: ::c_int = 22;
pub const ENFILE: ::c_int = 23;
pub const EMFILE: ::c_int = 24;
pub const ENOTTY: ::c_int = 25;
pub const ETXTBSY: ::c_int = 26;
pub const EFBIG: ::c_int = 27;
pub const ENOSPC: ::c_int = 28;
pub const ESPIPE: ::c_int = 29;
pub const EROFS: ::c_int = 30;
pub const EMLINK: ::c_int = 31;
pub const EPIPE: ::c_int = 32;
pub const EDOM: ::c_int = 33;
pub const ERANGE: ::c_int = 34;
pub const EAGAIN: ::c_int = 35;
pub const EWOULDBLOCK: ::c_int = 35;
pub const EINPROGRESS: ::c_int = 36;
pub const EALREADY: ::c_int = 37;
pub const ENOTSOCK: ::c_int = 38;
pub const EDESTADDRREQ: ::c_int = 39;
pub const EMSGSIZE: ::c_int = 40;
pub const EPROTOTYPE: ::c_int = 41;
pub const ENOPROTOOPT: ::c_int = 42;
pub const EPROTONOSUPPORT: ::c_int = 43;
pub const ESOCKTNOSUPPORT: ::c_int = 44;
pub const EOPNOTSUPP: ::c_int = 45;
pub const ENOTSUP: ::c_int = EOPNOTSUPP;
pub const EPFNOSUPPORT: ::c_int = 46;
pub const EAFNOSUPPORT: ::c_int = 47;
pub const EADDRINUSE: ::c_int = 48;
pub const EADDRNOTAVAIL: ::c_int = 49;
pub const ENETDOWN: ::c_int = 50;
pub const ENETUNREACH: ::c_int = 51;
pub const ENETRESET: ::c_int = 52;
pub const ECONNABORTED: ::c_int = 53;
pub const ECONNRESET: ::c_int = 54;
pub const ENOBUFS: ::c_int = 55;
pub const EISCONN: ::c_int = 56;
pub const ENOTCONN: ::c_int = 57;
pub const ESHUTDOWN: ::c_int = 58;
pub const ETOOMANYREFS: ::c_int = 59;
pub const ETIMEDOUT: ::c_int = 60;
pub const ECONNREFUSED: ::c_int = 61;
pub const ELOOP: ::c_int = 62;
pub const ENAMETOOLONG: ::c_int = 63;
pub const EHOSTDOWN: ::c_int = 64;
pub const EHOSTUNREACH: ::c_int = 65;
pub const ENOTEMPTY: ::c_int = 66;
pub const EPROCLIM: ::c_int = 67;
pub const EUSERS: ::c_int = 68;
pub const EDQUOT: ::c_int = 69;
pub const ESTALE: ::c_int = 70;
pub const EREMOTE: ::c_int = 71;
pub const EBADRPC: ::c_int = 72;
pub const ERPCMISMATCH: ::c_int = 73;
pub const EPROGUNAVAIL: ::c_int = 74;
pub const EPROGMISMATCH: ::c_int = 75;
pub const EPROCUNAVAIL: ::c_int = 76;
pub const ENOLCK: ::c_int = 77;
pub const ENOSYS: ::c_int = 78;
pub const EFTYPE: ::c_int = 79;
pub const EAUTH: ::c_int = 80;
pub const ENEEDAUTH: ::c_int = 81;
pub const EIDRM: ::c_int = 82;
pub const ENOMSG: ::c_int = 83;
pub const EOVERFLOW: ::c_int = 84;
pub const ECANCELED: ::c_int = 85;
pub const EILSEQ: ::c_int = 86;
pub const ENOATTR: ::c_int = 87;
pub const EDOOFUS: ::c_int = 88;
pub const EBADMSG: ::c_int = 89;
pub const EMULTIHOP: ::c_int = 90;
pub const ENOLINK: ::c_int = 91;
pub const EPROTO: ::c_int = 92;
pub const POLLSTANDARD: ::c_short = ::POLLIN | ::POLLPRI | ::POLLOUT |
::POLLRDNORM | ::POLLRDBAND | ::POLLWRBAND | ::POLLERR |
::POLLHUP | ::POLLNVAL;
pub const EAI_AGAIN: ::c_int = 2;
pub const EAI_BADFLAGS: ::c_int = 3;
pub const EAI_FAIL: ::c_int = 4;
pub const EAI_FAMILY: ::c_int = 5;
pub const EAI_MEMORY: ::c_int = 6;
pub const EAI_NONAME: ::c_int = 8;
pub const EAI_SERVICE: ::c_int = 9;
pub const EAI_SOCKTYPE: ::c_int = 10;
pub const EAI_SYSTEM: ::c_int = 11;
pub const EAI_OVERFLOW: ::c_int = 14;
pub const F_DUPFD: ::c_int = 0;
pub const F_GETFD: ::c_int = 1;
pub const F_SETFD: ::c_int = 2;
pub const F_GETFL: ::c_int = 3;
pub const F_SETFL: ::c_int = 4;
pub const SIGTRAP: ::c_int = 5;
pub const GLOB_APPEND : ::c_int = 0x0001;
pub const GLOB_DOOFFS : ::c_int = 0x0002;
pub const GLOB_ERR : ::c_int = 0x0004;
pub const GLOB_MARK : ::c_int = 0x0008;
pub const GLOB_NOCHECK : ::c_int = 0x0010;
pub const GLOB_NOSORT : ::c_int = 0x0020;
pub const GLOB_NOESCAPE: ::c_int = 0x2000;
pub const GLOB_NOSPACE : ::c_int = -1;
pub const GLOB_ABORTED : ::c_int = -2;
pub const GLOB_NOMATCH : ::c_int = -3;
pub const POSIX_MADV_NORMAL: ::c_int = 0;
pub const POSIX_MADV_RANDOM: ::c_int = 1;
pub const POSIX_MADV_SEQUENTIAL: ::c_int = 2;
pub const POSIX_MADV_WILLNEED: ::c_int = 3;
pub const POSIX_MADV_DONTNEED: ::c_int = 4;
pub const PTHREAD_PROCESS_PRIVATE: ::c_int = 0;
pub const PTHREAD_PROCESS_SHARED: ::c_int = 1;
pub const PTHREAD_CREATE_JOINABLE: ::c_int = 0;
pub const PTHREAD_CREATE_DETACHED: ::c_int = 1;
pub const RLIMIT_CPU: ::c_int = 0;
pub const RLIMIT_FSIZE: ::c_int = 1;
pub const RLIMIT_DATA: ::c_int = 2;
pub const RLIMIT_STACK: ::c_int = 3;
pub const RLIMIT_CORE: ::c_int = 4;
pub const RLIMIT_RSS: ::c_int = 5;
pub const RLIMIT_MEMLOCK: ::c_int = 6;
pub const RLIMIT_NPROC: ::c_int = 7;
pub const RLIMIT_NOFILE: ::c_int = 8;
pub const RLIMIT_SBSIZE: ::c_int = 9;
pub const RLIMIT_VMEM: ::c_int = 10;
pub const RLIMIT_AS: ::c_int = RLIMIT_VMEM;
pub const RLIM_INFINITY: rlim_t = 0x7fff_ffff_ffff_ffff;
pub const RUSAGE_SELF: ::c_int = 0;
pub const RUSAGE_CHILDREN: ::c_int = -1;
pub const MADV_NORMAL: ::c_int = 0;
pub const MADV_RANDOM: ::c_int = 1;
pub const MADV_SEQUENTIAL: ::c_int = 2;
pub const MADV_WILLNEED: ::c_int = 3;
pub const MADV_DONTNEED: ::c_int = 4;
pub const MADV_FREE: ::c_int = 5;
pub const MADV_NOSYNC: ::c_int = 6;
pub const MADV_AUTOSYNC: ::c_int = 7;
pub const MADV_NOCORE: ::c_int = 8;
pub const MADV_CORE: ::c_int = 9;
pub const MINCORE_INCORE: ::c_int = 0x1;
pub const MINCORE_REFERENCED: ::c_int = 0x2;
pub const MINCORE_MODIFIED: ::c_int = 0x4;
pub const MINCORE_REFERENCED_OTHER: ::c_int = 0x8;
pub const MINCORE_MODIFIED_OTHER: ::c_int = 0x10;
pub const MINCORE_SUPER: ::c_int = 0x20;
pub const AF_UNSPEC: ::c_int = 0;
pub const AF_LOCAL: ::c_int = 1;
pub const AF_UNIX: ::c_int = AF_LOCAL;
pub const AF_INET: ::c_int = 2;
pub const AF_IMPLINK: ::c_int = 3;
pub const AF_PUP: ::c_int = 4;
pub const AF_CHAOS: ::c_int = 5;
pub const AF_NETBIOS: ::c_int = 6;
pub const AF_ISO: ::c_int = 7;
pub const AF_OSI: ::c_int = AF_ISO;
pub const AF_ECMA: ::c_int = 8;
pub const AF_DATAKIT: ::c_int = 9;
pub const AF_CCITT: ::c_int = 10;
pub const AF_SNA: ::c_int = 11;
pub const AF_DECnet: ::c_int = 12;
pub const AF_DLI: ::c_int = 13;
pub const AF_LAT: ::c_int = 14;
pub const AF_HYLINK: ::c_int = 15;
pub const AF_APPLETALK: ::c_int = 16;
pub const AF_ROUTE: ::c_int = 17;
pub const AF_LINK: ::c_int = 18;
pub const pseudo_AF_XTP: ::c_int = 19;
pub const AF_COIP: ::c_int = 20;
pub const AF_CNT: ::c_int = 21;
pub const pseudo_AF_RTIP: ::c_int = 22;
pub const AF_IPX: ::c_int = 23;
pub const AF_SIP: ::c_int = 24;
pub const pseudo_AF_PIP: ::c_int = 25;
pub const AF_ISDN: ::c_int = 26;
pub const AF_E164: ::c_int = AF_ISDN;
pub const pseudo_AF_KEY: ::c_int = 27;
pub const AF_INET6: ::c_int = 28;
pub const AF_NATM: ::c_int = 29;
pub const AF_ATM: ::c_int = 30;
pub const pseudo_AF_HDRCMPLT: ::c_int = 31;
pub const AF_NETGRAPH: ::c_int = 32;
pub const PF_UNSPEC: ::c_int = AF_UNSPEC;
pub const PF_LOCAL: ::c_int = AF_LOCAL;
pub const PF_UNIX: ::c_int = PF_LOCAL;
pub const PF_INET: ::c_int = AF_INET;
pub const PF_IMPLINK: ::c_int = AF_IMPLINK;
pub const PF_PUP: ::c_int = AF_PUP;
pub const PF_CHAOS: ::c_int = AF_CHAOS;
pub const PF_NETBIOS: ::c_int = AF_NETBIOS;
pub const PF_ISO: ::c_int = AF_ISO;
pub const PF_OSI: ::c_int = AF_ISO;
pub const PF_ECMA: ::c_int = AF_ECMA;
pub const PF_DATAKIT: ::c_int = AF_DATAKIT;
pub const PF_CCITT: ::c_int = AF_CCITT;
pub const PF_SNA: ::c_int = AF_SNA;
pub const PF_DECnet: ::c_int = AF_DECnet;
pub const PF_DLI: ::c_int = AF_DLI;
pub const PF_LAT: ::c_int = AF_LAT;
pub const PF_HYLINK: ::c_int = AF_HYLINK;
pub const PF_APPLETALK: ::c_int = AF_APPLETALK;
pub const PF_ROUTE: ::c_int = AF_ROUTE;
pub const PF_LINK: ::c_int = AF_LINK;
pub const PF_XTP: ::c_int = pseudo_AF_XTP;
pub const PF_COIP: ::c_int = AF_COIP;
pub const PF_CNT: ::c_int = AF_CNT;
pub const PF_SIP: ::c_int = AF_SIP;
pub const PF_IPX: ::c_int = AF_IPX;
pub const PF_RTIP: ::c_int = pseudo_AF_RTIP;
pub const PF_PIP: ::c_int = pseudo_AF_PIP;
pub const PF_ISDN: ::c_int = AF_ISDN;
pub const PF_KEY: ::c_int = pseudo_AF_KEY;
pub const PF_INET6: ::c_int = AF_INET6;
pub const PF_NATM: ::c_int = AF_NATM;
pub const PF_ATM: ::c_int = AF_ATM;
pub const PF_NETGRAPH: ::c_int = AF_NETGRAPH;
pub const PT_TRACE_ME: ::c_int = 0;
pub const PT_READ_I: ::c_int = 1;
pub const PT_READ_D: ::c_int = 2;
pub const PT_WRITE_I: ::c_int = 4;
pub const PT_WRITE_D: ::c_int = 5;
pub const PT_CONTINUE: ::c_int = 7;
pub const PT_KILL: ::c_int = 8;
pub const PT_STEP: ::c_int = 9;
pub const PT_ATTACH: ::c_int = 10;
pub const PT_DETACH: ::c_int = 11;
pub const PT_IO: ::c_int = 12;
pub const SOMAXCONN: ::c_int = 128;
pub const MSG_OOB: ::c_int = 0x00000001;
pub const MSG_PEEK: ::c_int = 0x00000002;
pub const MSG_DONTROUTE: ::c_int = 0x00000004;
pub const MSG_EOR: ::c_int = 0x00000008;
pub const MSG_TRUNC: ::c_int = 0x00000010;
pub const MSG_CTRUNC: ::c_int = 0x00000020;
pub const MSG_WAITALL: ::c_int = 0x00000040;
pub const MSG_DONTWAIT: ::c_int = 0x00000080;
pub const MSG_EOF: ::c_int = 0x00000100;
pub const SCM_TIMESTAMP: ::c_int = 0x02;
pub const SOCK_STREAM: ::c_int = 1;
pub const SOCK_DGRAM: ::c_int = 2;
pub const SOCK_RAW: ::c_int = 3;
pub const SOCK_RDM: ::c_int = 4;
pub const SOCK_SEQPACKET: ::c_int = 5;
pub const SOCK_CLOEXEC: ::c_int = 0x10000000;
pub const SOCK_NONBLOCK: ::c_int = 0x20000000;
pub const SOCK_MAXADDRLEN: ::c_int = 255;
pub const IP_TTL: ::c_int = 4;
pub const IP_HDRINCL: ::c_int = 2;
pub const IP_RECVDSTADDR: ::c_int = 7;
pub const IP_SENDSRCADDR: ::c_int = IP_RECVDSTADDR;
pub const IP_ADD_MEMBERSHIP: ::c_int = 12;
pub const IP_DROP_MEMBERSHIP: ::c_int = 13;
pub const IP_RECVIF: ::c_int = 20;
pub const IPV6_JOIN_GROUP: ::c_int = 12;
pub const IPV6_LEAVE_GROUP: ::c_int = 13;
pub const IPV6_RECVPKTINFO: ::c_int = 36;
pub const IPV6_PKTINFO: ::c_int = 46;
pub const IPV6_RECVTCLASS: ::c_int = 57;
pub const IPV6_TCLASS: ::c_int = 61;
pub const TCP_NOPUSH: ::c_int = 4;
pub const TCP_NOOPT: ::c_int = 8;
pub const TCP_KEEPIDLE: ::c_int = 256;
pub const TCP_KEEPINTVL: ::c_int = 512;
pub const TCP_KEEPCNT: ::c_int = 1024;
pub const SOL_SOCKET: ::c_int = 0xffff;
pub const SO_DEBUG: ::c_int = 0x01;
pub const SO_ACCEPTCONN: ::c_int = 0x0002;
pub const SO_REUSEADDR: ::c_int = 0x0004;
pub const SO_KEEPALIVE: ::c_int = 0x0008;
pub const SO_DONTROUTE: ::c_int = 0x0010;
pub const SO_BROADCAST: ::c_int = 0x0020;
pub const SO_USELOOPBACK: ::c_int = 0x0040;
pub const SO_LINGER: ::c_int = 0x0080;
pub const SO_OOBINLINE: ::c_int = 0x0100;
pub const SO_REUSEPORT: ::c_int = 0x0200;
pub const SO_TIMESTAMP: ::c_int = 0x0400;
pub const SO_NOSIGPIPE: ::c_int = 0x0800;
pub const SO_ACCEPTFILTER: ::c_int = 0x1000;
pub const SO_SNDBUF: ::c_int = 0x1001;
pub const SO_RCVBUF: ::c_int = 0x1002;
pub const SO_SNDLOWAT: ::c_int = 0x1003;
pub const SO_RCVLOWAT: ::c_int = 0x1004;
pub const SO_SNDTIMEO: ::c_int = 0x1005;
pub const SO_RCVTIMEO: ::c_int = 0x1006;
pub const SO_ERROR: ::c_int = 0x1007;
pub const SO_TYPE: ::c_int = 0x1008;
pub const SHUT_RD: ::c_int = 0;
pub const SHUT_WR: ::c_int = 1;
pub const SHUT_RDWR: ::c_int = 2;
pub const LOCK_SH: ::c_int = 1;
pub const LOCK_EX: ::c_int = 2;
pub const LOCK_NB: ::c_int = 4;
pub const LOCK_UN: ::c_int = 8;
pub const MAP_COPY: ::c_int = 0x0002;
#[doc(hidden)]
#[deprecated(since="0.2.54",note="Removed in FreeBSD 11")]
pub const MAP_RENAME: ::c_int = 0x0020;
#[doc(hidden)]
#[deprecated(since="0.2.54",note="Removed in FreeBSD 11")]
pub const MAP_NORESERVE: ::c_int = 0x0040;
pub const MAP_HASSEMAPHORE: ::c_int = 0x0200;
pub const MAP_STACK: ::c_int = 0x0400;
pub const MAP_NOSYNC: ::c_int = 0x0800;
pub const MAP_NOCORE: ::c_int = 0x020000;
pub const IPPROTO_RAW: ::c_int = 255;
pub const _PC_LINK_MAX: ::c_int = 1;
pub const _PC_MAX_CANON: ::c_int = 2;
pub const _PC_MAX_INPUT: ::c_int = 3;
pub const _PC_NAME_MAX: ::c_int = 4;
pub const _PC_PATH_MAX: ::c_int = 5;
pub const _PC_PIPE_BUF: ::c_int = 6;
pub const _PC_CHOWN_RESTRICTED: ::c_int = 7;
pub const _PC_NO_TRUNC: ::c_int = 8;
pub const _PC_VDISABLE: ::c_int = 9;
pub const _PC_ALLOC_SIZE_MIN: ::c_int = 10;
pub const _PC_FILESIZEBITS: ::c_int = 12;
pub const _PC_REC_INCR_XFER_SIZE: ::c_int = 14;
pub const _PC_REC_MAX_XFER_SIZE: ::c_int = 15;
pub const _PC_REC_MIN_XFER_SIZE: ::c_int = 16;
pub const _PC_REC_XFER_ALIGN: ::c_int = 17;
pub const _PC_SYMLINK_MAX: ::c_int = 18;
pub const _PC_MIN_HOLE_SIZE: ::c_int = 21;
pub const _PC_ASYNC_IO: ::c_int = 53;
pub const _PC_PRIO_IO: ::c_int = 54;
pub const _PC_SYNC_IO: ::c_int = 55;
pub const _PC_ACL_EXTENDED: ::c_int = 59;
pub const _PC_ACL_PATH_MAX: ::c_int = 60;
pub const _PC_CAP_PRESENT: ::c_int = 61;
pub const _PC_INF_PRESENT: ::c_int = 62;
pub const _PC_MAC_PRESENT: ::c_int = 63;
pub const _SC_ARG_MAX: ::c_int = 1;
pub const _SC_CHILD_MAX: ::c_int = 2;
pub const _SC_CLK_TCK: ::c_int = 3;
pub const _SC_NGROUPS_MAX: ::c_int = 4;
pub const _SC_OPEN_MAX: ::c_int = 5;
pub const _SC_JOB_CONTROL: ::c_int = 6;
pub const _SC_SAVED_IDS: ::c_int = 7;
pub const _SC_VERSION: ::c_int = 8;
pub const _SC_BC_BASE_MAX: ::c_int = 9;
pub const _SC_BC_DIM_MAX: ::c_int = 10;
pub const _SC_BC_SCALE_MAX: ::c_int = 11;
pub const _SC_BC_STRING_MAX: ::c_int = 12;
pub const _SC_COLL_WEIGHTS_MAX: ::c_int = 13;
pub const _SC_EXPR_NEST_MAX: ::c_int = 14;
pub const _SC_LINE_MAX: ::c_int = 15;
pub const _SC_RE_DUP_MAX: ::c_int = 16;
pub const _SC_2_VERSION: ::c_int = 17;
pub const _SC_2_C_BIND: ::c_int = 18;
pub const _SC_2_C_DEV: ::c_int = 19;
pub const _SC_2_CHAR_TERM: ::c_int = 20;
pub const _SC_2_FORT_DEV: ::c_int = 21;
pub const _SC_2_FORT_RUN: ::c_int = 22;
pub const _SC_2_LOCALEDEF: ::c_int = 23;
pub const _SC_2_SW_DEV: ::c_int = 24;
pub const _SC_2_UPE: ::c_int = 25;
pub const _SC_STREAM_MAX: ::c_int = 26;
pub const _SC_TZNAME_MAX: ::c_int = 27;
pub const _SC_ASYNCHRONOUS_IO: ::c_int = 28;
pub const _SC_MAPPED_FILES: ::c_int = 29;
pub const _SC_MEMLOCK: ::c_int = 30;
pub const _SC_MEMLOCK_RANGE: ::c_int = 31;
pub const _SC_MEMORY_PROTECTION: ::c_int = 32;
pub const _SC_MESSAGE_PASSING: ::c_int = 33;
pub const _SC_PRIORITIZED_IO: ::c_int = 34;
pub const _SC_PRIORITY_SCHEDULING: ::c_int = 35;
pub const _SC_REALTIME_SIGNALS: ::c_int = 36;
pub const _SC_SEMAPHORES: ::c_int = 37;
pub const _SC_FSYNC: ::c_int = 38;
pub const _SC_SHARED_MEMORY_OBJECTS: ::c_int = 39;
pub const _SC_SYNCHRONIZED_IO: ::c_int = 40;
pub const _SC_TIMERS: ::c_int = 41;
pub const _SC_AIO_LISTIO_MAX: ::c_int = 42;
pub const _SC_AIO_MAX: ::c_int = 43;
pub const _SC_AIO_PRIO_DELTA_MAX: ::c_int = 44;
pub const _SC_DELAYTIMER_MAX: ::c_int = 45;
pub const _SC_MQ_OPEN_MAX: ::c_int = 46;
pub const _SC_PAGESIZE: ::c_int = 47;
pub const _SC_PAGE_SIZE: ::c_int = _SC_PAGESIZE;
pub const _SC_RTSIG_MAX: ::c_int = 48;
pub const _SC_SEM_NSEMS_MAX: ::c_int = 49;
pub const _SC_SEM_VALUE_MAX: ::c_int = 50;
pub const _SC_SIGQUEUE_MAX: ::c_int = 51;
pub const _SC_TIMER_MAX: ::c_int = 52;
pub const _SC_IOV_MAX: ::c_int = 56;
pub const _SC_NPROCESSORS_CONF: ::c_int = 57;
pub const _SC_2_PBS: ::c_int = 59;
pub const _SC_2_PBS_ACCOUNTING: ::c_int = 60;
pub const _SC_2_PBS_CHECKPOINT: ::c_int = 61;
pub const _SC_2_PBS_LOCATE: ::c_int = 62;
pub const _SC_2_PBS_MESSAGE: ::c_int = 63;
pub const _SC_2_PBS_TRACK: ::c_int = 64;
pub const _SC_ADVISORY_INFO: ::c_int = 65;
pub const _SC_BARRIERS: ::c_int = 66;
pub const _SC_CLOCK_SELECTION: ::c_int = 67;
pub const _SC_CPUTIME: ::c_int = 68;
pub const _SC_FILE_LOCKING: ::c_int = 69;
pub const _SC_NPROCESSORS_ONLN: ::c_int = 58;
pub const _SC_GETGR_R_SIZE_MAX: ::c_int = 70;
pub const _SC_GETPW_R_SIZE_MAX: ::c_int = 71;
pub const _SC_HOST_NAME_MAX: ::c_int = 72;
pub const _SC_LOGIN_NAME_MAX: ::c_int = 73;
pub const _SC_MONOTONIC_CLOCK: ::c_int = 74;
pub const _SC_MQ_PRIO_MAX: ::c_int = 75;
pub const _SC_READER_WRITER_LOCKS: ::c_int = 76;
pub const _SC_REGEXP: ::c_int = 77;
pub const _SC_SHELL: ::c_int = 78;
pub const _SC_SPAWN: ::c_int = 79;
pub const _SC_SPIN_LOCKS: ::c_int = 80;
pub const _SC_SPORADIC_SERVER: ::c_int = 81;
pub const _SC_THREAD_ATTR_STACKADDR: ::c_int = 82;
pub const _SC_THREAD_ATTR_STACKSIZE: ::c_int = 83;
pub const _SC_THREAD_DESTRUCTOR_ITERATIONS: ::c_int = 85;
pub const _SC_THREAD_KEYS_MAX: ::c_int = 86;
pub const _SC_THREAD_PRIO_INHERIT: ::c_int = 87;
pub const _SC_THREAD_PRIO_PROTECT: ::c_int = 88;
pub const _SC_THREAD_PRIORITY_SCHEDULING: ::c_int = 89;
pub const _SC_THREAD_PROCESS_SHARED: ::c_int = 90;
pub const _SC_THREAD_SAFE_FUNCTIONS: ::c_int = 91;
pub const _SC_THREAD_SPORADIC_SERVER: ::c_int = 92;
pub const _SC_THREAD_STACK_MIN: ::c_int = 93;
pub const _SC_THREAD_THREADS_MAX: ::c_int = 94;
pub const _SC_TIMEOUTS: ::c_int = 95;
pub const _SC_THREADS: ::c_int = 96;
pub const _SC_TRACE: ::c_int = 97;
pub const _SC_TRACE_EVENT_FILTER: ::c_int = 98;
pub const _SC_TRACE_INHERIT: ::c_int = 99;
pub const _SC_TRACE_LOG: ::c_int = 100;
pub const _SC_TTY_NAME_MAX: ::c_int = 101;
pub const _SC_TYPED_MEMORY_OBJECTS: ::c_int = 102;
pub const _SC_V6_ILP32_OFF32: ::c_int = 103;
pub const _SC_V6_ILP32_OFFBIG: ::c_int = 104;
pub const _SC_V6_LP64_OFF64: ::c_int = 105;
pub const _SC_V6_LPBIG_OFFBIG: ::c_int = 106;
pub const _SC_ATEXIT_MAX: ::c_int = 107;
pub const _SC_XOPEN_CRYPT: ::c_int = 108;
pub const _SC_XOPEN_ENH_I18N: ::c_int = 109;
pub const _SC_XOPEN_LEGACY: ::c_int = 110;
pub const _SC_XOPEN_REALTIME: ::c_int = 111;
pub const _SC_XOPEN_REALTIME_THREADS: ::c_int = 112;
pub const _SC_XOPEN_SHM: ::c_int = 113;
pub const _SC_XOPEN_STREAMS: ::c_int = 114;
pub const _SC_XOPEN_UNIX: ::c_int = 115;
pub const _SC_XOPEN_VERSION: ::c_int = 116;
pub const _SC_XOPEN_XCU_VERSION: ::c_int = 117;
pub const _SC_IPV6: ::c_int = 118;
pub const _SC_RAW_SOCKETS: ::c_int = 119;
pub const _SC_SYMLOOP_MAX: ::c_int = 120;
pub const _SC_PHYS_PAGES: ::c_int = 121;
pub const PTHREAD_MUTEX_INITIALIZER: pthread_mutex_t = 0 as *mut _;
pub const PTHREAD_COND_INITIALIZER: pthread_cond_t = 0 as *mut _;
pub const PTHREAD_RWLOCK_INITIALIZER: pthread_rwlock_t = 0 as *mut _;
pub const PTHREAD_MUTEX_ERRORCHECK: ::c_int = 1;
pub const PTHREAD_MUTEX_RECURSIVE: ::c_int = 2;
pub const PTHREAD_MUTEX_NORMAL: ::c_int = 3;
pub const PTHREAD_MUTEX_DEFAULT: ::c_int = PTHREAD_MUTEX_ERRORCHECK;
pub const SCHED_FIFO: ::c_int = 1;
pub const SCHED_OTHER: ::c_int = 2;
pub const SCHED_RR: ::c_int = 3;
pub const FD_SETSIZE: usize = 1024;
pub const ST_NOSUID: ::c_ulong = 2;
pub const NI_MAXHOST: ::size_t = 1025;
pub const RTLD_LOCAL: ::c_int = 0;
pub const RTLD_NODELETE: ::c_int = 0x1000;
pub const RTLD_NOLOAD: ::c_int = 0x2000;
pub const RTLD_GLOBAL: ::c_int = 0x100;
pub const LOG_NTP: ::c_int = 12 << 3;
pub const LOG_SECURITY: ::c_int = 13 << 3;
pub const LOG_CONSOLE: ::c_int = 14 << 3;
pub const LOG_NFACILITIES: ::c_int = 24;
pub const TIOCEXCL: ::c_uint = 0x2000740d;
pub const TIOCNXCL: ::c_uint = 0x2000740e;
pub const TIOCFLUSH: ::c_ulong = 0x80047410;
pub const TIOCGETA: ::c_uint = 0x402c7413;
pub const TIOCSETA: ::c_ulong = 0x802c7414;
pub const TIOCSETAW: ::c_ulong = 0x802c7415;
pub const TIOCSETAF: ::c_ulong = 0x802c7416;
pub const TIOCGETD: ::c_uint = 0x4004741a;
pub const TIOCSETD: ::c_ulong = 0x8004741b;
pub const TIOCGDRAINWAIT: ::c_uint = 0x40047456;
pub const TIOCSDRAINWAIT: ::c_ulong = 0x80047457;
pub const TIOCTIMESTAMP: ::c_uint = 0x40107459;
pub const TIOCMGDTRWAIT: ::c_uint = 0x4004745a;
pub const TIOCMSDTRWAIT: ::c_ulong = 0x8004745b;
pub const TIOCDRAIN: ::c_uint = 0x2000745e;
pub const TIOCEXT: ::c_ulong = 0x80047460;
pub const TIOCSCTTY: ::c_uint = 0x20007461;
pub const TIOCCONS: ::c_ulong = 0x80047462;
pub const TIOCGSID: ::c_uint = 0x40047463;
pub const TIOCSTAT: ::c_uint = 0x20007465;
pub const TIOCUCNTL: ::c_ulong = 0x80047466;
pub const TIOCSWINSZ: ::c_ulong = 0x80087467;
pub const TIOCGWINSZ: ::c_uint = 0x40087468;
pub const TIOCMGET: ::c_uint = 0x4004746a;
pub const TIOCM_LE: ::c_int = 0x1;
pub const TIOCM_DTR: ::c_int = 0x2;
pub const TIOCM_RTS: ::c_int = 0x4;
pub const TIOCM_ST: ::c_int = 0x8;
pub const TIOCM_SR: ::c_int = 0x10;
pub const TIOCM_CTS: ::c_int = 0x20;
pub const TIOCM_RI: ::c_int = 0x80;
pub const TIOCM_DSR: ::c_int = 0x100;
pub const TIOCM_CD: ::c_int = 0x40;
pub const TIOCM_CAR: ::c_int = 0x40;
pub const TIOCM_RNG: ::c_int = 0x80;
pub const TIOCMBIC: ::c_ulong = 0x8004746b;
pub const TIOCMBIS: ::c_ulong = 0x8004746c;
pub const TIOCMSET: ::c_ulong = 0x8004746d;
pub const TIOCSTART: ::c_uint = 0x2000746e;
pub const TIOCSTOP: ::c_uint = 0x2000746f;
pub const TIOCPKT: ::c_ulong = 0x80047470;
pub const TIOCPKT_DATA: ::c_int = 0x0;
pub const TIOCPKT_FLUSHREAD: ::c_int = 0x1;
pub const TIOCPKT_FLUSHWRITE: ::c_int = 0x2;
pub const TIOCPKT_STOP: ::c_int = 0x4;
pub const TIOCPKT_START: ::c_int = 0x8;
pub const TIOCPKT_NOSTOP: ::c_int = 0x10;
pub const TIOCPKT_DOSTOP: ::c_int = 0x20;
pub const TIOCPKT_IOCTL: ::c_int = 0x40;
pub const TIOCNOTTY: ::c_uint = 0x20007471;
pub const TIOCSTI: ::c_ulong = 0x80017472;
pub const TIOCOUTQ: ::c_uint = 0x40047473;
pub const TIOCSPGRP: ::c_ulong = 0x80047476;
pub const TIOCGPGRP: ::c_uint = 0x40047477;
pub const TIOCCDTR: ::c_uint = 0x20007478;
pub const TIOCSDTR: ::c_uint = 0x20007479;
pub const TIOCCBRK: ::c_uint = 0x2000747a;
pub const TIOCSBRK: ::c_uint = 0x2000747b;
pub const TTYDISC: ::c_int = 0x0;
pub const SLIPDISC: ::c_int = 0x4;
pub const PPPDISC: ::c_int = 0x5;
pub const NETGRAPHDISC: ::c_int = 0x6;
pub const FIODTYPE: ::c_ulong = 0x4004667a;
pub const FIOGETLBA: ::c_ulong = 0x40046679;
pub const FIODGNAME: ::c_ulong = 0x80106678;
pub const B0: speed_t = 0;
pub const B50: speed_t = 50;
pub const B75: speed_t = 75;
pub const B110: speed_t = 110;
pub const B134: speed_t = 134;
pub const B150: speed_t = 150;
pub const B200: speed_t = 200;
pub const B300: speed_t = 300;
pub const B600: speed_t = 600;
pub const B1200: speed_t = 1200;
pub const B1800: speed_t = 1800;
pub const B2400: speed_t = 2400;
pub const B4800: speed_t = 4800;
pub const B9600: speed_t = 9600;
pub const B19200: speed_t = 19200;
pub const B38400: speed_t = 38400;
pub const B7200: speed_t = 7200;
pub const B14400: speed_t = 14400;
pub const B28800: speed_t = 28800;
pub const B57600: speed_t = 57600;
pub const B76800: speed_t = 76800;
pub const B115200: speed_t = 115200;
pub const B230400: speed_t = 230400;
pub const EXTA: speed_t = 19200;
pub const EXTB: speed_t = 38400;
pub const SEM_FAILED: *mut sem_t = 0 as *mut sem_t;
pub const CRTSCTS: ::tcflag_t = 0x00030000;
pub const CCTS_OFLOW: ::tcflag_t = 0x00010000;
pub const CRTS_IFLOW: ::tcflag_t = 0x00020000;
pub const CDTR_IFLOW: ::tcflag_t = 0x00040000;
pub const CDSR_OFLOW: ::tcflag_t = 0x00080000;
pub const CCAR_OFLOW: ::tcflag_t = 0x00100000;
pub const VERASE2: usize = 7;
pub const OCRNL: ::tcflag_t = 0x10;
pub const ONOCR: ::tcflag_t = 0x20;
pub const ONLRET: ::tcflag_t = 0x40;
pub const CMGROUP_MAX: usize = 16;
// https://github.com/freebsd/freebsd/blob/master/sys/net/bpf.h
// sizeof(long)
pub const BPF_ALIGNMENT: ::c_int = 8;
// Values for rtprio struct (prio field) and syscall (function argument)
pub const RTP_PRIO_MIN: ::c_ushort = 0;
pub const RTP_PRIO_MAX: ::c_ushort = 31;
pub const RTP_LOOKUP: ::c_int = 0;
pub const RTP_SET: ::c_int = 1;
// Flags for chflags(2)
pub const UF_SETTABLE: ::c_ulong = 0x0000ffff;
pub const UF_NODUMP: ::c_ulong = 0x00000001;
pub const UF_IMMUTABLE: ::c_ulong = 0x00000002;
pub const UF_APPEND: ::c_ulong = 0x00000004;
pub const UF_OPAQUE: ::c_ulong = 0x00000008;
pub const UF_NOUNLINK: ::c_ulong = 0x00000010;
pub const SF_SETTABLE: ::c_ulong = 0xffff0000;
pub const SF_ARCHIVED: ::c_ulong = 0x00010000;
pub const SF_IMMUTABLE: ::c_ulong = 0x00020000;
pub const SF_APPEND: ::c_ulong = 0x00040000;
pub const SF_NOUNLINK: ::c_ulong = 0x00100000;
pub const TIMER_ABSTIME: ::c_int = 1;
f! {
pub fn WIFCONTINUED(status: ::c_int) -> bool {
status == 0x13
}
pub fn WSTOPSIG(status: ::c_int) -> ::c_int {
status >> 8
}
pub fn WIFSIGNALED(status: ::c_int) -> bool {
(status & 0o177) != 0o177 && (status & 0o177) != 0
}
pub fn WIFSTOPPED(status: ::c_int) -> bool {
(status & 0o177) == 0o177
}
}
extern {
pub fn sem_destroy(sem: *mut sem_t) -> ::c_int;
pub fn sem_init(sem: *mut sem_t,
pshared: ::c_int,
value: ::c_uint)
-> ::c_int;
pub fn daemon(nochdir: ::c_int, noclose: ::c_int) -> ::c_int;
pub fn gettimeofday(tp: *mut ::timeval,
tz: *mut ::timezone) -> ::c_int;
pub fn accept4(s: ::c_int, addr: *mut ::sockaddr,
addrlen: *mut ::socklen_t, flags: ::c_int) -> ::c_int;
pub fn aio_read(aiocbp: *mut aiocb) -> ::c_int;
pub fn aio_write(aiocbp: *mut aiocb) -> ::c_int;
pub fn aio_fsync(op: ::c_int, aiocbp: *mut aiocb) -> ::c_int;
pub fn aio_error(aiocbp: *const aiocb) -> ::c_int;
pub fn aio_return(aiocbp: *mut aiocb) -> ::ssize_t;
pub fn aio_suspend(aiocb_list: *const *const aiocb, nitems: ::c_int,
timeout: *const ::timespec) -> ::c_int;
pub fn aio_cancel(fd: ::c_int, aiocbp: *mut aiocb) -> ::c_int;
pub fn chflags(path: *const ::c_char, flags: ::c_ulong) -> ::c_int;
pub fn chflagsat(fd: ::c_int, path: *const ::c_char, flags: ::c_ulong,
atflag: ::c_int) -> ::c_int;
pub fn dirfd(dirp: *mut ::DIR) -> ::c_int;
pub fn duplocale(base: ::locale_t) -> ::locale_t;
pub fn endutxent();
pub fn fchflags(fd: ::c_int, flags: ::c_ulong) -> ::c_int;
pub fn futimens(fd: ::c_int, times: *const ::timespec) -> ::c_int;
pub fn getdomainname(name: *mut ::c_char, len: ::c_int) -> ::c_int;
pub fn getgrent_r(grp: *mut ::group,
buf: *mut ::c_char,
buflen: ::size_t,
result: *mut *mut ::group) -> ::c_int;
#[cfg_attr(target_os = "netbsd", link_name = "__getpwent_r50")]
pub fn getpwent_r(pwd: *mut ::passwd,
buf: *mut ::c_char,
buflen: ::size_t,
result: *mut *mut ::passwd) -> ::c_int;
pub fn getgrouplist(name: *const ::c_char,
basegid: ::gid_t,
groups: *mut ::gid_t,
ngroups: *mut ::c_int) -> ::c_int;
pub fn getnameinfo(sa: *const ::sockaddr,
salen: ::socklen_t,
host: *mut ::c_char,
hostlen: ::size_t,
serv: *mut ::c_char,
servlen: ::size_t,
flags: ::c_int) -> ::c_int;
pub fn getpriority(which: ::c_int, who: ::c_int) -> ::c_int;
pub fn getutxent() -> *mut utmpx;
pub fn getutxid(ut: *const utmpx) -> *mut utmpx;
pub fn getutxline(ut: *const utmpx) -> *mut utmpx;
pub fn initgroups(name: *const ::c_char, basegid: ::gid_t) -> ::c_int;
#[cfg_attr(
all(target_os = "freebsd", not(freebsd12)),
link_name = "kevent@FBSD_1.0"
)]
pub fn kevent(kq: ::c_int,
changelist: *const ::kevent,
nchanges: ::c_int,
eventlist: *mut ::kevent,
nevents: ::c_int,
timeout: *const ::timespec) -> ::c_int;
pub fn lchflags(path: *const ::c_char, flags: ::c_ulong) -> ::c_int;
pub fn lio_listio(mode: ::c_int, aiocb_list: *const *mut aiocb,
nitems: ::c_int, sevp: *mut sigevent) -> ::c_int;
pub fn lutimes(file: *const ::c_char, times: *const ::timeval) -> ::c_int;
pub fn memrchr(cx: *const ::c_void,
c: ::c_int,
n: ::size_t) -> *mut ::c_void;
pub fn mkfifoat(dirfd: ::c_int, pathname: *const ::c_char,
mode: ::mode_t) -> ::c_int;
#[cfg_attr(
all(target_os = "freebsd", not(freebsd12)),
link_name = "mknodat@FBSD_1.1"
)]
pub fn mknodat(dirfd: ::c_int, pathname: *const ::c_char,
mode: ::mode_t, dev: dev_t) -> ::c_int;
pub fn mq_close(mqd: ::mqd_t) -> ::c_int;
pub fn mq_getattr(mqd: ::mqd_t, attr: *mut ::mq_attr) -> ::c_int;
pub fn mq_notify(mqd: ::mqd_t, notification: *const ::sigevent) -> ::c_int;
pub fn mq_open(name: *const ::c_char, oflag: ::c_int, ...) -> ::mqd_t;
pub fn mq_receive(mqd: ::mqd_t,
msg_ptr: *mut ::c_char,
msg_len: ::size_t,
msq_prio: *mut ::c_uint) -> ::ssize_t;
pub fn mq_send(mqd: ::mqd_t,
msg_ptr: *const ::c_char,
msg_len: ::size_t,
msq_prio: ::c_uint) -> ::c_int;
pub fn mq_setattr(mqd: ::mqd_t,
newattr: *const ::mq_attr,
oldattr: *mut ::mq_attr) -> ::c_int;
pub fn mq_timedreceive(mqd: ::mqd_t,
msg_ptr: *mut ::c_char,
msg_len: ::size_t,
msq_prio: *mut ::c_uint,
abs_timeout: *const ::timespec) -> ::ssize_t;
pub fn mq_timedsend(mqd: ::mqd_t,
msg_ptr: *const ::c_char,
msg_len: ::size_t,
msq_prio: ::c_uint,
abs_timeout: *const ::timespec) -> ::c_int;
pub fn mq_unlink(name: *const ::c_char) -> ::c_int;
pub fn mincore(addr: *const ::c_void, len: ::size_t,
vec: *mut ::c_char) -> ::c_int;
pub fn newlocale(mask: ::c_int,
locale: *const ::c_char,
base: ::locale_t) -> ::locale_t;
pub fn nl_langinfo_l(item: ::nl_item, locale: ::locale_t) -> *mut ::c_char;
pub fn pipe2(fds: *mut ::c_int, flags: ::c_int) -> ::c_int;
pub fn ppoll(fds: *mut ::pollfd,
nfds: ::nfds_t,
timeout: *const ::timespec,
sigmask: *const sigset_t) -> ::c_int;
pub fn preadv(fd: ::c_int,
iov: *const ::iovec,
iovcnt: ::c_int,
offset: ::off_t) -> ::ssize_t;
pub fn pthread_attr_get_np(tid: ::pthread_t,
attr: *mut ::pthread_attr_t) -> ::c_int;
pub fn pthread_attr_getguardsize(attr: *const ::pthread_attr_t,
guardsize: *mut ::size_t) -> ::c_int;
pub fn pthread_attr_getstack(attr: *const ::pthread_attr_t,
stackaddr: *mut *mut ::c_void,
stacksize: *mut ::size_t) -> ::c_int;
pub fn pthread_condattr_getclock(attr: *const pthread_condattr_t,
clock_id: *mut clockid_t) -> ::c_int;
pub fn pthread_condattr_getpshared(attr: *const pthread_condattr_t,
pshared: *mut ::c_int) -> ::c_int;
pub fn pthread_condattr_setclock(attr: *mut pthread_condattr_t,
clock_id: ::clockid_t) -> ::c_int;
pub fn pthread_condattr_setpshared(attr: *mut pthread_condattr_t,
pshared: ::c_int) -> ::c_int;
pub fn pthread_mutex_timedlock(lock: *mut pthread_mutex_t,
abstime: *const ::timespec) -> ::c_int;
pub fn pthread_mutexattr_getpshared(attr: *const pthread_mutexattr_t,
pshared: *mut ::c_int) -> ::c_int;
pub fn pthread_mutexattr_setpshared(attr: *mut pthread_mutexattr_t,
pshared: ::c_int) -> ::c_int;
pub fn pthread_rwlockattr_getpshared(attr: *const pthread_rwlockattr_t,
val: *mut ::c_int) -> ::c_int;
pub fn pthread_rwlockattr_setpshared(attr: *mut pthread_rwlockattr_t,
val: ::c_int) -> ::c_int;
pub fn pthread_set_name_np(tid: ::pthread_t, name: *const ::c_char);
pub fn ptrace(request: ::c_int,
pid: ::pid_t,
addr: *mut ::c_char,
data: ::c_int) -> ::c_int;
pub fn pututxline(ut: *const utmpx) -> *mut utmpx;
pub fn pwritev(fd: ::c_int,
iov: *const ::iovec,
iovcnt: ::c_int,
offset: ::off_t) -> ::ssize_t;
pub fn querylocale(mask: ::c_int, loc: ::locale_t) -> *const ::c_char;
pub fn rtprio(function: ::c_int, pid: ::pid_t, rtp: *mut rtprio) -> ::c_int;
pub fn sched_getscheduler(pid: ::pid_t) -> ::c_int;
pub fn sched_setscheduler(pid: ::pid_t,
policy: ::c_int,
param: *const ::sched_param) -> ::c_int;
pub fn sem_getvalue(sem: *mut sem_t,
sval: *mut ::c_int) -> ::c_int;
pub fn sem_timedwait(sem: *mut sem_t,
abstime: *const ::timespec) -> ::c_int;
pub fn sendfile(fd: ::c_int,
s: ::c_int,
offset: ::off_t,
nbytes: ::size_t,
hdtr: *mut ::sf_hdtr,
sbytes: *mut ::off_t,
flags: ::c_int) -> ::c_int;
pub fn setdomainname(name: *const ::c_char, len: ::c_int) -> ::c_int;
pub fn sethostname(name: *const ::c_char, len: ::c_int) -> ::c_int;
pub fn setpriority(which: ::c_int, who: ::c_int, prio: ::c_int) -> ::c_int;
pub fn setresgid(rgid: ::gid_t, egid: ::gid_t, sgid: ::gid_t) -> ::c_int;
pub fn setresuid(ruid: ::uid_t, euid: ::uid_t, suid: ::uid_t) -> ::c_int;
pub fn settimeofday(tv: *const ::timeval, tz: *const ::timezone) -> ::c_int;
pub fn setutxent();
pub fn shm_open(name: *const ::c_char, oflag: ::c_int, mode: ::mode_t)
-> ::c_int;
pub fn sigtimedwait(set: *const sigset_t,
info: *mut siginfo_t,
timeout: *const ::timespec) -> ::c_int;
pub fn sigwaitinfo(set: *const sigset_t,
info: *mut siginfo_t) -> ::c_int;
pub fn sysctl(name: *const ::c_int,
namelen: ::c_uint,
oldp: *mut ::c_void,
oldlenp: *mut ::size_t,
newp: *const ::c_void,
newlen: ::size_t)
-> ::c_int;
pub fn sysctlbyname(name: *const ::c_char,
oldp: *mut ::c_void,
oldlenp: *mut ::size_t,
newp: *const ::c_void,
newlen: ::size_t)
-> ::c_int;
pub fn sysctlnametomib(name: *const ::c_char,
mibp: *mut ::c_int,
sizep: *mut ::size_t)
-> ::c_int;
pub fn uselocale(loc: ::locale_t) -> ::locale_t;
pub fn utimensat(dirfd: ::c_int, path: *const ::c_char,
times: *const ::timespec, flag: ::c_int) -> ::c_int;
}
#[link(name = "util")]
extern {
pub fn openpty(amaster: *mut ::c_int,
aslave: *mut ::c_int,
name: *mut ::c_char,
termp: *mut termios,
winp: *mut ::winsize) -> ::c_int;
pub fn forkpty(amaster: *mut ::c_int,
name: *mut ::c_char,
termp: *mut termios,
winp: *mut ::winsize) -> ::pid_t;
pub fn login_tty(fd: ::c_int) -> ::c_int;
}
cfg_if! {
if #[cfg(target_os = "freebsd")] {
mod freebsd;
pub use self::freebsd::*;
} else if #[cfg(target_os = "dragonfly")] {
mod dragonfly;
pub use self::dragonfly::*;
} else {
// ...
}
}
| 36.446631 | 80 | 0.633905 |
0e37b3c5cabcd6fecd50faccf273d09796f2889b | 6,235 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use super::*;
/// sqrt(f32) is slightly different than sqrt(CAST(f32 AS double)))
#[tokio::test]
async fn sqrt_f32_vs_f64() -> Result<()> {
let mut ctx = create_ctx()?;
register_aggregate_csv(&mut ctx).await?;
// sqrt(f32)'s plan passes
let sql = "SELECT avg(sqrt(c11)) FROM aggregate_test_100";
let actual = execute(&mut ctx, sql).await;
let expected = vec![vec!["0.6584407806396484"]];
assert_eq!(actual, expected);
let sql = "SELECT avg(sqrt(CAST(c11 AS double))) FROM aggregate_test_100";
let actual = execute(&mut ctx, sql).await;
let expected = vec![vec!["0.6584408483418833"]];
assert_float_eq(&expected, &actual);
Ok(())
}
#[tokio::test]
async fn csv_query_cast() -> Result<()> {
let mut ctx = SessionContext::new();
register_aggregate_csv(&mut ctx).await?;
let sql = "SELECT CAST(c12 AS float) FROM aggregate_test_100 WHERE c12 > 0.376 AND c12 < 0.4";
let actual = execute_to_batches(&ctx, sql).await;
let expected = vec![
"+-----------------------------------------+",
"| CAST(aggregate_test_100.c12 AS Float32) |",
"+-----------------------------------------+",
"| 0.39144436 |",
"| 0.3887028 |",
"+-----------------------------------------+",
];
assert_batches_eq!(expected, &actual);
Ok(())
}
#[tokio::test]
async fn csv_query_cast_literal() -> Result<()> {
let mut ctx = SessionContext::new();
register_aggregate_csv(&mut ctx).await?;
let sql =
"SELECT c12, CAST(1 AS float) FROM aggregate_test_100 WHERE c12 > CAST(0 AS float) LIMIT 2";
let actual = execute_to_batches(&ctx, sql).await;
let expected = vec![
"+--------------------+---------------------------+",
"| c12 | CAST(Int64(1) AS Float32) |",
"+--------------------+---------------------------+",
"| 0.9294097332465232 | 1 |",
"| 0.3114712539863804 | 1 |",
"+--------------------+---------------------------+",
];
assert_batches_eq!(expected, &actual);
Ok(())
}
#[tokio::test]
async fn query_concat() -> Result<()> {
let schema = Arc::new(Schema::new(vec![
Field::new("c1", DataType::Utf8, false),
Field::new("c2", DataType::Int32, true),
]));
let data = RecordBatch::try_new(
schema.clone(),
vec![
Arc::new(StringArray::from_slice(&["", "a", "aa", "aaa"])),
Arc::new(Int32Array::from(vec![Some(0), Some(1), None, Some(3)])),
],
)?;
let table = MemTable::try_new(schema, vec![vec![data]])?;
let mut ctx = SessionContext::new();
ctx.register_table("test", Arc::new(table))?;
let sql = "SELECT concat(c1, '-hi-', cast(c2 as varchar)) FROM test";
let actual = execute_to_batches(&ctx, sql).await;
let expected = vec![
"+----------------------------------------------------+",
"| concat(test.c1,Utf8(\"-hi-\"),CAST(test.c2 AS Utf8)) |",
"+----------------------------------------------------+",
"| -hi-0 |",
"| a-hi-1 |",
"| aa-hi- |",
"| aaa-hi-3 |",
"+----------------------------------------------------+",
];
assert_batches_eq!(expected, &actual);
Ok(())
}
// Revisit after implementing https://github.com/apache/arrow-rs/issues/925
#[tokio::test]
async fn query_array() -> Result<()> {
let schema = Arc::new(Schema::new(vec![
Field::new("c1", DataType::Utf8, false),
Field::new("c2", DataType::Int32, true),
]));
let data = RecordBatch::try_new(
schema.clone(),
vec![
Arc::new(StringArray::from_slice(&["", "a", "aa", "aaa"])),
Arc::new(Int32Array::from(vec![Some(0), Some(1), None, Some(3)])),
],
)?;
let table = MemTable::try_new(schema, vec![vec![data]])?;
let mut ctx = SessionContext::new();
ctx.register_table("test", Arc::new(table))?;
let sql = "SELECT array(c1, cast(c2 as varchar)) FROM test";
let actual = execute(&mut ctx, sql).await;
let expected = vec![
vec!["[,0]"],
vec!["[a,1]"],
vec!["[aa,NULL]"],
vec!["[aaa,3]"],
];
assert_eq!(expected, actual);
Ok(())
}
#[tokio::test]
async fn query_count_distinct() -> Result<()> {
let schema = Arc::new(Schema::new(vec![Field::new("c1", DataType::Int32, true)]));
let data = RecordBatch::try_new(
schema.clone(),
vec![Arc::new(Int32Array::from(vec![
Some(0),
Some(1),
None,
Some(3),
Some(3),
]))],
)?;
let table = MemTable::try_new(schema, vec![vec![data]])?;
let mut ctx = SessionContext::new();
ctx.register_table("test", Arc::new(table))?;
let sql = "SELECT COUNT(DISTINCT c1) FROM test";
let actual = execute_to_batches(&ctx, sql).await;
let expected = vec![
"+-------------------------+",
"| COUNT(DISTINCT test.c1) |",
"+-------------------------+",
"| 3 |",
"+-------------------------+",
];
assert_batches_eq!(expected, &actual);
Ok(())
}
| 35.225989 | 100 | 0.500401 |
2fcfa31f5165bbc9eab878cd4b3a62cb91773cab | 2,595 | use crate::config::ConfigData;
use crate::model::collection::Collection;
use crate::neo::NeoStore;
use crate::storage::persist::Persist;
use juniper::FieldResult;
use rocket_contrib::databases::rusted_cypher::Statement;
use uuid::Uuid;
pub struct Context {
pub connection: NeoStore,
pub config: ConfigData,
}
impl juniper::Context for Context {}
graphql_object!(Collection: () |&self| {
description: "Collection"
field uuid() -> &str as "The unique ID of the collection" {
&self.uuid
}
field name() -> &str as "Name of the collection" {
&self.name
}
field description() -> Option<String> as "Description of the collection" {
self.description.to_owned()
}
});
pub struct QueryRoot;
graphql_object!(QueryRoot: Context as "Query" |&self| {
description: "The root query object of the schema"
field collections(&executor) -> FieldResult<Vec<Collection>> {
let mut results = Vec::new();
let statement = Statement::new("MATCH (c:Collection) RETURN c.uuid, c.name, c.description");
let query = executor.context().connection.exec(statement)?;
for result in query.rows() {
let uuid = result.get("c.uuid")?;
let name = result.get("c.name")?;
let description = result.get("c.description")?;
results.push(Collection { uuid: uuid, name: name, description: description})
}
Ok(results)
}
});
pub struct MutationRoot;
graphql_object!(MutationRoot: Context | &self | {
description: "The root mutation object of the schema"
field collection(
&executor,
name: String as "id of the collection",
description: Option<String> as "description of the collection"
) -> FieldResult<Collection> {
let uuid = Uuid::new_v4().to_hyphenated().to_string();
let fs_adapter = executor.context().config.fs_adapter.as_str();
Persist::create_folder(fs_adapter, uuid.as_str())?;
let statement = Statement::new(
"CREATE (c:Collection {uuid: {uuid}, name: {name}, description: {description}}) RETURN c.uuid, c.name, c.description"
)
.with_param("uuid", &uuid).unwrap()
.with_param("name", &name).unwrap()
.with_param("description", &description).unwrap();
let query = executor.context().connection.exec(statement)?;
let result = query.rows().nth(0).unwrap();
let uuid: String = result.get("c.uuid")?;
let name: String = result.get("c.name")?;
let description = match result.get("c.description") {
Ok(description) => Some(description),
Err(_e) => None,
};
Ok(Collection { uuid: uuid, name: name, description: description})
}
});
| 30.174419 | 125 | 0.667823 |
1163ce31ddcb44610c84da712fe3ca445a35d7a2 | 29,596 | #[doc = "Reader of register SCFGR1"]
pub type R = crate::R<u32, super::SCFGR1>;
#[doc = "Writer for register SCFGR1"]
pub type W = crate::W<u32, super::SCFGR1>;
#[doc = "Register SCFGR1 `reset()`'s with value 0"]
impl crate::ResetValue for super::SCFGR1 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Address SCL Stall\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ADRSTALL_A {
#[doc = "0: Clock stretching is disabled"]
ADRSTALL_0,
#[doc = "1: Clock stretching is enabled"]
ADRSTALL_1,
}
impl From<ADRSTALL_A> for bool {
#[inline(always)]
fn from(variant: ADRSTALL_A) -> Self {
match variant {
ADRSTALL_A::ADRSTALL_0 => false,
ADRSTALL_A::ADRSTALL_1 => true,
}
}
}
#[doc = "Reader of field `ADRSTALL`"]
pub type ADRSTALL_R = crate::R<bool, ADRSTALL_A>;
impl ADRSTALL_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ADRSTALL_A {
match self.bits {
false => ADRSTALL_A::ADRSTALL_0,
true => ADRSTALL_A::ADRSTALL_1,
}
}
#[doc = "Checks if the value of the field is `ADRSTALL_0`"]
#[inline(always)]
pub fn is_adrstall_0(&self) -> bool {
*self == ADRSTALL_A::ADRSTALL_0
}
#[doc = "Checks if the value of the field is `ADRSTALL_1`"]
#[inline(always)]
pub fn is_adrstall_1(&self) -> bool {
*self == ADRSTALL_A::ADRSTALL_1
}
}
#[doc = "Write proxy for field `ADRSTALL`"]
pub struct ADRSTALL_W<'a> {
w: &'a mut W,
}
impl<'a> ADRSTALL_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: ADRSTALL_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clock stretching is disabled"]
#[inline(always)]
pub fn adrstall_0(self) -> &'a mut W {
self.variant(ADRSTALL_A::ADRSTALL_0)
}
#[doc = "Clock stretching is enabled"]
#[inline(always)]
pub fn adrstall_1(self) -> &'a mut W {
self.variant(ADRSTALL_A::ADRSTALL_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "RX SCL Stall\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RXSTALL_A {
#[doc = "0: Clock stretching is disabled"]
RXSTALL_0,
#[doc = "1: Clock stretching is enabled"]
RXSTALL_1,
}
impl From<RXSTALL_A> for bool {
#[inline(always)]
fn from(variant: RXSTALL_A) -> Self {
match variant {
RXSTALL_A::RXSTALL_0 => false,
RXSTALL_A::RXSTALL_1 => true,
}
}
}
#[doc = "Reader of field `RXSTALL`"]
pub type RXSTALL_R = crate::R<bool, RXSTALL_A>;
impl RXSTALL_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RXSTALL_A {
match self.bits {
false => RXSTALL_A::RXSTALL_0,
true => RXSTALL_A::RXSTALL_1,
}
}
#[doc = "Checks if the value of the field is `RXSTALL_0`"]
#[inline(always)]
pub fn is_rxstall_0(&self) -> bool {
*self == RXSTALL_A::RXSTALL_0
}
#[doc = "Checks if the value of the field is `RXSTALL_1`"]
#[inline(always)]
pub fn is_rxstall_1(&self) -> bool {
*self == RXSTALL_A::RXSTALL_1
}
}
#[doc = "Write proxy for field `RXSTALL`"]
pub struct RXSTALL_W<'a> {
w: &'a mut W,
}
impl<'a> RXSTALL_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RXSTALL_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clock stretching is disabled"]
#[inline(always)]
pub fn rxstall_0(self) -> &'a mut W {
self.variant(RXSTALL_A::RXSTALL_0)
}
#[doc = "Clock stretching is enabled"]
#[inline(always)]
pub fn rxstall_1(self) -> &'a mut W {
self.variant(RXSTALL_A::RXSTALL_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "TX Data SCL Stall\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum TXDSTALL_A {
#[doc = "0: Clock stretching is disabled"]
TXDSTALL_0,
#[doc = "1: Clock stretching is enabled"]
TXDSTALL_1,
}
impl From<TXDSTALL_A> for bool {
#[inline(always)]
fn from(variant: TXDSTALL_A) -> Self {
match variant {
TXDSTALL_A::TXDSTALL_0 => false,
TXDSTALL_A::TXDSTALL_1 => true,
}
}
}
#[doc = "Reader of field `TXDSTALL`"]
pub type TXDSTALL_R = crate::R<bool, TXDSTALL_A>;
impl TXDSTALL_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TXDSTALL_A {
match self.bits {
false => TXDSTALL_A::TXDSTALL_0,
true => TXDSTALL_A::TXDSTALL_1,
}
}
#[doc = "Checks if the value of the field is `TXDSTALL_0`"]
#[inline(always)]
pub fn is_txdstall_0(&self) -> bool {
*self == TXDSTALL_A::TXDSTALL_0
}
#[doc = "Checks if the value of the field is `TXDSTALL_1`"]
#[inline(always)]
pub fn is_txdstall_1(&self) -> bool {
*self == TXDSTALL_A::TXDSTALL_1
}
}
#[doc = "Write proxy for field `TXDSTALL`"]
pub struct TXDSTALL_W<'a> {
w: &'a mut W,
}
impl<'a> TXDSTALL_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: TXDSTALL_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clock stretching is disabled"]
#[inline(always)]
pub fn txdstall_0(self) -> &'a mut W {
self.variant(TXDSTALL_A::TXDSTALL_0)
}
#[doc = "Clock stretching is enabled"]
#[inline(always)]
pub fn txdstall_1(self) -> &'a mut W {
self.variant(TXDSTALL_A::TXDSTALL_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "ACK SCL Stall\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ACKSTALL_A {
#[doc = "0: Clock stretching is disabled"]
ACKSTALL_0,
#[doc = "1: Clock stretching is enabled"]
ACKSTALL_1,
}
impl From<ACKSTALL_A> for bool {
#[inline(always)]
fn from(variant: ACKSTALL_A) -> Self {
match variant {
ACKSTALL_A::ACKSTALL_0 => false,
ACKSTALL_A::ACKSTALL_1 => true,
}
}
}
#[doc = "Reader of field `ACKSTALL`"]
pub type ACKSTALL_R = crate::R<bool, ACKSTALL_A>;
impl ACKSTALL_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ACKSTALL_A {
match self.bits {
false => ACKSTALL_A::ACKSTALL_0,
true => ACKSTALL_A::ACKSTALL_1,
}
}
#[doc = "Checks if the value of the field is `ACKSTALL_0`"]
#[inline(always)]
pub fn is_ackstall_0(&self) -> bool {
*self == ACKSTALL_A::ACKSTALL_0
}
#[doc = "Checks if the value of the field is `ACKSTALL_1`"]
#[inline(always)]
pub fn is_ackstall_1(&self) -> bool {
*self == ACKSTALL_A::ACKSTALL_1
}
}
#[doc = "Write proxy for field `ACKSTALL`"]
pub struct ACKSTALL_W<'a> {
w: &'a mut W,
}
impl<'a> ACKSTALL_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: ACKSTALL_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clock stretching is disabled"]
#[inline(always)]
pub fn ackstall_0(self) -> &'a mut W {
self.variant(ACKSTALL_A::ACKSTALL_0)
}
#[doc = "Clock stretching is enabled"]
#[inline(always)]
pub fn ackstall_1(self) -> &'a mut W {
self.variant(ACKSTALL_A::ACKSTALL_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "General Call Enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GCEN_A {
#[doc = "0: General Call address is disabled"]
GCEN_0,
#[doc = "1: General Call address is enabled"]
GCEN_1,
}
impl From<GCEN_A> for bool {
#[inline(always)]
fn from(variant: GCEN_A) -> Self {
match variant {
GCEN_A::GCEN_0 => false,
GCEN_A::GCEN_1 => true,
}
}
}
#[doc = "Reader of field `GCEN`"]
pub type GCEN_R = crate::R<bool, GCEN_A>;
impl GCEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GCEN_A {
match self.bits {
false => GCEN_A::GCEN_0,
true => GCEN_A::GCEN_1,
}
}
#[doc = "Checks if the value of the field is `GCEN_0`"]
#[inline(always)]
pub fn is_gcen_0(&self) -> bool {
*self == GCEN_A::GCEN_0
}
#[doc = "Checks if the value of the field is `GCEN_1`"]
#[inline(always)]
pub fn is_gcen_1(&self) -> bool {
*self == GCEN_A::GCEN_1
}
}
#[doc = "Write proxy for field `GCEN`"]
pub struct GCEN_W<'a> {
w: &'a mut W,
}
impl<'a> GCEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: GCEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "General Call address is disabled"]
#[inline(always)]
pub fn gcen_0(self) -> &'a mut W {
self.variant(GCEN_A::GCEN_0)
}
#[doc = "General Call address is enabled"]
#[inline(always)]
pub fn gcen_1(self) -> &'a mut W {
self.variant(GCEN_A::GCEN_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "SMBus Alert Enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SAEN_A {
#[doc = "0: Disables match on SMBus Alert"]
SAEN_0,
#[doc = "1: Enables match on SMBus Alert"]
SAEN_1,
}
impl From<SAEN_A> for bool {
#[inline(always)]
fn from(variant: SAEN_A) -> Self {
match variant {
SAEN_A::SAEN_0 => false,
SAEN_A::SAEN_1 => true,
}
}
}
#[doc = "Reader of field `SAEN`"]
pub type SAEN_R = crate::R<bool, SAEN_A>;
impl SAEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SAEN_A {
match self.bits {
false => SAEN_A::SAEN_0,
true => SAEN_A::SAEN_1,
}
}
#[doc = "Checks if the value of the field is `SAEN_0`"]
#[inline(always)]
pub fn is_saen_0(&self) -> bool {
*self == SAEN_A::SAEN_0
}
#[doc = "Checks if the value of the field is `SAEN_1`"]
#[inline(always)]
pub fn is_saen_1(&self) -> bool {
*self == SAEN_A::SAEN_1
}
}
#[doc = "Write proxy for field `SAEN`"]
pub struct SAEN_W<'a> {
w: &'a mut W,
}
impl<'a> SAEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SAEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Disables match on SMBus Alert"]
#[inline(always)]
pub fn saen_0(self) -> &'a mut W {
self.variant(SAEN_A::SAEN_0)
}
#[doc = "Enables match on SMBus Alert"]
#[inline(always)]
pub fn saen_1(self) -> &'a mut W {
self.variant(SAEN_A::SAEN_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
#[doc = "Transmit Flag Configuration\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum TXCFG_A {
#[doc = "0: Transmit Data Flag will only assert during a slave-transmit transfer when the Transmit Data register is empty"]
TXCFG_0,
#[doc = "1: Transmit Data Flag will assert whenever the Transmit Data register is empty"]
TXCFG_1,
}
impl From<TXCFG_A> for bool {
#[inline(always)]
fn from(variant: TXCFG_A) -> Self {
match variant {
TXCFG_A::TXCFG_0 => false,
TXCFG_A::TXCFG_1 => true,
}
}
}
#[doc = "Reader of field `TXCFG`"]
pub type TXCFG_R = crate::R<bool, TXCFG_A>;
impl TXCFG_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TXCFG_A {
match self.bits {
false => TXCFG_A::TXCFG_0,
true => TXCFG_A::TXCFG_1,
}
}
#[doc = "Checks if the value of the field is `TXCFG_0`"]
#[inline(always)]
pub fn is_txcfg_0(&self) -> bool {
*self == TXCFG_A::TXCFG_0
}
#[doc = "Checks if the value of the field is `TXCFG_1`"]
#[inline(always)]
pub fn is_txcfg_1(&self) -> bool {
*self == TXCFG_A::TXCFG_1
}
}
#[doc = "Write proxy for field `TXCFG`"]
pub struct TXCFG_W<'a> {
w: &'a mut W,
}
impl<'a> TXCFG_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: TXCFG_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Transmit Data Flag will only assert during a slave-transmit transfer when the Transmit Data register is empty"]
#[inline(always)]
pub fn txcfg_0(self) -> &'a mut W {
self.variant(TXCFG_A::TXCFG_0)
}
#[doc = "Transmit Data Flag will assert whenever the Transmit Data register is empty"]
#[inline(always)]
pub fn txcfg_1(self) -> &'a mut W {
self.variant(TXCFG_A::TXCFG_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
#[doc = "Receive Data Configuration\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RXCFG_A {
#[doc = "0: Reading the Receive Data register will return received data and clear the Receive Data flag (MSR\\[RDF\\])."]
RXCFG_0,
#[doc = "1: Reading the Receive Data register when the Address Valid flag (SSR\\[AVF\\])is set, will return the Address Status register and clear the Address Valid flag. Reading the Receive Data register when the Address Valid flag is clear, will return received data and clear the Receive Data flag (MSR\\[RDF\\])."]
RXCFG_1,
}
impl From<RXCFG_A> for bool {
#[inline(always)]
fn from(variant: RXCFG_A) -> Self {
match variant {
RXCFG_A::RXCFG_0 => false,
RXCFG_A::RXCFG_1 => true,
}
}
}
#[doc = "Reader of field `RXCFG`"]
pub type RXCFG_R = crate::R<bool, RXCFG_A>;
impl RXCFG_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RXCFG_A {
match self.bits {
false => RXCFG_A::RXCFG_0,
true => RXCFG_A::RXCFG_1,
}
}
#[doc = "Checks if the value of the field is `RXCFG_0`"]
#[inline(always)]
pub fn is_rxcfg_0(&self) -> bool {
*self == RXCFG_A::RXCFG_0
}
#[doc = "Checks if the value of the field is `RXCFG_1`"]
#[inline(always)]
pub fn is_rxcfg_1(&self) -> bool {
*self == RXCFG_A::RXCFG_1
}
}
#[doc = "Write proxy for field `RXCFG`"]
pub struct RXCFG_W<'a> {
w: &'a mut W,
}
impl<'a> RXCFG_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RXCFG_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Reading the Receive Data register will return received data and clear the Receive Data flag (MSR\\[RDF\\])."]
#[inline(always)]
pub fn rxcfg_0(self) -> &'a mut W {
self.variant(RXCFG_A::RXCFG_0)
}
#[doc = "Reading the Receive Data register when the Address Valid flag (SSR\\[AVF\\])is set, will return the Address Status register and clear the Address Valid flag. Reading the Receive Data register when the Address Valid flag is clear, will return received data and clear the Receive Data flag (MSR\\[RDF\\])."]
#[inline(always)]
pub fn rxcfg_1(self) -> &'a mut W {
self.variant(RXCFG_A::RXCFG_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);
self.w
}
}
#[doc = "Ignore NACK\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum IGNACK_A {
#[doc = "0: Slave will end transfer when NACK is detected"]
IGNACK_0,
#[doc = "1: Slave will not end transfer when NACK detected"]
IGNACK_1,
}
impl From<IGNACK_A> for bool {
#[inline(always)]
fn from(variant: IGNACK_A) -> Self {
match variant {
IGNACK_A::IGNACK_0 => false,
IGNACK_A::IGNACK_1 => true,
}
}
}
#[doc = "Reader of field `IGNACK`"]
pub type IGNACK_R = crate::R<bool, IGNACK_A>;
impl IGNACK_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> IGNACK_A {
match self.bits {
false => IGNACK_A::IGNACK_0,
true => IGNACK_A::IGNACK_1,
}
}
#[doc = "Checks if the value of the field is `IGNACK_0`"]
#[inline(always)]
pub fn is_ignack_0(&self) -> bool {
*self == IGNACK_A::IGNACK_0
}
#[doc = "Checks if the value of the field is `IGNACK_1`"]
#[inline(always)]
pub fn is_ignack_1(&self) -> bool {
*self == IGNACK_A::IGNACK_1
}
}
#[doc = "Write proxy for field `IGNACK`"]
pub struct IGNACK_W<'a> {
w: &'a mut W,
}
impl<'a> IGNACK_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: IGNACK_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Slave will end transfer when NACK is detected"]
#[inline(always)]
pub fn ignack_0(self) -> &'a mut W {
self.variant(IGNACK_A::IGNACK_0)
}
#[doc = "Slave will not end transfer when NACK detected"]
#[inline(always)]
pub fn ignack_1(self) -> &'a mut W {
self.variant(IGNACK_A::IGNACK_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);
self.w
}
}
#[doc = "High Speed Mode Enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum HSMEN_A {
#[doc = "0: Disables detection of HS-mode master code"]
HSMEN_0,
#[doc = "1: Enables detection of HS-mode master code"]
HSMEN_1,
}
impl From<HSMEN_A> for bool {
#[inline(always)]
fn from(variant: HSMEN_A) -> Self {
match variant {
HSMEN_A::HSMEN_0 => false,
HSMEN_A::HSMEN_1 => true,
}
}
}
#[doc = "Reader of field `HSMEN`"]
pub type HSMEN_R = crate::R<bool, HSMEN_A>;
impl HSMEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> HSMEN_A {
match self.bits {
false => HSMEN_A::HSMEN_0,
true => HSMEN_A::HSMEN_1,
}
}
#[doc = "Checks if the value of the field is `HSMEN_0`"]
#[inline(always)]
pub fn is_hsmen_0(&self) -> bool {
*self == HSMEN_A::HSMEN_0
}
#[doc = "Checks if the value of the field is `HSMEN_1`"]
#[inline(always)]
pub fn is_hsmen_1(&self) -> bool {
*self == HSMEN_A::HSMEN_1
}
}
#[doc = "Write proxy for field `HSMEN`"]
pub struct HSMEN_W<'a> {
w: &'a mut W,
}
impl<'a> HSMEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: HSMEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Disables detection of HS-mode master code"]
#[inline(always)]
pub fn hsmen_0(self) -> &'a mut W {
self.variant(HSMEN_A::HSMEN_0)
}
#[doc = "Enables detection of HS-mode master code"]
#[inline(always)]
pub fn hsmen_1(self) -> &'a mut W {
self.variant(HSMEN_A::HSMEN_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);
self.w
}
}
#[doc = "Address Configuration\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ADDRCFG_A {
#[doc = "0: Address match 0 (7-bit)"]
ADDRCFG_0,
#[doc = "1: Address match 0 (10-bit)"]
ADDRCFG_1,
#[doc = "2: Address match 0 (7-bit) or Address match 1 (7-bit)"]
ADDRCFG_2,
#[doc = "3: Address match 0 (10-bit) or Address match 1 (10-bit)"]
ADDRCFG_3,
#[doc = "4: Address match 0 (7-bit) or Address match 1 (10-bit)"]
ADDRCFG_4,
#[doc = "5: Address match 0 (10-bit) or Address match 1 (7-bit)"]
ADDRCFG_5,
#[doc = "6: From Address match 0 (7-bit) to Address match 1 (7-bit)"]
ADDRCFG_6,
#[doc = "7: From Address match 0 (10-bit) to Address match 1 (10-bit)"]
ADDRCFG_7,
}
impl From<ADDRCFG_A> for u8 {
#[inline(always)]
fn from(variant: ADDRCFG_A) -> Self {
match variant {
ADDRCFG_A::ADDRCFG_0 => 0,
ADDRCFG_A::ADDRCFG_1 => 1,
ADDRCFG_A::ADDRCFG_2 => 2,
ADDRCFG_A::ADDRCFG_3 => 3,
ADDRCFG_A::ADDRCFG_4 => 4,
ADDRCFG_A::ADDRCFG_5 => 5,
ADDRCFG_A::ADDRCFG_6 => 6,
ADDRCFG_A::ADDRCFG_7 => 7,
}
}
}
#[doc = "Reader of field `ADDRCFG`"]
pub type ADDRCFG_R = crate::R<u8, ADDRCFG_A>;
impl ADDRCFG_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ADDRCFG_A {
match self.bits {
0 => ADDRCFG_A::ADDRCFG_0,
1 => ADDRCFG_A::ADDRCFG_1,
2 => ADDRCFG_A::ADDRCFG_2,
3 => ADDRCFG_A::ADDRCFG_3,
4 => ADDRCFG_A::ADDRCFG_4,
5 => ADDRCFG_A::ADDRCFG_5,
6 => ADDRCFG_A::ADDRCFG_6,
7 => ADDRCFG_A::ADDRCFG_7,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `ADDRCFG_0`"]
#[inline(always)]
pub fn is_addrcfg_0(&self) -> bool {
*self == ADDRCFG_A::ADDRCFG_0
}
#[doc = "Checks if the value of the field is `ADDRCFG_1`"]
#[inline(always)]
pub fn is_addrcfg_1(&self) -> bool {
*self == ADDRCFG_A::ADDRCFG_1
}
#[doc = "Checks if the value of the field is `ADDRCFG_2`"]
#[inline(always)]
pub fn is_addrcfg_2(&self) -> bool {
*self == ADDRCFG_A::ADDRCFG_2
}
#[doc = "Checks if the value of the field is `ADDRCFG_3`"]
#[inline(always)]
pub fn is_addrcfg_3(&self) -> bool {
*self == ADDRCFG_A::ADDRCFG_3
}
#[doc = "Checks if the value of the field is `ADDRCFG_4`"]
#[inline(always)]
pub fn is_addrcfg_4(&self) -> bool {
*self == ADDRCFG_A::ADDRCFG_4
}
#[doc = "Checks if the value of the field is `ADDRCFG_5`"]
#[inline(always)]
pub fn is_addrcfg_5(&self) -> bool {
*self == ADDRCFG_A::ADDRCFG_5
}
#[doc = "Checks if the value of the field is `ADDRCFG_6`"]
#[inline(always)]
pub fn is_addrcfg_6(&self) -> bool {
*self == ADDRCFG_A::ADDRCFG_6
}
#[doc = "Checks if the value of the field is `ADDRCFG_7`"]
#[inline(always)]
pub fn is_addrcfg_7(&self) -> bool {
*self == ADDRCFG_A::ADDRCFG_7
}
}
#[doc = "Write proxy for field `ADDRCFG`"]
pub struct ADDRCFG_W<'a> {
w: &'a mut W,
}
impl<'a> ADDRCFG_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: ADDRCFG_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Address match 0 (7-bit)"]
#[inline(always)]
pub fn addrcfg_0(self) -> &'a mut W {
self.variant(ADDRCFG_A::ADDRCFG_0)
}
#[doc = "Address match 0 (10-bit)"]
#[inline(always)]
pub fn addrcfg_1(self) -> &'a mut W {
self.variant(ADDRCFG_A::ADDRCFG_1)
}
#[doc = "Address match 0 (7-bit) or Address match 1 (7-bit)"]
#[inline(always)]
pub fn addrcfg_2(self) -> &'a mut W {
self.variant(ADDRCFG_A::ADDRCFG_2)
}
#[doc = "Address match 0 (10-bit) or Address match 1 (10-bit)"]
#[inline(always)]
pub fn addrcfg_3(self) -> &'a mut W {
self.variant(ADDRCFG_A::ADDRCFG_3)
}
#[doc = "Address match 0 (7-bit) or Address match 1 (10-bit)"]
#[inline(always)]
pub fn addrcfg_4(self) -> &'a mut W {
self.variant(ADDRCFG_A::ADDRCFG_4)
}
#[doc = "Address match 0 (10-bit) or Address match 1 (7-bit)"]
#[inline(always)]
pub fn addrcfg_5(self) -> &'a mut W {
self.variant(ADDRCFG_A::ADDRCFG_5)
}
#[doc = "From Address match 0 (7-bit) to Address match 1 (7-bit)"]
#[inline(always)]
pub fn addrcfg_6(self) -> &'a mut W {
self.variant(ADDRCFG_A::ADDRCFG_6)
}
#[doc = "From Address match 0 (10-bit) to Address match 1 (10-bit)"]
#[inline(always)]
pub fn addrcfg_7(self) -> &'a mut W {
self.variant(ADDRCFG_A::ADDRCFG_7)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 16)) | (((value as u32) & 0x07) << 16);
self.w
}
}
impl R {
#[doc = "Bit 0 - Address SCL Stall"]
#[inline(always)]
pub fn adrstall(&self) -> ADRSTALL_R {
ADRSTALL_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - RX SCL Stall"]
#[inline(always)]
pub fn rxstall(&self) -> RXSTALL_R {
RXSTALL_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - TX Data SCL Stall"]
#[inline(always)]
pub fn txdstall(&self) -> TXDSTALL_R {
TXDSTALL_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - ACK SCL Stall"]
#[inline(always)]
pub fn ackstall(&self) -> ACKSTALL_R {
ACKSTALL_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 8 - General Call Enable"]
#[inline(always)]
pub fn gcen(&self) -> GCEN_R {
GCEN_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - SMBus Alert Enable"]
#[inline(always)]
pub fn saen(&self) -> SAEN_R {
SAEN_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 10 - Transmit Flag Configuration"]
#[inline(always)]
pub fn txcfg(&self) -> TXCFG_R {
TXCFG_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 11 - Receive Data Configuration"]
#[inline(always)]
pub fn rxcfg(&self) -> RXCFG_R {
RXCFG_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 12 - Ignore NACK"]
#[inline(always)]
pub fn ignack(&self) -> IGNACK_R {
IGNACK_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 13 - High Speed Mode Enable"]
#[inline(always)]
pub fn hsmen(&self) -> HSMEN_R {
HSMEN_R::new(((self.bits >> 13) & 0x01) != 0)
}
#[doc = "Bits 16:18 - Address Configuration"]
#[inline(always)]
pub fn addrcfg(&self) -> ADDRCFG_R {
ADDRCFG_R::new(((self.bits >> 16) & 0x07) as u8)
}
}
impl W {
#[doc = "Bit 0 - Address SCL Stall"]
#[inline(always)]
pub fn adrstall(&mut self) -> ADRSTALL_W {
ADRSTALL_W { w: self }
}
#[doc = "Bit 1 - RX SCL Stall"]
#[inline(always)]
pub fn rxstall(&mut self) -> RXSTALL_W {
RXSTALL_W { w: self }
}
#[doc = "Bit 2 - TX Data SCL Stall"]
#[inline(always)]
pub fn txdstall(&mut self) -> TXDSTALL_W {
TXDSTALL_W { w: self }
}
#[doc = "Bit 3 - ACK SCL Stall"]
#[inline(always)]
pub fn ackstall(&mut self) -> ACKSTALL_W {
ACKSTALL_W { w: self }
}
#[doc = "Bit 8 - General Call Enable"]
#[inline(always)]
pub fn gcen(&mut self) -> GCEN_W {
GCEN_W { w: self }
}
#[doc = "Bit 9 - SMBus Alert Enable"]
#[inline(always)]
pub fn saen(&mut self) -> SAEN_W {
SAEN_W { w: self }
}
#[doc = "Bit 10 - Transmit Flag Configuration"]
#[inline(always)]
pub fn txcfg(&mut self) -> TXCFG_W {
TXCFG_W { w: self }
}
#[doc = "Bit 11 - Receive Data Configuration"]
#[inline(always)]
pub fn rxcfg(&mut self) -> RXCFG_W {
RXCFG_W { w: self }
}
#[doc = "Bit 12 - Ignore NACK"]
#[inline(always)]
pub fn ignack(&mut self) -> IGNACK_W {
IGNACK_W { w: self }
}
#[doc = "Bit 13 - High Speed Mode Enable"]
#[inline(always)]
pub fn hsmen(&mut self) -> HSMEN_W {
HSMEN_W { w: self }
}
#[doc = "Bits 16:18 - Address Configuration"]
#[inline(always)]
pub fn addrcfg(&mut self) -> ADDRCFG_W {
ADDRCFG_W { w: self }
}
}
| 27.920755 | 319 | 0.603764 |
48588ecf4f324349a38f90a67c9ee2ed00ded46f | 5,486 | use core::result;
use std::error::Error as StdError;
use std::fmt;
use std::io;
use num_bigint::BigInt;
use serde::de;
use serde::ser;
#[derive(Debug)]
pub enum Unexpected {
Bool(bool),
Integer(BigInt),
Float(f64),
Str(String),
Unit,
Array,
Map,
}
impl fmt::Display for Unexpected {
fn fmt(&self, f: &mut fmt::Formatter) -> result::Result<(), fmt::Error> {
match *self {
Unexpected::Bool(b) => write!(f, "boolean `{}`", b),
Unexpected::Integer(ref i) => write!(f, "integer `{}`", i),
Unexpected::Float(v) => write!(f, "floating point `{}`", v),
Unexpected::Str(ref s) => write!(f, "string {:?}", s),
Unexpected::Unit => write!(f, "unit value"),
Unexpected::Array => write!(f, "array"),
Unexpected::Map => write!(f, "map"),
}
}
}
pub struct Error {
inner: Box<ErrorImpl>,
}
pub type Result<T, E = Error> = result::Result<T, E>;
impl From<io::Error> for Error {
fn from(source: io::Error) -> Self {
Error {
inner: Box::new(ErrorImpl::Io(source)),
}
}
}
impl From<std::convert::Infallible> for Error {
fn from(_: std::convert::Infallible) -> Self {
unreachable!()
}
}
impl Error {
#[doc(hidden)]
#[cold]
pub(crate) fn io(source: io::Error) -> Self {
Error {
inner: Box::new(ErrorImpl::Io(source)),
}
}
#[doc(hidden)]
#[cold]
pub(crate) fn too_large<T: Into<BigInt>>(index: T) -> Self {
Error {
inner: Box::new(ErrorImpl::Range(index.into())),
}
}
#[doc(hidden)]
#[cold]
pub(crate) fn invalid_type(unexpected: Unexpected, expected: &'static str) -> Self {
Error {
inner: Box::new(ErrorImpl::Type {
unexpected,
expected,
}),
}
}
#[doc(hidden)]
#[cold]
pub(crate) fn path_parse<R: pest::RuleType + Send + Sync + 'static>(
source: pest::error::Error<R>,
path: &str,
) -> Self {
Error {
inner: Box::new(ErrorImpl::PathParse {
path: Box::from(path),
source: Box::new(source),
}),
}
}
#[doc(hidden)]
#[cold]
pub(crate) fn format_parse<E>(origin: &str, source: E) -> Self
where
E: StdError + Sync + Send + 'static,
{
Error {
inner: Box::new(ErrorImpl::FormatParse {
origin: origin.into(),
source: Box::new(source),
}),
}
}
#[doc(hidden)]
#[cold]
pub(crate) fn serde<T: AsRef<str>>(message: T) -> Self {
Error {
inner: Box::new(ErrorImpl::Serde(message.as_ref().into())),
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&*self.inner, f)
}
}
impl fmt::Debug for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", *self)
}
}
impl StdError for Error {
fn source(&self) -> Option<&(dyn StdError + 'static)> {
match *self.inner {
ErrorImpl::PathParse { ref source, .. } => Some(source.as_ref()),
ErrorImpl::FormatParse { ref source, .. } => Some(source.as_ref()),
_ => None,
}
}
}
impl de::Error for Error {
fn custom<T: fmt::Display>(msg: T) -> Self {
Error {
inner: Box::new(ErrorImpl::Serde(msg.to_string().into())),
}
}
}
impl ser::Error for Error {
fn custom<T: fmt::Display>(msg: T) -> Self {
Error {
inner: Box::new(ErrorImpl::Serde(msg.to_string().into())),
}
}
}
/// Represents all possible errors that can occur when working with
/// configuration.
enum ErrorImpl {
/// Value path could not be parsed (Origin Path).
PathParse {
path: Box<str>,
source: Box<dyn StdError + Send + Sync>,
},
/// Some IO error occurred while file operation.
Io(io::Error),
/// Value could not be converted into the requested type.
Type {
/// What we found when parsing the value
unexpected: Unexpected,
/// What was expected when parsing the value
expected: &'static str,
},
/// Value could not be parsed by target format.
FormatParse {
origin: Box<str>,
source: Box<dyn StdError + Send + Sync>,
},
/// Serde error
Serde(Box<str>),
/// Path index over range
Range(BigInt),
}
impl fmt::Display for ErrorImpl {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ErrorImpl::PathParse {
ref path,
ref source,
} => write!(f, "{}\n for {}", source, path),
ErrorImpl::Range(ref i) => write!(f, "invalid range {}", i),
ErrorImpl::Type {
ref unexpected,
expected,
} => {
write!(f, "invalid type: {}, expected {}", unexpected, expected)?;
Ok(())
}
ErrorImpl::Io(ref err) => write!(f, "{}", err),
ErrorImpl::Serde(ref s) => write!(f, "{}", s),
ErrorImpl::FormatParse {
ref source,
ref origin,
} => {
write!(f, "{}\n in {}", source, origin)?;
Ok(())
}
}
}
}
| 24.274336 | 88 | 0.496901 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.