hexsha
stringlengths
40
40
size
int64
4
1.05M
content
stringlengths
4
1.05M
avg_line_length
float64
1.33
100
max_line_length
int64
1
1k
alphanum_fraction
float64
0.25
1
e63dda97cd9a3fea3eb032f97ca33f50953a31f3
207
// Tests that storage class inference fails correctly // build-fail use spirv_std::Image2d; #[spirv(vertex)] pub fn main(#[spirv(uniform)] error: &Image2d, #[spirv(uniform_constant)] warning: &Image2d) {}
25.875
95
0.729469
fce4cca03dec2a3a1df81910b6159a8653d18770
976
use clap::{Arg, Command}; use std::error::Error; #[derive(Clone, Default)] pub struct Argument { pub config_file: String, pub version_info: String, } impl Argument { pub fn parse(&mut self) -> Result<(), Box<dyn Error>> { self.version_info = concat!(env!("CARGO_PKG_VERSION"), "-build-", env!("build")).to_string(); let matches = Command::new("worker") .version(&*self.version_info) .arg( Arg::new("config_file") .short('c') .long("config-file") .value_name("NAME") .help("Config file (.yml)") .takes_value(true) .required(true), ) .get_matches(); match matches.value_of("config_file") { Some(name) => self.config_file = name.to_string(), None => self.config_file = "".to_string(), } Ok(()) } }
27.111111
85
0.485656
f8d2aa2ee7a70b117aef69f7b7e40c359cc83049
129
use svm_sdk::template; #[template] mod Template { #[fundable_hook] #[fundable(default)] fn get() {} } fn main() {}
11.727273
24
0.589147
08404bea56138638aecf4131aa3b94de2f2fded6
15,463
//! # Minimal Specialization //! //! This module contains the checks for sound specialization used when the //! `min_specialization` feature is enabled. This requires that the impl is //! *always applicable*. //! //! If `impl1` specializes `impl2` then `impl1` is always applicable if we know //! that all the bounds of `impl2` are satisfied, and all of the bounds of //! `impl1` are satisfied for some choice of lifetimes then we know that //! `impl1` applies for any choice of lifetimes. //! //! ## Basic approach //! //! To enforce this requirement on specializations we take the following //! approach: //! //! 1. Match up the substs for `impl2` so that the implemented trait and //! self-type match those for `impl1`. //! 2. Check for any direct use of `'static` in the substs of `impl2`. //! 3. Check that all of the generic parameters of `impl1` occur at most once //! in the *unconstrained* substs for `impl2`. A parameter is constrained if //! its value is completely determined by an associated type projection //! predicate. //! 4. Check that all predicates on `impl1` either exist on `impl2` (after //! matching substs), or are well-formed predicates for the trait's type //! arguments. //! //! ## Example //! //! Suppose we have the following always applicable impl: //! //! ```rust //! impl<T> SpecExtend<T> for std::vec::IntoIter<T> { /* specialized impl */ } //! impl<T, I: Iterator<Item=T>> SpecExtend<T> for I { /* default impl */ } //! ``` //! //! We get that the subst for `impl2` are `[T, std::vec::IntoIter<T>]`. `T` is //! constrained to be `<I as Iterator>::Item`, so we check only //! `std::vec::IntoIter<T>` for repeated parameters, which it doesn't have. The //! predicates of `impl1` are only `T: Sized`, which is also a predicate of //! `impl2`. So this specialization is sound. //! //! ## Extensions //! //! Unfortunately not all specializations in the standard library are allowed //! by this. So there are two extensions to these rules that allow specializing //! on some traits: that is, using them as bounds on the specializing impl, //! even when they don't occur in the base impl. //! //! ### rustc_specialization_trait //! //! If a trait is always applicable, then it's sound to specialize on it. We //! check trait is always applicable in the same way as impls, except that step //! 4 is now "all predicates on `impl1` are always applicable". We require that //! `specialization` or `min_specialization` is enabled to implement these //! traits. //! //! ### rustc_unsafe_specialization_marker //! //! There are also some specialization on traits with no methods, including the //! stable `FusedIterator` trait. We allow marking marker traits with an //! unstable attribute that means we ignore them in point 3 of the checks //! above. This is unsound, in the sense that the specialized impl may be used //! when it doesn't apply, but we allow it in the short term since it can't //! cause use after frees with purely safe code in the same way as specializing //! on traits with methods can. use crate::constrained_generic_params as cgp; use rustc_data_structures::fx::FxHashSet; use rustc_hir as hir; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_infer::infer::outlives::env::OutlivesEnvironment; use rustc_infer::infer::{InferCtxt, RegionckMode, TyCtxtInferExt}; use rustc_infer::traits::specialization_graph::Node; use rustc_middle::ty::subst::{GenericArg, InternalSubsts, SubstsRef}; use rustc_middle::ty::trait_def::TraitSpecializationKind; use rustc_middle::ty::{self, InstantiatedPredicates, TyCtxt, TypeFoldable}; use rustc_span::Span; use rustc_trait_selection::traits::{self, translate_substs, wf}; pub(super) fn check_min_specialization(tcx: TyCtxt<'_>, impl_def_id: DefId, span: Span) { if let Some(node) = parent_specialization_node(tcx, impl_def_id) { tcx.infer_ctxt().enter(|infcx| { check_always_applicable(&infcx, impl_def_id, node, span); }); } } fn parent_specialization_node(tcx: TyCtxt<'_>, impl1_def_id: DefId) -> Option<Node> { let trait_ref = tcx.impl_trait_ref(impl1_def_id)?; let trait_def = tcx.trait_def(trait_ref.def_id); let impl2_node = trait_def.ancestors(tcx, impl1_def_id).ok()?.nth(1)?; let always_applicable_trait = matches!(trait_def.specialization_kind, TraitSpecializationKind::AlwaysApplicable); if impl2_node.is_from_trait() && !always_applicable_trait { // Implementing a normal trait isn't a specialization. return None; } Some(impl2_node) } /// Check that `impl1` is a sound specialization fn check_always_applicable( infcx: &InferCtxt<'_, '_>, impl1_def_id: DefId, impl2_node: Node, span: Span, ) { if let Some((impl1_substs, impl2_substs)) = get_impl_substs(infcx, impl1_def_id, impl2_node, span) { let impl2_def_id = impl2_node.def_id(); debug!( "check_always_applicable(\nimpl1_def_id={:?},\nimpl2_def_id={:?},\nimpl2_substs={:?}\n)", impl1_def_id, impl2_def_id, impl2_substs ); let tcx = infcx.tcx; let parent_substs = if impl2_node.is_from_trait() { impl2_substs.to_vec() } else { unconstrained_parent_impl_substs(tcx, impl2_def_id, impl2_substs) }; check_static_lifetimes(tcx, &parent_substs, span); check_duplicate_params(tcx, impl1_substs, &parent_substs, span); check_predicates( infcx, impl1_def_id.expect_local(), impl1_substs, impl2_node, impl2_substs, span, ); } } /// Given a specializing impl `impl1`, and the base impl `impl2`, returns two /// substitutions `(S1, S2)` that equate their trait references. The returned /// types are expressed in terms of the generics of `impl1`. /// /// Example /// /// impl<A, B> Foo<A> for B { /* impl2 */ } /// impl<C> Foo<Vec<C>> for C { /* impl1 */ } /// /// Would return `S1 = [C]` and `S2 = [Vec<C>, C]`. fn get_impl_substs<'tcx>( infcx: &InferCtxt<'_, 'tcx>, impl1_def_id: DefId, impl2_node: Node, span: Span, ) -> Option<(SubstsRef<'tcx>, SubstsRef<'tcx>)> { let tcx = infcx.tcx; let param_env = tcx.param_env(impl1_def_id); let impl1_substs = InternalSubsts::identity_for_item(tcx, impl1_def_id); let impl2_substs = translate_substs(infcx, param_env, impl1_def_id, impl1_substs, impl2_node); // Conservatively use an empty `ParamEnv`. let outlives_env = OutlivesEnvironment::new(ty::ParamEnv::empty()); infcx.resolve_regions_and_report_errors(impl1_def_id, &outlives_env, RegionckMode::default()); let impl2_substs = match infcx.fully_resolve(&impl2_substs) { Ok(s) => s, Err(_) => { tcx.sess.struct_span_err(span, "could not resolve substs on overridden impl").emit(); return None; } }; Some((impl1_substs, impl2_substs)) } /// Returns a list of all of the unconstrained subst of the given impl. /// /// For example given the impl: /// /// impl<'a, T, I> ... where &'a I: IntoIterator<Item=&'a T> /// /// This would return the substs corresponding to `['a, I]`, because knowing /// `'a` and `I` determines the value of `T`. fn unconstrained_parent_impl_substs<'tcx>( tcx: TyCtxt<'tcx>, impl_def_id: DefId, impl_substs: SubstsRef<'tcx>, ) -> Vec<GenericArg<'tcx>> { let impl_generic_predicates = tcx.predicates_of(impl_def_id); let mut unconstrained_parameters = FxHashSet::default(); let mut constrained_params = FxHashSet::default(); let impl_trait_ref = tcx.impl_trait_ref(impl_def_id); // Unfortunately the functions in `constrained_generic_parameters` don't do // what we want here. We want only a list of constrained parameters while // the functions in `cgp` add the constrained parameters to a list of // unconstrained parameters. for (predicate, _) in impl_generic_predicates.predicates.iter() { if let ty::PredicateKind::Projection(proj) = predicate.kind() { let projection_ty = proj.skip_binder().projection_ty; let projected_ty = proj.skip_binder().ty; let unbound_trait_ref = projection_ty.trait_ref(tcx); if Some(unbound_trait_ref) == impl_trait_ref { continue; } unconstrained_parameters.extend(cgp::parameters_for(&projection_ty, true)); for param in cgp::parameters_for(&projected_ty, false) { if !unconstrained_parameters.contains(&param) { constrained_params.insert(param.0); } } unconstrained_parameters.extend(cgp::parameters_for(&projected_ty, true)); } } impl_substs .iter() .enumerate() .filter(|&(idx, _)| !constrained_params.contains(&(idx as u32))) .map(|(_, arg)| *arg) .collect() } /// Check that parameters of the derived impl don't occur more than once in the /// equated substs of the base impl. /// /// For example forbid the following: /// /// impl<A> Tr for A { } /// impl<B> Tr for (B, B) { } /// /// Note that only consider the unconstrained parameters of the base impl: /// /// impl<S, I: IntoIterator<Item = S>> Tr<S> for I { } /// impl<T> Tr<T> for Vec<T> { } /// /// The substs for the parent impl here are `[T, Vec<T>]`, which repeats `T`, /// but `S` is constrained in the parent impl, so `parent_substs` is only /// `[Vec<T>]`. This means we allow this impl. fn check_duplicate_params<'tcx>( tcx: TyCtxt<'tcx>, impl1_substs: SubstsRef<'tcx>, parent_substs: &Vec<GenericArg<'tcx>>, span: Span, ) { let mut base_params = cgp::parameters_for(parent_substs, true); base_params.sort_by_key(|param| param.0); if let (_, [duplicate, ..]) = base_params.partition_dedup() { let param = impl1_substs[duplicate.0 as usize]; tcx.sess .struct_span_err(span, &format!("specializing impl repeats parameter `{}`", param)) .emit(); } } /// Check that `'static` lifetimes are not introduced by the specializing impl. /// /// For example forbid the following: /// /// impl<A> Tr for A { } /// impl Tr for &'static i32 { } fn check_static_lifetimes<'tcx>( tcx: TyCtxt<'tcx>, parent_substs: &Vec<GenericArg<'tcx>>, span: Span, ) { if tcx.any_free_region_meets(parent_substs, |r| *r == ty::ReStatic) { tcx.sess.struct_span_err(span, "cannot specialize on `'static` lifetime").emit(); } } /// Check whether predicates on the specializing impl (`impl1`) are allowed. /// /// Each predicate `P` must be: /// /// * global (not reference any parameters) /// * `T: Tr` predicate where `Tr` is an always-applicable trait /// * on the base `impl impl2` /// * Currently this check is done using syntactic equality, which is /// conservative but generally sufficient. /// * a well-formed predicate of a type argument of the trait being implemented, /// including the `Self`-type. fn check_predicates<'tcx>( infcx: &InferCtxt<'_, 'tcx>, impl1_def_id: LocalDefId, impl1_substs: SubstsRef<'tcx>, impl2_node: Node, impl2_substs: SubstsRef<'tcx>, span: Span, ) { let tcx = infcx.tcx; let impl1_predicates = tcx.predicates_of(impl1_def_id).instantiate(tcx, impl1_substs); let mut impl2_predicates = if impl2_node.is_from_trait() { // Always applicable traits have to be always applicable without any // assumptions. InstantiatedPredicates::empty() } else { tcx.predicates_of(impl2_node.def_id()).instantiate(tcx, impl2_substs) }; debug!( "check_always_applicable(\nimpl1_predicates={:?},\nimpl2_predicates={:?}\n)", impl1_predicates, impl2_predicates, ); // Since impls of always applicable traits don't get to assume anything, we // can also assume their supertraits apply. // // For example, we allow: // // #[rustc_specialization_trait] // trait AlwaysApplicable: Debug { } // // impl<T> Tr for T { } // impl<T: AlwaysApplicable> Tr for T { } // // Specializing on `AlwaysApplicable` allows also specializing on `Debug` // which is sound because we forbid impls like the following // // impl<D: Debug> AlwaysApplicable for D { } let always_applicable_traits = impl1_predicates .predicates .iter() .filter(|predicate| { matches!( trait_predicate_kind(tcx, predicate), Some(TraitSpecializationKind::AlwaysApplicable) ) }) .copied(); // Include the well-formed predicates of the type parameters of the impl. for ty in tcx.impl_trait_ref(impl1_def_id).unwrap().substs.types() { if let Some(obligations) = wf::obligations( infcx, tcx.param_env(impl1_def_id), tcx.hir().as_local_hir_id(impl1_def_id), ty, span, ) { impl2_predicates .predicates .extend(obligations.into_iter().map(|obligation| obligation.predicate)) } } impl2_predicates.predicates.extend( traits::elaborate_predicates(tcx, always_applicable_traits) .map(|obligation| obligation.predicate), ); for predicate in impl1_predicates.predicates { if !impl2_predicates.predicates.contains(&predicate) { check_specialization_on(tcx, &predicate, span) } } } fn check_specialization_on<'tcx>(tcx: TyCtxt<'tcx>, predicate: &ty::Predicate<'tcx>, span: Span) { debug!("can_specialize_on(predicate = {:?})", predicate); match predicate.kind() { // Global predicates are either always true or always false, so we // are fine to specialize on. _ if predicate.is_global() => (), // We allow specializing on explicitly marked traits with no associated // items. ty::PredicateKind::Trait(pred, hir::Constness::NotConst) => { if !matches!( trait_predicate_kind(tcx, predicate), Some(TraitSpecializationKind::Marker) ) { tcx.sess .struct_span_err( span, &format!( "cannot specialize on trait `{}`", tcx.def_path_str(pred.def_id()), ), ) .emit() } } _ => tcx .sess .struct_span_err(span, &format!("cannot specialize on `{:?}`", predicate)) .emit(), } } fn trait_predicate_kind<'tcx>( tcx: TyCtxt<'tcx>, predicate: &ty::Predicate<'tcx>, ) -> Option<TraitSpecializationKind> { match predicate.kind() { ty::PredicateKind::Trait(pred, hir::Constness::NotConst) => { Some(tcx.trait_def(pred.def_id()).specialization_kind) } ty::PredicateKind::Trait(_, hir::Constness::Const) | ty::PredicateKind::RegionOutlives(_) | ty::PredicateKind::TypeOutlives(_) | ty::PredicateKind::Projection(_) | ty::PredicateKind::WellFormed(_) | ty::PredicateKind::Subtype(_) | ty::PredicateKind::ObjectSafe(_) | ty::PredicateKind::ClosureKind(..) | ty::PredicateKind::ConstEvaluatable(..) | ty::PredicateKind::ConstEquate(..) => None, } }
37.350242
101
0.643407
d6f9d1db7df8830220d1b321f8366b2c63638709
6,479
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. /*! * Computes moves. */ use mc = middle::mem_categorization; use middle::borrowck::*; use middle::borrowck::move_data::*; use middle::moves; use middle::ty; use syntax::ast; use syntax::ast_util; use syntax::codemap::span; use util::ppaux::{UserString}; pub fn gather_decl(bccx: @BorrowckCtxt, move_data: &mut MoveData, decl_id: ast::NodeId, _decl_span: span, var_id: ast::NodeId) { let loan_path = @LpVar(var_id); move_data.add_move(bccx.tcx, loan_path, decl_id, Declared); } pub fn gather_move_from_expr(bccx: @BorrowckCtxt, move_data: &mut MoveData, move_expr: @ast::expr, cmt: mc::cmt) { gather_move_from_expr_or_pat(bccx, move_data, move_expr.id, MoveExpr(move_expr), cmt); } pub fn gather_move_from_pat(bccx: @BorrowckCtxt, move_data: &mut MoveData, move_pat: @ast::pat, cmt: mc::cmt) { gather_move_from_expr_or_pat(bccx, move_data, move_pat.id, MovePat(move_pat), cmt); } fn gather_move_from_expr_or_pat(bccx: @BorrowckCtxt, move_data: &mut MoveData, move_id: ast::NodeId, move_kind: MoveKind, cmt: mc::cmt) { if !check_is_legal_to_move_from(bccx, cmt, cmt) { return; } match opt_loan_path(cmt) { Some(loan_path) => { move_data.add_move(bccx.tcx, loan_path, move_id, move_kind); } None => { // move from rvalue or unsafe pointer, hence ok } } } pub fn gather_captures(bccx: @BorrowckCtxt, move_data: &mut MoveData, closure_expr: @ast::expr) { let captured_vars = bccx.capture_map.get(&closure_expr.id); for captured_var in captured_vars.iter() { match captured_var.mode { moves::CapMove => { let fvar_id = ast_util::def_id_of_def(captured_var.def).node; let loan_path = @LpVar(fvar_id); move_data.add_move(bccx.tcx, loan_path, closure_expr.id, Captured(closure_expr)); } moves::CapCopy | moves::CapRef => {} } } } pub fn gather_assignment(bccx: @BorrowckCtxt, move_data: &mut MoveData, assignment_id: ast::NodeId, assignment_span: span, assignee_loan_path: @LoanPath, assignee_id: ast::NodeId) { move_data.add_assignment(bccx.tcx, assignee_loan_path, assignment_id, assignment_span, assignee_id); } fn check_is_legal_to_move_from(bccx: @BorrowckCtxt, cmt0: mc::cmt, cmt: mc::cmt) -> bool { match cmt.cat { mc::cat_deref(_, _, mc::region_ptr(*)) | mc::cat_deref(_, _, mc::gc_ptr(*)) | mc::cat_deref(_, _, mc::unsafe_ptr(*)) => { bccx.span_err( cmt0.span, fmt!("cannot move out of %s", bccx.cmt_to_str(cmt))); false } // These are separate from the above cases for a better error message. mc::cat_stack_upvar(*) | mc::cat_copied_upvar(mc::CopiedUpvar { onceness: ast::Many, _ }) => { let once_hint = if bccx.tcx.sess.once_fns() { " (unless the destination closure type is `once fn')" } else { "" }; bccx.span_err( cmt0.span, fmt!("cannot move out of %s%s", bccx.cmt_to_str(cmt), once_hint)); false } // Can move out of captured upvars only if the destination closure // type is 'once'. 1-shot stack closures emit the copied_upvar form // (see mem_categorization.rs). mc::cat_copied_upvar(mc::CopiedUpvar { onceness: ast::Once, _ }) => { true } // It seems strange to allow a move out of a static item, // but what happens in practice is that you have a // reference to a constant with a type that should be // moved, like `None::<~int>`. The type of this constant // is technically `Option<~int>`, which moves, but we know // that the content of static items will never actually // contain allocated pointers, so we can just memcpy it. // Since static items can never have allocated memory, // this is ok. For now anyhow. mc::cat_static_item => { true } mc::cat_rvalue(*) | mc::cat_local(*) | mc::cat_arg(*) | mc::cat_self(*) => { true } mc::cat_downcast(b) | mc::cat_interior(b, _) => { match ty::get(b.ty).sty { ty::ty_struct(did, _) | ty::ty_enum(did, _) => { if ty::has_dtor(bccx.tcx, did) { bccx.span_err( cmt0.span, fmt!("cannot move out of type `%s`, \ which defines the `Drop` trait", b.ty.user_string(bccx.tcx))); false } else { check_is_legal_to_move_from(bccx, cmt0, b) } } _ => { check_is_legal_to_move_from(bccx, cmt0, b) } } } mc::cat_deref(b, _, mc::uniq_ptr) | mc::cat_discr(b, _) => { check_is_legal_to_move_from(bccx, cmt0, b) } } }
35.79558
82
0.505634
eb086e19f52c0b1a1b7f4b8741bb15f31c05aeaa
532
use super::get_path::concatenate_or_return_absolute; use std::{fs, io, path::Path}; /// *Unsandboxed* function similar to `hard_link`, but which does not perform sandboxing. pub(crate) fn hard_link_unchecked( old_start: &fs::File, old_path: &Path, new_start: &fs::File, new_path: &Path, ) -> io::Result<()> { let old_full_path = concatenate_or_return_absolute(old_start, old_path)?; let new_full_path = concatenate_or_return_absolute(new_start, new_path)?; fs::hard_link(old_full_path, new_full_path) }
35.466667
89
0.727444
16df1438464653d5178a87e3e9dc7e5bd4404cc6
437
use ink_lang as ink; #[ink::contract] mod contract { #[ink(storage)] pub struct Contract {} impl Contract { #[ink(constructor, selector = 0, payable)] pub fn constructor() -> Self { Self {} } #[ink(message)] pub fn message(&self) {} } } use contract::Contract; fn main() { assert!(<Contract as ::ink_lang::reflect::DispatchableConstructorInfo<0>>::PAYABLE); }
18.208333
88
0.567506
71b4d3370a7d08728e8cbc2e5c3e6ce610cb3cf8
3,399
use crate::{functions::sum, *}; use super::sum_axes_to_desire; pub fn mul(a: &Computed, b: &Computed) -> Computed { let y = Computed::new((&**a * &**b).into_ndarray()); chain( &[a.clone(), b.clone()], &[y.clone()], false, "mul", |xs, _ys, gys| { let mut gx0 = &gys[0] * &xs[1]; let mut gx1 = &gys[0] * &xs[0]; // fit shape if xs[0].shape() != gx0.shape() { gx0 = gx0.sum(sum_axes_to_desire(gx0.shape(), xs[0].shape()), false); } if xs[1].shape() != gx1.shape() { gx1 = gx1.sum(sum_axes_to_desire(gx1.shape(), xs[1].shape()), false); } vec![gx0, gx1] }, ); y } pub fn multi_mul(xs: &[Computed]) -> Computed { assert!(xs.len() >= 1); // NOTE: This assert is unnecessary? if broadcasted_shape(&xs).is_none() { panic!( "cannot broadcast on shapes: {:?}", xs.iter() .map(|x| (**x).shape().to_vec()) .collect::<Vec<_>>() ); }; let mut y = (*xs[0]).to_owned(); for x in xs.iter().skip(1) { y = y * &**x; } let y = Computed::new(y.into_ndarray()); chain(xs, &[y.clone()], false, "multi_mul", |xs, _ys, gys| { xs.iter() .enumerate() .map(|(i, x)| { let mut g = multi_mul( &(0..xs.len()) .filter(|j| *j != i) .map(|j| xs[j].clone()) .chain(gys.iter().cloned()) .collect::<Vec<_>>(), ); // fit shape if x.shape() != g.shape() { g = sum(&g, sum_axes_to_desire(g.shape(), x.shape()), false); // TODO: https://github.com/oreilly-japan/deep-learning-from-scratch-3/blob/06419d7fb2e7ea19aa3719efc27795edbdc41a1f/dezero/utils.py#L125 } g }) .collect() }); y } pub fn broadcasted_shape(xs: &[impl std::ops::Deref<Target = NDArray>]) -> Option<Vec<usize>> { let mut shape = xs[0].shape().to_vec(); for x in xs.iter().skip(1) { let x_shape = x.shape(); // scalar is broadcasted to any shape if shape.len() == 0 { shape = x_shape.to_vec(); continue; } if x_shape.len() == 0 { continue; } if shape.len() != x_shape.len() { return None; } for i in 0..shape.len() { match (shape[i], x_shape[i]) { (1, _) => shape[i] = x_shape[i], (_, 1) => (), (s1, s2) if s1 == s2 => (), (_, _) => return None, } } } Some(shape) } #[test] fn test() { let s = broadcasted_shape(&[ &ndarray::Array::zeros([1, 1, 1]).into_ndarray(), &ndarray::Array::zeros([1, 1, 2]).into_ndarray(), &ndarray::Array::zeros([3, 1, 1]).into_ndarray(), ]); assert_eq!(s, Some(vec![3, 1, 2])); let s = broadcasted_shape(&[ &ndarray::Array::zeros([1, 4, 1]).into_ndarray(), &ndarray::Array::zeros([3, 4, 2]).into_ndarray(), &ndarray::Array::zeros([1, 4, 2]).into_ndarray(), ]); assert_eq!(s, Some(vec![3, 4, 2])); }
27.41129
157
0.43454
fcd5c929063c9c08be4c9090ad3978fe76b5bf0f
3,767
pub mod actor; pub mod crypto; //#[cfg(feature = "debug")] pub mod debug; pub mod gas; pub mod ipld; pub mod message; pub mod network; pub mod rand; pub mod send; pub mod sself; pub mod vm; /// Generate a set of FVM syscall shims. /// /// ```ignore /// fvm_sdk::sys::fvm_syscalls! { /// module = "my_wasm_module"; /// /// /// This method will translate to a syscall with the signature: /// /// /// /// fn(arg: u64) -> u32; /// /// /// /// Where the returned u32 is the status code. /// pub fn returns_nothing(arg: u64) -> Result<()>; /// /// /// This method will translate to a syscall with the signature: /// /// /// /// fn(out: u32, arg: u32) -> u32; /// /// /// /// Where `out` is a pointer to where the return value will be written and the returned u32 /// /// is the status code. /// pub fn returns_value(arg: u64) -> Result<u64>; /// /// /// This method will translate to a syscall with the signature: /// /// /// /// fn(arg: u32) -> u32; /// /// /// /// But it will panic if this function returns. /// pub fn aborts(arg: u32) -> !; /// } /// ``` macro_rules! fvm_syscalls { // Returns no values. (module = $module:literal; $(#[$attrs:meta])* $v:vis fn $name:ident($($args:ident : $args_ty:ty),*$(,)?) -> Result<()>; $($rest:tt)*) => { $(#[$attrs])* $v unsafe fn $name($($args:$args_ty),*) -> Result<(), fvm_shared::error::ErrorNumber> { #[link(wasm_import_module = $module)] extern "C" { #[link_name = stringify!($name)] fn syscall($($args:$args_ty),*) -> u32; } let code = syscall($($args),*); if code == 0 { Ok(()) } else { Err(num_traits::FromPrimitive::from_u32(code) .expect("syscall returned unrecognized exit code")) } } $crate::sys::fvm_syscalls! { module = $module; $($rest)* } }; // Returns a value. (module = $module:literal; $(#[$attrs:meta])* $v:vis fn $name:ident($($args:ident : $args_ty:ty),*$(,)?) -> Result<$ret:ty>; $($rest:tt)*) => { $(#[$attrs])* $v unsafe fn $name($($args:$args_ty),*) -> Result<$ret, fvm_shared::error::ErrorNumber> { #[link(wasm_import_module = $module)] extern "C" { #[link_name = stringify!($name)] fn syscall(ret: *mut $ret $(, $args : $args_ty)*) -> u32; } let mut ret = std::mem::MaybeUninit::<$ret>::uninit(); let code = syscall(ret.as_mut_ptr(), $($args),*); if code == 0 { Ok(ret.assume_init()) } else { Err(num_traits::FromPrimitive::from_u32(code) .expect("syscall returned unrecognized exit code")) } } $crate::sys::fvm_syscalls! { module = $module; $($rest)* } }; // Does not return. (module = $module:literal; $(#[$attrs:meta])* $v:vis fn $name:ident($($args:ident : $args_ty:ty),*$(,)?) -> !; $($rest:tt)*) => { $(#[$attrs])* $v unsafe fn $name($($args:$args_ty),*) -> ! { #[link(wasm_import_module = $module)] extern "C" { #[link_name = stringify!($name)] fn syscall($($args : $args_ty),*) -> u32; } syscall($($args),*); panic!(concat!("syscall ", stringify!($name), " should not have returned")) } $crate::sys::fvm_syscalls! { module = $module; $($rest)* } }; // Base case. (module = $module:literal;) => {}; } pub(crate) use fvm_syscalls;
32.756522
147
0.482878
56979b54305631afcab137a626e6f10ebff79cd8
3,600
// Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0. use std::io::{Read, Result, Write}; use std::sync::Arc; /// An I/O rate limiter /// /// Throttles the maximum bytes per second written or read. pub trait IOLimiterExt { type IOLimiter: IOLimiter; } pub trait IOLimiter: Send + Sync { /// # Arguments /// /// - `bytes_per_sec`: controls the total write rate of compaction and flush in bytes per second. fn new(bytes_per_sec: i64) -> Self; /// Sets the rate limit in bytes per second fn set_bytes_per_second(&self, bytes_per_sec: i64); /// Requests an access token to read or write bytes. If this request can not be satisfied, the call is blocked. fn request(&self, bytes: i64); /// Gets the max bytes that can be granted in a single burst. fn get_max_bytes_per_time(&self) -> i64; /// Gets the total bytes that have gone through the rate limiter. fn get_total_bytes_through(&self) -> i64; /// Gets the rate limit in bytes per second. fn get_bytes_per_second(&self) -> i64; /// Gets the total number of requests that have gone through rate limiter fn get_total_requests(&self) -> i64; } pub struct LimitWriter<'a, T: Write, L: IOLimiter> { limiter: Option<Arc<L>>, writer: &'a mut T, } impl<'a, T: Write + 'a, L: IOLimiter> LimitWriter<'a, T, L> { pub fn new(limiter: Option<Arc<L>>, writer: &'a mut T) -> LimitWriter<'a, T, L> { LimitWriter { limiter, writer } } } impl<'a, T: Write + 'a, L: IOLimiter> Write for LimitWriter<'a, T, L> { fn write(&mut self, buf: &[u8]) -> Result<usize> { let total = buf.len(); if let Some(ref limiter) = self.limiter { let single = limiter.get_max_bytes_per_time() as usize; let mut curr = 0; let mut end; while curr < total { if curr + single >= total { end = total; } else { end = curr + single; } limiter.request((end - curr) as i64); self.writer.write_all(&buf[curr..end])?; curr = end; } } else { self.writer.write_all(buf)?; } Ok(total) } fn flush(&mut self) -> Result<()> { self.writer.flush()?; Ok(()) } } /// A limited reader. /// /// The read limits the bytes per second read from an underlying reader. pub struct LimitReader<'a, T: Read, L: IOLimiter> { limiter: Option<Arc<L>>, reader: &'a mut T, } impl<'a, T: Read + 'a, L: IOLimiter> LimitReader<'a, T, L> { /// Create a new `LimitReader`. pub fn new(limiter: Option<Arc<L>>, reader: &'a mut T) -> LimitReader<'a, T, L> { LimitReader { limiter, reader } } } impl<'a, T: Read + 'a, L: IOLimiter> Read for LimitReader<'a, T, L> { fn read(&mut self, buf: &mut [u8]) -> Result<usize> { if let Some(ref limiter) = self.limiter { let total = buf.len(); let single = limiter.get_max_bytes_per_time() as usize; let mut count = 0; let mut curr = 0; let mut end; while curr < total { if curr + single >= total { end = total; } else { end = curr + single; } limiter.request((end - curr) as i64); count += self.reader.read(&mut buf[curr..end])?; curr = end; } Ok(count) } else { self.reader.read(buf) } } }
30.769231
115
0.545
1ad69a582955be910166e31567f225d76ea548d9
975
//! Types for use in FFXIV-related projects. #[cfg(feature = "with_serde")] #[macro_use] extern crate serde_derive; #[cfg(feature = "clans")] pub mod clans; #[cfg(feature = "data_centers")] pub mod data_centers; pub mod errors; #[cfg(feature = "guardians")] pub mod guardians; pub mod jobs; #[cfg(feature = "races")] pub mod races; #[cfg(feature = "roles")] pub mod roles; #[cfg(feature = "worlds")] pub mod worlds; #[cfg(feature = "clans")] pub use self::clans::Clan; #[cfg(feature = "data_centers")] pub use self::data_centers::DataCenter; #[cfg(feature = "guardians")] pub use self::guardians::Guardian; #[cfg(feature = "combat_jobs")] pub use self::jobs::Job; #[cfg(feature = "non_combat_jobs")] pub use self::jobs::NonCombatJob; #[cfg(feature = "job_classifications")] pub use self::jobs::Classification; #[cfg(feature = "races")] pub use self::races::Race; #[cfg(feature = "roles")] pub use self::roles::Role; #[cfg(feature = "worlds")] pub use self::worlds::World;
24.375
44
0.689231
2fa2703119aefa5ae6d61db97961c57e30fac20a
617
// Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License in the LICENSE-APACHE file or at: // https://www.apache.org/licenses/LICENSE-2.0 //! KAS prelude //! //! This module allows convenient importation of common unabiguous items: //! ``` //! use kas::prelude::*; //! ``` //! //! This prelude may be more useful when implementing widgets than when simply //! using widgets in a GUI. #[doc(no_inline)] pub use kas_core::prelude::*; #[doc(no_inline)] pub use kas_widgets::adapter::AdaptWidget;
30.85
78
0.701783
64936c12ec9bef0368251be4ea7dfefb7afe7ce3
368
//! A module which provides some functionalities to work with the terminal screen. //! Like allowing you to switch between main and alternate screen or putting the terminal into raw mode. mod alternate; mod raw; mod screen; use super::{commands, TerminalOutput}; pub use self::alternate::AlternateScreen; pub use self::raw::RawScreen; pub use self::screen::Screen;
28.307692
104
0.771739
67c12d96d8c81ec958775e2877f68ef76105fc74
7,288
use bevy_ecs::{prelude::*, system::EntityCommands}; use bevy_math::prelude::*; use bevy_render::prelude::*; use bevy_transform::prelude::*; use bevy_ui::prelude::*; use bevy_utils::*; use crate::utils::*; use super::*; pub struct FrameWidgetBuilder<'a, 'w, 's> { root: WidgetBuilderEntity<'a, 'w, 's, Option<NodeBundle>>, title_bar: WidgetBuilderEntity<'a, 'w, 's, Option<ButtonBundle>>, title_text: WidgetBuilderEntity<'a, 'w, 's, Option<TextBundle>>, close_button: WidgetBuilderEntity<'a, 'w, 's, Option<ButtonBundle>>, content_entity: Option<Entity>, } pub struct FrameWidgetEntities { pub root: Entity, pub title_bar: Entity, pub title_text: Entity, pub close_button: Entity, pub content: Option<Entity>, } impl Default for FrameWidgetBuilder<'_, '_, '_> { fn default() -> Self { Self::new() } } impl<'a, 'w, 's> FrameWidgetBuilder<'a, 'w, 's> { /// Creates a new tooltip builder pub fn new() -> Self { Self { root: WidgetBuilderEntity::new(Some(NodeBundle { style: Style { position_type: PositionType::Absolute, position: Rect::all(Val::Px(0.0)), border: Rect::all(Val::Px(2.0)), align_items: AlignItems::Stretch, flex_direction: FlexDirection::Column, justify_content: JustifyContent::FlexEnd, ..default() }, ..default() })), title_bar: WidgetBuilderEntity::new(Some(ButtonBundle { style: Style { justify_content: JustifyContent::SpaceBetween, align_items: AlignItems::Center, min_size: Size { height: Val::Px(25.0), ..default() }, ..default() }, color: Color::NONE.into(), ..default() })), title_text: WidgetBuilderEntity::new(Some(TextBundle { style: Style { margin: Rect::all(Val::Px(5.0)), ..default() }, ..default() })), close_button: WidgetBuilderEntity::new(Some(ButtonBundle { style: Style { //size: Size::new(Val::Px(35.0), Val::Px(35.0)), aspect_ratio: Some(1.0), ..default() }, ..default() })), content_entity: None, } } /// Allows to run commands on the root entity after it's spawned. pub fn root_commands( &mut self, run_commands: impl for<'b> Fn(&mut EntityCommands<'w, 's, 'b>) + 'a, ) -> &mut Self { self.root.commands_runners.push(Box::new(run_commands)); self } /// Allows to edit the root bundle before it is spawned. /// It is recommended to keep unmodified original values by using the struct extend syntax `..`. pub fn root_bundle(&mut self, extend: impl FnOnce(NodeBundle) -> NodeBundle) -> &mut Self { self.root.bundle = Some(extend(self.root.bundle.take().unwrap())); self } /// Allows to run commands on the title bar entity after it's spawned. pub fn title_bar_commands( &mut self, run_commands: impl for<'b> Fn(&mut EntityCommands<'w, 's, 'b>) + 'a, ) -> &mut Self { self.title_bar.commands_runners.push(Box::new(run_commands)); self } /// Allows to edit the title bar bundle before it is spawned. /// It is recommended to keep unmodified original values by using the struct extend syntax `..`. pub fn title_bar_bundle(&mut self, extend: impl FnOnce(ButtonBundle) -> ButtonBundle) -> &mut Self { self.title_bar.bundle = Some(extend(self.title_bar.bundle.take().unwrap())); self } /// Allows to run commands on the title text entity after it's spawned. pub fn title_text_commands( &mut self, run_commands: impl for<'b> Fn(&mut EntityCommands<'w, 's, 'b>) + 'a, ) -> &mut Self { self.title_text.commands_runners.push(Box::new(run_commands)); self } /// Allows to edit the title text bundle before it is spawned. /// It is recommended to keep unmodified original values by using the struct extend syntax `..`. pub fn title_text_bundle(&mut self, extend: impl FnOnce(TextBundle) -> TextBundle) -> &mut Self { self.title_text.bundle = Some(extend(self.title_text.bundle.take().unwrap())); self } /// Allows to run commands on the close button entity after it's spawned. pub fn close_button_commands( &mut self, run_commands: impl for<'b> Fn(&mut EntityCommands<'w, 's, 'b>) + 'a, ) -> &mut Self { self.close_button.commands_runners.push(Box::new(run_commands)); self } /// Allows to edit the title close button bundle before it is spawned. /// It is recommended to keep unmodified original values by using the struct extend syntax `..`. pub fn close_button_bundle(&mut self, extend: impl FnOnce(ButtonBundle) -> ButtonBundle) -> &mut Self { self.close_button.bundle = Some(extend(self.close_button.bundle.take().unwrap())); self } /// Sets the tooltip content. /// The entity should be a valid UI node and will be added to the tooltip's tree when spawn() is called. pub fn with_content(&mut self, entity: Entity) -> &mut Self { self.content_entity = Some(entity); self } /// Spawns the entity and returns the EntityCommands for the root node. /// Using the builder again after calling this will panic. pub fn spawn(&mut self, commands: &'a mut Commands<'w, 's>) -> FrameWidgetEntities { let root = commands .spawn_bundle(self.root.bundle.take().unwrap()) .insert(Frame) .run_entity_commands(&self.root.commands_runners) .id(); let title_bar = commands .spawn_bundle(self.title_bar.bundle.take().unwrap()) .insert(RootEntity(root)) .insert(Grab) .insert(FrameGrabber) .run_entity_commands(&self.title_bar.commands_runners) .id(); let title_text = commands .spawn_bundle(self.title_text.bundle.take().unwrap()) .insert(RootEntity(root)) .run_entity_commands(&self.title_text.commands_runners) .id(); let close_button = commands .spawn_bundle(self.close_button.bundle.take().unwrap()) .insert(RootEntity(root)) .run_entity_commands(&self.close_button.commands_runners) .id(); if let Some(content) = self.content_entity { commands.entity(root).add_child(content); } commands.entity(root).push_children(&[title_bar]); commands .entity(title_bar) .push_children(&[title_text, close_button]); FrameWidgetEntities { root, title_bar, title_text, close_button, content: self.content_entity, } } }
36.44
108
0.573957
d97e5a7028210b32300067b69d114e7ab51b0a7f
5,506
mod common; use actix_web::{test, web, App}; use drogue_cloud_authentication_service::{endpoints, service, WebData}; use drogue_cloud_service_api::auth::device::authn::{AuthenticationRequest, Credential}; use drogue_cloud_test_common::{client, db}; use serde_json::{json, Value}; use serial_test::serial; fn device1_json() -> Value { json!({"pass":{ "application": { "metadata": { "name": "app1", "uid": "4e185ea6-7c26-11eb-a319-d45d6455d210", "creationTimestamp": "2020-01-01T00:00:00Z", "resourceVersion": "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11", "generation": 0, }, }, "device": { "metadata": { "application": "app1", "name": "device1", "uid": "4e185ea6-7c26-11eb-a319-d45d6455d211", "creationTimestamp": "2020-01-01T00:00:00Z", "resourceVersion": "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11", "generation": 0, }, } }}) } fn device3_json() -> Value { json!({"pass":{ "application": { "metadata": { "name": "app1", "uid": "4e185ea6-7c26-11eb-a319-d45d6455d210", "creationTimestamp": "2020-01-01T00:00:00Z", "resourceVersion": "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11", "generation": 0, }, }, "device": { "metadata": { "application": "app1", "name": "device3", "uid": "4e185ea6-7c26-11eb-a319-d45d6455d212", "creationTimestamp": "2020-01-01T00:00:00Z", "resourceVersion": "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11", "generation": 0, }, } }}) } /// Authorize a device using a password. #[actix_rt::test] #[serial] async fn test_auth_passes_password() { test_auth!(AuthenticationRequest{ application: "app1".into(), device: "device1".into(), credential: Credential::Password("foo".into()), r#as: None, } => device1_json()); } /// Authorize a device using a password. #[actix_rt::test] #[serial] async fn test_auth_passes_password_alias() { test_auth!(AuthenticationRequest{ application: "app1".into(), device: "foo".into(), credential: Credential::Password("bar".into()), r#as: None, } => device3_json()); } /// Authorize a device using a username/password combination for a password-only credential /// that has a username matching the device ID. #[actix_rt::test] #[serial] async fn test_auth_passes_password_with_device_username() { test_auth!(AuthenticationRequest{ application: "app1".into(), device: "device1".into(), credential: Credential::UsernamePassword{username: "device1".into(), password: "foo".into()}, r#as: None, } => device1_json()); } /// Authorize a device using a username/password combination for a password-only credential /// that has a username matching the device ID. #[actix_rt::test] #[serial] async fn test_auth_fails_password_with_non_matching_device_username() { test_auth!(AuthenticationRequest{ application: "app1".into(), device: "device1".into(), credential: Credential::UsernamePassword{username: "device2".into(), password: "foo".into()}, r#as: None, } => json!("fail")); } #[actix_rt::test] #[serial] async fn test_auth_fails_wrong_password() { test_auth!(AuthenticationRequest{ application: "app1".into(), device: "device1".into(), credential: Credential::Password("foo1".into()), r#as: None, } => json!("fail")); } #[actix_rt::test] #[serial] async fn test_auth_fails_missing_tenant() { test_auth!(AuthenticationRequest{ application: "app2".into(), device: "device1".into(), credential: Credential::Password("foo".into()), r#as: None, } => json!("fail")); } #[actix_rt::test] #[serial] async fn test_auth_fails_missing_device() { test_auth!(AuthenticationRequest{ application: "app1".into(), device: "device2".into(), credential: Credential::Password("foo".into()), r#as: None, } => json!("fail")); } #[actix_rt::test] #[serial] async fn test_auth_passes_username_password() { test_auth!(AuthenticationRequest{ application: "app1".into(), device: "device3".into(), credential: Credential::UsernamePassword{username: "foo".into(), password: "bar".into()}, r#as: None, } => device3_json()); } /// The password only variant must fail, as the username is not the device id. #[actix_rt::test] #[serial] async fn test_auth_fails_password_only() { test_auth!(AuthenticationRequest{ application: "app1".into(), device: "device3".into(), credential: Credential::Password("bar".into()), r#as: None, } => json!("fail")); } /// The password only variant must success, as the username is equal to the device id. #[actix_rt::test] #[serial] async fn test_auth_passes_password_only() { test_auth!(AuthenticationRequest{ application: "app1".into(), device: "device3".into(), credential: Credential::Password("baz".into()), r#as: None, } => device3_json()); }
31.462857
101
0.583
f710cb94ca72f748a705a63d4b6b813da6ea3f8f
1,310
/* * Nomad * * Nomad OpenApi specification * * The version of the OpenAPI document: 0.11.0 * * Generated by: https://openapi-generator.tech */ #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TaskState { #[serde(rename = "State", skip_serializing_if = "Option::is_none")] pub state: Option<String>, #[serde(rename = "Failed", skip_serializing_if = "Option::is_none")] pub failed: Option<bool>, #[serde(rename = "Restarts", skip_serializing_if = "Option::is_none")] pub restarts: Option<i32>, #[serde(rename = "LastRestart", skip_serializing_if = "Option::is_none")] pub last_restart: Option<String>, #[serde(rename = "StartedAt", skip_serializing_if = "Option::is_none")] pub started_at: Option<String>, #[serde(rename = "FinishedAt", skip_serializing_if = "Option::is_none")] pub finished_at: Option<String>, #[serde(rename = "Events", skip_serializing_if = "Option::is_none")] pub events: Option<Vec<crate::models::TaskEvent>>, } impl TaskState { pub fn new() -> TaskState { TaskState { state: None, failed: None, restarts: None, last_restart: None, started_at: None, finished_at: None, events: None, } } }
27.87234
77
0.625191
f55fb0d391a81bc2ac275eee51c5c18286ed3184
2,202
#[doc = "Slope definition A0."] pub struct A0 { register: ::vcell::VolatileCell<u32>, } #[doc = "Slope definition A0."] pub mod a0; #[doc = "Slope definition A1."] pub struct A1 { register: ::vcell::VolatileCell<u32>, } #[doc = "Slope definition A1."] pub mod a1; #[doc = "Slope definition A2."] pub struct A2 { register: ::vcell::VolatileCell<u32>, } #[doc = "Slope definition A2."] pub mod a2; #[doc = "Slope definition A3."] pub struct A3 { register: ::vcell::VolatileCell<u32>, } #[doc = "Slope definition A3."] pub mod a3; #[doc = "Slope definition A4."] pub struct A4 { register: ::vcell::VolatileCell<u32>, } #[doc = "Slope definition A4."] pub mod a4; #[doc = "Slope definition A5."] pub struct A5 { register: ::vcell::VolatileCell<u32>, } #[doc = "Slope definition A5."] pub mod a5; #[doc = "y-intercept B0."] pub struct B0 { register: ::vcell::VolatileCell<u32>, } #[doc = "y-intercept B0."] pub mod b0; #[doc = "y-intercept B1."] pub struct B1 { register: ::vcell::VolatileCell<u32>, } #[doc = "y-intercept B1."] pub mod b1; #[doc = "y-intercept B2."] pub struct B2 { register: ::vcell::VolatileCell<u32>, } #[doc = "y-intercept B2."] pub mod b2; #[doc = "y-intercept B3."] pub struct B3 { register: ::vcell::VolatileCell<u32>, } #[doc = "y-intercept B3."] pub mod b3; #[doc = "y-intercept B4."] pub struct B4 { register: ::vcell::VolatileCell<u32>, } #[doc = "y-intercept B4."] pub mod b4; #[doc = "y-intercept B5."] pub struct B5 { register: ::vcell::VolatileCell<u32>, } #[doc = "y-intercept B5."] pub mod b5; #[doc = "Segment end T0."] pub struct T0 { register: ::vcell::VolatileCell<u32>, } #[doc = "Segment end T0."] pub mod t0; #[doc = "Segment end T1."] pub struct T1 { register: ::vcell::VolatileCell<u32>, } #[doc = "Segment end T1."] pub mod t1; #[doc = "Segment end T2."] pub struct T2 { register: ::vcell::VolatileCell<u32>, } #[doc = "Segment end T2."] pub mod t2; #[doc = "Segment end T3."] pub struct T3 { register: ::vcell::VolatileCell<u32>, } #[doc = "Segment end T3."] pub mod t3; #[doc = "Segment end T4."] pub struct T4 { register: ::vcell::VolatileCell<u32>, } #[doc = "Segment end T4."] pub mod t4;
21.378641
41
0.621708
222b39b32ac22646749e2bb61a06fd3a8ce7b7f1
10,164
///! Input will listens to user input, modify the query string, send special ///! keystrokes(such as Enter, Ctrl-p, Ctrl-n, etc) to the controller. use crate::event::{parse_event, Event}; use regex::Regex; use std::collections::HashMap; use tuikit::event::Event as TermEvent; use tuikit::key::{from_keyname, Key}; pub type ActionChain = Vec<Event>; pub struct Input { keymap: HashMap<Key, ActionChain>, } impl Input { pub fn new() -> Self { Input { keymap: get_default_key_map(), } } pub fn translate_event(&self, event: TermEvent) -> (Key, ActionChain) { match event { // search event from keymap TermEvent::Key(key) => ( key, self.keymap.get(&key).cloned().unwrap_or_else(|| { if let Key::Char(ch) = key { vec![Event::EvActAddChar(ch)] } else { vec![Event::EvInputKey(key)] } }), ), TermEvent::Resize { .. } => (Key::Null, vec![Event::EvActRedraw]), _ => (Key::Null, vec![Event::EvInputInvalid]), } } pub fn bind(&mut self, key: &str, action_chain: ActionChain) { let key = from_keyname(key); if key == None || action_chain.is_empty() { return; } let key = key.unwrap(); // remove the key for existing keymap; let _ = self.keymap.remove(&key); self.keymap.entry(key).or_insert(action_chain); } pub fn parse_keymaps(&mut self, maps: &[&str]) { for &map in maps { self.parse_keymap(map); } } // key_action is comma separated: 'ctrl-j:accept,ctrl-k:kill-line' pub fn parse_keymap(&mut self, key_action: &str) { debug!("got key_action: {:?}", key_action); for (key, action_chain) in parse_key_action(key_action).into_iter() { debug!("parsed key_action: {:?}: {:?}", key, action_chain); let action_chain = action_chain .into_iter() .filter_map(|(action, arg)| parse_event(action, arg)) .collect(); self.bind(key, action_chain); } } pub fn parse_expect_keys(&mut self, keys: Option<&str>) { if let Some(keys) = keys { for key in keys.split(',') { self.bind(key, vec![Event::EvActAccept(Some(key.to_string()))]); } } } } type KeyActions<'a> = (&'a str, Vec<(&'a str, Option<String>)>); /// parse key action string to `(key, action, argument)` tuple /// key_action is comma separated: 'ctrl-j:accept,ctrl-k:kill-line' pub fn parse_key_action(key_action: &str) -> Vec<KeyActions> { lazy_static! { // match `key:action` or `key:action:arg` or `key:action(arg)` etc. static ref RE: Regex = Regex::new(r#"(?si)([^:]+?):((?:\+?[a-z-]+?(?:"[^"]*?"|'[^']*?'|\([^\)]*?\)|\[[^\]]*?\]|:[^:]*?)?\s*)+)(?:,|$)"#) .unwrap(); // grab key, action and arg out. static ref RE_BIND: Regex = Regex::new(r#"(?si)([a-z-]+)("[^"]+?"|'[^']+?'|\([^\)]+?\)|\[[^\]]+?\]|:[^:]+?)?(?:\+|$)"#).unwrap(); } RE.captures_iter(key_action) .map(|caps| { debug!("RE: caps: {:?}", caps); let key = caps.get(1).unwrap().as_str(); let actions = RE_BIND .captures_iter(caps.get(2).unwrap().as_str()) .map(|caps| { debug!("RE_BIND: caps: {:?}", caps); ( caps.get(1).unwrap().as_str(), caps.get(2).map(|s| { // (arg) => arg, :end_arg => arg let action = s.as_str(); if action.starts_with(':') { action[1..].to_string() } else { action[1..action.len() - 1].to_string() } }), ) }) .collect(); (key, actions) }) .collect() } /// e.g. execute(...) => Some(Event::EvActExecute, Box::new(Option("..."))) pub fn parse_action_arg(action_arg: &str) -> Option<Event> { // construct a fake key_action: `fake_key:action(arg)` let fake_key_action = format!("fake_key:{}", action_arg); // get keys: [(key, [(action, arg), (action, arg)]), ...] let keys = parse_key_action(&fake_key_action); // only get the first key(since it is faked), and get the first action if keys.is_empty() || keys[0].1.is_empty() { None } else { // first action pair of key(keys[0].1) and first action (keys[0].1[0]) let (action, new_arg) = keys[0].1[0].clone(); parse_event(action, new_arg) } } #[rustfmt::skip] fn get_default_key_map() -> HashMap<Key, ActionChain> { let mut ret = HashMap::new(); ret.insert(Key::ESC, vec![Event::EvActAbort]); ret.insert(Key::Ctrl('c'), vec![Event::EvActAbort]); ret.insert(Key::Ctrl('g'), vec![Event::EvActAbort]); ret.insert(Key::Enter, vec![Event::EvActAccept(None)]); ret.insert(Key::Left, vec![Event::EvActBackwardChar]); ret.insert(Key::Ctrl('b'), vec![Event::EvActBackwardChar]); ret.insert(Key::Ctrl('h'), vec![Event::EvActBackwardDeleteChar]); ret.insert(Key::Backspace, vec![Event::EvActBackwardDeleteChar]); ret.insert(Key::AltBackspace, vec![Event::EvActBackwardKillWord]); ret.insert(Key::Alt('b'), vec![Event::EvActBackwardWord]); ret.insert(Key::ShiftLeft, vec![Event::EvActBackwardWord]); ret.insert(Key::CtrlLeft, vec![Event::EvActBackwardWord]); ret.insert(Key::Ctrl('a'), vec![Event::EvActBeginningOfLine]); ret.insert(Key::Home, vec![Event::EvActBeginningOfLine]); ret.insert(Key::Ctrl('l'), vec![Event::EvActClearScreen]); ret.insert(Key::Delete, vec![Event::EvActDeleteChar]); ret.insert(Key::Ctrl('d'), vec![Event::EvActDeleteCharEOF]); ret.insert(Key::Ctrl('j'), vec![Event::EvActDown(1)]); ret.insert(Key::Ctrl('n'), vec![Event::EvActDown(1)]); ret.insert(Key::Down, vec![Event::EvActDown(1)]); ret.insert(Key::Ctrl('e'), vec![Event::EvActEndOfLine]); ret.insert(Key::End, vec![Event::EvActEndOfLine]); ret.insert(Key::Ctrl('f'), vec![Event::EvActForwardChar]); ret.insert(Key::Right, vec![Event::EvActForwardChar]); ret.insert(Key::Alt('f'), vec![Event::EvActForwardWord]); ret.insert(Key::CtrlRight, vec![Event::EvActForwardWord]); ret.insert(Key::ShiftRight, vec![Event::EvActForwardWord]); ret.insert(Key::Alt('d'), vec![Event::EvActKillWord]); ret.insert(Key::ShiftUp, vec![Event::EvActPreviewPageUp(1)]); ret.insert(Key::ShiftDown, vec![Event::EvActPreviewPageDown(1)]); ret.insert(Key::PageDown, vec![Event::EvActPageDown(1)]); ret.insert(Key::PageUp, vec![Event::EvActPageUp(1)]); ret.insert(Key::Ctrl('r'), vec![Event::EvActRotateMode]); ret.insert(Key::Alt('h'), vec![Event::EvActScrollLeft(1)]); ret.insert(Key::Alt('l'), vec![Event::EvActScrollRight(1)]); ret.insert(Key::Tab, vec![Event::EvActToggle, Event::EvActDown(1)]); ret.insert(Key::Ctrl('q'), vec![Event::EvActToggleInteractive]); ret.insert(Key::BackTab, vec![Event::EvActToggle, Event::EvActUp(1)]); ret.insert(Key::Ctrl('u'), vec![Event::EvActUnixLineDiscard]); ret.insert(Key::Ctrl('w'), vec![Event::EvActUnixWordRubout]); ret.insert(Key::Ctrl('p'), vec![Event::EvActUp(1)]); ret.insert(Key::Ctrl('k'), vec![Event::EvActUp(1)]); ret.insert(Key::Up, vec![Event::EvActUp(1)]); ret.insert(Key::Ctrl('y'), vec![Event::EvActYank]); ret.insert(Key::Null, vec![Event::EvActAbort]); ret } #[cfg(test)] mod test { use super::*; #[test] fn execute_should_be_parsed_correctly() { // example from https://github.com/lotabout/skim/issues/73 let cmd = " (grep -o '[a-f0-9]\\{7\\}' | head -1 | xargs -I % sh -c 'git show --color=always % | less -R') << 'FZF-EOF' {} FZF-EOF"; let key_action_str = format!("ctrl-s:toggle-sort,ctrl-m:execute:{},ctrl-t:toggle", cmd); let key_action = parse_key_action(&key_action_str); assert_eq!(("ctrl-s", vec![("toggle-sort", None)]), key_action[0]); assert_eq!(("ctrl-m", vec![("execute", Some(cmd.to_string()))]), key_action[1]); assert_eq!(("ctrl-t", vec![("toggle", None)]), key_action[2]); let key_action_str = "f1:execute(less -f {}),ctrl-y:execute-silent(echo {} | pbcopy)"; let key_action = parse_key_action(key_action_str); assert_eq!(("f1", vec![("execute", Some("less -f {}".to_string()))]), key_action[0]); assert_eq!( ("ctrl-y", vec![("execute-silent", Some("echo {} | pbcopy".to_string()))]), key_action[1] ); // #196 let key_action_str = "enter:execute($EDITOR +{2} {1})"; let key_action = parse_key_action(key_action_str); assert_eq!( ("enter", vec![("execute", Some("$EDITOR +{2} {1}".to_string()))]), key_action[0] ); } #[test] fn action_chain_should_be_parsed() { let key_action = parse_key_action("ctrl-t:toggle+up"); assert_eq!(("ctrl-t", vec![("toggle", None), ("up", None)]), key_action[0]); let key_action_str = "f1:execute(less -f {}),ctrl-y:execute-silent(echo {} | pbcopy)+abort"; let key_action = parse_key_action(key_action_str); assert_eq!(("f1", vec![("execute", Some("less -f {}".to_string()))]), key_action[0]); assert_eq!( ( "ctrl-y", vec![ ("execute-silent", Some("echo {} | pbcopy".to_string())), ("abort", None) ] ), key_action[1] ); } }
41.317073
137
0.533747
b9a256c6eb8fb805f3f25a74bda09bc6063a36c4
5,071
// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or // http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or // http://opensource.org/licenses/MIT>, at your option. This file may not be // copied, modified, or distributed except according to those terms. use crate::alloc::{vec, vec::Vec}; use core::any::{type_name, TypeId}; use core::ptr::NonNull; use core::{fmt, mem}; use crate::archetype::TypeInfo; use crate::Component; /// A dynamically typed collection of components pub unsafe trait DynamicBundle { /// Returns a `TypeId` uniquely identifying the set of components, if known #[doc(hidden)] fn key(&self) -> Option<TypeId> { None } /// Invoke a callback on the fields' type IDs, sorted by descending alignment then id #[doc(hidden)] fn with_ids<T>(&self, f: impl FnOnce(&[TypeId]) -> T) -> T; /// Obtain the fields' TypeInfos, sorted by descending alignment then id #[doc(hidden)] fn type_info(&self) -> Vec<TypeInfo>; /// Allow a callback to move all components out of the bundle /// /// Must invoke `f` only with a valid pointer and the pointee's type and size. `put` may only be /// called at most once on any given value. #[doc(hidden)] unsafe fn put(self, f: impl FnMut(*mut u8, TypeInfo)); } /// A statically typed collection of components pub unsafe trait Bundle: DynamicBundle { #[doc(hidden)] fn with_static_ids<T>(f: impl FnOnce(&[TypeId]) -> T) -> T; /// Obtain the fields' TypeInfos, sorted by descending alignment then id #[doc(hidden)] fn static_type_info() -> Vec<TypeInfo>; /// Construct `Self` by moving components out of pointers fetched by `f` /// /// # Safety /// /// `f` must produce pointers to the expected fields. The implementation must not read from any /// pointers if any call to `f` returns `None`. #[doc(hidden)] unsafe fn get(f: impl FnMut(TypeInfo) -> Option<NonNull<u8>>) -> Result<Self, MissingComponent> where Self: Sized; } /// Error indicating that an entity did not have a required component #[derive(Debug, Clone, Eq, PartialEq, Hash)] pub struct MissingComponent(&'static str); impl MissingComponent { /// Construct an error representing a missing `T` pub fn new<T: Component>() -> Self { Self(type_name::<T>()) } } impl fmt::Display for MissingComponent { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "missing {} component", self.0) } } #[cfg(feature = "std")] impl std::error::Error for MissingComponent {} macro_rules! tuple_impl { ($($name: ident),*) => { unsafe impl<$($name: Component),*> DynamicBundle for ($($name,)*) { fn key(&self) -> Option<TypeId> { Some(TypeId::of::<Self>()) } fn with_ids<T>(&self, f: impl FnOnce(&[TypeId]) -> T) -> T { Self::with_static_ids(f) } fn type_info(&self) -> Vec<TypeInfo> { Self::static_type_info() } #[allow(unused_variables, unused_mut)] unsafe fn put(self, mut f: impl FnMut(*mut u8, TypeInfo)) { #[allow(non_snake_case)] let ($(mut $name,)*) = self; $( f( (&mut $name as *mut $name).cast::<u8>(), TypeInfo::of::<$name>() ); mem::forget($name); )* } } unsafe impl<$($name: Component),*> Bundle for ($($name,)*) { fn with_static_ids<T>(f: impl FnOnce(&[TypeId]) -> T) -> T { const N: usize = count!($($name),*); let mut xs: [(usize, TypeId); N] = [$((mem::align_of::<$name>(), TypeId::of::<$name>())),*]; xs.sort_unstable_by(|x, y| x.0.cmp(&y.0).reverse().then(x.1.cmp(&y.1))); let mut ids = [TypeId::of::<()>(); N]; for (slot, &(_, id)) in ids.iter_mut().zip(xs.iter()) { *slot = id; } f(&ids) } fn static_type_info() -> Vec<TypeInfo> { let mut xs = vec![$(TypeInfo::of::<$name>()),*]; xs.sort_unstable(); xs } #[allow(unused_variables, unused_mut)] unsafe fn get(mut f: impl FnMut(TypeInfo) -> Option<NonNull<u8>>) -> Result<Self, MissingComponent> { #[allow(non_snake_case)] let ($(mut $name,)*) = ($( f(TypeInfo::of::<$name>()).ok_or_else(MissingComponent::new::<$name>)? .as_ptr() .cast::<$name>(),)* ); Ok(($($name.read(),)*)) } } } } macro_rules! count { () => { 0 }; ($x: ident $(, $rest: ident)*) => { 1 + count!($($rest),*) }; } smaller_tuples_too!(tuple_impl, O, N, M, L, K, J, I, H, G, F, E, D, C, B, A);
34.496599
113
0.534806
083def4d74afb6832002a662146604a7d06ac233
105
mod hello; mod hogehoge; pub fn mod_hello() { hello::hello_hoge(); hogehoge::hogehoge_hello() }
13.125
30
0.666667
e21be1a757cac9704cad112ecf9efbf3a76f243a
23,841
use crate::error::{Error, Result}; use crate::join::{Join, JoinKind, JoinOp, QualifiedJoin}; use crate::op::{Op, OpMutVisitor}; use crate::query::QuerySet; use crate::rule::expr_simplify::{update_simplify_nested, NullCoalesce}; use crate::rule::RuleEffect; use crate::setop::{Setop, SetopKind}; use std::collections::HashSet; use std::mem; use xngin_expr::controlflow::{Branch, ControlFlow, Unbranch}; use xngin_expr::{Col, Const, Expr, ExprKind, QueryID, Setq}; /// Eliminate redundant operators. /// 1. Filter with true predicate can be removed. /// 2. Join with some empty child can be rewritten. /// 3. Setop with some empty child can be rewritten. /// 4. Aggr with empty child can be directly evaluated. (todo) /// 5. Any other operators except Join, Setop, Aggr with empty child can be replaced with Empty. /// 6. LIMIT 0 can eliminiate entire tree. /// 7. ORDER BY in subquery without LIMIT can be removed. /// 8. join with false/null condition can be removed. #[inline] pub fn op_eliminate(qry_set: &mut QuerySet, qry_id: QueryID) -> Result<RuleEffect> { eliminate_op(qry_set, qry_id, false) } #[inline] fn eliminate_op(qry_set: &mut QuerySet, qry_id: QueryID, is_subq: bool) -> Result<RuleEffect> { qry_set.transform_op(qry_id, |qry_set, _, op| { let mut eo = EliminateOp::new(qry_set, is_subq); op.walk_mut(&mut eo).unbranch() })? } struct EliminateOp<'a> { qry_set: &'a mut QuerySet, is_subq: bool, has_limit: bool, empty_qs: HashSet<QueryID>, } impl<'a> EliminateOp<'a> { #[inline] fn new(qry_set: &'a mut QuerySet, is_subq: bool) -> Self { EliminateOp { qry_set, is_subq, has_limit: false, empty_qs: HashSet::new(), } } #[inline] fn bottom_up(&mut self, op: &mut Op) -> ControlFlow<Error, RuleEffect> { let mut eff = RuleEffect::NONE; match op { Op::Join(j) => match j.as_mut() { Join::Cross(tbls) => { if tbls.iter().any(|t| t.as_ref().is_empty()) { // any child in cross join results in empty rows *op = Op::Empty; eff |= RuleEffect::OP; } } Join::Qualified(QualifiedJoin { kind, left: JoinOp(Op::Empty), right: JoinOp(right_op), .. }) => match kind { JoinKind::Inner | JoinKind::Left | JoinKind::Semi | JoinKind::AntiSemi | JoinKind::Mark | JoinKind::Single => *op = Op::Empty, JoinKind::Full => { if right_op.is_empty() { *op = Op::Empty; eff |= RuleEffect::OP; } else { // As left table is empty, we can convert full join to single table, // when bottom up, rewrite all columns derived from right table to null let new = mem::take(right_op); *op = new; eff |= RuleEffect::OP; return ControlFlow::Continue(eff); // skip the cleansing because left table won't contain right columns. } } }, Join::Qualified(QualifiedJoin { kind, left: JoinOp(left_op), right: JoinOp(Op::Empty), .. }) => match kind { JoinKind::Inner | JoinKind::Semi | JoinKind::AntiSemi | JoinKind::Mark | JoinKind::Single => *op = Op::Empty, JoinKind::Left | JoinKind::Full => { // similar to left table case, we replace current op with the other child, // and clean up when bottom up. let new = mem::take(left_op); *op = new; eff |= RuleEffect::OP; return ControlFlow::Continue(eff); } }, _ => (), }, Op::Setop(so) => { let Setop { kind, q, left, right, } = so.as_mut(); match (left.as_mut(), right.as_mut()) { (Op::Empty, Op::Empty) => { *op = Op::Empty; eff |= RuleEffect::OP; } (Op::Empty, right) => match (kind, q) { (SetopKind::Union, Setq::All) => { *op = mem::take(right); eff |= RuleEffect::OP; } (SetopKind::Except | SetopKind::Intersect, Setq::All) => { *op = Op::Empty; eff |= RuleEffect::OP; } _ => (), }, (left, Op::Empty) => { if *q == Setq::All { *op = mem::take(left); eff |= RuleEffect::OP; } } _ => (), } } Op::Limit { input, .. } => { if input.is_empty() { *op = Op::Empty; eff |= RuleEffect::OP; } } Op::Sort { input, .. } => { if input.is_empty() { *op = Op::Empty; eff |= RuleEffect::OP; } } Op::Aggr(_) => (), // todo: leave aggr as is, and optimize later Op::Proj { input, .. } => { if input.is_empty() { *op = Op::Empty; eff |= RuleEffect::OP; } } Op::Filt { input, .. } => { if input.is_empty() { *op = Op::Empty; eff |= RuleEffect::OP; } } Op::Attach(input, _) => { if input.is_empty() { *op = Op::Empty; eff |= RuleEffect::OP; } } Op::Query(_) | Op::Table(..) | Op::Row(_) | Op::JoinGraph(_) => unreachable!(), Op::Empty => (), } if !op.is_empty() && !self.empty_qs.is_empty() { // current operator is not eliminated but we have some child query set to empty, // all column references to it must be set to null let qs = &self.empty_qs; for e in op.exprs_mut() { eff |= update_simplify_nested(e, NullCoalesce::Null, |e| { if let ExprKind::Col(Col::QueryCol(qry_id, _)) = &e.kind { if qs.contains(qry_id) { *e = Expr::const_null(); } } Ok(()) }) .branch()?; } } ControlFlow::Continue(eff) } } impl OpMutVisitor for EliminateOp<'_> { type Cont = RuleEffect; type Break = Error; #[inline] fn enter(&mut self, op: &mut Op) -> ControlFlow<Error, RuleEffect> { let mut eff = RuleEffect::NONE; match op { Op::Filt { pred, input } => { if pred.is_empty() { // no predicates, remove current filter *op = Op::Empty; eff |= RuleEffect::OP; } else { match pair_const_false(pred) { (false, _) => (), (true, true) => { *op = Op::Empty; eff |= RuleEffect::OP; } (true, false) => { let input = mem::take(input.as_mut()); *op = input; eff |= RuleEffect::OP; eff |= self.enter(op)?; return ControlFlow::Continue(eff); } } } } Op::Join(join) => match join.as_mut() { Join::Cross(_) => (), Join::Qualified(QualifiedJoin { kind: JoinKind::Inner, cond, .. }) => match pair_const_false(cond) { (false, _) => (), (true, true) => { *op = Op::Empty; eff |= RuleEffect::OP; } (true, false) => { cond.clear(); } }, Join::Qualified(QualifiedJoin { kind: JoinKind::Left | JoinKind::Full, filt, .. }) => match pair_const_false(filt) { (false, _) => (), (true, true) => { *op = Op::Empty; eff |= RuleEffect::OP; } (true, false) => { filt.clear(); } }, _ => todo!(), }, Op::Limit { start, end, .. } => { if *start == *end { *op = Op::Empty; eff |= RuleEffect::OP; } else { self.has_limit = true; } } Op::Sort { input, .. } => { if self.is_subq && !self.has_limit { // in case subquery that does not have limit upon sort, // sort can be eliminated. let input = mem::take(input.as_mut()); *op = input; eff |= RuleEffect::OP; eff |= self.enter(op)?; return ControlFlow::Continue(eff); } } Op::Query(query_id) => { eff |= eliminate_op(self.qry_set, *query_id, true).branch()?; } _ => (), }; // always returns true, even if current node is updated in-place, we still // need to traverse back to eliminate entire tree if possible. ControlFlow::Continue(eff) } #[inline] fn leave(&mut self, op: &mut Op) -> ControlFlow<Error, RuleEffect> { let mut eff = RuleEffect::NONE; match op { Op::Query(qry_id) => { if let Some(subq) = self.qry_set.get(qry_id) { if subq.root.is_empty() { self.empty_qs.insert(*qry_id); *op = Op::Empty; eff |= RuleEffect::OP; } } } Op::Table(..) | Op::Row(_) => (), _ => { eff |= self.bottom_up(op)?; } } ControlFlow::Continue(eff) } } // treat null as false #[inline] fn pair_const_false(es: &[Expr]) -> (bool, bool) { match es { [Expr { kind: ExprKind::Const(Const::Null), .. }] => (true, true), [Expr { kind: ExprKind::Const(c), .. }] => (true, c.is_zero().unwrap_or_default()), _ => (false, false), } } #[cfg(test)] mod tests { use super::*; use crate::builder::tests::{assert_j_plan1, get_lvl_queries, j_catalog, print_plan}; use crate::lgc::LgcPlan; use crate::op::preorder; #[test] fn test_op_eliminate_false_pred() { let cat = j_catalog(); assert_j_plan1(&cat, "select c1 from t1 where null", assert_empty_root); assert_j_plan1(&cat, "select c1 from t1 where false", assert_empty_root); } #[test] fn test_op_eliminate_true_pred() { let cat = j_catalog(); assert_j_plan1(&cat, "select c1 from t1 where true", |s, mut q| { op_eliminate(&mut q.qry_set, q.root).unwrap(); print_plan(s, &q); let subq = q.root_query().unwrap(); if let Op::Proj { input, .. } = &subq.root { assert!(matches!(input.as_ref(), Op::Query(..))) } else { panic!("fail") } }) } #[test] fn test_op_eliminate_derived_table() { let cat = j_catalog(); assert_j_plan1( &cat, "select c1 from (select c1 as c1 from t1 where null) x1", assert_empty_root, ); assert_j_plan1( &cat, "select c1 from (select c1 as c1 from (select c1 from t2 limit 0) x2) x1", assert_empty_root, ); assert_j_plan1( &cat, "select c1 from (select c1 from t1 where null) x1 where c1 > 0 order by c1 having c1 > 2 limit 1", assert_empty_root, ); } #[test] fn test_op_eliminate_order_by() { let cat = j_catalog(); // remove ORDER BY in subquery assert_j_plan1( &cat, "select c1 from (select c1 from t1 order by c0) x1", |s, mut q| { op_eliminate(&mut q.qry_set, q.root).unwrap(); print_plan(s, &q); let subqs = get_lvl_queries(&q, 1); assert_eq!(subqs.len(), 1); assert!(matches!(subqs[0].root, Op::Proj { .. })) }, ); // do NOT remove ORDER BY because of LIMIT exists assert_j_plan1( &cat, "select c1 from (select c1 from t1 order by c0 limit 1) x1", |s, mut q| { op_eliminate(&mut q.qry_set, q.root).unwrap(); print_plan(s, &q); let subqs = get_lvl_queries(&q, 1); assert_eq!(subqs.len(), 1); if let Op::Limit { input, .. } = &subqs[0].root { assert!(matches!(input.as_ref(), Op::Sort { .. })); } else { panic!("fail") } }, ); } #[test] fn test_op_eliminate_limit() { let cat = j_catalog(); // eliminate entire tree if LIMIT 0 assert_j_plan1(&cat, "select c1 from t1 limit 0", |s, mut q| { op_eliminate(&mut q.qry_set, q.root).unwrap(); print_plan(s, &q); let subq = q.root_query().unwrap(); assert!(matches!(subq.root, Op::Empty)); }); assert_j_plan1(&cat, "select c1 from t1 limit 0 offset 3", |s, mut q| { op_eliminate(&mut q.qry_set, q.root).unwrap(); print_plan(s, &q); let subq = q.root_query().unwrap(); assert!(matches!(subq.root, Op::Empty)); }); assert_j_plan1( &cat, "select c1 from t1 where null order by c1 limit 10", |s, mut q| { op_eliminate(&mut q.qry_set, q.root).unwrap(); print_plan(s, &q); let subq = q.root_query().unwrap(); assert!(matches!(subq.root, Op::Empty)); }, ); // do NOT eliminate if LIMIT non-zero assert_j_plan1(&cat, "select c1 from t1 limit 1", |s, mut q| { op_eliminate(&mut q.qry_set, q.root).unwrap(); print_plan(s, &q); let subq = q.root_query().unwrap(); assert!(matches!(subq.root, Op::Limit { .. })); }); } #[test] fn test_op_eliminate_union_all() { let cat = j_catalog(); assert_j_plan1( &cat, "select c1 from t1 where false union all select c1 from t1 limit 0", assert_empty_root, ); assert_j_plan1( &cat, "select c1 from t1 where false union all select c1 from t1", |s, mut q| { op_eliminate(&mut q.qry_set, q.root).unwrap(); print_plan(s, &q); let subq = q.root_query().unwrap(); subq.root.walk(&mut preorder(|op| match op { Op::Setop(_) => panic!("fail to eliminate setop"), _ => (), })); }, ); assert_j_plan1( &cat, "select c1 from t1 union all select c1 from t1 limit 0", |s, mut q| { op_eliminate(&mut q.qry_set, q.root).unwrap(); print_plan(s, &q); let subq = q.root_query().unwrap(); subq.root.walk(&mut preorder(|op| match op { Op::Setop(_) => panic!("fail to eliminate setop"), _ => (), })); }, ); } #[test] fn test_op_eliminate_except_all() { let cat = j_catalog(); assert_j_plan1( &cat, "select c1 from t1 where false except all select c1 from t1", assert_empty_root, ); assert_j_plan1( &cat, "select c1 from t1 except all select c1 from t1 where null", |s, mut q| { op_eliminate(&mut q.qry_set, q.root).unwrap(); print_plan(s, &q); let subq = q.root_query().unwrap(); subq.root.walk(&mut preorder(|op| match op { Op::Setop(_) => panic!("fail to eliminate setop"), _ => (), })); }, ); } #[test] fn test_op_eliminate_intersect_all() { let cat = j_catalog(); assert_j_plan1( &cat, "select c1 from t1 where false intersect all select c1 from t1", assert_empty_root, ); assert_j_plan1( &cat, "select c1 from t1 intersect all select c1 from t1 where null", |s, mut q| { op_eliminate(&mut q.qry_set, q.root).unwrap(); print_plan(s, &q); let subq = q.root_query().unwrap(); subq.root.walk(&mut preorder(|op| match op { Op::Setop(_) => panic!("fail to eliminate setop"), _ => (), })); }, ); } // cross join #[test] fn test_op_eliminate_cross_join() { let cat = j_catalog(); assert_j_plan1( &cat, "select c2 from (select c1 from t1 where null) x1, (select * from t2 where false) x2", assert_empty_root, ); assert_j_plan1( &cat, "select c2 from (select c1 from t1) x1, (select * from t2 limit 0) x2", assert_empty_root, ); } // inner join #[test] fn test_op_eliminate_inner_join() { let cat = j_catalog(); assert_j_plan1( &cat, "select c2 from (select c1 from t1 where null) x1 join (select * from t2) x2", assert_empty_root, ); assert_j_plan1( &cat, "select c2 from (select c1 from t1) x1 join (select * from t2 limit 0) x2", assert_empty_root, ); } // left join #[test] fn test_op_eliminate_left_join() { let cat = j_catalog(); assert_j_plan1( &cat, "select c2 from (select c1 from t1 where null) x1 left join (select * from t2) x2", assert_empty_root, ); assert_j_plan1( &cat, "select c2 from (select c1 from t1) x1 left join (select * from t2 limit 0) x2", |s, mut q| { op_eliminate(&mut q.qry_set, q.root).unwrap(); print_plan(s, &q); let subq = q.root_query().unwrap(); subq.root.walk(&mut preorder(|op| match op { Op::Join(_) => panic!("fail to eliminate op"), _ => (), })); if let Op::Proj { cols, .. } = &subq.root { assert_eq!(&cols[0].0, &Expr::const_null()); } else { panic!("fail") } }, ); } // right join #[test] fn test_op_eliminate_right_join() { let cat = j_catalog(); assert_j_plan1( &cat, "select c2 from (select c1 from t1 where null) x1 right join (select * from t2) x2", |s, mut q| { op_eliminate(&mut q.qry_set, q.root).unwrap(); print_plan(s, &q); let subq = q.root_query().unwrap(); subq.root.walk(&mut preorder(|op| match op { Op::Join(_) => panic!("fail to eliminate op"), _ => (), })); if let Op::Proj { cols, .. } = &subq.root { assert_ne!(&cols[0].0, &Expr::const_null()); } else { panic!("fail") } }, ); assert_j_plan1( &cat, "select c2 from (select c1 from t1) x1 right join (select * from t2 where false) x2", assert_empty_root, ); } // full join #[test] fn test_op_eliminate_full_join() { let cat = j_catalog(); assert_j_plan1( &cat, "select x1.c1, c2 from (select c1 from t1 where null) x1 full join (select * from t2) x2", |s, mut q| { op_eliminate(&mut q.qry_set, q.root).unwrap(); print_plan(s, &q); let subq = q.root_query().unwrap(); subq.root.walk(&mut preorder(|op| match op { Op::Join(_) => panic!("fail to eliminate op"), _ => (), })); if let Op::Proj{cols, ..} = &subq.root { assert_eq!(&cols[0].0, &Expr::const_null()); assert_ne!(&cols[1].0, &Expr::const_null()); } else { panic!("fail") } }, ); assert_j_plan1( &cat, "select x1.c1, c2 from (select c1 from t1) x1 full join (select * from t2 where false) x2", |s, mut q| { op_eliminate(&mut q.qry_set, q.root).unwrap(); print_plan(s, &q); let subq = q.root_query().unwrap(); subq.root.walk(&mut preorder(|op| match op { Op::Join(_) => panic!("fail to eliminate op"), _ => (), })); if let Op::Proj{cols, ..} = &subq.root { assert_ne!(&cols[0].0, &Expr::const_null()); assert_eq!(&cols[1].0, &Expr::const_null()); } else { panic!("fail") } }, ); assert_j_plan1( &cat, "select c2 from (select c1 from t1 limit 0) x1 full join (select * from t2 where false) x2", assert_empty_root, ); } fn assert_empty_root(s1: &str, mut q1: LgcPlan) { op_eliminate(&mut q1.qry_set, q1.root).unwrap(); print_plan(s1, &q1); let root = &q1.root_query().unwrap().root; assert!(matches!(root, Op::Empty)); } }
35.477679
132
0.426366
89c83ab37f5dac0a802a9bbcfe59d0f606604598
6,299
use std::rc::Rc; use std::sync::{Arc, Mutex}; use flux::ast::SourceLocation; use flux::semantic::nodes::*; use flux::semantic::types::MonoType; use flux::semantic::walk::{Node, Visitor}; use lsp_types as lsp; use crate::shared::get_argument_names; use crate::shared::Function; fn defined_after(loc: &SourceLocation, pos: lsp::Position) -> bool { if loc.start.line > pos.line + 1 || (loc.start.line == pos.line + 1 && loc.start.column > pos.character + 1) { return true; } false } pub struct FunctionFinderState { pub functions: Vec<Function>, } pub struct FunctionFinderVisitor { pub pos: lsp::Position, pub state: Arc<Mutex<FunctionFinderState>>, } impl FunctionFinderVisitor { pub fn new(pos: lsp::Position) -> Self { FunctionFinderVisitor { pos, state: Arc::new(Mutex::new(FunctionFinderState { functions: vec![], })), } } } impl<'a> Visitor<'a> for FunctionFinderVisitor { fn visit(&mut self, node: Rc<Node<'a>>) -> bool { if let Ok(mut state) = self.state.lock() { let loc = node.loc(); if defined_after(loc, self.pos) { return true; } if let Node::VariableAssgn(assgn) = node.as_ref() { let name = assgn.id.name.clone(); if let Expression::Function(f) = assgn.init.clone() { if let MonoType::Fun(fun) = f.typ.clone() { let mut params = get_argument_names(fun.req); for opt in get_argument_names(fun.opt) { params.push(opt); } state .functions .push(Function { name, params }) } } } if let Node::OptionStmt(opt) = node.as_ref() { if let flux::semantic::nodes::Assignment::Variable( assgn, ) = &opt.assignment { let name = assgn.id.name.clone(); if let Expression::Function(f) = assgn.init.clone() { if let MonoType::Fun(fun) = f.typ.clone() { let mut params = get_argument_names(fun.req); for opt in get_argument_names(fun.opt) { params.push(opt); } state .functions .push(Function { name, params }) } } } } } true } } #[derive(Clone)] pub struct ObjectFunction { pub object: String, pub function: Function, } #[derive(Default)] pub struct ObjectFunctionFinderState { pub results: Vec<ObjectFunction>, } #[derive(Default)] pub struct ObjectFunctionFinderVisitor { pub state: Arc<Mutex<ObjectFunctionFinderState>>, } impl<'a> Visitor<'a> for ObjectFunctionFinderVisitor { fn visit(&mut self, node: Rc<Node<'a>>) -> bool { match node.as_ref() { Node::VariableAssgn(assignment) => { let object_name = assignment.id.name.clone(); if let Expression::Object(obj) = assignment.init.clone() { for prop in obj.properties.clone() { let func_name = prop.key.name; if let Expression::Function(fun) = prop.value { let params = fun .params .into_iter() .map(|p| p.key.name) .collect::<Vec<String>>(); if let Ok(mut state) = self.state.lock() { state.results.push(ObjectFunction { object: object_name, function: Function { name: func_name, params, }, }); return false; } } } } } Node::OptionStmt(opt) => { if let flux::semantic::nodes::Assignment::Variable( assignment, ) = opt.assignment.clone() { let object_name = assignment.id.name; if let Expression::Object(obj) = assignment.init { for prop in obj.properties.clone() { let func_name = prop.key.name; if let Expression::Function(fun) = prop.value { let params = fun .params .into_iter() .map(|p| p.key.name) .collect::<Vec<String>>(); if let Ok(mut state) = self.state.lock() { state.results.push( ObjectFunction { object: object_name, function: Function { name: func_name, params, }, }, ); return false; } } } } } } _ => {} } true } }
32.469072
70
0.378155
392cd956f5b8785a20d15208223bf2b0fd764b66
1,678
use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode}; use ::RegType::*; use ::instruction_def::*; use ::Operand::*; use ::Reg::*; use ::RegScale::*; fn movhps_1() { run_test(&Instruction { mnemonic: Mnemonic::MOVHPS, operand1: Some(Direct(XMM2)), operand2: Some(IndirectDisplaced(EBX, 32424642, Some(OperandSize::Qword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 22, 147, 194, 194, 238, 1], OperandSize::Dword) } fn movhps_2() { run_test(&Instruction { mnemonic: Mnemonic::MOVHPS, operand1: Some(Direct(XMM1)), operand2: Some(IndirectScaledDisplaced(RBX, Two, 1050193998, Some(OperandSize::Qword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 22, 12, 93, 78, 176, 152, 62], OperandSize::Qword) } fn movhps_3() { run_test(&Instruction { mnemonic: Mnemonic::MOVHPS, operand1: Some(IndirectScaledIndexed(ECX, EDI, Eight, Some(OperandSize::Qword), None)), operand2: Some(Direct(XMM2)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 23, 20, 249], OperandSize::Dword) } fn movhps_4() { run_test(&Instruction { mnemonic: Mnemonic::MOVHPS, operand1: Some(IndirectDisplaced(RSI, 1872549594, Some(OperandSize::Qword), None)), operand2: Some(Direct(XMM5)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 23, 174, 218, 214, 156, 111], OperandSize::Qword) }
69.916667
364
0.712753
e26ce0db2a4de94e90c176046e1446e71ed2ab7b
2,203
use select::document::Document; use select::predicate::{Class, Name}; fn main() { let r = get_data(); for (k, v) in r { println!("{} => {}", k, v); } } fn get_data() -> Vec<(String, String)>{ let site_url = "https://torrentjuju.com/bbs/"; let document = Document::from(include_str!("../bbs.html")); let mut v = vec![]; for node in document.find(Class("media-heading")){ let title = node.text().replace("\n", ""); let bbs_link = node.find(Name("a")) .next() .unwrap() .attr("href") .unwrap(); let bbs_link = format!("{}{}", site_url, bbs_link.replace("./", "")); v.push((title, bbs_link)); } v } fn get_magnet() -> String { let document = Document::from(include_str!("../magnet.html")); let mut magnet = ""; for node in document.find(Class("list-group-item")){ if let Some(m) = node.find(Name("a")).next() { magnet = m.attr("href").unwrap(); } } magnet.to_string() } fn get_magnet_try() -> String { let document = Document::from(include_str!("../magnet_try.html")); let mut magnet = ""; for node in document.find(Class("list-group-item")){ if let Some(m) = node.find(Name("a")).next() { magnet = m.attr("href").unwrap(); } } magnet.to_string() } #[cfg(test)] mod tests { use super::*; #[test] fn get_data_func_test() { let data = get_data(); let sample = &data[0]; assert_eq!( "동상이몽2 너는 내 운명.E235.220228.720p-NEXT.mp4", sample.0, ); assert_eq!( "https://torrentjuju.com/bbs/board.php?bo_table=enter&wr_id=32183", sample.1, ); } #[test] fn get_magnet_func_test(){ let m = get_magnet(); assert_eq!( "magnet:?xt=urn:btih:04a6888916168f67e7f16cafb55fcbcfef7317e2", m ); } #[test] fn get_magnet_try_func_test(){ let m = get_magnet_try(); assert_eq!( "magnet:?xt=urn:btih:c16080948e35c41f7ad39c52d3c6d7defed04a17", m ); } }
25.616279
79
0.515207
fe29c787bc62d0bccd0ada705070712f8f78701c
1,981
// //! Copyright 2020 Alibaba Group Holding Limited. //! //! Licensed under the Apache License, Version 2.0 (the "License"); //! you may not use this file except in compliance with the License. //! You may obtain a copy of the License at //! //! http://www.apache.org/licenses/LICENSE-2.0 //! //! Unless required by applicable law or agreed to in writing, software //! distributed under the License is distributed on an "AS IS" BASIS, //! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //! See the License for the specific language governing permissions and //! limitations under the License. use std::collections::HashSet; use super::*; use crossbeam_channel::Sender; pub struct Client<T> { input_send: Sender<(u32, Option<T>)>, running: HashSet<u32>, } impl<T: Data> Client<T> { pub fn new(sx: Sender<(u32, Option<T>)>) -> Self { Client { input_send: sx, running: HashSet::new(), } } /// Create input session with id `id`; pub fn input(&mut self, id: u32) -> Result<InputSession<T>, String> { if !self.running.insert(id) { Err(format!("job created with session id {} is still in running", id)) } else { Ok(InputSession::new(id, &self.input_send)) } } } pub struct InputSession<D> { id: u32, send: Sender<(u32, Option<D>)> } impl<D: Data> InputSession<D> { pub fn new(id: u32, send: &Sender<(u32, Option<D>)>) -> Self { InputSession { id, send: send.clone(), } } pub fn give(&self, record: D) -> Result<(), D> { self.send.send((self.id, Some(record))) .map_err(|e| { let (_i, r) = e.0; r.unwrap() }) } } impl<D> Drop for InputSession<D> { fn drop(&mut self) { loop { if let Ok(()) = self.send.send((self.id, None)) { break } } } }
26.77027
82
0.565876
ff33cf31af85e6717a85322cf109d0ff4c40c272
1,122
// run-pass #![allow(unused_must_use)] // This time we're testing repeatedly going up and down both stacks to // make sure the stack pointers are maintained properly in both // directions // ignore-emscripten no threads support #![feature(rustc_private)] extern crate libc; use std::thread; mod rustrt { extern crate libc; #[link(name = "rust_test_helpers", kind = "static")] extern "C" { pub fn rust_dbg_call( cb: extern "C" fn(libc::uintptr_t) -> libc::uintptr_t, data: libc::uintptr_t, ) -> libc::uintptr_t; } } extern "C" fn cb(data: libc::uintptr_t) -> libc::uintptr_t { if data == 1 { data } else { count(data - 1) + count(data - 1) } } fn count(n: libc::uintptr_t) -> libc::uintptr_t { unsafe { println!("n = {}", n); rustrt::rust_dbg_call(cb, n) } } pub fn main() { // Make sure we're on a thread with small Rust stacks (main currently // has a large stack) thread::spawn(move || { let result = count(12); println!("result = {}", result); assert_eq!(result, 2048); }) .join(); }
24.391304
73
0.596257
56f51480e763d6e10e9ccffbd0fe0ef29d4c8ce3
324
mod bisection_method; use bisection_method::bisection; mod fpi; use fpi::fpi; fn main() { let f1 = | x: f64 | x.powi(3) + x - 1.0; let g1 = | x: f64 | (1.0 + 2.0*x.powi(3)) / (1.0 + 3.0*x.powi(2)); println!("root is {}", bisection(&f1, 0.0, 1.0, 0.000000001)); println!("root is {}", fpi(&g1, 0.0, 10)); }
24.923077
70
0.540123
e5d761551e9e552a8b0e7c3a96b493cfa44f0362
1,512
/* * This file is part of the uutils coreutils package. * * (c) Rolf Morel <[email protected]> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ #[derive(Clone)] pub struct Searcher<'a> { haystack: &'a [u8], needle: &'a [u8], position: usize, } impl<'a> Searcher<'a> { pub fn new(haystack: &'a [u8], needle: &'a [u8]) -> Searcher<'a> { Searcher { haystack: haystack, needle: needle, position: 0, } } } impl<'a> Iterator for Searcher<'a> { type Item = (usize, usize); fn next(&mut self) -> Option<(usize, usize)> { if self.needle.len() == 1 { for offset in self.position..self.haystack.len() { if self.haystack[offset] == self.needle[0] { self.position = offset + 1; return Some((offset, offset + 1)); } } self.position = self.haystack.len(); return None; } while self.position + self.needle.len() <= self.haystack.len() { if &self.haystack[self.position..self.position + self.needle.len()] == self.needle { let match_pos = self.position; self.position += self.needle.len(); return Some((match_pos, match_pos + self.needle.len())); } else { self.position += 1; } } None } }
27.490909
96
0.515212
0af90a11ead2be2fa48f84f3bb1bfa0c8776c430
4,723
//! Internal raw utilities (don't use unless you know what you're doing!) use std::slice; /// A generic version of the raw imgui-sys ImVector struct types #[repr(C)] pub struct ImVector<T> { size: i32, capacity: i32, pub(crate) data: *mut T, } impl<T> ImVector<T> { #[inline] pub fn as_slice(&self) -> &[T] { unsafe { slice::from_raw_parts(self.data, self.size as usize) } } } #[test] #[cfg(test)] fn test_imvector_memory_layout() { use std::mem; assert_eq!( mem::size_of::<ImVector<u8>>(), mem::size_of::<sys::ImVector_char>() ); assert_eq!( mem::align_of::<ImVector<u8>>(), mem::align_of::<sys::ImVector_char>() ); use sys::ImVector_char; type VectorChar = ImVector<u8>; macro_rules! assert_field_offset { ($l:ident, $r:ident) => { assert_eq!( memoffset::offset_of!(VectorChar, $l), memoffset::offset_of!(ImVector_char, $r) ); }; } assert_field_offset!(size, Size); assert_field_offset!(capacity, Capacity); assert_field_offset!(data, Data); } /// Marks a type as a transparent wrapper over a raw type pub trait RawWrapper { /// Wrapped raw type type Raw; /// Returns an immutable reference to the wrapped raw value /// /// # Safety /// /// It is up to the caller to use the returned raw reference without causing undefined /// behaviour or breaking safety rules. unsafe fn raw(&self) -> &Self::Raw; /// Returns a mutable reference to the wrapped raw value /// /// # Safety /// /// It is up to the caller to use the returned mutable raw reference without causing undefined /// behaviour or breaking safety rules. unsafe fn raw_mut(&mut self) -> &mut Self::Raw; } /// Casting from/to a raw type that has the same layout and alignment as the target type /// /// # Safety /// /// Each function outlines its own safety contract, which generally is /// that the cast from `T` to `Self` is valid. pub unsafe trait RawCast<T>: Sized { /// Casts an immutable reference from the raw type /// /// # Safety /// /// It is up to the caller to guarantee the cast is valid. #[inline] unsafe fn from_raw(raw: &T) -> &Self { &*(raw as *const _ as *const Self) } /// Casts a mutable reference from the raw type /// /// # Safety /// /// It is up to the caller to guarantee the cast is valid. #[inline] unsafe fn from_raw_mut(raw: &mut T) -> &mut Self { &mut *(raw as *mut _ as *mut Self) } /// Casts an immutable reference to the raw type /// /// # Safety /// /// It is up to the caller to guarantee the cast is valid. #[inline] unsafe fn raw(&self) -> &T { &*(self as *const _ as *const T) } /// Casts a mutable reference to the raw type /// /// # Safety /// /// It is up to the caller to guarantee the cast is valid. #[inline] unsafe fn raw_mut(&mut self) -> &mut T { &mut *(self as *mut _ as *mut T) } } /// A primary data type #[repr(u32)] #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub enum DataType { I8 = sys::ImGuiDataType_S8, U8 = sys::ImGuiDataType_U8, I16 = sys::ImGuiDataType_S16, U16 = sys::ImGuiDataType_U16, I32 = sys::ImGuiDataType_S32, U32 = sys::ImGuiDataType_U32, I64 = sys::ImGuiDataType_S64, U64 = sys::ImGuiDataType_U64, F32 = sys::ImGuiDataType_Float, F64 = sys::ImGuiDataType_Double, } /// Primitive type marker. /// /// If this trait is implemented for a type, it is assumed to have *exactly* the same /// representation in memory as the primitive value described by the associated `KIND` constant. /// /// # Safety /// The `DataType` *must* have the same representation as the primitive value of `KIND`. pub unsafe trait DataTypeKind: Copy { const KIND: DataType; } unsafe impl DataTypeKind for i8 { const KIND: DataType = DataType::I8; } unsafe impl DataTypeKind for u8 { const KIND: DataType = DataType::U8; } unsafe impl DataTypeKind for i16 { const KIND: DataType = DataType::I16; } unsafe impl DataTypeKind for u16 { const KIND: DataType = DataType::U16; } unsafe impl DataTypeKind for i32 { const KIND: DataType = DataType::I32; } unsafe impl DataTypeKind for u32 { const KIND: DataType = DataType::U32; } unsafe impl DataTypeKind for i64 { const KIND: DataType = DataType::I64; } unsafe impl DataTypeKind for u64 { const KIND: DataType = DataType::U64; } unsafe impl DataTypeKind for f32 { const KIND: DataType = DataType::F32; } unsafe impl DataTypeKind for f64 { const KIND: DataType = DataType::F64; }
28.113095
98
0.628414
6434dccdfc75bdbae0b277e6fd2ac0dc214fcf84
42,791
//! HTML formatting module //! //! This module contains a large number of `fmt::Display` implementations for //! various types in `rustdoc::clean`. These implementations all currently //! assume that HTML output is desired, although it may be possible to redesign //! them in the future to instead emit any format desired. use std::borrow::Cow; use std::cell::Cell; use std::fmt; use rustc_data_structures::fx::FxHashSet; use rustc_hir as hir; use rustc_hir::def_id::DefId; use rustc_target::spec::abi::Abi; use crate::clean::{self, PrimitiveType}; use crate::html::escape::Escape; use crate::html::item_type::ItemType; use crate::html::render::{self, cache, CURRENT_DEPTH}; pub trait Print { fn print(self, buffer: &mut Buffer); } impl<F> Print for F where F: FnOnce(&mut Buffer), { fn print(self, buffer: &mut Buffer) { (self)(buffer) } } impl Print for String { fn print(self, buffer: &mut Buffer) { buffer.write_str(&self); } } impl Print for &'_ str { fn print(self, buffer: &mut Buffer) { buffer.write_str(self); } } #[derive(Debug, Clone)] pub struct Buffer { for_html: bool, buffer: String, } impl Buffer { crate fn empty_from(v: &Buffer) -> Buffer { Buffer { for_html: v.for_html, buffer: String::new() } } crate fn html() -> Buffer { Buffer { for_html: true, buffer: String::new() } } crate fn new() -> Buffer { Buffer { for_html: false, buffer: String::new() } } crate fn is_empty(&self) -> bool { self.buffer.is_empty() } crate fn into_inner(self) -> String { self.buffer } crate fn insert_str(&mut self, idx: usize, s: &str) { self.buffer.insert_str(idx, s); } crate fn push_str(&mut self, s: &str) { self.buffer.push_str(s); } // Intended for consumption by write! and writeln! (std::fmt) but without // the fmt::Result return type imposed by fmt::Write (and avoiding the trait // import). crate fn write_str(&mut self, s: &str) { self.buffer.push_str(s); } // Intended for consumption by write! and writeln! (std::fmt) but without // the fmt::Result return type imposed by fmt::Write (and avoiding the trait // import). crate fn write_fmt(&mut self, v: fmt::Arguments<'_>) { use fmt::Write; self.buffer.write_fmt(v).unwrap(); } crate fn to_display<T: Print>(mut self, t: T) -> String { t.print(&mut self); self.into_inner() } crate fn from_display<T: std::fmt::Display>(&mut self, t: T) { if self.for_html { write!(self, "{}", t); } else { write!(self, "{:#}", t); } } crate fn is_for_html(&self) -> bool { self.for_html } } /// Wrapper struct for properly emitting a function or method declaration. pub struct Function<'a> { /// The declaration to emit. pub decl: &'a clean::FnDecl, /// The length of the function header and name. In other words, the number of characters in the /// function declaration up to but not including the parentheses. /// /// Used to determine line-wrapping. pub header_len: usize, /// The number of spaces to indent each successive line with, if line-wrapping is necessary. pub indent: usize, /// Whether the function is async or not. pub asyncness: hir::IsAsync, } /// Wrapper struct for emitting a where-clause from Generics. pub struct WhereClause<'a> { /// The Generics from which to emit a where-clause. pub gens: &'a clean::Generics, /// The number of spaces to indent each line with. pub indent: usize, /// Whether the where-clause needs to add a comma and newline after the last bound. pub end_newline: bool, } fn comma_sep<T: fmt::Display>(items: impl Iterator<Item = T>) -> impl fmt::Display { display_fn(move |f| { for (i, item) in items.enumerate() { if i != 0 { write!(f, ", ")?; } fmt::Display::fmt(&item, f)?; } Ok(()) }) } crate fn print_generic_bounds(bounds: &[clean::GenericBound]) -> impl fmt::Display + '_ { display_fn(move |f| { let mut bounds_dup = FxHashSet::default(); for (i, bound) in bounds.iter().filter(|b| bounds_dup.insert(b.print().to_string())).enumerate() { if i > 0 { f.write_str(" + ")?; } fmt::Display::fmt(&bound.print(), f)?; } Ok(()) }) } impl clean::GenericParamDef { crate fn print(&self) -> impl fmt::Display + '_ { display_fn(move |f| match self.kind { clean::GenericParamDefKind::Lifetime => write!(f, "{}", self.name), clean::GenericParamDefKind::Type { ref bounds, ref default, .. } => { f.write_str(&self.name)?; if !bounds.is_empty() { if f.alternate() { write!(f, ": {:#}", print_generic_bounds(bounds))?; } else { write!(f, ":&nbsp;{}", print_generic_bounds(bounds))?; } } if let Some(ref ty) = default { if f.alternate() { write!(f, " = {:#}", ty.print())?; } else { write!(f, "&nbsp;=&nbsp;{}", ty.print())?; } } Ok(()) } clean::GenericParamDefKind::Const { ref ty, .. } => { f.write_str("const ")?; f.write_str(&self.name)?; if f.alternate() { write!(f, ": {:#}", ty.print()) } else { write!(f, ":&nbsp;{}", ty.print()) } } }) } } impl clean::Generics { crate fn print(&self) -> impl fmt::Display + '_ { display_fn(move |f| { let real_params = self.params.iter().filter(|p| !p.is_synthetic_type_param()).collect::<Vec<_>>(); if real_params.is_empty() { return Ok(()); } if f.alternate() { write!(f, "<{:#}>", comma_sep(real_params.iter().map(|g| g.print()))) } else { write!(f, "&lt;{}&gt;", comma_sep(real_params.iter().map(|g| g.print()))) } }) } } impl<'a> fmt::Display for WhereClause<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let &WhereClause { gens, indent, end_newline } = self; if gens.where_predicates.is_empty() { return Ok(()); } let mut clause = String::new(); if f.alternate() { clause.push_str(" where"); } else { if end_newline { clause.push_str(" <span class=\"where fmt-newline\">where"); } else { clause.push_str(" <span class=\"where\">where"); } } for (i, pred) in gens.where_predicates.iter().enumerate() { if f.alternate() { clause.push(' '); } else { clause.push_str("<br>"); } match pred { &clean::WherePredicate::BoundPredicate { ref ty, ref bounds } => { let bounds = bounds; if f.alternate() { clause.push_str(&format!( "{:#}: {:#}", ty.print(), print_generic_bounds(bounds) )); } else { clause.push_str(&format!( "{}: {}", ty.print(), print_generic_bounds(bounds) )); } } &clean::WherePredicate::RegionPredicate { ref lifetime, ref bounds } => { clause.push_str(&format!( "{}: {}", lifetime.print(), bounds .iter() .map(|b| b.print().to_string()) .collect::<Vec<_>>() .join(" + ") )); } &clean::WherePredicate::EqPredicate { ref lhs, ref rhs } => { if f.alternate() { clause.push_str(&format!("{:#} == {:#}", lhs.print(), rhs.print())); } else { clause.push_str(&format!("{} == {}", lhs.print(), rhs.print())); } } } if i < gens.where_predicates.len() - 1 || end_newline { clause.push(','); } } if end_newline { // add a space so stripping <br> tags and breaking spaces still renders properly if f.alternate() { clause.push(' '); } else { clause.push_str("&nbsp;"); } } if !f.alternate() { clause.push_str("</span>"); let padding = "&nbsp;".repeat(indent + 4); clause = clause.replace("<br>", &format!("<br>{}", padding)); clause.insert_str(0, &"&nbsp;".repeat(indent.saturating_sub(1))); if !end_newline { clause.insert_str(0, "<br>"); } } write!(f, "{}", clause) } } impl clean::Lifetime { crate fn print(&self) -> &str { self.get_ref() } } impl clean::Constant { crate fn print(&self) -> impl fmt::Display + '_ { display_fn(move |f| { if f.alternate() { f.write_str(&self.expr) } else { write!(f, "{}", Escape(&self.expr)) } }) } } impl clean::PolyTrait { fn print(&self) -> impl fmt::Display + '_ { display_fn(move |f| { if !self.generic_params.is_empty() { if f.alternate() { write!( f, "for<{:#}> ", comma_sep(self.generic_params.iter().map(|g| g.print())) )?; } else { write!( f, "for&lt;{}&gt; ", comma_sep(self.generic_params.iter().map(|g| g.print())) )?; } } if f.alternate() { write!(f, "{:#}", self.trait_.print()) } else { write!(f, "{}", self.trait_.print()) } }) } } impl clean::GenericBound { crate fn print(&self) -> impl fmt::Display + '_ { display_fn(move |f| match self { clean::GenericBound::Outlives(lt) => write!(f, "{}", lt.print()), clean::GenericBound::TraitBound(ty, modifier) => { let modifier_str = match modifier { hir::TraitBoundModifier::None => "", hir::TraitBoundModifier::Maybe => "?", }; if f.alternate() { write!(f, "{}{:#}", modifier_str, ty.print()) } else { write!(f, "{}{}", modifier_str, ty.print()) } } }) } } impl clean::GenericArgs { fn print(&self) -> impl fmt::Display + '_ { display_fn(move |f| { match *self { clean::GenericArgs::AngleBracketed { ref args, ref bindings } => { if !args.is_empty() || !bindings.is_empty() { if f.alternate() { f.write_str("<")?; } else { f.write_str("&lt;")?; } let mut comma = false; for arg in args { if comma { f.write_str(", ")?; } comma = true; if f.alternate() { write!(f, "{:#}", arg.print())?; } else { write!(f, "{}", arg.print())?; } } for binding in bindings { if comma { f.write_str(", ")?; } comma = true; if f.alternate() { write!(f, "{:#}", binding.print())?; } else { write!(f, "{}", binding.print())?; } } if f.alternate() { f.write_str(">")?; } else { f.write_str("&gt;")?; } } } clean::GenericArgs::Parenthesized { ref inputs, ref output } => { f.write_str("(")?; let mut comma = false; for ty in inputs { if comma { f.write_str(", ")?; } comma = true; if f.alternate() { write!(f, "{:#}", ty.print())?; } else { write!(f, "{}", ty.print())?; } } f.write_str(")")?; if let Some(ref ty) = *output { if f.alternate() { write!(f, " -> {:#}", ty.print())?; } else { write!(f, " -&gt; {}", ty.print())?; } } } } Ok(()) }) } } impl clean::PathSegment { crate fn print(&self) -> impl fmt::Display + '_ { display_fn(move |f| { f.write_str(&self.name)?; if f.alternate() { write!(f, "{:#}", self.args.print()) } else { write!(f, "{}", self.args.print()) } }) } } impl clean::Path { crate fn print(&self) -> impl fmt::Display + '_ { display_fn(move |f| { if self.global { f.write_str("::")? } for (i, seg) in self.segments.iter().enumerate() { if i > 0 { f.write_str("::")? } if f.alternate() { write!(f, "{:#}", seg.print())?; } else { write!(f, "{}", seg.print())?; } } Ok(()) }) } } pub fn href(did: DefId) -> Option<(String, ItemType, Vec<String>)> { let cache = cache(); if !did.is_local() && !cache.access_levels.is_public(did) { return None; } let depth = CURRENT_DEPTH.with(|l| l.get()); let (fqp, shortty, mut url) = match cache.paths.get(&did) { Some(&(ref fqp, shortty)) => (fqp, shortty, "../".repeat(depth)), None => { let &(ref fqp, shortty) = cache.external_paths.get(&did)?; ( fqp, shortty, match cache.extern_locations[&did.krate] { (.., render::Remote(ref s)) => s.to_string(), (.., render::Local) => "../".repeat(depth), (.., render::Unknown) => return None, }, ) } }; for component in &fqp[..fqp.len() - 1] { url.push_str(component); url.push_str("/"); } match shortty { ItemType::Module => { url.push_str(fqp.last().unwrap()); url.push_str("/index.html"); } _ => { url.push_str(shortty.as_str()); url.push_str("."); url.push_str(fqp.last().unwrap()); url.push_str(".html"); } } Some((url, shortty, fqp.to_vec())) } /// Used when rendering a `ResolvedPath` structure. This invokes the `path` /// rendering function with the necessary arguments for linking to a local path. fn resolved_path( w: &mut fmt::Formatter<'_>, did: DefId, path: &clean::Path, print_all: bool, use_absolute: bool, ) -> fmt::Result { let last = path.segments.last().unwrap(); if print_all { for seg in &path.segments[..path.segments.len() - 1] { write!(w, "{}::", seg.name)?; } } if w.alternate() { write!(w, "{}{:#}", &last.name, last.args.print())?; } else { let path = if use_absolute { if let Some((_, _, fqp)) = href(did) { format!("{}::{}", fqp[..fqp.len() - 1].join("::"), anchor(did, fqp.last().unwrap())) } else { last.name.to_string() } } else { anchor(did, &last.name).to_string() }; write!(w, "{}{}", path, last.args.print())?; } Ok(()) } fn primitive_link( f: &mut fmt::Formatter<'_>, prim: clean::PrimitiveType, name: &str, ) -> fmt::Result { let m = cache(); let mut needs_termination = false; if !f.alternate() { match m.primitive_locations.get(&prim) { Some(&def_id) if def_id.is_local() => { let len = CURRENT_DEPTH.with(|s| s.get()); let len = if len == 0 { 0 } else { len - 1 }; write!( f, "<a class=\"primitive\" href=\"{}primitive.{}.html\">", "../".repeat(len), prim.to_url_str() )?; needs_termination = true; } Some(&def_id) => { let loc = match m.extern_locations[&def_id.krate] { (ref cname, _, render::Remote(ref s)) => Some((cname, s.to_string())), (ref cname, _, render::Local) => { let len = CURRENT_DEPTH.with(|s| s.get()); Some((cname, "../".repeat(len))) } (.., render::Unknown) => None, }; if let Some((cname, root)) = loc { write!( f, "<a class=\"primitive\" href=\"{}{}/primitive.{}.html\">", root, cname, prim.to_url_str() )?; needs_termination = true; } } None => {} } } write!(f, "{}", name)?; if needs_termination { write!(f, "</a>")?; } Ok(()) } /// Helper to render type parameters fn tybounds(param_names: &Option<Vec<clean::GenericBound>>) -> impl fmt::Display + '_ { display_fn(move |f| match *param_names { Some(ref params) => { for param in params { write!(f, " + ")?; fmt::Display::fmt(&param.print(), f)?; } Ok(()) } None => Ok(()), }) } pub fn anchor(did: DefId, text: &str) -> impl fmt::Display + '_ { display_fn(move |f| { if let Some((url, short_ty, fqp)) = href(did) { write!( f, r#"<a class="{}" href="{}" title="{} {}">{}</a>"#, short_ty, url, short_ty, fqp.join("::"), text ) } else { write!(f, "{}", text) } }) } fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter<'_>, use_absolute: bool) -> fmt::Result { match *t { clean::Generic(ref name) => f.write_str(name), clean::ResolvedPath { did, ref param_names, ref path, is_generic } => { if param_names.is_some() { f.write_str("dyn ")?; } // Paths like `T::Output` and `Self::Output` should be rendered with all segments. resolved_path(f, did, path, is_generic, use_absolute)?; fmt::Display::fmt(&tybounds(param_names), f) } clean::Infer => write!(f, "_"), clean::Primitive(prim) => primitive_link(f, prim, prim.as_str()), clean::BareFunction(ref decl) => { if f.alternate() { write!( f, "{}{:#}fn{:#}{:#}", decl.unsafety.print_with_space(), print_abi_with_space(decl.abi), decl.print_generic_params(), decl.decl.print() ) } else { write!( f, "{}{}", decl.unsafety.print_with_space(), print_abi_with_space(decl.abi) )?; primitive_link(f, PrimitiveType::Fn, "fn")?; write!(f, "{}{}", decl.print_generic_params(), decl.decl.print()) } } clean::Tuple(ref typs) => { match &typs[..] { &[] => primitive_link(f, PrimitiveType::Unit, "()"), &[ref one] => { primitive_link(f, PrimitiveType::Tuple, "(")?; // Carry `f.alternate()` into this display w/o branching manually. fmt::Display::fmt(&one.print(), f)?; primitive_link(f, PrimitiveType::Tuple, ",)") } many => { primitive_link(f, PrimitiveType::Tuple, "(")?; for (i, item) in many.iter().enumerate() { if i != 0 { write!(f, ", ")?; } fmt::Display::fmt(&item.print(), f)?; } primitive_link(f, PrimitiveType::Tuple, ")") } } } clean::Slice(ref t) => { primitive_link(f, PrimitiveType::Slice, "[")?; fmt::Display::fmt(&t.print(), f)?; primitive_link(f, PrimitiveType::Slice, "]") } clean::Array(ref t, ref n) => { primitive_link(f, PrimitiveType::Array, "[")?; fmt::Display::fmt(&t.print(), f)?; if f.alternate() { primitive_link(f, PrimitiveType::Array, &format!("; {}]", n)) } else { primitive_link(f, PrimitiveType::Array, &format!("; {}]", Escape(n))) } } clean::Never => primitive_link(f, PrimitiveType::Never, "!"), clean::RawPointer(m, ref t) => { let m = match m { hir::Mutability::Mut => "mut", hir::Mutability::Not => "const", }; match **t { clean::Generic(_) | clean::ResolvedPath { is_generic: true, .. } => { if f.alternate() { primitive_link( f, clean::PrimitiveType::RawPointer, &format!("*{} {:#}", m, t.print()), ) } else { primitive_link( f, clean::PrimitiveType::RawPointer, &format!("*{} {}", m, t.print()), ) } } _ => { primitive_link(f, clean::PrimitiveType::RawPointer, &format!("*{} ", m))?; fmt::Display::fmt(&t.print(), f) } } } clean::BorrowedRef { lifetime: ref l, mutability, type_: ref ty } => { let lt = match l { Some(l) => format!("{} ", l.print()), _ => String::new(), }; let m = mutability.print_with_space(); let amp = if f.alternate() { "&".to_string() } else { "&amp;".to_string() }; match **ty { clean::Slice(ref bt) => { // `BorrowedRef{ ... Slice(T) }` is `&[T]` match **bt { clean::Generic(_) => { if f.alternate() { primitive_link( f, PrimitiveType::Slice, &format!("{}{}{}[{:#}]", amp, lt, m, bt.print()), ) } else { primitive_link( f, PrimitiveType::Slice, &format!("{}{}{}[{}]", amp, lt, m, bt.print()), ) } } _ => { primitive_link( f, PrimitiveType::Slice, &format!("{}{}{}[", amp, lt, m), )?; if f.alternate() { write!(f, "{:#}", bt.print())?; } else { write!(f, "{}", bt.print())?; } primitive_link(f, PrimitiveType::Slice, "]") } } } clean::ResolvedPath { param_names: Some(ref v), .. } if !v.is_empty() => { write!(f, "{}{}{}(", amp, lt, m)?; fmt_type(&ty, f, use_absolute)?; write!(f, ")") } clean::Generic(..) => { primitive_link(f, PrimitiveType::Reference, &format!("{}{}{}", amp, lt, m))?; fmt_type(&ty, f, use_absolute) } _ => { write!(f, "{}{}{}", amp, lt, m)?; fmt_type(&ty, f, use_absolute) } } } clean::ImplTrait(ref bounds) => { if f.alternate() { write!(f, "impl {:#}", print_generic_bounds(bounds)) } else { write!(f, "impl {}", print_generic_bounds(bounds)) } } clean::QPath { ref name, ref self_type, ref trait_ } => { let should_show_cast = match *trait_ { box clean::ResolvedPath { ref path, .. } => { !path.segments.is_empty() && !self_type.is_self_type() } _ => true, }; if f.alternate() { if should_show_cast { write!(f, "<{:#} as {:#}>::", self_type.print(), trait_.print())? } else { write!(f, "{:#}::", self_type.print())? } } else { if should_show_cast { write!(f, "&lt;{} as {}&gt;::", self_type.print(), trait_.print())? } else { write!(f, "{}::", self_type.print())? } }; match *trait_ { // It's pretty unsightly to look at `<A as B>::C` in output, and // we've got hyperlinking on our side, so try to avoid longer // notation as much as possible by making `C` a hyperlink to trait // `B` to disambiguate. // // FIXME: this is still a lossy conversion and there should probably // be a better way of representing this in general? Most of // the ugliness comes from inlining across crates where // everything comes in as a fully resolved QPath (hard to // look at). box clean::ResolvedPath { did, ref param_names, .. } => { match href(did) { Some((ref url, _, ref path)) if !f.alternate() => { write!( f, "<a class=\"type\" href=\"{url}#{shortty}.{name}\" \ title=\"type {path}::{name}\">{name}</a>", url = url, shortty = ItemType::AssocType, name = name, path = path.join("::") )?; } _ => write!(f, "{}", name)?, } // FIXME: `param_names` are not rendered, and this seems bad? drop(param_names); Ok(()) } _ => write!(f, "{}", name), } } } } impl clean::Type { crate fn print(&self) -> impl fmt::Display + '_ { display_fn(move |f| fmt_type(self, f, false)) } } impl clean::Impl { crate fn print(&self) -> impl fmt::Display + '_ { self.print_inner(true, false) } fn print_inner(&self, link_trait: bool, use_absolute: bool) -> impl fmt::Display + '_ { display_fn(move |f| { if f.alternate() { write!(f, "impl{:#} ", self.generics.print())?; } else { write!(f, "impl{} ", self.generics.print())?; } if let Some(ref ty) = self.trait_ { if self.polarity == Some(clean::ImplPolarity::Negative) { write!(f, "!")?; } if link_trait { fmt::Display::fmt(&ty.print(), f)?; } else { match ty { clean::ResolvedPath { param_names: None, path, is_generic: false, .. } => { let last = path.segments.last().unwrap(); fmt::Display::fmt(&last.name, f)?; fmt::Display::fmt(&last.args.print(), f)?; } _ => unreachable!(), } } write!(f, " for ")?; } if let Some(ref ty) = self.blanket_impl { fmt_type(ty, f, use_absolute)?; } else { fmt_type(&self.for_, f, use_absolute)?; } fmt::Display::fmt( &WhereClause { gens: &self.generics, indent: 0, end_newline: true }, f, )?; Ok(()) }) } } // The difference from above is that trait is not hyperlinked. pub fn fmt_impl_for_trait_page(i: &clean::Impl, f: &mut Buffer, use_absolute: bool) { f.from_display(i.print_inner(false, use_absolute)) } impl clean::Arguments { crate fn print(&self) -> impl fmt::Display + '_ { display_fn(move |f| { for (i, input) in self.values.iter().enumerate() { if !input.name.is_empty() { write!(f, "{}: ", input.name)?; } if f.alternate() { write!(f, "{:#}", input.type_.print())?; } else { write!(f, "{}", input.type_.print())?; } if i + 1 < self.values.len() { write!(f, ", ")?; } } Ok(()) }) } } impl clean::FunctionRetTy { crate fn print(&self) -> impl fmt::Display + '_ { display_fn(move |f| match self { clean::Return(clean::Tuple(tys)) if tys.is_empty() => Ok(()), clean::Return(ty) if f.alternate() => write!(f, " -> {:#}", ty.print()), clean::Return(ty) => write!(f, " -&gt; {}", ty.print()), clean::DefaultReturn => Ok(()), }) } } impl clean::BareFunctionDecl { fn print_generic_params(&self) -> impl fmt::Display + '_ { comma_sep(self.generic_params.iter().map(|g| g.print())) } } impl clean::FnDecl { crate fn print(&self) -> impl fmt::Display + '_ { display_fn(move |f| { let ellipsis = if self.c_variadic { ", ..." } else { "" }; if f.alternate() { write!( f, "({args:#}{ellipsis}){arrow:#}", args = self.inputs.print(), ellipsis = ellipsis, arrow = self.output.print() ) } else { write!( f, "({args}{ellipsis}){arrow}", args = self.inputs.print(), ellipsis = ellipsis, arrow = self.output.print() ) } }) } } impl Function<'_> { crate fn print(&self) -> impl fmt::Display + '_ { display_fn(move |f| { let &Function { decl, header_len, indent, asyncness } = self; let amp = if f.alternate() { "&" } else { "&amp;" }; let mut args = String::new(); let mut args_plain = String::new(); for (i, input) in decl.inputs.values.iter().enumerate() { if i == 0 { args.push_str("<br>"); } if let Some(selfty) = input.to_self() { match selfty { clean::SelfValue => { args.push_str("self"); args_plain.push_str("self"); } clean::SelfBorrowed(Some(ref lt), mtbl) => { args.push_str(&format!( "{}{} {}self", amp, lt.print(), mtbl.print_with_space() )); args_plain.push_str(&format!( "&{} {}self", lt.print(), mtbl.print_with_space() )); } clean::SelfBorrowed(None, mtbl) => { args.push_str(&format!("{}{}self", amp, mtbl.print_with_space())); args_plain.push_str(&format!("&{}self", mtbl.print_with_space())); } clean::SelfExplicit(ref typ) => { if f.alternate() { args.push_str(&format!("self: {:#}", typ.print())); } else { args.push_str(&format!("self: {}", typ.print())); } args_plain.push_str(&format!("self: {:#}", typ.print())); } } } else { if i > 0 { args.push_str(" <br>"); args_plain.push_str(" "); } if !input.name.is_empty() { args.push_str(&format!("{}: ", input.name)); args_plain.push_str(&format!("{}: ", input.name)); } if f.alternate() { args.push_str(&format!("{:#}", input.type_.print())); } else { args.push_str(&input.type_.print().to_string()); } args_plain.push_str(&format!("{:#}", input.type_.print())); } if i + 1 < decl.inputs.values.len() { args.push(','); args_plain.push(','); } } let mut args_plain = format!("({})", args_plain); if decl.c_variadic { args.push_str(",<br> ..."); args_plain.push_str(", ..."); } let output = if let hir::IsAsync::Async = asyncness { Cow::Owned(decl.sugared_async_return_type()) } else { Cow::Borrowed(&decl.output) }; let arrow_plain = format!("{:#}", &output.print()); let arrow = if f.alternate() { format!("{:#}", &output.print()) } else { output.print().to_string() }; let declaration_len = header_len + args_plain.len() + arrow_plain.len(); let output = if declaration_len > 80 { let full_pad = format!("<br>{}", "&nbsp;".repeat(indent + 4)); let close_pad = format!("<br>{}", "&nbsp;".repeat(indent)); format!( "({args}{close}){arrow}", args = args.replace("<br>", &full_pad), close = close_pad, arrow = arrow ) } else { format!("({args}){arrow}", args = args.replace("<br>", ""), arrow = arrow) }; if f.alternate() { write!(f, "{}", output.replace("<br>", "\n")) } else { write!(f, "{}", output) } }) } } impl clean::Visibility { crate fn print_with_space(&self) -> impl fmt::Display + '_ { display_fn(move |f| match *self { clean::Public => f.write_str("pub "), clean::Inherited => Ok(()), clean::Visibility::Crate => write!(f, "pub(crate) "), clean::Visibility::Restricted(did, ref path) => { f.write_str("pub(")?; if path.segments.len() != 1 || (path.segments[0].name != "self" && path.segments[0].name != "super") { f.write_str("in ")?; } resolved_path(f, did, path, true, false)?; f.write_str(") ") } }) } } crate trait PrintWithSpace { fn print_with_space(&self) -> &str; } impl PrintWithSpace for hir::Unsafety { fn print_with_space(&self) -> &str { match self { hir::Unsafety::Unsafe => "unsafe ", hir::Unsafety::Normal => "", } } } impl PrintWithSpace for hir::Constness { fn print_with_space(&self) -> &str { match self { hir::Constness::Const => "const ", hir::Constness::NotConst => "", } } } impl PrintWithSpace for hir::IsAsync { fn print_with_space(&self) -> &str { match self { hir::IsAsync::Async => "async ", hir::IsAsync::NotAsync => "", } } } impl PrintWithSpace for hir::Mutability { fn print_with_space(&self) -> &str { match self { hir::Mutability::Not => "", hir::Mutability::Mut => "mut ", } } } impl clean::Import { crate fn print(&self) -> impl fmt::Display + '_ { display_fn(move |f| match *self { clean::Import::Simple(ref name, ref src) => { if *name == src.path.last_name() { write!(f, "use {};", src.print()) } else { write!(f, "use {} as {};", src.print(), *name) } } clean::Import::Glob(ref src) => { if src.path.segments.is_empty() { write!(f, "use *;") } else { write!(f, "use {}::*;", src.print()) } } }) } } impl clean::ImportSource { crate fn print(&self) -> impl fmt::Display + '_ { display_fn(move |f| match self.did { Some(did) => resolved_path(f, did, &self.path, true, false), _ => { for (i, seg) in self.path.segments.iter().enumerate() { if i > 0 { write!(f, "::")? } write!(f, "{}", seg.name)?; } Ok(()) } }) } } impl clean::TypeBinding { crate fn print(&self) -> impl fmt::Display + '_ { display_fn(move |f| { f.write_str(&self.name)?; match self.kind { clean::TypeBindingKind::Equality { ref ty } => { if f.alternate() { write!(f, " = {:#}", ty.print())?; } else { write!(f, " = {}", ty.print())?; } } clean::TypeBindingKind::Constraint { ref bounds } => { if !bounds.is_empty() { if f.alternate() { write!(f, ": {:#}", print_generic_bounds(bounds))?; } else { write!(f, ":&nbsp;{}", print_generic_bounds(bounds))?; } } } } Ok(()) }) } } crate fn print_abi_with_space(abi: Abi) -> impl fmt::Display { display_fn(move |f| { let quot = if f.alternate() { "\"" } else { "&quot;" }; match abi { Abi::Rust => Ok(()), abi => write!(f, "extern {0}{1}{0} ", quot, abi.name()), } }) } crate fn print_default_space<'a>(v: bool) -> &'a str { if v { "default " } else { "" } } impl clean::GenericArg { crate fn print(&self) -> impl fmt::Display + '_ { display_fn(move |f| match self { clean::GenericArg::Lifetime(lt) => fmt::Display::fmt(&lt.print(), f), clean::GenericArg::Type(ty) => fmt::Display::fmt(&ty.print(), f), clean::GenericArg::Const(ct) => fmt::Display::fmt(&ct.print(), f), }) } } crate fn display_fn(f: impl FnOnce(&mut fmt::Formatter<'_>) -> fmt::Result) -> impl fmt::Display { WithFormatter(Cell::new(Some(f))) } struct WithFormatter<F>(Cell<Option<F>>); impl<F> fmt::Display for WithFormatter<F> where F: FnOnce(&mut fmt::Formatter<'_>) -> fmt::Result, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { (self.0.take()).unwrap()(f) } }
34.2328
100
0.401323
23b933a0231baf756902e3f01b70d2396d72e395
2,075
use std::sync::Arc; use liblumen_alloc::erts::exception::system::Alloc; use liblumen_alloc::erts::process::code::stack::frame::{Frame, Placement}; use liblumen_alloc::erts::process::{code, Process}; use liblumen_alloc::erts::term::{atom_unchecked, Term}; use liblumen_alloc::ModuleFunctionArity; use super::label_5; pub fn place_frame_with_arguments( process: &Process, placement: Placement, document: Term, parent: Term, old_child: Term, ) -> Result<(), Alloc> { process.stack_push(old_child)?; process.stack_push(parent)?; process.stack_push(document)?; process.place_frame(frame(), placement); Ok(()) } // Private // ```elixir // # label 4 // # pushed to stack: (document, parent, old_child) // # returned form call: :ok // # full stack: (:ok, document, parent, old_child) // # returns: {:ok, new_child} // {:ok, new_child} = Lumen.Web.Document.create_element(document, "ul"); // {:ok, replaced_child} = Lumen.Web.replace_child(parent, new_child, old_child) // ``` fn code(arc_process: &Arc<Process>) -> code::Result { arc_process.reduce(); let ok = arc_process.stack_pop().unwrap(); assert_eq!(ok, atom_unchecked("ok")); let document = arc_process.stack_pop().unwrap(); assert!(document.is_resource_reference()); let parent = arc_process.stack_pop().unwrap(); assert!(parent.is_resource_reference()); let old_child = arc_process.stack_pop().unwrap(); assert!(old_child.is_resource_reference()); label_5::place_frame_with_arguments(arc_process, Placement::Replace, parent, old_child)?; let new_child_tag = arc_process.binary_from_str("ul")?; lumen_web::document::create_element_2::place_frame_with_arguments( arc_process, Placement::Push, document, new_child_tag, )?; Process::call_code(arc_process) } fn frame() -> Frame { let module_function_arity = Arc::new(ModuleFunctionArity { module: super::module(), function: super::function(), arity: 0, }); Frame::new(module_function_arity, code) }
28.040541
93
0.68
dd65c5b16cc6e49df7e12895cf6681110574ba9c
3,160
use rocket::response::Redirect; use rocket::serde::json::{json, Value}; use rocket::{Catcher, Route}; /// # About endpoint pub mod about; /// # Balance endpoints pub mod balances; /// # Chain endpoints pub mod chains; /// # Collectibles endpoint pub mod collectibles; /// # Utility endpoints pub mod contracts; pub mod delegates; #[doc(hidden)] pub mod health; #[doc(hidden)] pub mod hooks; /// # Notification endpoints pub mod notifications; /// # SafeApps endpoints pub mod safe_apps; /// # Safe endpoints pub mod safes; /// # Transactions endpoints /// /// As presented by the endpoints in this handlers, we are taking in the types returned by the [transaction handlers](https://github.com/gnosis/safe-transaction-service-example), which to this data are `Multisig`, `Module` and `Ethereum` transaction types. /// /// The types served by the gate way are `Transfer`, `SettingsChange` and `Custom`. Additionally, we treat the `Creation` transaction as one additional type, as it is meant to be group with the rest of the items in the same UI component in the apps. pub mod transactions; #[doc(hidden)] pub fn active_routes() -> Vec<Route> { routes![ root, about::routes::backbone, about::routes::get_about, about::routes::get_chains_about, about::routes::redis, about::routes::get_master_copies, balances::routes::get_balances, balances::routes::get_supported_fiat, chains::routes::get_chain, chains::routes::get_chains, collectibles::routes::get_collectibles, contracts::routes::post_data_decoder, contracts::routes::get_contract, delegates::routes::delete_delegate, delegates::routes::delete_safe_delegate, delegates::routes::get_delegates, delegates::routes::post_delegate, notifications::routes::post_notification_registration, notifications::routes::delete_notification_registration, safes::routes::get_safe_info, safes::routes::get_owners, safes::routes::post_safe_gas_estimation, safes::routes::post_safe_gas_estimation_v2, safe_apps::routes::get_safe_apps, transactions::routes::get_transactions, transactions::routes::get_transactions_history, transactions::routes::get_transactions_queued, transactions::routes::post_transaction, transactions::routes::post_confirmation, hooks::routes::update, hooks::routes::post_hook_update, hooks::routes::post_hooks_events, hooks::routes::post_flush_events, hooks::routes::flush, health::routes::health ] } #[doc(hidden)] pub fn error_catchers() -> Vec<Catcher> { catchers![not_found, panic] } #[doc(hidden)] #[catch(404)] fn not_found() -> Value { json!({ "status": "error", "reason": "Resource was not found." }) } #[doc(hidden)] #[catch(500)] fn panic() -> Value { json!({ "status": "error", "reason": "Server error occurred." }) } #[doc(hidden)] #[get("/")] pub fn root() -> Redirect { Redirect::temporary("https://gnosis.github.io/safe-client-gateway/") }
30.980392
256
0.671835
4ae479a11c6cbafecd9cfae59741d70b0235619a
1,229
//! Test suite for the Web and headless browsers. #![cfg(target_arch = "wasm32")] extern crate wasm_bindgen_test; use wasm_bindgen_test::*; wasm_bindgen_test_configure!(run_in_browser); #[wasm_bindgen_test] fn pass() { assert_eq!(1 + 1, 2); } extern crate wasm_game_of_life; use wasm_game_of_life::Universe; #[cfg(test)] pub fn input_spaceship() -> Universe { let mut universe = Universe::new(); universe.set_height_and_width(6, 6); universe.set_cells(&[(1,2), (2,3), (3,1), (3,2), (3,3)]); universe } #[cfg(test)] pub fn expected_spaceship() -> Universe { let mut universe = Universe::new(); universe.set_height_and_width(6, 6); universe.set_cells(&[(2,1), (2,3), (3,2), (3,3), (4,2)]); universe } #[wasm_bindgen_test] pub fn test_tick() { // Let's create a smaller Universe with a small spaceship to test! let mut input_universe = input_spaceship(); // This is what our spaceship should look like // after one tick in our universe. let expected_universe = expected_spaceship(); // Call `tick` and then see if the cells in the `Universe`s are the same. input_universe.tick(); assert_eq!(&input_universe.get_cells(), &expected_universe.get_cells()); }
26.717391
77
0.6786
232ada8f30fa36e7080308db28fd78f70e61167c
681
#![allow(unused_parens)] #![allow(unused_imports)] #![allow(clippy::unnecessary_cast)] use frame_support::weights::{constants::RocksDbWeight as DbWeight, Weight}; pub trait WeightInfo { fn draw_card() -> Weight; fn synthetic_cards() -> Weight; } impl crate::WeightInfo for () { fn draw_card() -> Weight{ (10_000 as Weight) .saturating_add(DbWeight::get().reads(3 as Weight)) .saturating_add(DbWeight::get().writes(3 as Weight)) } fn synthetic_cards() -> Weight{ (10_000 as Weight) .saturating_add(DbWeight::get().reads(3 as Weight)) .saturating_add(DbWeight::get().writes(3 as Weight)) } }
28.375
75
0.632893
d61db63e14c3992db452b391797c8a6da209ce36
4,064
extern crate clap; pub mod config; pub mod emails; pub mod pings; pub mod urls; use config::Config; use lettre::sendmail::SendmailTransport; use std::{process, thread, time}; fn verbose_println(log: bool, message: &str) { if log { println!("{}", message); } } pub fn run(matches: clap::ArgMatches) { println!("- Initial setup..."); let config = Config::new(matches).unwrap_or_else(|e| { eprintln!("Configuration error: {}", e.to_string()); process::exit(1); }); let verbose = config.verbose.unwrap_or(false); verbose_println(verbose, " Config parsed."); let mut list = urls::hash_list(&config.urls); { let client = reqwest::Client::new(); for (k, v) in list.clone().iter() { let hash = urls::compare(&client, k, v).unwrap_or_else(|e| { eprintln!("Request error for {}: {}", &k, e.to_string()); None }); if let Some(r) = hash { verbose_println(verbose, &format!(" Initial hash for {} obtained.", &k)); list.insert(k.clone(), r); } } } loop { { let client = reqwest::Client::new(); let mut transport = SendmailTransport::new(); println!("- Checking for updates..."); for (k, v) in list.clone().iter() { let hash = urls::compare(&client, k, v).unwrap_or_else(|e| { eprintln!("Request error for {}: {}", &k, e.to_string()); None }); if let Some(r) = hash { println!(" Page at {} has changed", &k); list.insert(k.clone(), r); if let Some(p) = &config.pings { for ping in p { let response = pings::send( &client, &ping.url, match &ping.content { Some(c) => c, None => "", }, k, ) .unwrap_or_else(|e| { eprintln!("Ping error for {}: {}", &ping.url, e.to_string()); String::new() }); verbose_println( verbose, &format!(" Ping sent to {}. Response is {}", &ping.url, response), ); } } if let Some(e) = &config.emails { for email in e { emails::send( &mut transport, match &config.sender { Some(s) => s, None => "wbmrs@localhost", }, &email.address, match &email.content { Some(c) => c, None => "", }, k, ) .unwrap_or_else(|e| { eprintln!("Email error for {}: {}", &email.address, e.to_string()); }); verbose_println( verbose, &format!(" Email sent to {}.", &email.address), ); } } } else { verbose_println(verbose, &format!(" Page at {} has not changed", &k)); } } } thread::sleep(time::Duration::from_secs((config.timeout * 60) as u64)); } }
34.440678
99
0.353839
d9eb695e8e419f24eae0f736d48493dbc5124872
9,292
// -*- mode: rust; -*- // // This file is part of ed25519-dalek. // Copyright (c) 2017-2019 isis lovecruft // See LICENSE for licensing information. // // Authors: // - isis agora lovecruft <[email protected]> //! A Rust implementation of ed25519 key generation, signing, and verification. //! //! # Example //! //! Creating an ed25519 signature on a message is simple. //! //! First, we need to generate a `Keypair`, which includes both public and //! secret halves of an asymmetric key. To do so, we need a cryptographically //! secure pseudorandom number generator (CSPRNG). For this example, we'll use //! the operating system's builtin PRNG: //! //! ``` //! extern crate rand; //! extern crate ed25519_dalek; //! //! # #[cfg(feature = "std")] //! # fn main() { //! use rand::rngs::OsRng; //! use ed25519_dalek::Keypair; //! use ed25519_dalek::Signature; //! //! let mut csprng = OsRng{}; //! let keypair: Keypair = Keypair::generate(&mut csprng); //! # } //! # //! # #[cfg(not(feature = "std"))] //! # fn main() { } //! ``` //! //! We can now use this `keypair` to sign a message: //! //! ``` //! # extern crate rand; //! # extern crate ed25519_dalek; //! # fn main() { //! # use rand::rngs::OsRng; //! # use ed25519_dalek::Keypair; //! # use ed25519_dalek::Signature; //! # let mut csprng = OsRng{}; //! # let keypair: Keypair = Keypair::generate(&mut csprng); //! let message: &[u8] = b"This is a test of the tsunami alert system."; //! let signature: Signature = keypair.sign(message); //! # } //! ``` //! //! As well as to verify that this is, indeed, a valid signature on //! that `message`: //! //! ``` //! # extern crate rand; //! # extern crate ed25519_dalek; //! # fn main() { //! # use rand::rngs::OsRng; //! # use ed25519_dalek::Keypair; //! # use ed25519_dalek::Signature; //! # let mut csprng = OsRng{}; //! # let keypair: Keypair = Keypair::generate(&mut csprng); //! # let message: &[u8] = b"This is a test of the tsunami alert system."; //! # let signature: Signature = keypair.sign(message); //! assert!(keypair.verify(message, &signature).is_ok()); //! # } //! ``` //! //! Anyone else, given the `public` half of the `keypair` can also easily //! verify this signature: //! //! ``` //! # extern crate rand; //! # extern crate ed25519_dalek; //! # fn main() { //! # use rand::rngs::OsRng; //! # use ed25519_dalek::Keypair; //! # use ed25519_dalek::Signature; //! use ed25519_dalek::PublicKey; //! # let mut csprng = OsRng{}; //! # let keypair: Keypair = Keypair::generate(&mut csprng); //! # let message: &[u8] = b"This is a test of the tsunami alert system."; //! # let signature: Signature = keypair.sign(message); //! //! let public_key: PublicKey = keypair.public; //! assert!(public_key.verify(message, &signature).is_ok()); //! # } //! ``` //! //! ## Serialisation //! //! `PublicKey`s, `SecretKey`s, `Keypair`s, and `Signature`s can be serialised //! into byte-arrays by calling `.to_bytes()`. It's perfectly acceptible and //! safe to transfer and/or store those bytes. (Of course, never transfer your //! secret key to anyone else, since they will only need the public key to //! verify your signatures!) //! //! ``` //! # extern crate rand; //! # extern crate ed25519_dalek; //! # fn main() { //! # use rand::rngs::OsRng; //! # use ed25519_dalek::{Keypair, Signature, PublicKey}; //! use ed25519_dalek::{PUBLIC_KEY_LENGTH, SECRET_KEY_LENGTH, KEYPAIR_LENGTH, SIGNATURE_LENGTH}; //! # let mut csprng = OsRng{}; //! # let keypair: Keypair = Keypair::generate(&mut csprng); //! # let message: &[u8] = b"This is a test of the tsunami alert system."; //! # let signature: Signature = keypair.sign(message); //! # let public_key: PublicKey = keypair.public; //! //! let public_key_bytes: [u8; PUBLIC_KEY_LENGTH] = public_key.to_bytes(); //! let secret_key_bytes: [u8; SECRET_KEY_LENGTH] = keypair.secret.to_bytes(); //! let keypair_bytes: [u8; KEYPAIR_LENGTH] = keypair.to_bytes(); //! let signature_bytes: [u8; SIGNATURE_LENGTH] = signature.to_bytes(); //! # } //! ``` //! //! And similarly, decoded from bytes with `::from_bytes()`: //! //! ``` //! # extern crate rand; //! # extern crate ed25519_dalek; //! # use rand::rngs::OsRng; //! # use ed25519_dalek::{Keypair, Signature, PublicKey, SecretKey, SignatureError}; //! # use ed25519_dalek::{PUBLIC_KEY_LENGTH, SECRET_KEY_LENGTH, KEYPAIR_LENGTH, SIGNATURE_LENGTH}; //! # fn do_test() -> Result<(SecretKey, PublicKey, Keypair, Signature), SignatureError> { //! # let mut csprng = OsRng{}; //! # let keypair_orig: Keypair = Keypair::generate(&mut csprng); //! # let message: &[u8] = b"This is a test of the tsunami alert system."; //! # let signature_orig: Signature = keypair_orig.sign(message); //! # let public_key_bytes: [u8; PUBLIC_KEY_LENGTH] = keypair_orig.public.to_bytes(); //! # let secret_key_bytes: [u8; SECRET_KEY_LENGTH] = keypair_orig.secret.to_bytes(); //! # let keypair_bytes: [u8; KEYPAIR_LENGTH] = keypair_orig.to_bytes(); //! # let signature_bytes: [u8; SIGNATURE_LENGTH] = signature_orig.to_bytes(); //! # //! let public_key: PublicKey = PublicKey::from_bytes(&public_key_bytes)?; //! let secret_key: SecretKey = SecretKey::from_bytes(&secret_key_bytes)?; //! let keypair: Keypair = Keypair::from_bytes(&keypair_bytes)?; //! let signature: Signature = Signature::from_bytes(&signature_bytes)?; //! # //! # Ok((secret_key, public_key, keypair, signature)) //! # } //! # fn main() { //! # do_test(); //! # } //! ``` //! //! ### Using Serde //! //! If you prefer the bytes to be wrapped in another serialisation format, all //! types additionally come with built-in [serde](https://serde.rs) support by //! building `ed25519-dalek` via: //! //! ```bash //! $ cargo build --features="serde" //! ``` //! //! They can be then serialised into any of the wire formats which serde supports. //! For example, using [bincode](https://github.com/TyOverby/bincode): //! //! ``` //! # extern crate rand; //! # extern crate ed25519_dalek; //! # #[cfg(feature = "serde")] //! extern crate serde; //! # #[cfg(feature = "serde")] //! extern crate bincode; //! //! # #[cfg(feature = "serde")] //! # fn main() { //! # use rand::rngs::OsRng; //! # use ed25519_dalek::{Keypair, Signature, PublicKey}; //! use bincode::{serialize}; //! # let mut csprng = OsRng{}; //! # let keypair: Keypair = Keypair::generate(&mut csprng); //! # let message: &[u8] = b"This is a test of the tsunami alert system."; //! # let signature: Signature = keypair.sign(message); //! # let public_key: PublicKey = keypair.public; //! # let verified: bool = public_key.verify(message, &signature).is_ok(); //! //! let encoded_public_key: Vec<u8> = serialize(&public_key).unwrap(); //! let encoded_signature: Vec<u8> = serialize(&signature).unwrap(); //! # } //! # #[cfg(not(feature = "serde"))] //! # fn main() {} //! ``` //! //! After sending the `encoded_public_key` and `encoded_signature`, the //! recipient may deserialise them and verify: //! //! ``` //! # extern crate rand; //! # extern crate ed25519_dalek; //! # #[cfg(feature = "serde")] //! # extern crate serde; //! # #[cfg(feature = "serde")] //! # extern crate bincode; //! # //! # #[cfg(feature = "serde")] //! # fn main() { //! # use rand::rngs::OsRng; //! # use ed25519_dalek::{Keypair, Signature, PublicKey}; //! # use bincode::{serialize}; //! use bincode::{deserialize}; //! //! # let mut csprng = OsRng{}; //! # let keypair: Keypair = Keypair::generate(&mut csprng); //! let message: &[u8] = b"This is a test of the tsunami alert system."; //! # let signature: Signature = keypair.sign(message); //! # let public_key: PublicKey = keypair.public; //! # let verified: bool = public_key.verify(message, &signature).is_ok(); //! # let encoded_public_key: Vec<u8> = serialize(&public_key).unwrap(); //! # let encoded_signature: Vec<u8> = serialize(&signature).unwrap(); //! let decoded_public_key: PublicKey = deserialize(&encoded_public_key).unwrap(); //! let decoded_signature: Signature = deserialize(&encoded_signature).unwrap(); //! //! # assert_eq!(public_key, decoded_public_key); //! # assert_eq!(signature, decoded_signature); //! # //! let verified: bool = decoded_public_key.verify(&message, &decoded_signature).is_ok(); //! //! assert!(verified); //! # } //! # #[cfg(not(feature = "serde"))] //! # fn main() {} //! ``` #![no_std] #![warn(future_incompatible)] #![deny(missing_docs)] // refuse to compile if documentation is missing #[cfg(any(feature = "std", test))] #[macro_use] extern crate std; #[cfg(all(feature = "alloc", not(feature = "std")))] extern crate alloc; extern crate clear_on_drop; extern crate curve25519_dalek; #[cfg(all(any(feature = "batch", feature = "batch_deterministic"), any(feature = "std", feature = "alloc")))] extern crate merlin; #[cfg(any(feature = "batch", feature = "std", feature = "alloc", test))] extern crate rand; #[cfg(feature = "serde")] extern crate serde; extern crate sha2; #[cfg(all(any(feature = "batch", feature = "batch_deterministic"), any(feature = "std", feature = "alloc")))] mod batch; mod constants; mod ed25519; mod errors; mod public; mod secret; mod signature; // Export everything public in ed25519. pub use crate::ed25519::*; #[cfg(all(any(feature = "batch", feature = "batch_deterministic"), any(feature = "std", feature = "alloc")))] pub use crate::batch::*;
35.064151
109
0.649914
e2f2f9f606e1c1f3089cac3358198e9749057f6a
1,013
use std::fs; use std::io::prelude::*; use std::net::TcpListener; use std::net::TcpStream; fn main() { let listener = TcpListener::bind("127.0.0.1:7878").unwrap(); for stream in listener.incoming() { let stream = stream.unwrap(); handle_connection(stream); } } // ANCHOR: here // --snip-- fn handle_connection(mut stream: TcpStream) { // --snip-- // ANCHOR_END: here let mut buffer = [0; 1024]; stream.read(&mut buffer).unwrap(); let get = b"GET / HTTP/1.1\r\n"; // ANCHOR: here let (status_line, filename) = if buffer.starts_with(get) { ("HTTP/1.1 200 OK", "hello.html") } else { ("HTTP/1.1 404 NOT FOUND", "404.html") }; let contents = fs::read_to_string(filename).unwrap(); let response = format!( "{}\r\nContent-Length: {}\r\n\r\n{}", status_line, contents.len(), contents ); stream.write(response.as_bytes()).unwrap(); stream.flush().unwrap(); } // ANCHOR_END: here
21.104167
64
0.574531
873a2c47ddc76279b8e2440194b9bfa3c4be6a08
165,232
use std::ops::{Add, Sub, Mul}; use std::cmp::{Eq, PartialEq,min}; use util::{fixed_time_eq}; use step_by::RangeExt; /* fe means field element. Here the field is \Z/(2^255-19). An element t, entries t[0]...t[9], represents the integer t[0]+2^26 t[1]+2^51 t[2]+2^77 t[3]+2^102 t[4]+...+2^230 t[9]. Bounds on each t[i] vary depending on context. */ #[derive(Clone, Copy)] pub struct Fe(pub [i32; 10]); impl PartialEq for Fe { fn eq(&self, other: &Fe) -> bool { let &Fe(self_elems) = self; let &Fe(other_elems) = other; self_elems.to_vec() == other_elems.to_vec() } } impl Eq for Fe { } static FE_ZERO : Fe = Fe([0,0,0,0,0,0,0,0,0,0]); static FE_ONE : Fe = Fe([1,0,0,0,0,0,0,0,0,0]); static FE_SQRTM1 : Fe = Fe([-32595792,-7943725,9377950,3500415,12389472,-272473,-25146209,-2005654,326686,11406482]); static FE_D : Fe = Fe([-10913610,13857413,-15372611,6949391,114729,-8787816,-6275908,-3247719,-18696448,-12055116]); static FE_D2 : Fe = Fe([-21827239,-5839606,-30745221,13898782,229458,15978800,-12551817,-6495438,29715968,9444199]); fn load_4u(s: &[u8]) -> u64 { (s[0] as u64) | ((s[1] as u64)<<8) | ((s[2] as u64)<<16) | ((s[3] as u64)<<24) } fn load_4i(s: &[u8]) -> i64 { load_4u(s) as i64 } fn load_3u(s: &[u8]) -> u64 { (s[0] as u64) | ((s[1] as u64)<<8) | ((s[2] as u64)<<16) } fn load_3i(s: &[u8]) -> i64 { load_3u(s) as i64 } impl Add for Fe { type Output = Fe; /* h = f + g Can overlap h with f or g. Preconditions: |f| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. |g| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. Postconditions: |h| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. */ fn add(self, _rhs: Fe) -> Fe { let Fe(f) = self; let Fe(g) = _rhs; let f0 = f[0]; let f1 = f[1]; let f2 = f[2]; let f3 = f[3]; let f4 = f[4]; let f5 = f[5]; let f6 = f[6]; let f7 = f[7]; let f8 = f[8]; let f9 = f[9]; let g0 = g[0]; let g1 = g[1]; let g2 = g[2]; let g3 = g[3]; let g4 = g[4]; let g5 = g[5]; let g6 = g[6]; let g7 = g[7]; let g8 = g[8]; let g9 = g[9]; let h0 = f0 + g0; let h1 = f1 + g1; let h2 = f2 + g2; let h3 = f3 + g3; let h4 = f4 + g4; let h5 = f5 + g5; let h6 = f6 + g6; let h7 = f7 + g7; let h8 = f8 + g8; let h9 = f9 + g9; Fe([h0, h1, h2, h3, h4, h5, h6, h7, h8, h9]) } } impl Sub for Fe { type Output = Fe; /* h = f - g Can overlap h with f or g. Preconditions: |f| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. |g| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. Postconditions: |h| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. */ fn sub(self, _rhs: Fe) -> Fe { let Fe(f) = self; let Fe(g) = _rhs; let f0 = f[0]; let f1 = f[1]; let f2 = f[2]; let f3 = f[3]; let f4 = f[4]; let f5 = f[5]; let f6 = f[6]; let f7 = f[7]; let f8 = f[8]; let f9 = f[9]; let g0 = g[0]; let g1 = g[1]; let g2 = g[2]; let g3 = g[3]; let g4 = g[4]; let g5 = g[5]; let g6 = g[6]; let g7 = g[7]; let g8 = g[8]; let g9 = g[9]; let h0 = f0 - g0; let h1 = f1 - g1; let h2 = f2 - g2; let h3 = f3 - g3; let h4 = f4 - g4; let h5 = f5 - g5; let h6 = f6 - g6; let h7 = f7 - g7; let h8 = f8 - g8; let h9 = f9 - g9; Fe([h0, h1, h2, h3, h4, h5, h6, h7, h8, h9]) } } impl Mul for Fe { type Output = Fe; /* h = f * g Can overlap h with f or g. Preconditions: |f| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. |g| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. Postconditions: |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. */ /* Notes on implementation strategy: Using schoolbook multiplication. Karatsuba would save a little in some cost models. Most multiplications by 2 and 19 are 32-bit precomputations; cheaper than 64-bit postcomputations. There is one remaining multiplication by 19 in the carry chain; one *19 precomputation can be merged into this, but the resulting data flow is considerably less clean. There are 12 carries below. 10 of them are 2-way parallelizable and vectorizable. Can get away with 11 carries, but then data flow is much deeper. With tighter constraints on inputs can squeeze carries into int32. */ fn mul(self, _rhs: Fe) -> Fe { let Fe(f) = self; let Fe(g) = _rhs; let f0 = f[0]; let f1 = f[1]; let f2 = f[2]; let f3 = f[3]; let f4 = f[4]; let f5 = f[5]; let f6 = f[6]; let f7 = f[7]; let f8 = f[8]; let f9 = f[9]; let g0 = g[0]; let g1 = g[1]; let g2 = g[2]; let g3 = g[3]; let g4 = g[4]; let g5 = g[5]; let g6 = g[6]; let g7 = g[7]; let g8 = g[8]; let g9 = g[9]; let g1_19 = 19 * g1; /* 1.4*2^29 */ let g2_19 = 19 * g2; /* 1.4*2^30; still ok */ let g3_19 = 19 * g3; let g4_19 = 19 * g4; let g5_19 = 19 * g5; let g6_19 = 19 * g6; let g7_19 = 19 * g7; let g8_19 = 19 * g8; let g9_19 = 19 * g9; let f1_2 = 2 * f1; let f3_2 = 2 * f3; let f5_2 = 2 * f5; let f7_2 = 2 * f7; let f9_2 = 2 * f9; let f0g0 = (f0 as i64) * (g0 as i64); let f0g1 = (f0 as i64) * (g1 as i64); let f0g2 = (f0 as i64) * (g2 as i64); let f0g3 = (f0 as i64) * (g3 as i64); let f0g4 = (f0 as i64) * (g4 as i64); let f0g5 = (f0 as i64) * (g5 as i64); let f0g6 = (f0 as i64) * (g6 as i64); let f0g7 = (f0 as i64) * (g7 as i64); let f0g8 = (f0 as i64) * (g8 as i64); let f0g9 = (f0 as i64) * (g9 as i64); let f1g0 = (f1 as i64) * (g0 as i64); let f1g1_2 = (f1_2 as i64) * (g1 as i64); let f1g2 = (f1 as i64) * (g2 as i64); let f1g3_2 = (f1_2 as i64) * (g3 as i64); let f1g4 = (f1 as i64) * (g4 as i64); let f1g5_2 = (f1_2 as i64) * (g5 as i64); let f1g6 = (f1 as i64) * (g6 as i64); let f1g7_2 = (f1_2 as i64) * (g7 as i64); let f1g8 = (f1 as i64) * (g8 as i64); let f1g9_38 = (f1_2 as i64) * (g9_19 as i64); let f2g0 = (f2 as i64) * (g0 as i64); let f2g1 = (f2 as i64) * (g1 as i64); let f2g2 = (f2 as i64) * (g2 as i64); let f2g3 = (f2 as i64) * (g3 as i64); let f2g4 = (f2 as i64) * (g4 as i64); let f2g5 = (f2 as i64) * (g5 as i64); let f2g6 = (f2 as i64) * (g6 as i64); let f2g7 = (f2 as i64) * (g7 as i64); let f2g8_19 = (f2 as i64) * (g8_19 as i64); let f2g9_19 = (f2 as i64) * (g9_19 as i64); let f3g0 = (f3 as i64) * (g0 as i64); let f3g1_2 = (f3_2 as i64) * (g1 as i64); let f3g2 = (f3 as i64) * (g2 as i64); let f3g3_2 = (f3_2 as i64) * (g3 as i64); let f3g4 = (f3 as i64) * (g4 as i64); let f3g5_2 = (f3_2 as i64) * (g5 as i64); let f3g6 = (f3 as i64) * (g6 as i64); let f3g7_38 = (f3_2 as i64) * (g7_19 as i64); let f3g8_19 = (f3 as i64) * (g8_19 as i64); let f3g9_38 = (f3_2 as i64) * (g9_19 as i64); let f4g0 = (f4 as i64) * (g0 as i64); let f4g1 = (f4 as i64) * (g1 as i64); let f4g2 = (f4 as i64) * (g2 as i64); let f4g3 = (f4 as i64) * (g3 as i64); let f4g4 = (f4 as i64) * (g4 as i64); let f4g5 = (f4 as i64) * (g5 as i64); let f4g6_19 = (f4 as i64) * (g6_19 as i64); let f4g7_19 = (f4 as i64) * (g7_19 as i64); let f4g8_19 = (f4 as i64) * (g8_19 as i64); let f4g9_19 = (f4 as i64) * (g9_19 as i64); let f5g0 = (f5 as i64) * (g0 as i64); let f5g1_2 = (f5_2 as i64) * (g1 as i64); let f5g2 = (f5 as i64) * (g2 as i64); let f5g3_2 = (f5_2 as i64) * (g3 as i64); let f5g4 = (f5 as i64) * (g4 as i64); let f5g5_38 = (f5_2 as i64) * (g5_19 as i64); let f5g6_19 = (f5 as i64) * (g6_19 as i64); let f5g7_38 = (f5_2 as i64) * (g7_19 as i64); let f5g8_19 = (f5 as i64) * (g8_19 as i64); let f5g9_38 = (f5_2 as i64) * (g9_19 as i64); let f6g0 = (f6 as i64) * (g0 as i64); let f6g1 = (f6 as i64) * (g1 as i64); let f6g2 = (f6 as i64) * (g2 as i64); let f6g3 = (f6 as i64) * (g3 as i64); let f6g4_19 = (f6 as i64) * (g4_19 as i64); let f6g5_19 = (f6 as i64) * (g5_19 as i64); let f6g6_19 = (f6 as i64) * (g6_19 as i64); let f6g7_19 = (f6 as i64) * (g7_19 as i64); let f6g8_19 = (f6 as i64) * (g8_19 as i64); let f6g9_19 = (f6 as i64) * (g9_19 as i64); let f7g0 = (f7 as i64) * (g0 as i64); let f7g1_2 = (f7_2 as i64) * (g1 as i64); let f7g2 = (f7 as i64) * (g2 as i64); let f7g3_38 = (f7_2 as i64) * (g3_19 as i64); let f7g4_19 = (f7 as i64) * (g4_19 as i64); let f7g5_38 = (f7_2 as i64) * (g5_19 as i64); let f7g6_19 = (f7 as i64) * (g6_19 as i64); let f7g7_38 = (f7_2 as i64) * (g7_19 as i64); let f7g8_19 = (f7 as i64) * (g8_19 as i64); let f7g9_38 = (f7_2 as i64) * (g9_19 as i64); let f8g0 = (f8 as i64) * (g0 as i64); let f8g1 = (f8 as i64) * (g1 as i64); let f8g2_19 = (f8 as i64) * (g2_19 as i64); let f8g3_19 = (f8 as i64) * (g3_19 as i64); let f8g4_19 = (f8 as i64) * (g4_19 as i64); let f8g5_19 = (f8 as i64) * (g5_19 as i64); let f8g6_19 = (f8 as i64) * (g6_19 as i64); let f8g7_19 = (f8 as i64) * (g7_19 as i64); let f8g8_19 = (f8 as i64) * (g8_19 as i64); let f8g9_19 = (f8 as i64) * (g9_19 as i64); let f9g0 = (f9 as i64) * (g0 as i64); let f9g1_38 = (f9_2 as i64) * (g1_19 as i64); let f9g2_19 = (f9 as i64) * (g2_19 as i64); let f9g3_38 = (f9_2 as i64) * (g3_19 as i64); let f9g4_19 = (f9 as i64) * (g4_19 as i64); let f9g5_38 = (f9_2 as i64) * (g5_19 as i64); let f9g6_19 = (f9 as i64) * (g6_19 as i64); let f9g7_38 = (f9_2 as i64) * (g7_19 as i64); let f9g8_19 = (f9 as i64) * (g8_19 as i64); let f9g9_38 = (f9_2 as i64) * (g9_19 as i64); let mut h0 = f0g0+f1g9_38+f2g8_19+f3g7_38+f4g6_19+f5g5_38+f6g4_19+f7g3_38+f8g2_19+f9g1_38; let mut h1 = f0g1+f1g0 +f2g9_19+f3g8_19+f4g7_19+f5g6_19+f6g5_19+f7g4_19+f8g3_19+f9g2_19; let mut h2 = f0g2+f1g1_2 +f2g0 +f3g9_38+f4g8_19+f5g7_38+f6g6_19+f7g5_38+f8g4_19+f9g3_38; let mut h3 = f0g3+f1g2 +f2g1 +f3g0 +f4g9_19+f5g8_19+f6g7_19+f7g6_19+f8g5_19+f9g4_19; let mut h4 = f0g4+f1g3_2 +f2g2 +f3g1_2 +f4g0 +f5g9_38+f6g8_19+f7g7_38+f8g6_19+f9g5_38; let mut h5 = f0g5+f1g4 +f2g3 +f3g2 +f4g1 +f5g0 +f6g9_19+f7g8_19+f8g7_19+f9g6_19; let mut h6 = f0g6+f1g5_2 +f2g4 +f3g3_2 +f4g2 +f5g1_2 +f6g0 +f7g9_38+f8g8_19+f9g7_38; let mut h7 = f0g7+f1g6 +f2g5 +f3g4 +f4g3 +f5g2 +f6g1 +f7g0 +f8g9_19+f9g8_19; let mut h8 = f0g8+f1g7_2 +f2g6 +f3g5_2 +f4g4 +f5g3_2 +f6g2 +f7g1_2 +f8g0 +f9g9_38; let mut h9 = f0g9+f1g8 +f2g7 +f3g6 +f4g5 +f5g4 +f6g3 +f7g2 +f8g1 +f9g0 ; let mut carry0; let carry1; let carry2; let carry3; let mut carry4; let carry5; let carry6; let carry7; let carry8; let carry9; /* |h0| <= (1.1*1.1*2^52*(1+19+19+19+19)+1.1*1.1*2^50*(38+38+38+38+38)) i.e. |h0| <= 1.2*2^59; narrower ranges for h2, h4, h6, h8 |h1| <= (1.1*1.1*2^51*(1+1+19+19+19+19+19+19+19+19)) i.e. |h1| <= 1.5*2^58; narrower ranges for h3, h5, h7, h9 */ carry0 = (h0 + (1<<25)) >> 26; h1 += carry0; h0 -= carry0 << 26; carry4 = (h4 + (1<<25)) >> 26; h5 += carry4; h4 -= carry4 << 26; /* |h0| <= 2^25 */ /* |h4| <= 2^25 */ /* |h1| <= 1.51*2^58 */ /* |h5| <= 1.51*2^58 */ carry1 = (h1 + (1<<24)) >> 25; h2 += carry1; h1 -= carry1 << 25; carry5 = (h5 + (1<<24)) >> 25; h6 += carry5; h5 -= carry5 << 25; /* |h1| <= 2^24; from now on fits into int32 */ /* |h5| <= 2^24; from now on fits into int32 */ /* |h2| <= 1.21*2^59 */ /* |h6| <= 1.21*2^59 */ carry2 = (h2 + (1<<25)) >> 26; h3 += carry2; h2 -= carry2 << 26; carry6 = (h6 + (1<<25)) >> 26; h7 += carry6; h6 -= carry6 << 26; /* |h2| <= 2^25; from now on fits into int32 unchanged */ /* |h6| <= 2^25; from now on fits into int32 unchanged */ /* |h3| <= 1.51*2^58 */ /* |h7| <= 1.51*2^58 */ carry3 = (h3 + (1<<24)) >> 25; h4 += carry3; h3 -= carry3 << 25; carry7 = (h7 + (1<<24)) >> 25; h8 += carry7; h7 -= carry7 << 25; /* |h3| <= 2^24; from now on fits into int32 unchanged */ /* |h7| <= 2^24; from now on fits into int32 unchanged */ /* |h4| <= 1.52*2^33 */ /* |h8| <= 1.52*2^33 */ carry4 = (h4 + (1<<25)) >> 26; h5 += carry4; h4 -= carry4 << 26; carry8 = (h8 + (1<<25)) >> 26; h9 += carry8; h8 -= carry8 << 26; /* |h4| <= 2^25; from now on fits into int32 unchanged */ /* |h8| <= 2^25; from now on fits into int32 unchanged */ /* |h5| <= 1.01*2^24 */ /* |h9| <= 1.51*2^58 */ carry9 = (h9 + (1<<24)) >> 25; h0 += carry9 * 19; h9 -= carry9 << 25; /* |h9| <= 2^24; from now on fits into int32 unchanged */ /* |h0| <= 1.8*2^37 */ carry0 = (h0 + (1<<25)) >> 26; h1 += carry0; h0 -= carry0 << 26; /* |h0| <= 2^25; from now on fits into int32 unchanged */ /* |h1| <= 1.01*2^24 */ Fe([h0 as i32, h1 as i32, h2 as i32, h3 as i32, h4 as i32, h5 as i32, h6 as i32, h7 as i32, h8 as i32, h9 as i32]) } } impl Fe { pub fn from_bytes(s: &[u8]) -> Fe { let mut h0 = load_4i(&s[0..4]); let mut h1 = load_3i(&s[4..7]) << 6; let mut h2 = load_3i(&s[7..10]) << 5; let mut h3 = load_3i(&s[10..13]) << 3; let mut h4 = load_3i(&s[13..16]) << 2; let mut h5 = load_4i(&s[16..20]); let mut h6 = load_3i(&s[20..23]) << 7; let mut h7 = load_3i(&s[23..26]) << 5; let mut h8 = load_3i(&s[26..29]) << 4; let mut h9 = (load_3i(&s[29..32]) & 8388607) << 2; let carry9 = (h9 + (1<<24)) >> 25; h0 += carry9 * 19; h9 -= carry9 << 25; let carry1 = (h1 + (1<<24)) >> 25; h2 += carry1; h1 -= carry1 << 25; let carry3 = (h3 + (1<<24)) >> 25; h4 += carry3; h3 -= carry3 << 25; let carry5 = (h5 + (1<<24)) >> 25; h6 += carry5; h5 -= carry5 << 25; let carry7 = (h7 + (1<<24)) >> 25; h8 += carry7; h7 -= carry7 << 25; let carry0 = (h0 + (1<<25)) >> 26; h1 += carry0; h0 -= carry0 << 26; let carry2 = (h2 + (1<<25)) >> 26; h3 += carry2; h2 -= carry2 << 26; let carry4 = (h4 + (1<<25)) >> 26; h5 += carry4; h4 -= carry4 << 26; let carry6 = (h6 + (1<<25)) >> 26; h7 += carry6; h6 -= carry6 << 26; let carry8 = (h8 + (1<<25)) >> 26; h9 += carry8; h8 -= carry8 << 26; Fe([h0 as i32, h1 as i32, h2 as i32, h3 as i32, h4 as i32, h5 as i32, h6 as i32, h7 as i32, h8 as i32, h9 as i32]) } /* Preconditions: |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. Write p=2^255-19; q=floor(h/p). Basic claim: q = floor(2^(-255)(h + 19 2^(-25)h9 + 2^(-1))). Proof: Have |h|<=p so |q|<=1 so |19^2 2^(-255) q|<1/4. Also have |h-2^230 h9|<2^230 so |19 2^(-255)(h-2^230 h9)|<1/4. Write y=2^(-1)-19^2 2^(-255)q-19 2^(-255)(h-2^230 h9). Then 0<y<1. Write r=h-pq. Have 0<=r<=p-1=2^255-20. Thus 0<=r+19(2^-255)r<r+19(2^-255)2^255<=2^255-1. Write x=r+19(2^-255)r+y. Then 0<x<2^255 so floor(2^(-255)x) = 0 so floor(q+2^(-255)x) = q. Have q+2^(-255)x = 2^(-255)(h + 19 2^(-25) h9 + 2^(-1)) so floor(2^(-255)(h + 19 2^(-25) h9 + 2^(-1))) = q. */ pub fn to_bytes(&self) -> [u8; 32] { let &Fe(es) = self; let mut h0 = es[0]; let mut h1 = es[1]; let mut h2 = es[2]; let mut h3 = es[3]; let mut h4 = es[4]; let mut h5 = es[5]; let mut h6 = es[6]; let mut h7 = es[7]; let mut h8 = es[8]; let mut h9 = es[9]; let mut q; q = (19 * h9 + (1 << 24)) >> 25; q = (h0 + q) >> 26; q = (h1 + q) >> 25; q = (h2 + q) >> 26; q = (h3 + q) >> 25; q = (h4 + q) >> 26; q = (h5 + q) >> 25; q = (h6 + q) >> 26; q = (h7 + q) >> 25; q = (h8 + q) >> 26; q = (h9 + q) >> 25; /* Goal: Output h-(2^255-19)q, which is between 0 and 2^255-20. */ h0 += 19 * q; /* Goal: Output h-2^255 q, which is between 0 and 2^255-20. */ let carry0 = h0 >> 26; h1 += carry0; h0 -= carry0 << 26; let carry1 = h1 >> 25; h2 += carry1; h1 -= carry1 << 25; let carry2 = h2 >> 26; h3 += carry2; h2 -= carry2 << 26; let carry3 = h3 >> 25; h4 += carry3; h3 -= carry3 << 25; let carry4 = h4 >> 26; h5 += carry4; h4 -= carry4 << 26; let carry5 = h5 >> 25; h6 += carry5; h5 -= carry5 << 25; let carry6 = h6 >> 26; h7 += carry6; h6 -= carry6 << 26; let carry7 = h7 >> 25; h8 += carry7; h7 -= carry7 << 25; let carry8 = h8 >> 26; h9 += carry8; h8 -= carry8 << 26; let carry9 = h9 >> 25; h9 -= carry9 << 25; /* h10 = carry9 */ /* Goal: Output h0+...+2^255 h10-2^255 q, which is between 0 and 2^255-20. Have h0+...+2^230 h9 between 0 and 2^255-1; evidently 2^255 h10-2^255 q = 0. Goal: Output h0+...+2^230 h9. */ [ (h0 >> 0) as u8, (h0 >> 8) as u8, (h0 >> 16) as u8, ((h0 >> 24) | (h1 << 2)) as u8, (h1 >> 6) as u8, (h1 >> 14) as u8, ((h1 >> 22) | (h2 << 3)) as u8, (h2 >> 5) as u8, (h2 >> 13) as u8, ((h2 >> 21) | (h3 << 5)) as u8, (h3 >> 3) as u8, (h3 >> 11) as u8, ((h3 >> 19) | (h4 << 6)) as u8, (h4 >> 2) as u8, (h4 >> 10) as u8, (h4 >> 18) as u8, (h5 >> 0) as u8, (h5 >> 8) as u8, (h5 >> 16) as u8, ((h5 >> 24) | (h6 << 1)) as u8, (h6 >> 7) as u8, (h6 >> 15) as u8, ((h6 >> 23) | (h7 << 3)) as u8, (h7 >> 5) as u8, (h7 >> 13) as u8, ((h7 >> 21) | (h8 << 4)) as u8, (h8 >> 4) as u8, (h8 >> 12) as u8, ((h8 >> 20) | (h9 << 6)) as u8, (h9 >> 2) as u8, (h9 >> 10) as u8, (h9 >> 18) as u8, ] } pub fn maybe_swap_with(&mut self, other: &mut Fe, do_swap: i32) { let &mut Fe(f) = self; let &mut Fe(g) = other; let f0 = f[0]; let f1 = f[1]; let f2 = f[2]; let f3 = f[3]; let f4 = f[4]; let f5 = f[5]; let f6 = f[6]; let f7 = f[7]; let f8 = f[8]; let f9 = f[9]; let g0 = g[0]; let g1 = g[1]; let g2 = g[2]; let g3 = g[3]; let g4 = g[4]; let g5 = g[5]; let g6 = g[6]; let g7 = g[7]; let g8 = g[8]; let g9 = g[9]; let mut x0 = f0 ^ g0; let mut x1 = f1 ^ g1; let mut x2 = f2 ^ g2; let mut x3 = f3 ^ g3; let mut x4 = f4 ^ g4; let mut x5 = f5 ^ g5; let mut x6 = f6 ^ g6; let mut x7 = f7 ^ g7; let mut x8 = f8 ^ g8; let mut x9 = f9 ^ g9; let b = -do_swap; x0 &= b; x1 &= b; x2 &= b; x3 &= b; x4 &= b; x5 &= b; x6 &= b; x7 &= b; x8 &= b; x9 &= b; *self = Fe([f0^x0, f1^x1, f2^x2, f3^x3, f4^x4, f5^x5, f6^x6, f7^x7, f8^x8, f9^x9]); *other = Fe([g0^x0, g1^x1, g2^x2, g3^x3, g4^x4, g5^x5, g6^x6, g7^x7, g8^x8, g9^x9]); } pub fn maybe_set(&mut self, other: &Fe, do_swap: i32) { let &mut Fe(f) = self; let &Fe(g) = other; let f0 = f[0]; let f1 = f[1]; let f2 = f[2]; let f3 = f[3]; let f4 = f[4]; let f5 = f[5]; let f6 = f[6]; let f7 = f[7]; let f8 = f[8]; let f9 = f[9]; let g0 = g[0]; let g1 = g[1]; let g2 = g[2]; let g3 = g[3]; let g4 = g[4]; let g5 = g[5]; let g6 = g[6]; let g7 = g[7]; let g8 = g[8]; let g9 = g[9]; let mut x0 = f0 ^ g0; let mut x1 = f1 ^ g1; let mut x2 = f2 ^ g2; let mut x3 = f3 ^ g3; let mut x4 = f4 ^ g4; let mut x5 = f5 ^ g5; let mut x6 = f6 ^ g6; let mut x7 = f7 ^ g7; let mut x8 = f8 ^ g8; let mut x9 = f9 ^ g9; let b = -do_swap; x0 &= b; x1 &= b; x2 &= b; x3 &= b; x4 &= b; x5 &= b; x6 &= b; x7 &= b; x8 &= b; x9 &= b; *self = Fe([f0^x0, f1^x1, f2^x2, f3^x3, f4^x4, f5^x5, f6^x6, f7^x7, f8^x8, f9^x9]); } /* h = f * 121666 Can overlap h with f. Preconditions: |f| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. Postconditions: |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. */ fn mul_121666(&self) -> Fe { let &Fe(f) = self; let mut h0 = (f[0] as i64) * 121666; let mut h1 = (f[1] as i64) * 121666; let mut h2 = (f[2] as i64) * 121666; let mut h3 = (f[3] as i64) * 121666; let mut h4 = (f[4] as i64) * 121666; let mut h5 = (f[5] as i64) * 121666; let mut h6 = (f[6] as i64) * 121666; let mut h7 = (f[7] as i64) * 121666; let mut h8 = (f[8] as i64) * 121666; let mut h9 = (f[9] as i64) * 121666; let carry9 = (h9 + (1<<24)) >> 25; h0 += carry9 * 19; h9 -= carry9 << 25; let carry1 = (h1 + (1<<24)) >> 25; h2 += carry1; h1 -= carry1 << 25; let carry3 = (h3 + (1<<24)) >> 25; h4 += carry3; h3 -= carry3 << 25; let carry5 = (h5 + (1<<24)) >> 25; h6 += carry5; h5 -= carry5 << 25; let carry7 = (h7 + (1<<24)) >> 25; h8 += carry7; h7 -= carry7 << 25; let carry0 = (h0 + (1<<25)) >> 26; h1 += carry0; h0 -= carry0 << 26; let carry2 = (h2 + (1<<25)) >> 26; h3 += carry2; h2 -= carry2 << 26; let carry4 = (h4 + (1<<25)) >> 26; h5 += carry4; h4 -= carry4 << 26; let carry6 = (h6 + (1<<25)) >> 26; h7 += carry6; h6 -= carry6 << 26; let carry8 = (h8 + (1<<25)) >> 26; h9 += carry8; h8 -= carry8 << 26; Fe([h0 as i32, h1 as i32, h2 as i32, h3 as i32, h4 as i32, h5 as i32, h6 as i32, h7 as i32, h8 as i32, h9 as i32]) } /* h = f * f Can overlap h with f. Preconditions: |f| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. Postconditions: |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. */ /* See fe_mul.c for discussion of implementation strategy. */ fn square(&self) -> Fe { let &Fe(f) = self; let f0 = f[0]; let f1 = f[1]; let f2 = f[2]; let f3 = f[3]; let f4 = f[4]; let f5 = f[5]; let f6 = f[6]; let f7 = f[7]; let f8 = f[8]; let f9 = f[9]; let f0_2 = 2 * f0; let f1_2 = 2 * f1; let f2_2 = 2 * f2; let f3_2 = 2 * f3; let f4_2 = 2 * f4; let f5_2 = 2 * f5; let f6_2 = 2 * f6; let f7_2 = 2 * f7; let f5_38 = 38 * f5; /* 1.31*2^30 */ let f6_19 = 19 * f6; /* 1.31*2^30 */ let f7_38 = 38 * f7; /* 1.31*2^30 */ let f8_19 = 19 * f8; /* 1.31*2^30 */ let f9_38 = 38 * f9; /* 1.31*2^30 */ let f0f0 = (f0 as i64) * (f0 as i64); let f0f1_2 = (f0_2 as i64) * (f1 as i64); let f0f2_2 = (f0_2 as i64) * (f2 as i64); let f0f3_2 = (f0_2 as i64) * (f3 as i64); let f0f4_2 = (f0_2 as i64) * (f4 as i64); let f0f5_2 = (f0_2 as i64) * (f5 as i64); let f0f6_2 = (f0_2 as i64) * (f6 as i64); let f0f7_2 = (f0_2 as i64) * (f7 as i64); let f0f8_2 = (f0_2 as i64) * (f8 as i64); let f0f9_2 = (f0_2 as i64) * (f9 as i64); let f1f1_2 = (f1_2 as i64) * (f1 as i64); let f1f2_2 = (f1_2 as i64) * (f2 as i64); let f1f3_4 = (f1_2 as i64) * (f3_2 as i64); let f1f4_2 = (f1_2 as i64) * (f4 as i64); let f1f5_4 = (f1_2 as i64) * (f5_2 as i64); let f1f6_2 = (f1_2 as i64) * (f6 as i64); let f1f7_4 = (f1_2 as i64) * (f7_2 as i64); let f1f8_2 = (f1_2 as i64) * (f8 as i64); let f1f9_76 = (f1_2 as i64) * (f9_38 as i64); let f2f2 = (f2 as i64) * (f2 as i64); let f2f3_2 = (f2_2 as i64) * (f3 as i64); let f2f4_2 = (f2_2 as i64) * (f4 as i64); let f2f5_2 = (f2_2 as i64) * (f5 as i64); let f2f6_2 = (f2_2 as i64) * (f6 as i64); let f2f7_2 = (f2_2 as i64) * (f7 as i64); let f2f8_38 = (f2_2 as i64) * (f8_19 as i64); let f2f9_38 = (f2 as i64) * (f9_38 as i64); let f3f3_2 = (f3_2 as i64) * (f3 as i64); let f3f4_2 = (f3_2 as i64) * (f4 as i64); let f3f5_4 = (f3_2 as i64) * (f5_2 as i64); let f3f6_2 = (f3_2 as i64) * (f6 as i64); let f3f7_76 = (f3_2 as i64) * (f7_38 as i64); let f3f8_38 = (f3_2 as i64) * (f8_19 as i64); let f3f9_76 = (f3_2 as i64) * (f9_38 as i64); let f4f4 = (f4 as i64) * (f4 as i64); let f4f5_2 = (f4_2 as i64) * (f5 as i64); let f4f6_38 = (f4_2 as i64) * (f6_19 as i64); let f4f7_38 = (f4 as i64) * (f7_38 as i64); let f4f8_38 = (f4_2 as i64) * (f8_19 as i64); let f4f9_38 = (f4 as i64) * (f9_38 as i64); let f5f5_38 = (f5 as i64) * (f5_38 as i64); let f5f6_38 = (f5_2 as i64) * (f6_19 as i64); let f5f7_76 = (f5_2 as i64) * (f7_38 as i64); let f5f8_38 = (f5_2 as i64) * (f8_19 as i64); let f5f9_76 = (f5_2 as i64) * (f9_38 as i64); let f6f6_19 = (f6 as i64) * (f6_19 as i64); let f6f7_38 = (f6 as i64) * (f7_38 as i64); let f6f8_38 = (f6_2 as i64) * (f8_19 as i64); let f6f9_38 = (f6 as i64) * (f9_38 as i64); let f7f7_38 = (f7 as i64) * (f7_38 as i64); let f7f8_38 = (f7_2 as i64) * (f8_19 as i64); let f7f9_76 = (f7_2 as i64) * (f9_38 as i64); let f8f8_19 = (f8 as i64) * (f8_19 as i64); let f8f9_38 = (f8 as i64) * (f9_38 as i64); let f9f9_38 = (f9 as i64) * (f9_38 as i64); let mut h0 = f0f0 +f1f9_76+f2f8_38+f3f7_76+f4f6_38+f5f5_38; let mut h1 = f0f1_2+f2f9_38+f3f8_38+f4f7_38+f5f6_38; let mut h2 = f0f2_2+f1f1_2 +f3f9_76+f4f8_38+f5f7_76+f6f6_19; let mut h3 = f0f3_2+f1f2_2 +f4f9_38+f5f8_38+f6f7_38; let mut h4 = f0f4_2+f1f3_4 +f2f2 +f5f9_76+f6f8_38+f7f7_38; let mut h5 = f0f5_2+f1f4_2 +f2f3_2 +f6f9_38+f7f8_38; let mut h6 = f0f6_2+f1f5_4 +f2f4_2 +f3f3_2 +f7f9_76+f8f8_19; let mut h7 = f0f7_2+f1f6_2 +f2f5_2 +f3f4_2 +f8f9_38; let mut h8 = f0f8_2+f1f7_4 +f2f6_2 +f3f5_4 +f4f4 +f9f9_38; let mut h9 = f0f9_2+f1f8_2 +f2f7_2 +f3f6_2 +f4f5_2; let carry0 = (h0 + (1<<25)) >> 26; h1 += carry0; h0 -= carry0 << 26; let carry4 = (h4 + (1<<25)) >> 26; h5 += carry4; h4 -= carry4 << 26; let carry1 = (h1 + (1<<24)) >> 25; h2 += carry1; h1 -= carry1 << 25; let carry5 = (h5 + (1<<24)) >> 25; h6 += carry5; h5 -= carry5 << 25; let carry2 = (h2 + (1<<25)) >> 26; h3 += carry2; h2 -= carry2 << 26; let carry6 = (h6 + (1<<25)) >> 26; h7 += carry6; h6 -= carry6 << 26; let carry3 = (h3 + (1<<24)) >> 25; h4 += carry3; h3 -= carry3 << 25; let carry7 = (h7 + (1<<24)) >> 25; h8 += carry7; h7 -= carry7 << 25; let carry4 = (h4 + (1<<25)) >> 26; h5 += carry4; h4 -= carry4 << 26; let carry8 = (h8 + (1<<25)) >> 26; h9 += carry8; h8 -= carry8 << 26; let carry9 = (h9 + (1<<24)) >> 25; h0 += carry9 * 19; h9 -= carry9 << 25; let carrya = (h0 + (1<<25)) >> 26; h1 += carrya; h0 -= carrya << 26; Fe([h0 as i32, h1 as i32, h2 as i32, h3 as i32, h4 as i32, h5 as i32, h6 as i32, h7 as i32, h8 as i32, h9 as i32]) } fn square_and_double(&self) -> Fe { let &Fe(f) = self; let f0 = f[0]; let f1 = f[1]; let f2 = f[2]; let f3 = f[3]; let f4 = f[4]; let f5 = f[5]; let f6 = f[6]; let f7 = f[7]; let f8 = f[8]; let f9 = f[9]; let f0_2 = 2 * f0; let f1_2 = 2 * f1; let f2_2 = 2 * f2; let f3_2 = 2 * f3; let f4_2 = 2 * f4; let f5_2 = 2 * f5; let f6_2 = 2 * f6; let f7_2 = 2 * f7; let f5_38 = 38 * f5; /* 1.959375*2^30 */ let f6_19 = 19 * f6; /* 1.959375*2^30 */ let f7_38 = 38 * f7; /* 1.959375*2^30 */ let f8_19 = 19 * f8; /* 1.959375*2^30 */ let f9_38 = 38 * f9; /* 1.959375*2^30 */ let f0f0 = (f0 as i64) * (f0 as i64); let f0f1_2 = (f0_2 as i64) * (f1 as i64); let f0f2_2 = (f0_2 as i64) * (f2 as i64); let f0f3_2 = (f0_2 as i64) * (f3 as i64); let f0f4_2 = (f0_2 as i64) * (f4 as i64); let f0f5_2 = (f0_2 as i64) * (f5 as i64); let f0f6_2 = (f0_2 as i64) * (f6 as i64); let f0f7_2 = (f0_2 as i64) * (f7 as i64); let f0f8_2 = (f0_2 as i64) * (f8 as i64); let f0f9_2 = (f0_2 as i64) * (f9 as i64); let f1f1_2 = (f1_2 as i64) * (f1 as i64); let f1f2_2 = (f1_2 as i64) * (f2 as i64); let f1f3_4 = (f1_2 as i64) * (f3_2 as i64); let f1f4_2 = (f1_2 as i64) * (f4 as i64); let f1f5_4 = (f1_2 as i64) * (f5_2 as i64); let f1f6_2 = (f1_2 as i64) * (f6 as i64); let f1f7_4 = (f1_2 as i64) * (f7_2 as i64); let f1f8_2 = (f1_2 as i64) * (f8 as i64); let f1f9_76 = (f1_2 as i64) * (f9_38 as i64); let f2f2 = (f2 as i64) * (f2 as i64); let f2f3_2 = (f2_2 as i64) * (f3 as i64); let f2f4_2 = (f2_2 as i64) * (f4 as i64); let f2f5_2 = (f2_2 as i64) * (f5 as i64); let f2f6_2 = (f2_2 as i64) * (f6 as i64); let f2f7_2 = (f2_2 as i64) * (f7 as i64); let f2f8_38 = (f2_2 as i64) * (f8_19 as i64); let f2f9_38 = (f2 as i64) * (f9_38 as i64); let f3f3_2 = (f3_2 as i64) * (f3 as i64); let f3f4_2 = (f3_2 as i64) * (f4 as i64); let f3f5_4 = (f3_2 as i64) * (f5_2 as i64); let f3f6_2 = (f3_2 as i64) * (f6 as i64); let f3f7_76 = (f3_2 as i64) * (f7_38 as i64); let f3f8_38 = (f3_2 as i64) * (f8_19 as i64); let f3f9_76 = (f3_2 as i64) * (f9_38 as i64); let f4f4 = (f4 as i64) * (f4 as i64); let f4f5_2 = (f4_2 as i64) * (f5 as i64); let f4f6_38 = (f4_2 as i64) * (f6_19 as i64); let f4f7_38 = (f4 as i64) * (f7_38 as i64); let f4f8_38 = (f4_2 as i64) * (f8_19 as i64); let f4f9_38 = (f4 as i64) * (f9_38 as i64); let f5f5_38 = (f5 as i64) * (f5_38 as i64); let f5f6_38 = (f5_2 as i64) * (f6_19 as i64); let f5f7_76 = (f5_2 as i64) * (f7_38 as i64); let f5f8_38 = (f5_2 as i64) * (f8_19 as i64); let f5f9_76 = (f5_2 as i64) * (f9_38 as i64); let f6f6_19 = (f6 as i64) * (f6_19 as i64); let f6f7_38 = (f6 as i64) * (f7_38 as i64); let f6f8_38 = (f6_2 as i64) * (f8_19 as i64); let f6f9_38 = (f6 as i64) * (f9_38 as i64); let f7f7_38 = (f7 as i64) * (f7_38 as i64); let f7f8_38 = (f7_2 as i64) * (f8_19 as i64); let f7f9_76 = (f7_2 as i64) * (f9_38 as i64); let f8f8_19 = (f8 as i64) * (f8_19 as i64); let f8f9_38 = (f8 as i64) * (f9_38 as i64); let f9f9_38 = (f9 as i64) * (f9_38 as i64); let mut h0 = f0f0 +f1f9_76+f2f8_38+f3f7_76+f4f6_38+f5f5_38; let mut h1 = f0f1_2+f2f9_38+f3f8_38+f4f7_38+f5f6_38; let mut h2 = f0f2_2+f1f1_2 +f3f9_76+f4f8_38+f5f7_76+f6f6_19; let mut h3 = f0f3_2+f1f2_2 +f4f9_38+f5f8_38+f6f7_38; let mut h4 = f0f4_2+f1f3_4 +f2f2 +f5f9_76+f6f8_38+f7f7_38; let mut h5 = f0f5_2+f1f4_2 +f2f3_2 +f6f9_38+f7f8_38; let mut h6 = f0f6_2+f1f5_4 +f2f4_2 +f3f3_2 +f7f9_76+f8f8_19; let mut h7 = f0f7_2+f1f6_2 +f2f5_2 +f3f4_2 +f8f9_38; let mut h8 = f0f8_2+f1f7_4 +f2f6_2 +f3f5_4 +f4f4 +f9f9_38; let mut h9 = f0f9_2+f1f8_2 +f2f7_2 +f3f6_2 +f4f5_2; let mut carry0: i64; let carry1: i64; let carry2: i64; let carry3: i64; let mut carry4: i64; let carry5: i64; let carry6: i64; let carry7: i64; let carry8: i64; let carry9: i64; h0 += h0; h1 += h1; h2 += h2; h3 += h3; h4 += h4; h5 += h5; h6 += h6; h7 += h7; h8 += h8; h9 += h9; carry0 = (h0 + (1<<25)) >> 26; h1 += carry0; h0 -= carry0 << 26; carry4 = (h4 + (1<<25)) >> 26; h5 += carry4; h4 -= carry4 << 26; carry1 = (h1 + (1<<24)) >> 25; h2 += carry1; h1 -= carry1 << 25; carry5 = (h5 + (1<<24)) >> 25; h6 += carry5; h5 -= carry5 << 25; carry2 = (h2 + (1<<25)) >> 26; h3 += carry2; h2 -= carry2 << 26; carry6 = (h6 + (1<<25)) >> 26; h7 += carry6; h6 -= carry6 << 26; carry3 = (h3 + (1<<24)) >> 25; h4 += carry3; h3 -= carry3 << 25; carry7 = (h7 + (1<<24)) >> 25; h8 += carry7; h7 -= carry7 << 25; carry4 = (h4 + (1<<25)) >> 26; h5 += carry4; h4 -= carry4 << 26; carry8 = (h8 + (1<<25)) >> 26; h9 += carry8; h8 -= carry8 << 26; carry9 = (h9 + (1<<24)) >> 25; h0 += carry9 * 19; h9 -= carry9 << 25; carry0 = (h0 + (1<<25)) >> 26; h1 += carry0; h0 -= carry0 << 26; Fe([h0 as i32, h1 as i32, h2 as i32, h3 as i32, h4 as i32, h5 as i32, h6 as i32, h7 as i32, h8 as i32, h9 as i32]) } pub fn invert(&self) -> Fe { let z1 = *self; /* qhasm: z2 = z1^2^1 */ let z2 = z1.square(); /* qhasm: z8 = z2^2^2 */ let z8 = z2.square().square(); /* qhasm: z9 = z1*z8 */ let z9 = z1*z8; /* qhasm: z11 = z2*z9 */ let z11 = z2*z9; /* qhasm: z22 = z11^2^1 */ let z22 = z11.square(); /* qhasm: z_5_0 = z9*z22 */ let z_5_0 = z9*z22; /* qhasm: z_10_5 = z_5_0^2^5 */ let z_10_5 = (0..5).fold(z_5_0, |z_5_n, _| z_5_n.square()); /* qhasm: z_10_0 = z_10_5*z_5_0 */ let z_10_0 = z_10_5*z_5_0; /* qhasm: z_20_10 = z_10_0^2^10 */ let z_20_10 = (0..10).fold(z_10_0, |x, _| x.square()); /* qhasm: z_20_0 = z_20_10*z_10_0 */ let z_20_0 = z_20_10*z_10_0; /* qhasm: z_40_20 = z_20_0^2^20 */ let z_40_20 = (0..20).fold(z_20_0, |x, _| x.square()); /* qhasm: z_40_0 = z_40_20*z_20_0 */ let z_40_0 = z_40_20*z_20_0; /* qhasm: z_50_10 = z_40_0^2^10 */ let z_50_10 = (0..10).fold(z_40_0, |x, _| x.square()); /* qhasm: z_50_0 = z_50_10*z_10_0 */ let z_50_0 = z_50_10*z_10_0; /* qhasm: z_100_50 = z_50_0^2^50 */ let z_100_50 = (0..50).fold(z_50_0, |x, _| x.square()); /* qhasm: z_100_0 = z_100_50*z_50_0 */ let z_100_0 = z_100_50*z_50_0; /* qhasm: z_200_100 = z_100_0^2^100 */ let z_200_100 = (0..100).fold(z_100_0, |x, _| x.square()); /* qhasm: z_200_0 = z_200_100*z_100_0 */ /* asm 1: fe_mul(>z_200_0=fe#3,<z_200_100=fe#4,<z_100_0=fe#3); */ /* asm 2: fe_mul(>z_200_0=t2,<z_200_100=t3,<z_100_0=t2); */ let z_200_0 = z_200_100*z_100_0; /* qhasm: z_250_50 = z_200_0^2^50 */ let z_250_50 = (0..50).fold(z_200_0, |x, _| x.square()); /* qhasm: z_250_0 = z_250_50*z_50_0 */ let z_250_0 = z_250_50*z_50_0; /* qhasm: z_255_5 = z_250_0^2^5 */ let z_255_5 = (0..5).fold(z_250_0, |x, _| x.square()); /* qhasm: z_255_21 = z_255_5*z11 */ /* asm 1: fe_mul(>z_255_21=fe#12,<z_255_5=fe#2,<z11=fe#1); */ /* asm 2: fe_mul(>z_255_21=out,<z_255_5=t1,<z11=t0); */ let z_255_21 = z_255_5*z11; z_255_21 } fn is_nonzero(&self) -> bool { let bs = self.to_bytes(); let zero = [0; 32]; !fixed_time_eq(bs.as_ref(), zero.as_ref()) } fn is_negative(&self) -> bool { (self.to_bytes()[0] & 1) != 0 } fn neg(&self) -> Fe { let &Fe(f) = self; Fe([-f[0], -f[1], -f[2], -f[3], -f[4], -f[5], -f[6], -f[7], -f[8], -f[9]]) } fn pow25523(&self) -> Fe { let z2 = self.square(); let z8 = (0..2).fold(z2, |x, _| x.square()); let z9 = *self * z8; let z11 = z2 * z9; let z22 = z11.square(); let z_5_0 = z9 * z22; let z_10_5 = (0..5).fold(z_5_0, |x, _| x.square()); let z_10_0 = z_10_5 * z_5_0; let z_20_10 = (0..10).fold(z_10_0, |x, _| x.square()); let z_20_0 = z_20_10 * z_10_0; let z_40_20 = (0..20).fold(z_20_0, |x, _| x.square()); let z_40_0 = z_40_20 * z_20_0; let z_50_10 = (0..10).fold(z_40_0, |x, _| x.square()); let z_50_0 = z_50_10 * z_10_0; let z_100_50 = (0..50).fold(z_50_0, |x, _| x.square()); let z_100_0 = z_100_50 * z_50_0; let z_200_100 = (0..100).fold(z_100_0, |x, _| x.square()); let z_200_0 = z_200_100 * z_100_0; let z_250_50 = (0..50).fold(z_200_0, |x, _| x.square()); let z_250_0 = z_250_50 * z_50_0; let z_252_2 = (0..2).fold(z_250_0, |x, _| x.square()); let z_252_3 = z_252_2 * *self; z_252_3 } } #[derive(Clone, Copy)] pub struct GeP2 { x: Fe, y: Fe, z: Fe, } #[derive(Clone, Copy)] pub struct GeP3 { x: Fe, y: Fe, z: Fe, t: Fe, } #[derive(Clone, Copy)] pub struct GeP1P1 { x: Fe, y: Fe, z: Fe, t: Fe, } #[derive(Clone, Copy)] pub struct GePrecomp { y_plus_x: Fe, y_minus_x: Fe, xy2d: Fe, } #[derive(Clone, Copy)] pub struct GeCached { y_plus_x: Fe, y_minus_x: Fe, z: Fe, t2d: Fe, } impl GeP1P1 { fn to_p2(&self) -> GeP2 { GeP2 { x: self.x * self.t, y: self.y * self.z, z: self.z * self.t, } } fn to_p3(&self) -> GeP3 { GeP3 { x: self.x * self.t, y: self.y * self.z, z: self.z * self.t, t: self.x * self.y, } } } impl GeP2 { fn zero() -> GeP2 { GeP2 { x: FE_ZERO, y: FE_ONE, z: FE_ONE, } } pub fn to_bytes(&self) -> [u8; 32] { let recip = self.z.invert(); let x = self.x * recip; let y = self.y * recip; let mut bs = y.to_bytes(); bs[31] ^= (if x.is_negative() { 1 } else { 0 }) << 7; bs } fn dbl(&self) -> GeP1P1 { let xx = self.x.square(); let yy = self.y.square(); let b = self.z.square_and_double(); let a = self.x + self.y; let aa = a.square(); let y3 = yy + xx; let z3 = yy - xx; let x3 = aa - y3; let t3 = b - z3; GeP1P1 { x: x3, y: y3, z: z3, t: t3 } } fn slide(a: &[u8]) -> [i8; 256] { let mut r = [0i8; 256]; for i in 0..256 { r[i] = (1 & (a[i >> 3] >> (i & 7))) as i8; } for i in 0..256 { if r[i]!=0 { for b in 1..min(7, 256-i) { if r[i + b] != 0 { if r[i] + (r[i + b] << b) <= 15 { r[i] += r[i + b] << b; r[i + b] = 0; } else if r[i] - (r[i + b] << b) >= -15 { r[i] -= r[i + b] << b; for k in i+b..256 { if r[k]==0 { r[k] = 1; break; } r[k] = 0; } } else { break; } } } } } r } /* r = a * A + b * B where a = a[0]+256*a[1]+...+256^31 a[31]. and b = b[0]+256*b[1]+...+256^31 b[31]. B is the Ed25519 base point (x,4/5) with x positive. */ pub fn double_scalarmult_vartime(a_scalar: &[u8], a_point: GeP3, b_scalar: &[u8]) -> GeP2 { let aslide = GeP2::slide(a_scalar); let bslide = GeP2::slide(b_scalar); let mut ai = [GeCached{y_plus_x:FE_ZERO, y_minus_x: FE_ZERO, z: FE_ZERO, t2d: FE_ZERO}; 8]; /* A,3A,5A,7A,9A,11A,13A,15A */ ai[0] = a_point.to_cached(); let a2 = a_point.dbl().to_p3(); ai[1] = (a2 + ai[0]).to_p3().to_cached(); ai[2] = (a2 + ai[1]).to_p3().to_cached(); ai[3] = (a2 + ai[2]).to_p3().to_cached(); ai[4] = (a2 + ai[3]).to_p3().to_cached(); ai[5] = (a2 + ai[4]).to_p3().to_cached(); ai[6] = (a2 + ai[5]).to_p3().to_cached(); ai[7] = (a2 + ai[6]).to_p3().to_cached(); let mut r = GeP2::zero(); let mut i: usize = 255; loop { if aslide[i]!=0 || bslide[i]!=0 { break; } if i==0 { return r; } i -= 1; } loop { let mut t = r.dbl(); if aslide[i] > 0 { t = t.to_p3() + ai[(aslide[i]/2) as usize]; } else if aslide[i] < 0 { t = t.to_p3() - ai[(-aslide[i]/2) as usize]; } if bslide[i] > 0 { t = t.to_p3() + BI[(bslide[i]/2) as usize]; } else if bslide[i] < 0 { t = t.to_p3() - BI[(-bslide[i]/2) as usize]; } r = t.to_p2(); if i==0 { return r; } i -= 1; } } } impl GeP3 { pub fn from_bytes_negate_vartime(s: &[u8]) -> Option<GeP3> { let y = Fe::from_bytes(s); let z = FE_ONE; let y_squared = y.square(); let u = y_squared - FE_ONE; let v = (y_squared * FE_D) + FE_ONE; let v_raise_3 = v.square() * v; let v_raise_7 = v_raise_3.square() * v; let uv7 = v_raise_7 * u;// Is this commutative? u comes second in the code, but not in the notation... let mut x = uv7.pow25523() * v_raise_3 * u; let vxx = x.square() * v; let check = vxx - u; if check.is_nonzero() { let check2 = vxx + u; if check2.is_nonzero() { return None; } x = x * FE_SQRTM1; } if x.is_negative() == ((s[31]>>7)!=0) { x = x.neg(); } let t = x * y; Some(GeP3{x: x, y: y, z: z, t: t}) } fn to_p2(&self) -> GeP2 { GeP2 { x: self.x, y: self.y, z: self.z, } } fn to_cached(&self) -> GeCached { GeCached { y_plus_x: self.y + self.x, y_minus_x: self.y - self.x, z: self.z, t2d: self.t * FE_D2 } } fn zero() -> GeP3 { GeP3 { x: FE_ZERO, y: FE_ONE, z: FE_ONE, t: FE_ZERO, } } fn dbl(&self) -> GeP1P1 { self.to_p2().dbl() } pub fn to_bytes(&self) -> [u8; 32] { let recip = self.z.invert(); let x = self.x * recip; let y = self.y * recip; let mut bs = y.to_bytes(); bs[31] ^= (if x.is_negative() { 1 } else { 0 }) << 7; bs } } impl Add<GeCached> for GeP3 { type Output = GeP1P1; fn add(self, _rhs: GeCached) -> GeP1P1 { let y1_plus_x1 = self.y + self.x; let y1_minus_x1 = self.y - self.x; let a = y1_plus_x1 * _rhs.y_plus_x; let b = y1_minus_x1 * _rhs.y_minus_x; let c = _rhs.t2d * self.t; let zz = self.z * _rhs.z; let d = zz + zz; let x3 = a - b; let y3 = a + b; let z3 = d + c; let t3 = d - c; GeP1P1 { x: x3, y: y3, z: z3, t: t3 } } } impl Add<GePrecomp> for GeP3 { type Output = GeP1P1; fn add(self, _rhs: GePrecomp) -> GeP1P1 { let y1_plus_x1 = self.y + self.x; let y1_minus_x1 = self.y - self.x; let a = y1_plus_x1 * _rhs.y_plus_x; let b = y1_minus_x1 * _rhs.y_minus_x; let c = _rhs.xy2d * self.t; let d = self.z + self.z; let x3 = a - b; let y3 = a + b; let z3 = d + c; let t3 = d - c; GeP1P1 { x: x3, y: y3, z: z3, t: t3 } } } impl Sub<GeCached> for GeP3 { type Output = GeP1P1; fn sub(self, _rhs: GeCached) -> GeP1P1 { let y1_plus_x1 = self.y + self.x; let y1_minus_x1 = self.y - self.x; let a = y1_plus_x1 * _rhs.y_minus_x; let b = y1_minus_x1 * _rhs.y_plus_x; let c = _rhs.t2d * self.t; let zz = self.z * _rhs.z; let d = zz + zz; let x3 = a - b; let y3 = a + b; let z3 = d - c; let t3 = d + c; GeP1P1 { x: x3, y: y3, z: z3, t: t3 } } } impl Sub<GePrecomp> for GeP3 { type Output = GeP1P1; fn sub(self, _rhs: GePrecomp) -> GeP1P1 { let y1_plus_x1 = self.y + self.x; let y1_minus_x1 = self.y - self.x; let a = y1_plus_x1 * _rhs.y_minus_x; let b = y1_minus_x1 * _rhs.y_plus_x; let c = _rhs.xy2d * self.t; let d = self.z + self.z; let x3 = a - b; let y3 = a + b; let z3 = d - c; let t3 = d + c; GeP1P1 { x: x3, y: y3, z: z3, t: t3 } } } fn equal(b: u8, c: u8) -> i32 { let x = b ^ c; /* 0: yes; 1..255: no */ let mut y = x as u32; /* 0: yes; 1..255: no */ y = y.wrapping_sub(1); /* 4294967295: yes; 0..254: no */ y >>= 31; /* 1: yes; 0: no */ y as i32 } impl GePrecomp { fn zero() -> GePrecomp { GePrecomp { y_plus_x: FE_ONE, y_minus_x: FE_ONE, xy2d: FE_ZERO, } } pub fn maybe_set(&mut self, other: &GePrecomp, do_swap: i32) { self.y_plus_x.maybe_set(&other.y_plus_x, do_swap); self.y_minus_x.maybe_set(&other.y_minus_x, do_swap); self.xy2d.maybe_set(&other.xy2d, do_swap); } pub fn select(pos: usize, b: i8) -> GePrecomp { let bnegative = (b as u8) >> 7; let babs: u8 = (b - (((-(bnegative as i8)) & b) << 1)) as u8; let mut t = GePrecomp::zero(); t.maybe_set(&GE_PRECOMP_BASE[pos][0], equal(babs, 1)); t.maybe_set(&GE_PRECOMP_BASE[pos][1], equal(babs, 2)); t.maybe_set(&GE_PRECOMP_BASE[pos][2], equal(babs, 3)); t.maybe_set(&GE_PRECOMP_BASE[pos][3], equal(babs, 4)); t.maybe_set(&GE_PRECOMP_BASE[pos][4], equal(babs, 5)); t.maybe_set(&GE_PRECOMP_BASE[pos][5], equal(babs, 6)); t.maybe_set(&GE_PRECOMP_BASE[pos][6], equal(babs, 7)); t.maybe_set(&GE_PRECOMP_BASE[pos][7], equal(babs, 8)); let minus_t = GePrecomp { y_plus_x: t.y_minus_x, y_minus_x: t.y_plus_x, xy2d: t.xy2d.neg(), }; t.maybe_set(&minus_t, bnegative as i32); t } } /* h = a * B where a = a[0]+256*a[1]+...+256^31 a[31] B is the Ed25519 base point (x,4/5) with x positive. Preconditions: a[31] <= 127 */ pub fn ge_scalarmult_base(a: &[u8]) -> GeP3 { let mut es: [i8; 64] = [0; 64]; let mut r: GeP1P1; let mut s: GeP2; let mut t: GePrecomp; for i in 0..32 { es[2 * i + 0] = ((a[i] >> 0) & 15) as i8; es[2 * i + 1] = ((a[i] >> 4) & 15) as i8; } /* each es[i] is between 0 and 15 */ /* es[63] is between 0 and 7 */ let mut carry: i8 = 0; for i in 0..63 { es[i] += carry; carry = es[i] + 8; carry >>= 4; es[i] -= carry << 4; } es[63] += carry; /* each es[i] is between -8 and 8 */ let mut h = GeP3::zero(); for i in (1..64).step_up(2) { t = GePrecomp::select(i/2, es[i]); r = h + t; h = r.to_p3(); } r = h.dbl(); s = r.to_p2(); r = s.dbl(); s = r.to_p2(); r = s.dbl(); s = r.to_p2(); r = s.dbl(); h = r.to_p3(); for i in (0..64).step_up(2) { t = GePrecomp::select(i/2, es[i]); r = h + t; h = r.to_p3(); } h } /* Input: s[0]+256*s[1]+...+256^63*s[63] = s Output: s[0]+256*s[1]+...+256^31*s[31] = s mod l where l = 2^252 + 27742317777372353535851937790883648493. Overwrites s in place. */ pub fn sc_reduce(s: &mut [u8]) { let mut s0: i64 = 2097151 & load_3i(s); let mut s1: i64 = 2097151 & (load_4i(&s[2..6]) >> 5); let mut s2: i64 = 2097151 & (load_3i(&s[5..8]) >> 2); let mut s3: i64 = 2097151 & (load_4i(&s[7..11]) >> 7); let mut s4: i64 = 2097151 & (load_4i(&s[10..14]) >> 4); let mut s5: i64 = 2097151 & (load_3i(&s[13..16]) >> 1); let mut s6: i64 = 2097151 & (load_4i(&s[15..19]) >> 6); let mut s7: i64 = 2097151 & (load_3i(&s[18..21]) >> 3); let mut s8: i64 = 2097151 & load_3i(&s[21..24]); let mut s9: i64 = 2097151 & (load_4i(&s[23..27]) >> 5); let mut s10: i64 = 2097151 & (load_3i(&s[26..29]) >> 2); let mut s11: i64 = 2097151 & (load_4i(&s[28..32]) >> 7); let mut s12: i64 = 2097151 & (load_4i(&s[31..35]) >> 4); let mut s13: i64 = 2097151 & (load_3i(&s[34..37]) >> 1); let mut s14: i64 = 2097151 & (load_4i(&s[36..40]) >> 6); let mut s15: i64 = 2097151 & (load_3i(&s[39..42]) >> 3); let mut s16: i64 = 2097151 & load_3i(&s[42..45]); let mut s17: i64 = 2097151 & (load_4i(&s[44..48]) >> 5); let s18: i64 = 2097151 & (load_3i(&s[47..50]) >> 2); let s19: i64 = 2097151 & (load_4i(&s[49..53]) >> 7); let s20: i64 = 2097151 & (load_4i(&s[52..56]) >> 4); let s21: i64 = 2097151 & (load_3i(&s[55..58]) >> 1); let s22: i64 = 2097151 & (load_4i(&s[57..61]) >> 6); let s23: i64 = load_4i(&s[60..64]) >> 3; let mut carry0: i64; let mut carry1: i64; let mut carry2: i64; let mut carry3: i64; let mut carry4: i64; let mut carry5: i64; let mut carry6: i64; let mut carry7: i64; let mut carry8: i64; let mut carry9: i64; let mut carry10: i64; let mut carry11: i64; let carry12: i64; let carry13: i64; let carry14: i64; let carry15: i64; let carry16: i64; s11 += s23 * 666643; s12 += s23 * 470296; s13 += s23 * 654183; s14 -= s23 * 997805; s15 += s23 * 136657; s16 -= s23 * 683901; s10 += s22 * 666643; s11 += s22 * 470296; s12 += s22 * 654183; s13 -= s22 * 997805; s14 += s22 * 136657; s15 -= s22 * 683901; s9 += s21 * 666643; s10 += s21 * 470296; s11 += s21 * 654183; s12 -= s21 * 997805; s13 += s21 * 136657; s14 -= s21 * 683901; s8 += s20 * 666643; s9 += s20 * 470296; s10 += s20 * 654183; s11 -= s20 * 997805; s12 += s20 * 136657; s13 -= s20 * 683901; s7 += s19 * 666643; s8 += s19 * 470296; s9 += s19 * 654183; s10 -= s19 * 997805; s11 += s19 * 136657; s12 -= s19 * 683901; s6 += s18 * 666643; s7 += s18 * 470296; s8 += s18 * 654183; s9 -= s18 * 997805; s10 += s18 * 136657; s11 -= s18 * 683901; carry6 = (s6 + (1<<20)) >> 21; s7 += carry6; s6 -= carry6 << 21; carry8 = (s8 + (1<<20)) >> 21; s9 += carry8; s8 -= carry8 << 21; carry10 = (s10 + (1<<20)) >> 21; s11 += carry10; s10 -= carry10 << 21; carry12 = (s12 + (1<<20)) >> 21; s13 += carry12; s12 -= carry12 << 21; carry14 = (s14 + (1<<20)) >> 21; s15 += carry14; s14 -= carry14 << 21; carry16 = (s16 + (1<<20)) >> 21; s17 += carry16; s16 -= carry16 << 21; carry7 = (s7 + (1<<20)) >> 21; s8 += carry7; s7 -= carry7 << 21; carry9 = (s9 + (1<<20)) >> 21; s10 += carry9; s9 -= carry9 << 21; carry11 = (s11 + (1<<20)) >> 21; s12 += carry11; s11 -= carry11 << 21; carry13 = (s13 + (1<<20)) >> 21; s14 += carry13; s13 -= carry13 << 21; carry15 = (s15 + (1<<20)) >> 21; s16 += carry15; s15 -= carry15 << 21; s5 += s17 * 666643; s6 += s17 * 470296; s7 += s17 * 654183; s8 -= s17 * 997805; s9 += s17 * 136657; s10 -= s17 * 683901; s4 += s16 * 666643; s5 += s16 * 470296; s6 += s16 * 654183; s7 -= s16 * 997805; s8 += s16 * 136657; s9 -= s16 * 683901; s3 += s15 * 666643; s4 += s15 * 470296; s5 += s15 * 654183; s6 -= s15 * 997805; s7 += s15 * 136657; s8 -= s15 * 683901; s2 += s14 * 666643; s3 += s14 * 470296; s4 += s14 * 654183; s5 -= s14 * 997805; s6 += s14 * 136657; s7 -= s14 * 683901; s1 += s13 * 666643; s2 += s13 * 470296; s3 += s13 * 654183; s4 -= s13 * 997805; s5 += s13 * 136657; s6 -= s13 * 683901; s0 += s12 * 666643; s1 += s12 * 470296; s2 += s12 * 654183; s3 -= s12 * 997805; s4 += s12 * 136657; s5 -= s12 * 683901; s12 = 0; carry0 = (s0 + (1<<20)) >> 21; s1 += carry0; s0 -= carry0 << 21; carry2 = (s2 + (1<<20)) >> 21; s3 += carry2; s2 -= carry2 << 21; carry4 = (s4 + (1<<20)) >> 21; s5 += carry4; s4 -= carry4 << 21; carry6 = (s6 + (1<<20)) >> 21; s7 += carry6; s6 -= carry6 << 21; carry8 = (s8 + (1<<20)) >> 21; s9 += carry8; s8 -= carry8 << 21; carry10 = (s10 + (1<<20)) >> 21; s11 += carry10; s10 -= carry10 << 21; carry1 = (s1 + (1<<20)) >> 21; s2 += carry1; s1 -= carry1 << 21; carry3 = (s3 + (1<<20)) >> 21; s4 += carry3; s3 -= carry3 << 21; carry5 = (s5 + (1<<20)) >> 21; s6 += carry5; s5 -= carry5 << 21; carry7 = (s7 + (1<<20)) >> 21; s8 += carry7; s7 -= carry7 << 21; carry9 = (s9 + (1<<20)) >> 21; s10 += carry9; s9 -= carry9 << 21; carry11 = (s11 + (1<<20)) >> 21; s12 += carry11; s11 -= carry11 << 21; s0 += s12 * 666643; s1 += s12 * 470296; s2 += s12 * 654183; s3 -= s12 * 997805; s4 += s12 * 136657; s5 -= s12 * 683901; s12 = 0; carry0 = s0 >> 21; s1 += carry0; s0 -= carry0 << 21; carry1 = s1 >> 21; s2 += carry1; s1 -= carry1 << 21; carry2 = s2 >> 21; s3 += carry2; s2 -= carry2 << 21; carry3 = s3 >> 21; s4 += carry3; s3 -= carry3 << 21; carry4 = s4 >> 21; s5 += carry4; s4 -= carry4 << 21; carry5 = s5 >> 21; s6 += carry5; s5 -= carry5 << 21; carry6 = s6 >> 21; s7 += carry6; s6 -= carry6 << 21; carry7 = s7 >> 21; s8 += carry7; s7 -= carry7 << 21; carry8 = s8 >> 21; s9 += carry8; s8 -= carry8 << 21; carry9 = s9 >> 21; s10 += carry9; s9 -= carry9 << 21; carry10 = s10 >> 21; s11 += carry10; s10 -= carry10 << 21; carry11 = s11 >> 21; s12 += carry11; s11 -= carry11 << 21; s0 += s12 * 666643; s1 += s12 * 470296; s2 += s12 * 654183; s3 -= s12 * 997805; s4 += s12 * 136657; s5 -= s12 * 683901; carry0 = s0 >> 21; s1 += carry0; s0 -= carry0 << 21; carry1 = s1 >> 21; s2 += carry1; s1 -= carry1 << 21; carry2 = s2 >> 21; s3 += carry2; s2 -= carry2 << 21; carry3 = s3 >> 21; s4 += carry3; s3 -= carry3 << 21; carry4 = s4 >> 21; s5 += carry4; s4 -= carry4 << 21; carry5 = s5 >> 21; s6 += carry5; s5 -= carry5 << 21; carry6 = s6 >> 21; s7 += carry6; s6 -= carry6 << 21; carry7 = s7 >> 21; s8 += carry7; s7 -= carry7 << 21; carry8 = s8 >> 21; s9 += carry8; s8 -= carry8 << 21; carry9 = s9 >> 21; s10 += carry9; s9 -= carry9 << 21; carry10 = s10 >> 21; s11 += carry10; s10 -= carry10 << 21; s[0] = (s0 >> 0) as u8; s[1] = (s0 >> 8) as u8; s[2] = ((s0 >> 16) | (s1 << 5)) as u8; s[3] = (s1 >> 3) as u8; s[4] = (s1 >> 11) as u8; s[5] = ((s1 >> 19) | (s2 << 2)) as u8; s[6] = (s2 >> 6) as u8; s[7] = ((s2 >> 14) | (s3 << 7)) as u8; s[8] = (s3 >> 1) as u8; s[9] = (s3 >> 9) as u8; s[10] = ((s3 >> 17) | (s4 << 4)) as u8; s[11] = (s4 >> 4) as u8; s[12] = (s4 >> 12) as u8; s[13] = ((s4 >> 20) | (s5 << 1)) as u8; s[14] = (s5 >> 7) as u8; s[15] = ((s5 >> 15) | (s6 << 6)) as u8; s[16] = (s6 >> 2) as u8; s[17] = (s6 >> 10) as u8; s[18] = ((s6 >> 18) | (s7 << 3)) as u8; s[19] = (s7 >> 5) as u8; s[20] = (s7 >> 13) as u8; s[21] = (s8 >> 0) as u8; s[22] = (s8 >> 8) as u8; s[23] = ((s8 >> 16) | (s9 << 5)) as u8; s[24] = (s9 >> 3) as u8; s[25] = (s9 >> 11) as u8; s[26] = ((s9 >> 19) | (s10 << 2)) as u8; s[27] = (s10 >> 6) as u8; s[28] = ((s10 >> 14) | (s11 << 7)) as u8; s[29] = (s11 >> 1) as u8; s[30] = (s11 >> 9) as u8; s[31] = (s11 >> 17) as u8; } /* Input: a[0]+256*a[1]+...+256^31*a[31] = a b[0]+256*b[1]+...+256^31*b[31] = b c[0]+256*c[1]+...+256^31*c[31] = c Output: s[0]+256*s[1]+...+256^31*s[31] = (ab+c) mod l where l = 2^252 + 27742317777372353535851937790883648493. */ pub fn sc_muladd(s: &mut[u8], a: &[u8], b: &[u8], c: &[u8]) { let a0 = 2097151 & load_3i(&a[0..3]); let a1 = 2097151 & (load_4i(&a[2..6]) >> 5); let a2 = 2097151 & (load_3i(&a[5..8]) >> 2); let a3 = 2097151 & (load_4i(&a[7..11]) >> 7); let a4 = 2097151 & (load_4i(&a[10..14]) >> 4); let a5 = 2097151 & (load_3i(&a[13..16]) >> 1); let a6 = 2097151 & (load_4i(&a[15..19]) >> 6); let a7 = 2097151 & (load_3i(&a[18..21]) >> 3); let a8 = 2097151 & load_3i(&a[21..24]); let a9 = 2097151 & (load_4i(&a[23..27]) >> 5); let a10 = 2097151 & (load_3i(&a[26..29]) >> 2); let a11 = load_4i(&a[28..32]) >> 7; let b0 = 2097151 & load_3i(&b[0..3]); let b1 = 2097151 & (load_4i(&b[2..6]) >> 5); let b2 = 2097151 & (load_3i(&b[5..8]) >> 2); let b3 = 2097151 & (load_4i(&b[7..11]) >> 7); let b4 = 2097151 & (load_4i(&b[10..14]) >> 4); let b5 = 2097151 & (load_3i(&b[13..16]) >> 1); let b6 = 2097151 & (load_4i(&b[15..19]) >> 6); let b7 = 2097151 & (load_3i(&b[18..21]) >> 3); let b8 = 2097151 & load_3i(&b[21..24]); let b9 = 2097151 & (load_4i(&b[23..27]) >> 5); let b10 = 2097151 & (load_3i(&b[26..29]) >> 2); let b11 = load_4i(&b[28..32]) >> 7; let c0 = 2097151 & load_3i(&c[0..3]); let c1 = 2097151 & (load_4i(&c[2..6]) >> 5); let c2 = 2097151 & (load_3i(&c[5..8]) >> 2); let c3 = 2097151 & (load_4i(&c[7..11]) >> 7); let c4 = 2097151 & (load_4i(&c[10..14]) >> 4); let c5 = 2097151 & (load_3i(&c[13..16]) >> 1); let c6 = 2097151 & (load_4i(&c[15..19]) >> 6); let c7 = 2097151 & (load_3i(&c[18..21]) >> 3); let c8 = 2097151 & load_3i(&c[21..24]); let c9 = 2097151 & (load_4i(&c[23..27]) >> 5); let c10 = 2097151 & (load_3i(&c[26..29]) >> 2); let c11 = load_4i(&c[28..32]) >> 7; let mut s0: i64; let mut s1: i64; let mut s2: i64; let mut s3: i64; let mut s4: i64; let mut s5: i64; let mut s6: i64; let mut s7: i64; let mut s8: i64; let mut s9: i64; let mut s10: i64; let mut s11: i64; let mut s12: i64; let mut s13: i64; let mut s14: i64; let mut s15: i64; let mut s16: i64; let mut s17: i64; let mut s18: i64; let mut s19: i64; let mut s20: i64; let mut s21: i64; let mut s22: i64; let mut s23: i64; let mut carry0: i64; let mut carry1: i64; let mut carry2: i64; let mut carry3: i64; let mut carry4: i64; let mut carry5: i64; let mut carry6: i64; let mut carry7: i64; let mut carry8: i64; let mut carry9: i64; let mut carry10: i64; let mut carry11: i64; let mut carry12: i64; let mut carry13: i64; let mut carry14: i64; let mut carry15: i64; let mut carry16: i64; let carry17: i64; let carry18: i64; let carry19: i64; let carry20: i64; let carry21: i64; let carry22: i64; s0 = c0 + a0*b0; s1 = c1 + a0*b1 + a1*b0; s2 = c2 + a0*b2 + a1*b1 + a2*b0; s3 = c3 + a0*b3 + a1*b2 + a2*b1 + a3*b0; s4 = c4 + a0*b4 + a1*b3 + a2*b2 + a3*b1 + a4*b0; s5 = c5 + a0*b5 + a1*b4 + a2*b3 + a3*b2 + a4*b1 + a5*b0; s6 = c6 + a0*b6 + a1*b5 + a2*b4 + a3*b3 + a4*b2 + a5*b1 + a6*b0; s7 = c7 + a0*b7 + a1*b6 + a2*b5 + a3*b4 + a4*b3 + a5*b2 + a6*b1 + a7*b0; s8 = c8 + a0*b8 + a1*b7 + a2*b6 + a3*b5 + a4*b4 + a5*b3 + a6*b2 + a7*b1 + a8*b0; s9 = c9 + a0*b9 + a1*b8 + a2*b7 + a3*b6 + a4*b5 + a5*b4 + a6*b3 + a7*b2 + a8*b1 + a9*b0; s10 = c10 + a0*b10 + a1*b9 + a2*b8 + a3*b7 + a4*b6 + a5*b5 + a6*b4 + a7*b3 + a8*b2 + a9*b1 + a10*b0; s11 = c11 + a0*b11 + a1*b10 + a2*b9 + a3*b8 + a4*b7 + a5*b6 + a6*b5 + a7*b4 + a8*b3 + a9*b2 + a10*b1 + a11*b0; s12 = a1*b11 + a2*b10 + a3*b9 + a4*b8 + a5*b7 + a6*b6 + a7*b5 + a8*b4 + a9*b3 + a10*b2 + a11*b1; s13 = a2*b11 + a3*b10 + a4*b9 + a5*b8 + a6*b7 + a7*b6 + a8*b5 + a9*b4 + a10*b3 + a11*b2; s14 = a3*b11 + a4*b10 + a5*b9 + a6*b8 + a7*b7 + a8*b6 + a9*b5 + a10*b4 + a11*b3; s15 = a4*b11 + a5*b10 + a6*b9 + a7*b8 + a8*b7 + a9*b6 + a10*b5 + a11*b4; s16 = a5*b11 + a6*b10 + a7*b9 + a8*b8 + a9*b7 + a10*b6 + a11*b5; s17 = a6*b11 + a7*b10 + a8*b9 + a9*b8 + a10*b7 + a11*b6; s18 = a7*b11 + a8*b10 + a9*b9 + a10*b8 + a11*b7; s19 = a8*b11 + a9*b10 + a10*b9 + a11*b8; s20 = a9*b11 + a10*b10 + a11*b9; s21 = a10*b11 + a11*b10; s22 = a11*b11; s23 = 0; carry0 = (s0 + (1<<20)) >> 21; s1 += carry0; s0 -= carry0 << 21; carry2 = (s2 + (1<<20)) >> 21; s3 += carry2; s2 -= carry2 << 21; carry4 = (s4 + (1<<20)) >> 21; s5 += carry4; s4 -= carry4 << 21; carry6 = (s6 + (1<<20)) >> 21; s7 += carry6; s6 -= carry6 << 21; carry8 = (s8 + (1<<20)) >> 21; s9 += carry8; s8 -= carry8 << 21; carry10 = (s10 + (1<<20)) >> 21; s11 += carry10; s10 -= carry10 << 21; carry12 = (s12 + (1<<20)) >> 21; s13 += carry12; s12 -= carry12 << 21; carry14 = (s14 + (1<<20)) >> 21; s15 += carry14; s14 -= carry14 << 21; carry16 = (s16 + (1<<20)) >> 21; s17 += carry16; s16 -= carry16 << 21; carry18 = (s18 + (1<<20)) >> 21; s19 += carry18; s18 -= carry18 << 21; carry20 = (s20 + (1<<20)) >> 21; s21 += carry20; s20 -= carry20 << 21; carry22 = (s22 + (1<<20)) >> 21; s23 += carry22; s22 -= carry22 << 21; carry1 = (s1 + (1<<20)) >> 21; s2 += carry1; s1 -= carry1 << 21; carry3 = (s3 + (1<<20)) >> 21; s4 += carry3; s3 -= carry3 << 21; carry5 = (s5 + (1<<20)) >> 21; s6 += carry5; s5 -= carry5 << 21; carry7 = (s7 + (1<<20)) >> 21; s8 += carry7; s7 -= carry7 << 21; carry9 = (s9 + (1<<20)) >> 21; s10 += carry9; s9 -= carry9 << 21; carry11 = (s11 + (1<<20)) >> 21; s12 += carry11; s11 -= carry11 << 21; carry13 = (s13 + (1<<20)) >> 21; s14 += carry13; s13 -= carry13 << 21; carry15 = (s15 + (1<<20)) >> 21; s16 += carry15; s15 -= carry15 << 21; carry17 = (s17 + (1<<20)) >> 21; s18 += carry17; s17 -= carry17 << 21; carry19 = (s19 + (1<<20)) >> 21; s20 += carry19; s19 -= carry19 << 21; carry21 = (s21 + (1<<20)) >> 21; s22 += carry21; s21 -= carry21 << 21; s11 += s23 * 666643; s12 += s23 * 470296; s13 += s23 * 654183; s14 -= s23 * 997805; s15 += s23 * 136657; s16 -= s23 * 683901; s10 += s22 * 666643; s11 += s22 * 470296; s12 += s22 * 654183; s13 -= s22 * 997805; s14 += s22 * 136657; s15 -= s22 * 683901; s9 += s21 * 666643; s10 += s21 * 470296; s11 += s21 * 654183; s12 -= s21 * 997805; s13 += s21 * 136657; s14 -= s21 * 683901; s8 += s20 * 666643; s9 += s20 * 470296; s10 += s20 * 654183; s11 -= s20 * 997805; s12 += s20 * 136657; s13 -= s20 * 683901; s7 += s19 * 666643; s8 += s19 * 470296; s9 += s19 * 654183; s10 -= s19 * 997805; s11 += s19 * 136657; s12 -= s19 * 683901; s6 += s18 * 666643; s7 += s18 * 470296; s8 += s18 * 654183; s9 -= s18 * 997805; s10 += s18 * 136657; s11 -= s18 * 683901; carry6 = (s6 + (1<<20)) >> 21; s7 += carry6; s6 -= carry6 << 21; carry8 = (s8 + (1<<20)) >> 21; s9 += carry8; s8 -= carry8 << 21; carry10 = (s10 + (1<<20)) >> 21; s11 += carry10; s10 -= carry10 << 21; carry12 = (s12 + (1<<20)) >> 21; s13 += carry12; s12 -= carry12 << 21; carry14 = (s14 + (1<<20)) >> 21; s15 += carry14; s14 -= carry14 << 21; carry16 = (s16 + (1<<20)) >> 21; s17 += carry16; s16 -= carry16 << 21; carry7 = (s7 + (1<<20)) >> 21; s8 += carry7; s7 -= carry7 << 21; carry9 = (s9 + (1<<20)) >> 21; s10 += carry9; s9 -= carry9 << 21; carry11 = (s11 + (1<<20)) >> 21; s12 += carry11; s11 -= carry11 << 21; carry13 = (s13 + (1<<20)) >> 21; s14 += carry13; s13 -= carry13 << 21; carry15 = (s15 + (1<<20)) >> 21; s16 += carry15; s15 -= carry15 << 21; s5 += s17 * 666643; s6 += s17 * 470296; s7 += s17 * 654183; s8 -= s17 * 997805; s9 += s17 * 136657; s10 -= s17 * 683901; s4 += s16 * 666643; s5 += s16 * 470296; s6 += s16 * 654183; s7 -= s16 * 997805; s8 += s16 * 136657; s9 -= s16 * 683901; s3 += s15 * 666643; s4 += s15 * 470296; s5 += s15 * 654183; s6 -= s15 * 997805; s7 += s15 * 136657; s8 -= s15 * 683901; s2 += s14 * 666643; s3 += s14 * 470296; s4 += s14 * 654183; s5 -= s14 * 997805; s6 += s14 * 136657; s7 -= s14 * 683901; s1 += s13 * 666643; s2 += s13 * 470296; s3 += s13 * 654183; s4 -= s13 * 997805; s5 += s13 * 136657; s6 -= s13 * 683901; s0 += s12 * 666643; s1 += s12 * 470296; s2 += s12 * 654183; s3 -= s12 * 997805; s4 += s12 * 136657; s5 -= s12 * 683901; s12 = 0; carry0 = (s0 + (1<<20)) >> 21; s1 += carry0; s0 -= carry0 << 21; carry2 = (s2 + (1<<20)) >> 21; s3 += carry2; s2 -= carry2 << 21; carry4 = (s4 + (1<<20)) >> 21; s5 += carry4; s4 -= carry4 << 21; carry6 = (s6 + (1<<20)) >> 21; s7 += carry6; s6 -= carry6 << 21; carry8 = (s8 + (1<<20)) >> 21; s9 += carry8; s8 -= carry8 << 21; carry10 = (s10 + (1<<20)) >> 21; s11 += carry10; s10 -= carry10 << 21; carry1 = (s1 + (1<<20)) >> 21; s2 += carry1; s1 -= carry1 << 21; carry3 = (s3 + (1<<20)) >> 21; s4 += carry3; s3 -= carry3 << 21; carry5 = (s5 + (1<<20)) >> 21; s6 += carry5; s5 -= carry5 << 21; carry7 = (s7 + (1<<20)) >> 21; s8 += carry7; s7 -= carry7 << 21; carry9 = (s9 + (1<<20)) >> 21; s10 += carry9; s9 -= carry9 << 21; carry11 = (s11 + (1<<20)) >> 21; s12 += carry11; s11 -= carry11 << 21; s0 += s12 * 666643; s1 += s12 * 470296; s2 += s12 * 654183; s3 -= s12 * 997805; s4 += s12 * 136657; s5 -= s12 * 683901; s12 = 0; carry0 = s0 >> 21; s1 += carry0; s0 -= carry0 << 21; carry1 = s1 >> 21; s2 += carry1; s1 -= carry1 << 21; carry2 = s2 >> 21; s3 += carry2; s2 -= carry2 << 21; carry3 = s3 >> 21; s4 += carry3; s3 -= carry3 << 21; carry4 = s4 >> 21; s5 += carry4; s4 -= carry4 << 21; carry5 = s5 >> 21; s6 += carry5; s5 -= carry5 << 21; carry6 = s6 >> 21; s7 += carry6; s6 -= carry6 << 21; carry7 = s7 >> 21; s8 += carry7; s7 -= carry7 << 21; carry8 = s8 >> 21; s9 += carry8; s8 -= carry8 << 21; carry9 = s9 >> 21; s10 += carry9; s9 -= carry9 << 21; carry10 = s10 >> 21; s11 += carry10; s10 -= carry10 << 21; carry11 = s11 >> 21; s12 += carry11; s11 -= carry11 << 21; s0 += s12 * 666643; s1 += s12 * 470296; s2 += s12 * 654183; s3 -= s12 * 997805; s4 += s12 * 136657; s5 -= s12 * 683901; carry0 = s0 >> 21; s1 += carry0; s0 -= carry0 << 21; carry1 = s1 >> 21; s2 += carry1; s1 -= carry1 << 21; carry2 = s2 >> 21; s3 += carry2; s2 -= carry2 << 21; carry3 = s3 >> 21; s4 += carry3; s3 -= carry3 << 21; carry4 = s4 >> 21; s5 += carry4; s4 -= carry4 << 21; carry5 = s5 >> 21; s6 += carry5; s5 -= carry5 << 21; carry6 = s6 >> 21; s7 += carry6; s6 -= carry6 << 21; carry7 = s7 >> 21; s8 += carry7; s7 -= carry7 << 21; carry8 = s8 >> 21; s9 += carry8; s8 -= carry8 << 21; carry9 = s9 >> 21; s10 += carry9; s9 -= carry9 << 21; carry10 = s10 >> 21; s11 += carry10; s10 -= carry10 << 21; s[0] = (s0 >> 0) as u8; s[1] = (s0 >> 8) as u8; s[2] = ((s0 >> 16) | (s1 << 5)) as u8; s[3] = (s1 >> 3) as u8; s[4] = (s1 >> 11) as u8; s[5] = ((s1 >> 19) | (s2 << 2)) as u8; s[6] = (s2 >> 6) as u8; s[7] = ((s2 >> 14) | (s3 << 7)) as u8; s[8] = (s3 >> 1) as u8; s[9] = (s3 >> 9) as u8; s[10] = ((s3 >> 17) | (s4 << 4)) as u8; s[11] = (s4 >> 4) as u8; s[12] = (s4 >> 12) as u8; s[13] = ((s4 >> 20) | (s5 << 1)) as u8; s[14] = (s5 >> 7) as u8; s[15] = ((s5 >> 15) | (s6 << 6)) as u8; s[16] = (s6 >> 2) as u8; s[17] = (s6 >> 10) as u8; s[18] = ((s6 >> 18) | (s7 << 3)) as u8; s[19] = (s7 >> 5) as u8; s[20] = (s7 >> 13) as u8; s[21] = (s8 >> 0) as u8; s[22] = (s8 >> 8) as u8; s[23] = ((s8 >> 16) | (s9 << 5)) as u8; s[24] = (s9 >> 3) as u8; s[25] = (s9 >> 11) as u8; s[26] = ((s9 >> 19) | (s10 << 2)) as u8; s[27] = (s10 >> 6) as u8; s[28] = ((s10 >> 14) | (s11 << 7)) as u8; s[29] = (s11 >> 1) as u8; s[30] = (s11 >> 9) as u8; s[31] = (s11 >> 17) as u8; } pub fn curve25519(n: &[u8], p: &[u8]) -> [u8; 32] { let mut e = [0u8; 32]; let mut x2; let mut z2; let mut x3; let mut z3; let mut swap: i32; let mut b: i32; for (d,s) in e.iter_mut().zip(n.iter()) { *d = *s; } e[0] &= 248; e[31] &= 127; e[31] |= 64; let x1 = Fe::from_bytes(p); x2 = FE_ONE; z2 = FE_ZERO; x3 = x1; z3 = FE_ONE; swap = 0; // pos starts at 254 and goes down to 0 for pos in (0usize..255).rev() { b = (e[pos / 8] >> (pos & 7)) as i32; b &= 1; swap ^= b; x2.maybe_swap_with(&mut x3, swap); z2.maybe_swap_with(&mut z3, swap); swap = b; let d = x3 - z3; let b = x2 - z2; let a = x2 + z2; let c = x3 + z3; let da = d * a; let cb = c * b; let bb = b.square(); let aa = a.square(); let t0 = da + cb; let t1 = da - cb; let x4 = aa*bb; let e = aa - bb; let t2 = t1.square(); let t3 = e.mul_121666(); let x5 = t0.square(); let t4 = bb + t3; let z5 = x1 * t2; let z4 = e*t4; z2 = z4; z3 = z5; x2 = x4; x3 = x5; } x2.maybe_swap_with(&mut x3, swap); z2.maybe_swap_with(&mut z3, swap); (z2.invert() * x2).to_bytes() } pub fn curve25519_base(x: &[u8]) -> [u8; 32] { let mut base : [u8; 32] = [0; 32]; base[0] = 9; curve25519(x, base.as_ref()) } #[cfg(test)] mod tests { use curve25519::{Fe, curve25519_base}; #[test] fn from_to_bytes_preserves() { for i in 0..50 { let mut e: Vec<u8> = (0u32..32).map(|idx| (idx*(1289+i*761)) as u8).collect(); e[0] &= 248; e[31] &= 127; e[31] |= 64; let fe = Fe::from_bytes(e.as_ref()); let e_preserved = fe.to_bytes(); assert!(e == e_preserved.to_vec()); } } #[test] fn swap_test() { let mut f = Fe([10,20,30,40,50,60,70,80,90,100]); let mut g = Fe([11,21,31,41,51,61,71,81,91,101]); let f_initial = f; let g_initial = g; f.maybe_swap_with(&mut g, 0); assert!(f == f_initial); assert!(g == g_initial); f.maybe_swap_with(&mut g, 1); assert!(f == g_initial); assert!(g == f_initial); } struct CurveGen { which: u32 } impl CurveGen { fn new(seed: u32) -> CurveGen { CurveGen{which: seed} } } impl Iterator for CurveGen { type Item = Fe; fn next(&mut self) -> Option<Fe> { let mut e: Vec<u8> = (0..32).map(|idx| (idx*(1289+self.which*761)) as u8).collect(); e[0] &= 248; e[31] &= 127; e[31] |= 64; Some(Fe::from_bytes(e.as_ref())) } } #[test] fn mul_commutes() { for (x,y) in CurveGen::new(1).zip(CurveGen::new(2)).take(40) { assert!(x*y == y*x); }; } #[test] fn mul_assoc() { for (x,(y,z)) in CurveGen::new(1).zip(CurveGen::new(2).zip(CurveGen::new(3))).take(40) { assert!((x*y)*z == x*(y*z)); }; } #[test] fn invert_inverts() { for x in CurveGen::new(1).take(40) { assert!(x.invert().invert() == x); }; } #[test] fn square_by_mul() { for x in CurveGen::new(1).take(40) { assert!(x*x == x.square()); }; } #[test] fn base_example() { let sk : [u8; 32] = [ 0x77, 0x07, 0x6d, 0x0a, 0x73, 0x18, 0xa5, 0x7d, 0x3c, 0x16, 0xc1, 0x72, 0x51, 0xb2, 0x66, 0x45, 0xdf, 0x4c, 0x2f, 0x87, 0xeb, 0xc0, 0x99, 0x2a, 0xb1, 0x77, 0xfb, 0xa5, 0x1d, 0xb9, 0x2c, 0x2a ]; let pk = curve25519_base(sk.as_ref()); let correct : [u8; 32] = [ 0x85,0x20,0xf0,0x09,0x89,0x30,0xa7,0x54 ,0x74,0x8b,0x7d,0xdc,0xb4,0x3e,0xf7,0x5a ,0x0d,0xbf,0x3a,0x0d,0x26,0x38,0x1a,0xf4 ,0xeb,0xa4,0xa9,0x8e,0xaa,0x9b,0x4e,0x6a ]; assert_eq!(pk.to_vec(), correct.to_vec()); } } static BI: [GePrecomp; 8] = [ GePrecomp { y_plus_x: Fe([ 25967493,-14356035,29566456,3660896,-12694345,4014787,27544626,-11754271,-6079156,2047605 ]), y_minus_x: Fe([ -12545711,934262,-2722910,3049990,-727428,9406986,12720692,5043384,19500929,-15469378 ]), xy2d: Fe([ -8738181,4489570,9688441,-14785194,10184609,-12363380,29287919,11864899,-24514362,-4438546 ]), }, GePrecomp { y_plus_x: Fe([ 15636291,-9688557,24204773,-7912398,616977,-16685262,27787600,-14772189,28944400,-1550024 ]), y_minus_x: Fe([ 16568933,4717097,-11556148,-1102322,15682896,-11807043,16354577,-11775962,7689662,11199574 ]), xy2d: Fe([ 30464156,-5976125,-11779434,-15670865,23220365,15915852,7512774,10017326,-17749093,-9920357 ]), }, GePrecomp { y_plus_x: Fe([ 10861363,11473154,27284546,1981175,-30064349,12577861,32867885,14515107,-15438304,10819380 ]), y_minus_x: Fe([ 4708026,6336745,20377586,9066809,-11272109,6594696,-25653668,12483688,-12668491,5581306 ]), xy2d: Fe([ 19563160,16186464,-29386857,4097519,10237984,-4348115,28542350,13850243,-23678021,-15815942 ]), }, GePrecomp { y_plus_x: Fe([ 5153746,9909285,1723747,-2777874,30523605,5516873,19480852,5230134,-23952439,-15175766 ]), y_minus_x: Fe([ -30269007,-3463509,7665486,10083793,28475525,1649722,20654025,16520125,30598449,7715701 ]), xy2d: Fe([ 28881845,14381568,9657904,3680757,-20181635,7843316,-31400660,1370708,29794553,-1409300 ]), }, GePrecomp { y_plus_x: Fe([ -22518993,-6692182,14201702,-8745502,-23510406,8844726,18474211,-1361450,-13062696,13821877 ]), y_minus_x: Fe([ -6455177,-7839871,3374702,-4740862,-27098617,-10571707,31655028,-7212327,18853322,-14220951 ]), xy2d: Fe([ 4566830,-12963868,-28974889,-12240689,-7602672,-2830569,-8514358,-10431137,2207753,-3209784 ]), }, GePrecomp { y_plus_x: Fe([ -25154831,-4185821,29681144,7868801,-6854661,-9423865,-12437364,-663000,-31111463,-16132436 ]), y_minus_x: Fe([ 25576264,-2703214,7349804,-11814844,16472782,9300885,3844789,15725684,171356,6466918 ]), xy2d: Fe([ 23103977,13316479,9739013,-16149481,817875,-15038942,8965339,-14088058,-30714912,16193877 ]), }, GePrecomp { y_plus_x: Fe([ -33521811,3180713,-2394130,14003687,-16903474,-16270840,17238398,4729455,-18074513,9256800 ]), y_minus_x: Fe([ -25182317,-4174131,32336398,5036987,-21236817,11360617,22616405,9761698,-19827198,630305 ]), xy2d: Fe([ -13720693,2639453,-24237460,-7406481,9494427,-5774029,-6554551,-15960994,-2449256,-14291300 ]), }, GePrecomp { y_plus_x: Fe([ -3151181,-5046075,9282714,6866145,-31907062,-863023,-18940575,15033784,25105118,-7894876 ]), y_minus_x: Fe([ -24326370,15950226,-31801215,-14592823,-11662737,-5090925,1573892,-2625887,2198790,-15804619 ]), xy2d: Fe([ -3099351,10324967,-2241613,7453183,-5446979,-2735503,-13812022,-16236442,-32461234,-12290683 ]), }, ]; static GE_PRECOMP_BASE : [[GePrecomp; 8]; 32] = [ [ GePrecomp { y_plus_x: Fe([25967493,-14356035,29566456,3660896,-12694345,4014787,27544626,-11754271,-6079156,2047605]), y_minus_x: Fe([-12545711,934262,-2722910,3049990,-727428,9406986,12720692,5043384,19500929,-15469378]), xy2d: Fe([-8738181,4489570,9688441,-14785194,10184609,-12363380,29287919,11864899,-24514362,-4438546]), }, GePrecomp { y_plus_x: Fe([-12815894,-12976347,-21581243,11784320,-25355658,-2750717,-11717903,-3814571,-358445,-10211303]), y_minus_x: Fe([-21703237,6903825,27185491,6451973,-29577724,-9554005,-15616551,11189268,-26829678,-5319081]), xy2d: Fe([26966642,11152617,32442495,15396054,14353839,-12752335,-3128826,-9541118,-15472047,-4166697]), }, GePrecomp { y_plus_x: Fe([15636291,-9688557,24204773,-7912398,616977,-16685262,27787600,-14772189,28944400,-1550024]), y_minus_x: Fe([16568933,4717097,-11556148,-1102322,15682896,-11807043,16354577,-11775962,7689662,11199574]), xy2d: Fe([30464156,-5976125,-11779434,-15670865,23220365,15915852,7512774,10017326,-17749093,-9920357]), }, GePrecomp { y_plus_x: Fe([-17036878,13921892,10945806,-6033431,27105052,-16084379,-28926210,15006023,3284568,-6276540]), y_minus_x: Fe([23599295,-8306047,-11193664,-7687416,13236774,10506355,7464579,9656445,13059162,10374397]), xy2d: Fe([7798556,16710257,3033922,2874086,28997861,2835604,32406664,-3839045,-641708,-101325]), }, GePrecomp { y_plus_x: Fe([10861363,11473154,27284546,1981175,-30064349,12577861,32867885,14515107,-15438304,10819380]), y_minus_x: Fe([4708026,6336745,20377586,9066809,-11272109,6594696,-25653668,12483688,-12668491,5581306]), xy2d: Fe([19563160,16186464,-29386857,4097519,10237984,-4348115,28542350,13850243,-23678021,-15815942]), }, GePrecomp { y_plus_x: Fe([-15371964,-12862754,32573250,4720197,-26436522,5875511,-19188627,-15224819,-9818940,-12085777]), y_minus_x: Fe([-8549212,109983,15149363,2178705,22900618,4543417,3044240,-15689887,1762328,14866737]), xy2d: Fe([-18199695,-15951423,-10473290,1707278,-17185920,3916101,-28236412,3959421,27914454,4383652]), }, GePrecomp { y_plus_x: Fe([5153746,9909285,1723747,-2777874,30523605,5516873,19480852,5230134,-23952439,-15175766]), y_minus_x: Fe([-30269007,-3463509,7665486,10083793,28475525,1649722,20654025,16520125,30598449,7715701]), xy2d: Fe([28881845,14381568,9657904,3680757,-20181635,7843316,-31400660,1370708,29794553,-1409300]), }, GePrecomp { y_plus_x: Fe([14499471,-2729599,-33191113,-4254652,28494862,14271267,30290735,10876454,-33154098,2381726]), y_minus_x: Fe([-7195431,-2655363,-14730155,462251,-27724326,3941372,-6236617,3696005,-32300832,15351955]), xy2d: Fe([27431194,8222322,16448760,-3907995,-18707002,11938355,-32961401,-2970515,29551813,10109425]), }, ], [ GePrecomp { y_plus_x: Fe([-13657040,-13155431,-31283750,11777098,21447386,6519384,-2378284,-1627556,10092783,-4764171]), y_minus_x: Fe([27939166,14210322,4677035,16277044,-22964462,-12398139,-32508754,12005538,-17810127,12803510]), xy2d: Fe([17228999,-15661624,-1233527,300140,-1224870,-11714777,30364213,-9038194,18016357,4397660]), }, GePrecomp { y_plus_x: Fe([-10958843,-7690207,4776341,-14954238,27850028,-15602212,-26619106,14544525,-17477504,982639]), y_minus_x: Fe([29253598,15796703,-2863982,-9908884,10057023,3163536,7332899,-4120128,-21047696,9934963]), xy2d: Fe([5793303,16271923,-24131614,-10116404,29188560,1206517,-14747930,4559895,-30123922,-10897950]), }, GePrecomp { y_plus_x: Fe([-27643952,-11493006,16282657,-11036493,28414021,-15012264,24191034,4541697,-13338309,5500568]), y_minus_x: Fe([12650548,-1497113,9052871,11355358,-17680037,-8400164,-17430592,12264343,10874051,13524335]), xy2d: Fe([25556948,-3045990,714651,2510400,23394682,-10415330,33119038,5080568,-22528059,5376628]), }, GePrecomp { y_plus_x: Fe([-26088264,-4011052,-17013699,-3537628,-6726793,1920897,-22321305,-9447443,4535768,1569007]), y_minus_x: Fe([-2255422,14606630,-21692440,-8039818,28430649,8775819,-30494562,3044290,31848280,12543772]), xy2d: Fe([-22028579,2943893,-31857513,6777306,13784462,-4292203,-27377195,-2062731,7718482,14474653]), }, GePrecomp { y_plus_x: Fe([2385315,2454213,-22631320,46603,-4437935,-15680415,656965,-7236665,24316168,-5253567]), y_minus_x: Fe([13741529,10911568,-33233417,-8603737,-20177830,-1033297,33040651,-13424532,-20729456,8321686]), xy2d: Fe([21060490,-2212744,15712757,-4336099,1639040,10656336,23845965,-11874838,-9984458,608372]), }, GePrecomp { y_plus_x: Fe([-13672732,-15087586,-10889693,-7557059,-6036909,11305547,1123968,-6780577,27229399,23887]), y_minus_x: Fe([-23244140,-294205,-11744728,14712571,-29465699,-2029617,12797024,-6440308,-1633405,16678954]), xy2d: Fe([-29500620,4770662,-16054387,14001338,7830047,9564805,-1508144,-4795045,-17169265,4904953]), }, GePrecomp { y_plus_x: Fe([24059557,14617003,19037157,-15039908,19766093,-14906429,5169211,16191880,2128236,-4326833]), y_minus_x: Fe([-16981152,4124966,-8540610,-10653797,30336522,-14105247,-29806336,916033,-6882542,-2986532]), xy2d: Fe([-22630907,12419372,-7134229,-7473371,-16478904,16739175,285431,2763829,15736322,4143876]), }, GePrecomp { y_plus_x: Fe([2379352,11839345,-4110402,-5988665,11274298,794957,212801,-14594663,23527084,-16458268]), y_minus_x: Fe([33431127,-11130478,-17838966,-15626900,8909499,8376530,-32625340,4087881,-15188911,-14416214]), xy2d: Fe([1767683,7197987,-13205226,-2022635,-13091350,448826,5799055,4357868,-4774191,-16323038]), }, ], [ GePrecomp { y_plus_x: Fe([6721966,13833823,-23523388,-1551314,26354293,-11863321,23365147,-3949732,7390890,2759800]), y_minus_x: Fe([4409041,2052381,23373853,10530217,7676779,-12885954,21302353,-4264057,1244380,-12919645]), xy2d: Fe([-4421239,7169619,4982368,-2957590,30256825,-2777540,14086413,9208236,15886429,16489664]), }, GePrecomp { y_plus_x: Fe([1996075,10375649,14346367,13311202,-6874135,-16438411,-13693198,398369,-30606455,-712933]), y_minus_x: Fe([-25307465,9795880,-2777414,14878809,-33531835,14780363,13348553,12076947,-30836462,5113182]), xy2d: Fe([-17770784,11797796,31950843,13929123,-25888302,12288344,-30341101,-7336386,13847711,5387222]), }, GePrecomp { y_plus_x: Fe([-18582163,-3416217,17824843,-2340966,22744343,-10442611,8763061,3617786,-19600662,10370991]), y_minus_x: Fe([20246567,-14369378,22358229,-543712,18507283,-10413996,14554437,-8746092,32232924,16763880]), xy2d: Fe([9648505,10094563,26416693,14745928,-30374318,-6472621,11094161,15689506,3140038,-16510092]), }, GePrecomp { y_plus_x: Fe([-16160072,5472695,31895588,4744994,8823515,10365685,-27224800,9448613,-28774454,366295]), y_minus_x: Fe([19153450,11523972,-11096490,-6503142,-24647631,5420647,28344573,8041113,719605,11671788]), xy2d: Fe([8678025,2694440,-6808014,2517372,4964326,11152271,-15432916,-15266516,27000813,-10195553]), }, GePrecomp { y_plus_x: Fe([-15157904,7134312,8639287,-2814877,-7235688,10421742,564065,5336097,6750977,-14521026]), y_minus_x: Fe([11836410,-3979488,26297894,16080799,23455045,15735944,1695823,-8819122,8169720,16220347]), xy2d: Fe([-18115838,8653647,17578566,-6092619,-8025777,-16012763,-11144307,-2627664,-5990708,-14166033]), }, GePrecomp { y_plus_x: Fe([-23308498,-10968312,15213228,-10081214,-30853605,-11050004,27884329,2847284,2655861,1738395]), y_minus_x: Fe([-27537433,-14253021,-25336301,-8002780,-9370762,8129821,21651608,-3239336,-19087449,-11005278]), xy2d: Fe([1533110,3437855,23735889,459276,29970501,11335377,26030092,5821408,10478196,8544890]), }, GePrecomp { y_plus_x: Fe([32173121,-16129311,24896207,3921497,22579056,-3410854,19270449,12217473,17789017,-3395995]), y_minus_x: Fe([-30552961,-2228401,-15578829,-10147201,13243889,517024,15479401,-3853233,30460520,1052596]), xy2d: Fe([-11614875,13323618,32618793,8175907,-15230173,12596687,27491595,-4612359,3179268,-9478891]), }, GePrecomp { y_plus_x: Fe([31947069,-14366651,-4640583,-15339921,-15125977,-6039709,-14756777,-16411740,19072640,-9511060]), y_minus_x: Fe([11685058,11822410,3158003,-13952594,33402194,-4165066,5977896,-5215017,473099,5040608]), xy2d: Fe([-20290863,8198642,-27410132,11602123,1290375,-2799760,28326862,1721092,-19558642,-3131606]), }, ], [ GePrecomp { y_plus_x: Fe([7881532,10687937,7578723,7738378,-18951012,-2553952,21820786,8076149,-27868496,11538389]), y_minus_x: Fe([-19935666,3899861,18283497,-6801568,-15728660,-11249211,8754525,7446702,-5676054,5797016]), xy2d: Fe([-11295600,-3793569,-15782110,-7964573,12708869,-8456199,2014099,-9050574,-2369172,-5877341]), }, GePrecomp { y_plus_x: Fe([-22472376,-11568741,-27682020,1146375,18956691,16640559,1192730,-3714199,15123619,10811505]), y_minus_x: Fe([14352098,-3419715,-18942044,10822655,32750596,4699007,-70363,15776356,-28886779,-11974553]), xy2d: Fe([-28241164,-8072475,-4978962,-5315317,29416931,1847569,-20654173,-16484855,4714547,-9600655]), }, GePrecomp { y_plus_x: Fe([15200332,8368572,19679101,15970074,-31872674,1959451,24611599,-4543832,-11745876,12340220]), y_minus_x: Fe([12876937,-10480056,33134381,6590940,-6307776,14872440,9613953,8241152,15370987,9608631]), xy2d: Fe([-4143277,-12014408,8446281,-391603,4407738,13629032,-7724868,15866074,-28210621,-8814099]), }, GePrecomp { y_plus_x: Fe([26660628,-15677655,8393734,358047,-7401291,992988,-23904233,858697,20571223,8420556]), y_minus_x: Fe([14620715,13067227,-15447274,8264467,14106269,15080814,33531827,12516406,-21574435,-12476749]), xy2d: Fe([236881,10476226,57258,-14677024,6472998,2466984,17258519,7256740,8791136,15069930]), }, GePrecomp { y_plus_x: Fe([1276410,-9371918,22949635,-16322807,-23493039,-5702186,14711875,4874229,-30663140,-2331391]), y_minus_x: Fe([5855666,4990204,-13711848,7294284,-7804282,1924647,-1423175,-7912378,-33069337,9234253]), xy2d: Fe([20590503,-9018988,31529744,-7352666,-2706834,10650548,31559055,-11609587,18979186,13396066]), }, GePrecomp { y_plus_x: Fe([24474287,4968103,22267082,4407354,24063882,-8325180,-18816887,13594782,33514650,7021958]), y_minus_x: Fe([-11566906,-6565505,-21365085,15928892,-26158305,4315421,-25948728,-3916677,-21480480,12868082]), xy2d: Fe([-28635013,13504661,19988037,-2132761,21078225,6443208,-21446107,2244500,-12455797,-8089383]), }, GePrecomp { y_plus_x: Fe([-30595528,13793479,-5852820,319136,-25723172,-6263899,33086546,8957937,-15233648,5540521]), y_minus_x: Fe([-11630176,-11503902,-8119500,-7643073,2620056,1022908,-23710744,-1568984,-16128528,-14962807]), xy2d: Fe([23152971,775386,27395463,14006635,-9701118,4649512,1689819,892185,-11513277,-15205948]), }, GePrecomp { y_plus_x: Fe([9770129,9586738,26496094,4324120,1556511,-3550024,27453819,4763127,-19179614,5867134]), y_minus_x: Fe([-32765025,1927590,31726409,-4753295,23962434,-16019500,27846559,5931263,-29749703,-16108455]), xy2d: Fe([27461885,-2977536,22380810,1815854,-23033753,-3031938,7283490,-15148073,-19526700,7734629]), }, ], [ GePrecomp { y_plus_x: Fe([-8010264,-9590817,-11120403,6196038,29344158,-13430885,7585295,-3176626,18549497,15302069]), y_minus_x: Fe([-32658337,-6171222,-7672793,-11051681,6258878,13504381,10458790,-6418461,-8872242,8424746]), xy2d: Fe([24687205,8613276,-30667046,-3233545,1863892,-1830544,19206234,7134917,-11284482,-828919]), }, GePrecomp { y_plus_x: Fe([11334899,-9218022,8025293,12707519,17523892,-10476071,10243738,-14685461,-5066034,16498837]), y_minus_x: Fe([8911542,6887158,-9584260,-6958590,11145641,-9543680,17303925,-14124238,6536641,10543906]), xy2d: Fe([-28946384,15479763,-17466835,568876,-1497683,11223454,-2669190,-16625574,-27235709,8876771]), }, GePrecomp { y_plus_x: Fe([-25742899,-12566864,-15649966,-846607,-33026686,-796288,-33481822,15824474,-604426,-9039817]), y_minus_x: Fe([10330056,70051,7957388,-9002667,9764902,15609756,27698697,-4890037,1657394,3084098]), xy2d: Fe([10477963,-7470260,12119566,-13250805,29016247,-5365589,31280319,14396151,-30233575,15272409]), }, GePrecomp { y_plus_x: Fe([-12288309,3169463,28813183,16658753,25116432,-5630466,-25173957,-12636138,-25014757,1950504]), y_minus_x: Fe([-26180358,9489187,11053416,-14746161,-31053720,5825630,-8384306,-8767532,15341279,8373727]), xy2d: Fe([28685821,7759505,-14378516,-12002860,-31971820,4079242,298136,-10232602,-2878207,15190420]), }, GePrecomp { y_plus_x: Fe([-32932876,13806336,-14337485,-15794431,-24004620,10940928,8669718,2742393,-26033313,-6875003]), y_minus_x: Fe([-1580388,-11729417,-25979658,-11445023,-17411874,-10912854,9291594,-16247779,-12154742,6048605]), xy2d: Fe([-30305315,14843444,1539301,11864366,20201677,1900163,13934231,5128323,11213262,9168384]), }, GePrecomp { y_plus_x: Fe([-26280513,11007847,19408960,-940758,-18592965,-4328580,-5088060,-11105150,20470157,-16398701]), y_minus_x: Fe([-23136053,9282192,14855179,-15390078,-7362815,-14408560,-22783952,14461608,14042978,5230683]), xy2d: Fe([29969567,-2741594,-16711867,-8552442,9175486,-2468974,21556951,3506042,-5933891,-12449708]), }, GePrecomp { y_plus_x: Fe([-3144746,8744661,19704003,4581278,-20430686,6830683,-21284170,8971513,-28539189,15326563]), y_minus_x: Fe([-19464629,10110288,-17262528,-3503892,-23500387,1355669,-15523050,15300988,-20514118,9168260]), xy2d: Fe([-5353335,4488613,-23803248,16314347,7780487,-15638939,-28948358,9601605,33087103,-9011387]), }, GePrecomp { y_plus_x: Fe([-19443170,-15512900,-20797467,-12445323,-29824447,10229461,-27444329,-15000531,-5996870,15664672]), y_minus_x: Fe([23294591,-16632613,-22650781,-8470978,27844204,11461195,13099750,-2460356,18151676,13417686]), xy2d: Fe([-24722913,-4176517,-31150679,5988919,-26858785,6685065,1661597,-12551441,15271676,-15452665]), }, ], [ GePrecomp { y_plus_x: Fe([11433042,-13228665,8239631,-5279517,-1985436,-725718,-18698764,2167544,-6921301,-13440182]), y_minus_x: Fe([-31436171,15575146,30436815,12192228,-22463353,9395379,-9917708,-8638997,12215110,12028277]), xy2d: Fe([14098400,6555944,23007258,5757252,-15427832,-12950502,30123440,4617780,-16900089,-655628]), }, GePrecomp { y_plus_x: Fe([-4026201,-15240835,11893168,13718664,-14809462,1847385,-15819999,10154009,23973261,-12684474]), y_minus_x: Fe([-26531820,-3695990,-1908898,2534301,-31870557,-16550355,18341390,-11419951,32013174,-10103539]), xy2d: Fe([-25479301,10876443,-11771086,-14625140,-12369567,1838104,21911214,6354752,4425632,-837822]), }, GePrecomp { y_plus_x: Fe([-10433389,-14612966,22229858,-3091047,-13191166,776729,-17415375,-12020462,4725005,14044970]), y_minus_x: Fe([19268650,-7304421,1555349,8692754,-21474059,-9910664,6347390,-1411784,-19522291,-16109756]), xy2d: Fe([-24864089,12986008,-10898878,-5558584,-11312371,-148526,19541418,8180106,9282262,10282508]), }, GePrecomp { y_plus_x: Fe([-26205082,4428547,-8661196,-13194263,4098402,-14165257,15522535,8372215,5542595,-10702683]), y_minus_x: Fe([-10562541,14895633,26814552,-16673850,-17480754,-2489360,-2781891,6993761,-18093885,10114655]), xy2d: Fe([-20107055,-929418,31422704,10427861,-7110749,6150669,-29091755,-11529146,25953725,-106158]), }, GePrecomp { y_plus_x: Fe([-4234397,-8039292,-9119125,3046000,2101609,-12607294,19390020,6094296,-3315279,12831125]), y_minus_x: Fe([-15998678,7578152,5310217,14408357,-33548620,-224739,31575954,6326196,7381791,-2421839]), xy2d: Fe([-20902779,3296811,24736065,-16328389,18374254,7318640,6295303,8082724,-15362489,12339664]), }, GePrecomp { y_plus_x: Fe([27724736,2291157,6088201,-14184798,1792727,5857634,13848414,15768922,25091167,14856294]), y_minus_x: Fe([-18866652,8331043,24373479,8541013,-701998,-9269457,12927300,-12695493,-22182473,-9012899]), xy2d: Fe([-11423429,-5421590,11632845,3405020,30536730,-11674039,-27260765,13866390,30146206,9142070]), }, GePrecomp { y_plus_x: Fe([3924129,-15307516,-13817122,-10054960,12291820,-668366,-27702774,9326384,-8237858,4171294]), y_minus_x: Fe([-15921940,16037937,6713787,16606682,-21612135,2790944,26396185,3731949,345228,-5462949]), xy2d: Fe([-21327538,13448259,25284571,1143661,20614966,-8849387,2031539,-12391231,-16253183,-13582083]), }, GePrecomp { y_plus_x: Fe([31016211,-16722429,26371392,-14451233,-5027349,14854137,17477601,3842657,28012650,-16405420]), y_minus_x: Fe([-5075835,9368966,-8562079,-4600902,-15249953,6970560,-9189873,16292057,-8867157,3507940]), xy2d: Fe([29439664,3537914,23333589,6997794,-17555561,-11018068,-15209202,-15051267,-9164929,6580396]), }, ], [ GePrecomp { y_plus_x: Fe([-12185861,-7679788,16438269,10826160,-8696817,-6235611,17860444,-9273846,-2095802,9304567]), y_minus_x: Fe([20714564,-4336911,29088195,7406487,11426967,-5095705,14792667,-14608617,5289421,-477127]), xy2d: Fe([-16665533,-10650790,-6160345,-13305760,9192020,-1802462,17271490,12349094,26939669,-3752294]), }, GePrecomp { y_plus_x: Fe([-12889898,9373458,31595848,16374215,21471720,13221525,-27283495,-12348559,-3698806,117887]), y_minus_x: Fe([22263325,-6560050,3984570,-11174646,-15114008,-566785,28311253,5358056,-23319780,541964]), xy2d: Fe([16259219,3261970,2309254,-15534474,-16885711,-4581916,24134070,-16705829,-13337066,-13552195]), }, GePrecomp { y_plus_x: Fe([9378160,-13140186,-22845982,-12745264,28198281,-7244098,-2399684,-717351,690426,14876244]), y_minus_x: Fe([24977353,-314384,-8223969,-13465086,28432343,-1176353,-13068804,-12297348,-22380984,6618999]), xy2d: Fe([-1538174,11685646,12944378,13682314,-24389511,-14413193,8044829,-13817328,32239829,-5652762]), }, GePrecomp { y_plus_x: Fe([-18603066,4762990,-926250,8885304,-28412480,-3187315,9781647,-10350059,32779359,5095274]), y_minus_x: Fe([-33008130,-5214506,-32264887,-3685216,9460461,-9327423,-24601656,14506724,21639561,-2630236]), xy2d: Fe([-16400943,-13112215,25239338,15531969,3987758,-4499318,-1289502,-6863535,17874574,558605]), }, GePrecomp { y_plus_x: Fe([-13600129,10240081,9171883,16131053,-20869254,9599700,33499487,5080151,2085892,5119761]), y_minus_x: Fe([-22205145,-2519528,-16381601,414691,-25019550,2170430,30634760,-8363614,-31999993,-5759884]), xy2d: Fe([-6845704,15791202,8550074,-1312654,29928809,-12092256,27534430,-7192145,-22351378,12961482]), }, GePrecomp { y_plus_x: Fe([-24492060,-9570771,10368194,11582341,-23397293,-2245287,16533930,8206996,-30194652,-5159638]), y_minus_x: Fe([-11121496,-3382234,2307366,6362031,-135455,8868177,-16835630,7031275,7589640,8945490]), xy2d: Fe([-32152748,8917967,6661220,-11677616,-1192060,-15793393,7251489,-11182180,24099109,-14456170]), }, GePrecomp { y_plus_x: Fe([5019558,-7907470,4244127,-14714356,-26933272,6453165,-19118182,-13289025,-6231896,-10280736]), y_minus_x: Fe([10853594,10721687,26480089,5861829,-22995819,1972175,-1866647,-10557898,-3363451,-6441124]), xy2d: Fe([-17002408,5906790,221599,-6563147,7828208,-13248918,24362661,-2008168,-13866408,7421392]), }, GePrecomp { y_plus_x: Fe([8139927,-6546497,32257646,-5890546,30375719,1886181,-21175108,15441252,28826358,-4123029]), y_minus_x: Fe([6267086,9695052,7709135,-16603597,-32869068,-1886135,14795160,-7840124,13746021,-1742048]), xy2d: Fe([28584902,7787108,-6732942,-15050729,22846041,-7571236,-3181936,-363524,4771362,-8419958]), }, ], [ GePrecomp { y_plus_x: Fe([24949256,6376279,-27466481,-8174608,-18646154,-9930606,33543569,-12141695,3569627,11342593]), y_minus_x: Fe([26514989,4740088,27912651,3697550,19331575,-11472339,6809886,4608608,7325975,-14801071]), xy2d: Fe([-11618399,-14554430,-24321212,7655128,-1369274,5214312,-27400540,10258390,-17646694,-8186692]), }, GePrecomp { y_plus_x: Fe([11431204,15823007,26570245,14329124,18029990,4796082,-31446179,15580664,9280358,-3973687]), y_minus_x: Fe([-160783,-10326257,-22855316,-4304997,-20861367,-13621002,-32810901,-11181622,-15545091,4387441]), xy2d: Fe([-20799378,12194512,3937617,-5805892,-27154820,9340370,-24513992,8548137,20617071,-7482001]), }, GePrecomp { y_plus_x: Fe([-938825,-3930586,-8714311,16124718,24603125,-6225393,-13775352,-11875822,24345683,10325460]), y_minus_x: Fe([-19855277,-1568885,-22202708,8714034,14007766,6928528,16318175,-1010689,4766743,3552007]), xy2d: Fe([-21751364,-16730916,1351763,-803421,-4009670,3950935,3217514,14481909,10988822,-3994762]), }, GePrecomp { y_plus_x: Fe([15564307,-14311570,3101243,5684148,30446780,-8051356,12677127,-6505343,-8295852,13296005]), y_minus_x: Fe([-9442290,6624296,-30298964,-11913677,-4670981,-2057379,31521204,9614054,-30000824,12074674]), xy2d: Fe([4771191,-135239,14290749,-13089852,27992298,14998318,-1413936,-1556716,29832613,-16391035]), }, GePrecomp { y_plus_x: Fe([7064884,-7541174,-19161962,-5067537,-18891269,-2912736,25825242,5293297,-27122660,13101590]), y_minus_x: Fe([-2298563,2439670,-7466610,1719965,-27267541,-16328445,32512469,-5317593,-30356070,-4190957]), xy2d: Fe([-30006540,10162316,-33180176,3981723,-16482138,-13070044,14413974,9515896,19568978,9628812]), }, GePrecomp { y_plus_x: Fe([33053803,199357,15894591,1583059,27380243,-4580435,-17838894,-6106839,-6291786,3437740]), y_minus_x: Fe([-18978877,3884493,19469877,12726490,15913552,13614290,-22961733,70104,7463304,4176122]), xy2d: Fe([-27124001,10659917,11482427,-16070381,12771467,-6635117,-32719404,-5322751,24216882,5944158]), }, GePrecomp { y_plus_x: Fe([8894125,7450974,-2664149,-9765752,-28080517,-12389115,19345746,14680796,11632993,5847885]), y_minus_x: Fe([26942781,-2315317,9129564,-4906607,26024105,11769399,-11518837,6367194,-9727230,4782140]), xy2d: Fe([19916461,-4828410,-22910704,-11414391,25606324,-5972441,33253853,8220911,6358847,-1873857]), }, GePrecomp { y_plus_x: Fe([801428,-2081702,16569428,11065167,29875704,96627,7908388,-4480480,-13538503,1387155]), y_minus_x: Fe([19646058,5720633,-11416706,12814209,11607948,12749789,14147075,15156355,-21866831,11835260]), xy2d: Fe([19299512,1155910,28703737,14890794,2925026,7269399,26121523,15467869,-26560550,5052483]), }, ], [ GePrecomp { y_plus_x: Fe([-3017432,10058206,1980837,3964243,22160966,12322533,-6431123,-12618185,12228557,-7003677]), y_minus_x: Fe([32944382,14922211,-22844894,5188528,21913450,-8719943,4001465,13238564,-6114803,8653815]), xy2d: Fe([22865569,-4652735,27603668,-12545395,14348958,8234005,24808405,5719875,28483275,2841751]), }, GePrecomp { y_plus_x: Fe([-16420968,-1113305,-327719,-12107856,21886282,-15552774,-1887966,-315658,19932058,-12739203]), y_minus_x: Fe([-11656086,10087521,-8864888,-5536143,-19278573,-3055912,3999228,13239134,-4777469,-13910208]), xy2d: Fe([1382174,-11694719,17266790,9194690,-13324356,9720081,20403944,11284705,-14013818,3093230]), }, GePrecomp { y_plus_x: Fe([16650921,-11037932,-1064178,1570629,-8329746,7352753,-302424,16271225,-24049421,-6691850]), y_minus_x: Fe([-21911077,-5927941,-4611316,-5560156,-31744103,-10785293,24123614,15193618,-21652117,-16739389]), xy2d: Fe([-9935934,-4289447,-25279823,4372842,2087473,10399484,31870908,14690798,17361620,11864968]), }, GePrecomp { y_plus_x: Fe([-11307610,6210372,13206574,5806320,-29017692,-13967200,-12331205,-7486601,-25578460,-16240689]), y_minus_x: Fe([14668462,-12270235,26039039,15305210,25515617,4542480,10453892,6577524,9145645,-6443880]), xy2d: Fe([5974874,3053895,-9433049,-10385191,-31865124,3225009,-7972642,3936128,-5652273,-3050304]), }, GePrecomp { y_plus_x: Fe([30625386,-4729400,-25555961,-12792866,-20484575,7695099,17097188,-16303496,-27999779,1803632]), y_minus_x: Fe([-3553091,9865099,-5228566,4272701,-5673832,-16689700,14911344,12196514,-21405489,7047412]), xy2d: Fe([20093277,9920966,-11138194,-5343857,13161587,12044805,-32856851,4124601,-32343828,-10257566]), }, GePrecomp { y_plus_x: Fe([-20788824,14084654,-13531713,7842147,19119038,-13822605,4752377,-8714640,-21679658,2288038]), y_minus_x: Fe([-26819236,-3283715,29965059,3039786,-14473765,2540457,29457502,14625692,-24819617,12570232]), xy2d: Fe([-1063558,-11551823,16920318,12494842,1278292,-5869109,-21159943,-3498680,-11974704,4724943]), }, GePrecomp { y_plus_x: Fe([17960970,-11775534,-4140968,-9702530,-8876562,-1410617,-12907383,-8659932,-29576300,1903856]), y_minus_x: Fe([23134274,-14279132,-10681997,-1611936,20684485,15770816,-12989750,3190296,26955097,14109738]), xy2d: Fe([15308788,5320727,-30113809,-14318877,22902008,7767164,29425325,-11277562,31960942,11934971]), }, GePrecomp { y_plus_x: Fe([-27395711,8435796,4109644,12222639,-24627868,14818669,20638173,4875028,10491392,1379718]), y_minus_x: Fe([-13159415,9197841,3875503,-8936108,-1383712,-5879801,33518459,16176658,21432314,12180697]), xy2d: Fe([-11787308,11500838,13787581,-13832590,-22430679,10140205,1465425,12689540,-10301319,-13872883]), }, ], [ GePrecomp { y_plus_x: Fe([5414091,-15386041,-21007664,9643570,12834970,1186149,-2622916,-1342231,26128231,6032912]), y_minus_x: Fe([-26337395,-13766162,32496025,-13653919,17847801,-12669156,3604025,8316894,-25875034,-10437358]), xy2d: Fe([3296484,6223048,24680646,-12246460,-23052020,5903205,-8862297,-4639164,12376617,3188849]), }, GePrecomp { y_plus_x: Fe([29190488,-14659046,27549113,-1183516,3520066,-10697301,32049515,-7309113,-16109234,-9852307]), y_minus_x: Fe([-14744486,-9309156,735818,-598978,-20407687,-5057904,25246078,-15795669,18640741,-960977]), xy2d: Fe([-6928835,-16430795,10361374,5642961,4910474,12345252,-31638386,-494430,10530747,1053335]), }, GePrecomp { y_plus_x: Fe([-29265967,-14186805,-13538216,-12117373,-19457059,-10655384,-31462369,-2948985,24018831,15026644]), y_minus_x: Fe([-22592535,-3145277,-2289276,5953843,-13440189,9425631,25310643,13003497,-2314791,-15145616]), xy2d: Fe([-27419985,-603321,-8043984,-1669117,-26092265,13987819,-27297622,187899,-23166419,-2531735]), }, GePrecomp { y_plus_x: Fe([-21744398,-13810475,1844840,5021428,-10434399,-15911473,9716667,16266922,-5070217,726099]), y_minus_x: Fe([29370922,-6053998,7334071,-15342259,9385287,2247707,-13661962,-4839461,30007388,-15823341]), xy2d: Fe([-936379,16086691,23751945,-543318,-1167538,-5189036,9137109,730663,9835848,4555336]), }, GePrecomp { y_plus_x: Fe([-23376435,1410446,-22253753,-12899614,30867635,15826977,17693930,544696,-11985298,12422646]), y_minus_x: Fe([31117226,-12215734,-13502838,6561947,-9876867,-12757670,-5118685,-4096706,29120153,13924425]), xy2d: Fe([-17400879,-14233209,19675799,-2734756,-11006962,-5858820,-9383939,-11317700,7240931,-237388]), }, GePrecomp { y_plus_x: Fe([-31361739,-11346780,-15007447,-5856218,-22453340,-12152771,1222336,4389483,3293637,-15551743]), y_minus_x: Fe([-16684801,-14444245,11038544,11054958,-13801175,-3338533,-24319580,7733547,12796905,-6335822]), xy2d: Fe([-8759414,-10817836,-25418864,10783769,-30615557,-9746811,-28253339,3647836,3222231,-11160462]), }, GePrecomp { y_plus_x: Fe([18606113,1693100,-25448386,-15170272,4112353,10045021,23603893,-2048234,-7550776,2484985]), y_minus_x: Fe([9255317,-3131197,-12156162,-1004256,13098013,-9214866,16377220,-2102812,-19802075,-3034702]), xy2d: Fe([-22729289,7496160,-5742199,11329249,19991973,-3347502,-31718148,9936966,-30097688,-10618797]), }, GePrecomp { y_plus_x: Fe([21878590,-5001297,4338336,13643897,-3036865,13160960,19708896,5415497,-7360503,-4109293]), y_minus_x: Fe([27736861,10103576,12500508,8502413,-3413016,-9633558,10436918,-1550276,-23659143,-8132100]), xy2d: Fe([19492550,-12104365,-29681976,-852630,-3208171,12403437,30066266,8367329,13243957,8709688]), }, ], [ GePrecomp { y_plus_x: Fe([12015105,2801261,28198131,10151021,24818120,-4743133,-11194191,-5645734,5150968,7274186]), y_minus_x: Fe([2831366,-12492146,1478975,6122054,23825128,-12733586,31097299,6083058,31021603,-9793610]), xy2d: Fe([-2529932,-2229646,445613,10720828,-13849527,-11505937,-23507731,16354465,15067285,-14147707]), }, GePrecomp { y_plus_x: Fe([7840942,14037873,-33364863,15934016,-728213,-3642706,21403988,1057586,-19379462,-12403220]), y_minus_x: Fe([915865,-16469274,15608285,-8789130,-24357026,6060030,-17371319,8410997,-7220461,16527025]), xy2d: Fe([32922597,-556987,20336074,-16184568,10903705,-5384487,16957574,52992,23834301,6588044]), }, GePrecomp { y_plus_x: Fe([32752030,11232950,3381995,-8714866,22652988,-10744103,17159699,16689107,-20314580,-1305992]), y_minus_x: Fe([-4689649,9166776,-25710296,-10847306,11576752,12733943,7924251,-2752281,1976123,-7249027]), xy2d: Fe([21251222,16309901,-2983015,-6783122,30810597,12967303,156041,-3371252,12331345,-8237197]), }, GePrecomp { y_plus_x: Fe([8651614,-4477032,-16085636,-4996994,13002507,2950805,29054427,-5106970,10008136,-4667901]), y_minus_x: Fe([31486080,15114593,-14261250,12951354,14369431,-7387845,16347321,-13662089,8684155,-10532952]), xy2d: Fe([19443825,11385320,24468943,-9659068,-23919258,2187569,-26263207,-6086921,31316348,14219878]), }, GePrecomp { y_plus_x: Fe([-28594490,1193785,32245219,11392485,31092169,15722801,27146014,6992409,29126555,9207390]), y_minus_x: Fe([32382935,1110093,18477781,11028262,-27411763,-7548111,-4980517,10843782,-7957600,-14435730]), xy2d: Fe([2814918,7836403,27519878,-7868156,-20894015,-11553689,-21494559,8550130,28346258,1994730]), }, GePrecomp { y_plus_x: Fe([-19578299,8085545,-14000519,-3948622,2785838,-16231307,-19516951,7174894,22628102,8115180]), y_minus_x: Fe([-30405132,955511,-11133838,-15078069,-32447087,-13278079,-25651578,3317160,-9943017,930272]), xy2d: Fe([-15303681,-6833769,28856490,1357446,23421993,1057177,24091212,-1388970,-22765376,-10650715]), }, GePrecomp { y_plus_x: Fe([-22751231,-5303997,-12907607,-12768866,-15811511,-7797053,-14839018,-16554220,-1867018,8398970]), y_minus_x: Fe([-31969310,2106403,-4736360,1362501,12813763,16200670,22981545,-6291273,18009408,-15772772]), xy2d: Fe([-17220923,-9545221,-27784654,14166835,29815394,7444469,29551787,-3727419,19288549,1325865]), }, GePrecomp { y_plus_x: Fe([15100157,-15835752,-23923978,-1005098,-26450192,15509408,12376730,-3479146,33166107,-8042750]), y_minus_x: Fe([20909231,13023121,-9209752,16251778,-5778415,-8094914,12412151,10018715,2213263,-13878373]), xy2d: Fe([32529814,-11074689,30361439,-16689753,-9135940,1513226,22922121,6382134,-5766928,8371348]), }, ], [ GePrecomp { y_plus_x: Fe([9923462,11271500,12616794,3544722,-29998368,-1721626,12891687,-8193132,-26442943,10486144]), y_minus_x: Fe([-22597207,-7012665,8587003,-8257861,4084309,-12970062,361726,2610596,-23921530,-11455195]), xy2d: Fe([5408411,-1136691,-4969122,10561668,24145918,14240566,31319731,-4235541,19985175,-3436086]), }, GePrecomp { y_plus_x: Fe([-13994457,16616821,14549246,3341099,32155958,13648976,-17577068,8849297,65030,8370684]), y_minus_x: Fe([-8320926,-12049626,31204563,5839400,-20627288,-1057277,-19442942,6922164,12743482,-9800518]), xy2d: Fe([-2361371,12678785,28815050,4759974,-23893047,4884717,23783145,11038569,18800704,255233]), }, GePrecomp { y_plus_x: Fe([-5269658,-1773886,13957886,7990715,23132995,728773,13393847,9066957,19258688,-14753793]), y_minus_x: Fe([-2936654,-10827535,-10432089,14516793,-3640786,4372541,-31934921,2209390,-1524053,2055794]), xy2d: Fe([580882,16705327,5468415,-2683018,-30926419,-14696000,-7203346,-8994389,-30021019,7394435]), }, GePrecomp { y_plus_x: Fe([23838809,1822728,-15738443,15242727,8318092,-3733104,-21672180,-3492205,-4821741,14799921]), y_minus_x: Fe([13345610,9759151,3371034,-16137791,16353039,8577942,31129804,13496856,-9056018,7402518]), xy2d: Fe([2286874,-4435931,-20042458,-2008336,-13696227,5038122,11006906,-15760352,8205061,1607563]), }, GePrecomp { y_plus_x: Fe([14414086,-8002132,3331830,-3208217,22249151,-5594188,18364661,-2906958,30019587,-9029278]), y_minus_x: Fe([-27688051,1585953,-10775053,931069,-29120221,-11002319,-14410829,12029093,9944378,8024]), xy2d: Fe([4368715,-3709630,29874200,-15022983,-20230386,-11410704,-16114594,-999085,-8142388,5640030]), }, GePrecomp { y_plus_x: Fe([10299610,13746483,11661824,16234854,7630238,5998374,9809887,-16694564,15219798,-14327783]), y_minus_x: Fe([27425505,-5719081,3055006,10660664,23458024,595578,-15398605,-1173195,-18342183,9742717]), xy2d: Fe([6744077,2427284,26042789,2720740,-847906,1118974,32324614,7406442,12420155,1994844]), }, GePrecomp { y_plus_x: Fe([14012521,-5024720,-18384453,-9578469,-26485342,-3936439,-13033478,-10909803,24319929,-6446333]), y_minus_x: Fe([16412690,-4507367,10772641,15929391,-17068788,-4658621,10555945,-10484049,-30102368,-4739048]), xy2d: Fe([22397382,-7767684,-9293161,-12792868,17166287,-9755136,-27333065,6199366,21880021,-12250760]), }, GePrecomp { y_plus_x: Fe([-4283307,5368523,-31117018,8163389,-30323063,3209128,16557151,8890729,8840445,4957760]), y_minus_x: Fe([-15447727,709327,-6919446,-10870178,-29777922,6522332,-21720181,12130072,-14796503,5005757]), xy2d: Fe([-2114751,-14308128,23019042,15765735,-25269683,6002752,10183197,-13239326,-16395286,-2176112]), }, ], [ GePrecomp { y_plus_x: Fe([-19025756,1632005,13466291,-7995100,-23640451,16573537,-32013908,-3057104,22208662,2000468]), y_minus_x: Fe([3065073,-1412761,-25598674,-361432,-17683065,-5703415,-8164212,11248527,-3691214,-7414184]), xy2d: Fe([10379208,-6045554,8877319,1473647,-29291284,-12507580,16690915,2553332,-3132688,16400289]), }, GePrecomp { y_plus_x: Fe([15716668,1254266,-18472690,7446274,-8448918,6344164,-22097271,-7285580,26894937,9132066]), y_minus_x: Fe([24158887,12938817,11085297,-8177598,-28063478,-4457083,-30576463,64452,-6817084,-2692882]), xy2d: Fe([13488534,7794716,22236231,5989356,25426474,-12578208,2350710,-3418511,-4688006,2364226]), }, GePrecomp { y_plus_x: Fe([16335052,9132434,25640582,6678888,1725628,8517937,-11807024,-11697457,15445875,-7798101]), y_minus_x: Fe([29004207,-7867081,28661402,-640412,-12794003,-7943086,31863255,-4135540,-278050,-15759279]), xy2d: Fe([-6122061,-14866665,-28614905,14569919,-10857999,-3591829,10343412,-6976290,-29828287,-10815811]), }, GePrecomp { y_plus_x: Fe([27081650,3463984,14099042,-4517604,1616303,-6205604,29542636,15372179,17293797,960709]), y_minus_x: Fe([20263915,11434237,-5765435,11236810,13505955,-10857102,-16111345,6493122,-19384511,7639714]), xy2d: Fe([-2830798,-14839232,25403038,-8215196,-8317012,-16173699,18006287,-16043750,29994677,-15808121]), }, GePrecomp { y_plus_x: Fe([9769828,5202651,-24157398,-13631392,-28051003,-11561624,-24613141,-13860782,-31184575,709464]), y_minus_x: Fe([12286395,13076066,-21775189,-1176622,-25003198,4057652,-32018128,-8890874,16102007,13205847]), xy2d: Fe([13733362,5599946,10557076,3195751,-5557991,8536970,-25540170,8525972,10151379,10394400]), }, GePrecomp { y_plus_x: Fe([4024660,-16137551,22436262,12276534,-9099015,-2686099,19698229,11743039,-33302334,8934414]), y_minus_x: Fe([-15879800,-4525240,-8580747,-2934061,14634845,-698278,-9449077,3137094,-11536886,11721158]), xy2d: Fe([17555939,-5013938,8268606,2331751,-22738815,9761013,9319229,8835153,-9205489,-1280045]), }, GePrecomp { y_plus_x: Fe([-461409,-7830014,20614118,16688288,-7514766,-4807119,22300304,505429,6108462,-6183415]), y_minus_x: Fe([-5070281,12367917,-30663534,3234473,32617080,-8422642,29880583,-13483331,-26898490,-7867459]), xy2d: Fe([-31975283,5726539,26934134,10237677,-3173717,-605053,24199304,3795095,7592688,-14992079]), }, GePrecomp { y_plus_x: Fe([21594432,-14964228,17466408,-4077222,32537084,2739898,6407723,12018833,-28256052,4298412]), y_minus_x: Fe([-20650503,-11961496,-27236275,570498,3767144,-1717540,13891942,-1569194,13717174,10805743]), xy2d: Fe([-14676630,-15644296,15287174,11927123,24177847,-8175568,-796431,14860609,-26938930,-5863836]), }, ], [ GePrecomp { y_plus_x: Fe([12962541,5311799,-10060768,11658280,18855286,-7954201,13286263,-12808704,-4381056,9882022]), y_minus_x: Fe([18512079,11319350,-20123124,15090309,18818594,5271736,-22727904,3666879,-23967430,-3299429]), xy2d: Fe([-6789020,-3146043,16192429,13241070,15898607,-14206114,-10084880,-6661110,-2403099,5276065]), }, GePrecomp { y_plus_x: Fe([30169808,-5317648,26306206,-11750859,27814964,7069267,7152851,3684982,1449224,13082861]), y_minus_x: Fe([10342826,3098505,2119311,193222,25702612,12233820,23697382,15056736,-21016438,-8202000]), xy2d: Fe([-33150110,3261608,22745853,7948688,19370557,-15177665,-26171976,6482814,-10300080,-11060101]), }, GePrecomp { y_plus_x: Fe([32869458,-5408545,25609743,15678670,-10687769,-15471071,26112421,2521008,-22664288,6904815]), y_minus_x: Fe([29506923,4457497,3377935,-9796444,-30510046,12935080,1561737,3841096,-29003639,-6657642]), xy2d: Fe([10340844,-6630377,-18656632,-2278430,12621151,-13339055,30878497,-11824370,-25584551,5181966]), }, GePrecomp { y_plus_x: Fe([25940115,-12658025,17324188,-10307374,-8671468,15029094,24396252,-16450922,-2322852,-12388574]), y_minus_x: Fe([-21765684,9916823,-1300409,4079498,-1028346,11909559,1782390,12641087,20603771,-6561742]), xy2d: Fe([-18882287,-11673380,24849422,11501709,13161720,-4768874,1925523,11914390,4662781,7820689]), }, GePrecomp { y_plus_x: Fe([12241050,-425982,8132691,9393934,32846760,-1599620,29749456,12172924,16136752,15264020]), y_minus_x: Fe([-10349955,-14680563,-8211979,2330220,-17662549,-14545780,10658213,6671822,19012087,3772772]), xy2d: Fe([3753511,-3421066,10617074,2028709,14841030,-6721664,28718732,-15762884,20527771,12988982]), }, GePrecomp { y_plus_x: Fe([-14822485,-5797269,-3707987,12689773,-898983,-10914866,-24183046,-10564943,3299665,-12424953]), y_minus_x: Fe([-16777703,-15253301,-9642417,4978983,3308785,8755439,6943197,6461331,-25583147,8991218]), xy2d: Fe([-17226263,1816362,-1673288,-6086439,31783888,-8175991,-32948145,7417950,-30242287,1507265]), }, GePrecomp { y_plus_x: Fe([29692663,6829891,-10498800,4334896,20945975,-11906496,-28887608,8209391,14606362,-10647073]), y_minus_x: Fe([-3481570,8707081,32188102,5672294,22096700,1711240,-33020695,9761487,4170404,-2085325]), xy2d: Fe([-11587470,14855945,-4127778,-1531857,-26649089,15084046,22186522,16002000,-14276837,-8400798]), }, GePrecomp { y_plus_x: Fe([-4811456,13761029,-31703877,-2483919,-3312471,7869047,-7113572,-9620092,13240845,10965870]), y_minus_x: Fe([-7742563,-8256762,-14768334,-13656260,-23232383,12387166,4498947,14147411,29514390,4302863]), xy2d: Fe([-13413405,-12407859,20757302,-13801832,14785143,8976368,-5061276,-2144373,17846988,-13971927]), }, ], [ GePrecomp { y_plus_x: Fe([-2244452,-754728,-4597030,-1066309,-6247172,1455299,-21647728,-9214789,-5222701,12650267]), y_minus_x: Fe([-9906797,-16070310,21134160,12198166,-27064575,708126,387813,13770293,-19134326,10958663]), xy2d: Fe([22470984,12369526,23446014,-5441109,-21520802,-9698723,-11772496,-11574455,-25083830,4271862]), }, GePrecomp { y_plus_x: Fe([-25169565,-10053642,-19909332,15361595,-5984358,2159192,75375,-4278529,-32526221,8469673]), y_minus_x: Fe([15854970,4148314,-8893890,7259002,11666551,13824734,-30531198,2697372,24154791,-9460943]), xy2d: Fe([15446137,-15806644,29759747,14019369,30811221,-9610191,-31582008,12840104,24913809,9815020]), }, GePrecomp { y_plus_x: Fe([-4709286,-5614269,-31841498,-12288893,-14443537,10799414,-9103676,13438769,18735128,9466238]), y_minus_x: Fe([11933045,9281483,5081055,-5183824,-2628162,-4905629,-7727821,-10896103,-22728655,16199064]), xy2d: Fe([14576810,379472,-26786533,-8317236,-29426508,-10812974,-102766,1876699,30801119,2164795]), }, GePrecomp { y_plus_x: Fe([15995086,3199873,13672555,13712240,-19378835,-4647646,-13081610,-15496269,-13492807,1268052]), y_minus_x: Fe([-10290614,-3659039,-3286592,10948818,23037027,3794475,-3470338,-12600221,-17055369,3565904]), xy2d: Fe([29210088,-9419337,-5919792,-4952785,10834811,-13327726,-16512102,-10820713,-27162222,-14030531]), }, GePrecomp { y_plus_x: Fe([-13161890,15508588,16663704,-8156150,-28349942,9019123,-29183421,-3769423,2244111,-14001979]), y_minus_x: Fe([-5152875,-3800936,-9306475,-6071583,16243069,14684434,-25673088,-16180800,13491506,4641841]), xy2d: Fe([10813417,643330,-19188515,-728916,30292062,-16600078,27548447,-7721242,14476989,-12767431]), }, GePrecomp { y_plus_x: Fe([10292079,9984945,6481436,8279905,-7251514,7032743,27282937,-1644259,-27912810,12651324]), y_minus_x: Fe([-31185513,-813383,22271204,11835308,10201545,15351028,17099662,3988035,21721536,-3148940]), xy2d: Fe([10202177,-6545839,-31373232,-9574638,-32150642,-8119683,-12906320,3852694,13216206,14842320]), }, GePrecomp { y_plus_x: Fe([-15815640,-10601066,-6538952,-7258995,-6984659,-6581778,-31500847,13765824,-27434397,9900184]), y_minus_x: Fe([14465505,-13833331,-32133984,-14738873,-27443187,12990492,33046193,15796406,-7051866,-8040114]), xy2d: Fe([30924417,-8279620,6359016,-12816335,16508377,9071735,-25488601,15413635,9524356,-7018878]), }, GePrecomp { y_plus_x: Fe([12274201,-13175547,32627641,-1785326,6736625,13267305,5237659,-5109483,15663516,4035784]), y_minus_x: Fe([-2951309,8903985,17349946,601635,-16432815,-4612556,-13732739,-15889334,-22258478,4659091]), xy2d: Fe([-16916263,-4952973,-30393711,-15158821,20774812,15897498,5736189,15026997,-2178256,-13455585]), }, ], [ GePrecomp { y_plus_x: Fe([-8858980,-2219056,28571666,-10155518,-474467,-10105698,-3801496,278095,23440562,-290208]), y_minus_x: Fe([10226241,-5928702,15139956,120818,-14867693,5218603,32937275,11551483,-16571960,-7442864]), xy2d: Fe([17932739,-12437276,-24039557,10749060,11316803,7535897,22503767,5561594,-3646624,3898661]), }, GePrecomp { y_plus_x: Fe([7749907,-969567,-16339731,-16464,-25018111,15122143,-1573531,7152530,21831162,1245233]), y_minus_x: Fe([26958459,-14658026,4314586,8346991,-5677764,11960072,-32589295,-620035,-30402091,-16716212]), xy2d: Fe([-12165896,9166947,33491384,13673479,29787085,13096535,6280834,14587357,-22338025,13987525]), }, GePrecomp { y_plus_x: Fe([-24349909,7778775,21116000,15572597,-4833266,-5357778,-4300898,-5124639,-7469781,-2858068]), y_minus_x: Fe([9681908,-6737123,-31951644,13591838,-6883821,386950,31622781,6439245,-14581012,4091397]), xy2d: Fe([-8426427,1470727,-28109679,-1596990,3978627,-5123623,-19622683,12092163,29077877,-14741988]), }, GePrecomp { y_plus_x: Fe([5269168,-6859726,-13230211,-8020715,25932563,1763552,-5606110,-5505881,-20017847,2357889]), y_minus_x: Fe([32264008,-15407652,-5387735,-1160093,-2091322,-3946900,23104804,-12869908,5727338,189038]), xy2d: Fe([14609123,-8954470,-6000566,-16622781,-14577387,-7743898,-26745169,10942115,-25888931,-14884697]), }, GePrecomp { y_plus_x: Fe([20513500,5557931,-15604613,7829531,26413943,-2019404,-21378968,7471781,13913677,-5137875]), y_minus_x: Fe([-25574376,11967826,29233242,12948236,-6754465,4713227,-8940970,14059180,12878652,8511905]), xy2d: Fe([-25656801,3393631,-2955415,-7075526,-2250709,9366908,-30223418,6812974,5568676,-3127656]), }, GePrecomp { y_plus_x: Fe([11630004,12144454,2116339,13606037,27378885,15676917,-17408753,-13504373,-14395196,8070818]), y_minus_x: Fe([27117696,-10007378,-31282771,-5570088,1127282,12772488,-29845906,10483306,-11552749,-1028714]), xy2d: Fe([10637467,-5688064,5674781,1072708,-26343588,-6982302,-1683975,9177853,-27493162,15431203]), }, GePrecomp { y_plus_x: Fe([20525145,10892566,-12742472,12779443,-29493034,16150075,-28240519,14943142,-15056790,-7935931]), y_minus_x: Fe([-30024462,5626926,-551567,-9981087,753598,11981191,25244767,-3239766,-3356550,9594024]), xy2d: Fe([-23752644,2636870,-5163910,-10103818,585134,7877383,11345683,-6492290,13352335,-10977084]), }, GePrecomp { y_plus_x: Fe([-1931799,-5407458,3304649,-12884869,17015806,-4877091,-29783850,-7752482,-13215537,-319204]), y_minus_x: Fe([20239939,6607058,6203985,3483793,-18386976,-779229,-20723742,15077870,-22750759,14523817]), xy2d: Fe([27406042,-6041657,27423596,-4497394,4996214,10002360,-28842031,-4545494,-30172742,-4805667]), }, ], [ GePrecomp { y_plus_x: Fe([11374242,12660715,17861383,-12540833,10935568,1099227,-13886076,-9091740,-27727044,11358504]), y_minus_x: Fe([-12730809,10311867,1510375,10778093,-2119455,-9145702,32676003,11149336,-26123651,4985768]), xy2d: Fe([-19096303,341147,-6197485,-239033,15756973,-8796662,-983043,13794114,-19414307,-15621255]), }, GePrecomp { y_plus_x: Fe([6490081,11940286,25495923,-7726360,8668373,-8751316,3367603,6970005,-1691065,-9004790]), y_minus_x: Fe([1656497,13457317,15370807,6364910,13605745,8362338,-19174622,-5475723,-16796596,-5031438]), xy2d: Fe([-22273315,-13524424,-64685,-4334223,-18605636,-10921968,-20571065,-7007978,-99853,-10237333]), }, GePrecomp { y_plus_x: Fe([17747465,10039260,19368299,-4050591,-20630635,-16041286,31992683,-15857976,-29260363,-5511971]), y_minus_x: Fe([31932027,-4986141,-19612382,16366580,22023614,88450,11371999,-3744247,4882242,-10626905]), xy2d: Fe([29796507,37186,19818052,10115756,-11829032,3352736,18551198,3272828,-5190932,-4162409]), }, GePrecomp { y_plus_x: Fe([12501286,4044383,-8612957,-13392385,-32430052,5136599,-19230378,-3529697,330070,-3659409]), y_minus_x: Fe([6384877,2899513,17807477,7663917,-2358888,12363165,25366522,-8573892,-271295,12071499]), xy2d: Fe([-8365515,-4042521,25133448,-4517355,-6211027,2265927,-32769618,1936675,-5159697,3829363]), }, GePrecomp { y_plus_x: Fe([28425966,-5835433,-577090,-4697198,-14217555,6870930,7921550,-6567787,26333140,14267664]), y_minus_x: Fe([-11067219,11871231,27385719,-10559544,-4585914,-11189312,10004786,-8709488,-21761224,8930324]), xy2d: Fe([-21197785,-16396035,25654216,-1725397,12282012,11008919,1541940,4757911,-26491501,-16408940]), }, GePrecomp { y_plus_x: Fe([13537262,-7759490,-20604840,10961927,-5922820,-13218065,-13156584,6217254,-15943699,13814990]), y_minus_x: Fe([-17422573,15157790,18705543,29619,24409717,-260476,27361681,9257833,-1956526,-1776914]), xy2d: Fe([-25045300,-10191966,15366585,15166509,-13105086,8423556,-29171540,12361135,-18685978,4578290]), }, GePrecomp { y_plus_x: Fe([24579768,3711570,1342322,-11180126,-27005135,14124956,-22544529,14074919,21964432,8235257]), y_minus_x: Fe([-6528613,-2411497,9442966,-5925588,12025640,-1487420,-2981514,-1669206,13006806,2355433]), xy2d: Fe([-16304899,-13605259,-6632427,-5142349,16974359,-10911083,27202044,1719366,1141648,-12796236]), }, GePrecomp { y_plus_x: Fe([-12863944,-13219986,-8318266,-11018091,-6810145,-4843894,13475066,-3133972,32674895,13715045]), y_minus_x: Fe([11423335,-5468059,32344216,8962751,24989809,9241752,-13265253,16086212,-28740881,-15642093]), xy2d: Fe([-1409668,12530728,-6368726,10847387,19531186,-14132160,-11709148,7791794,-27245943,4383347]), }, ], [ GePrecomp { y_plus_x: Fe([-28970898,5271447,-1266009,-9736989,-12455236,16732599,-4862407,-4906449,27193557,6245191]), y_minus_x: Fe([-15193956,5362278,-1783893,2695834,4960227,12840725,23061898,3260492,22510453,8577507]), xy2d: Fe([-12632451,11257346,-32692994,13548177,-721004,10879011,31168030,13952092,-29571492,-3635906]), }, GePrecomp { y_plus_x: Fe([3877321,-9572739,32416692,5405324,-11004407,-13656635,3759769,11935320,5611860,8164018]), y_minus_x: Fe([-16275802,14667797,15906460,12155291,-22111149,-9039718,32003002,-8832289,5773085,-8422109]), xy2d: Fe([-23788118,-8254300,1950875,8937633,18686727,16459170,-905725,12376320,31632953,190926]), }, GePrecomp { y_plus_x: Fe([-24593607,-16138885,-8423991,13378746,14162407,6901328,-8288749,4508564,-25341555,-3627528]), y_minus_x: Fe([8884438,-5884009,6023974,10104341,-6881569,-4941533,18722941,-14786005,-1672488,827625]), xy2d: Fe([-32720583,-16289296,-32503547,7101210,13354605,2659080,-1800575,-14108036,-24878478,1541286]), }, GePrecomp { y_plus_x: Fe([2901347,-1117687,3880376,-10059388,-17620940,-3612781,-21802117,-3567481,20456845,-1885033]), y_minus_x: Fe([27019610,12299467,-13658288,-1603234,-12861660,-4861471,-19540150,-5016058,29439641,15138866]), xy2d: Fe([21536104,-6626420,-32447818,-10690208,-22408077,5175814,-5420040,-16361163,7779328,109896]), }, GePrecomp { y_plus_x: Fe([30279744,14648750,-8044871,6425558,13639621,-743509,28698390,12180118,23177719,-554075]), y_minus_x: Fe([26572847,3405927,-31701700,12890905,-19265668,5335866,-6493768,2378492,4439158,-13279347]), xy2d: Fe([-22716706,3489070,-9225266,-332753,18875722,-1140095,14819434,-12731527,-17717757,-5461437]), }, GePrecomp { y_plus_x: Fe([-5056483,16566551,15953661,3767752,-10436499,15627060,-820954,2177225,8550082,-15114165]), y_minus_x: Fe([-18473302,16596775,-381660,15663611,22860960,15585581,-27844109,-3582739,-23260460,-8428588]), xy2d: Fe([-32480551,15707275,-8205912,-5652081,29464558,2713815,-22725137,15860482,-21902570,1494193]), }, GePrecomp { y_plus_x: Fe([-19562091,-14087393,-25583872,-9299552,13127842,759709,21923482,16529112,8742704,12967017]), y_minus_x: Fe([-28464899,1553205,32536856,-10473729,-24691605,-406174,-8914625,-2933896,-29903758,15553883]), xy2d: Fe([21877909,3230008,9881174,10539357,-4797115,2841332,11543572,14513274,19375923,-12647961]), }, GePrecomp { y_plus_x: Fe([8832269,-14495485,13253511,5137575,5037871,4078777,24880818,-6222716,2862653,9455043]), y_minus_x: Fe([29306751,5123106,20245049,-14149889,9592566,8447059,-2077124,-2990080,15511449,4789663]), xy2d: Fe([-20679756,7004547,8824831,-9434977,-4045704,-3750736,-5754762,108893,23513200,16652362]), }, ], [ GePrecomp { y_plus_x: Fe([-33256173,4144782,-4476029,-6579123,10770039,-7155542,-6650416,-12936300,-18319198,10212860]), y_minus_x: Fe([2756081,8598110,7383731,-6859892,22312759,-1105012,21179801,2600940,-9988298,-12506466]), xy2d: Fe([-24645692,13317462,-30449259,-15653928,21365574,-10869657,11344424,864440,-2499677,-16710063]), }, GePrecomp { y_plus_x: Fe([-26432803,6148329,-17184412,-14474154,18782929,-275997,-22561534,211300,2719757,4940997]), y_minus_x: Fe([-1323882,3911313,-6948744,14759765,-30027150,7851207,21690126,8518463,26699843,5276295]), xy2d: Fe([-13149873,-6429067,9396249,365013,24703301,-10488939,1321586,149635,-15452774,7159369]), }, GePrecomp { y_plus_x: Fe([9987780,-3404759,17507962,9505530,9731535,-2165514,22356009,8312176,22477218,-8403385]), y_minus_x: Fe([18155857,-16504990,19744716,9006923,15154154,-10538976,24256460,-4864995,-22548173,9334109]), xy2d: Fe([2986088,-4911893,10776628,-3473844,10620590,-7083203,-21413845,14253545,-22587149,536906]), }, GePrecomp { y_plus_x: Fe([4377756,8115836,24567078,15495314,11625074,13064599,7390551,10589625,10838060,-15420424]), y_minus_x: Fe([-19342404,867880,9277171,-3218459,-14431572,-1986443,19295826,-15796950,6378260,699185]), xy2d: Fe([7895026,4057113,-7081772,-13077756,-17886831,-323126,-716039,15693155,-5045064,-13373962]), }, GePrecomp { y_plus_x: Fe([-7737563,-5869402,-14566319,-7406919,11385654,13201616,31730678,-10962840,-3918636,-9669325]), y_minus_x: Fe([10188286,-15770834,-7336361,13427543,22223443,14896287,30743455,7116568,-21786507,5427593]), xy2d: Fe([696102,13206899,27047647,-10632082,15285305,-9853179,10798490,-4578720,19236243,12477404]), }, GePrecomp { y_plus_x: Fe([-11229439,11243796,-17054270,-8040865,-788228,-8167967,-3897669,11180504,-23169516,7733644]), y_minus_x: Fe([17800790,-14036179,-27000429,-11766671,23887827,3149671,23466177,-10538171,10322027,15313801]), xy2d: Fe([26246234,11968874,32263343,-5468728,6830755,-13323031,-15794704,-101982,-24449242,10890804]), }, GePrecomp { y_plus_x: Fe([-31365647,10271363,-12660625,-6267268,16690207,-13062544,-14982212,16484931,25180797,-5334884]), y_minus_x: Fe([-586574,10376444,-32586414,-11286356,19801893,10997610,2276632,9482883,316878,13820577]), xy2d: Fe([-9882808,-4510367,-2115506,16457136,-11100081,11674996,30756178,-7515054,30696930,-3712849]), }, GePrecomp { y_plus_x: Fe([32988917,-9603412,12499366,7910787,-10617257,-11931514,-7342816,-9985397,-32349517,7392473]), y_minus_x: Fe([-8855661,15927861,9866406,-3649411,-2396914,-16655781,-30409476,-9134995,25112947,-2926644]), xy2d: Fe([-2504044,-436966,25621774,-5678772,15085042,-5479877,-24884878,-13526194,5537438,-13914319]), }, ], [ GePrecomp { y_plus_x: Fe([-11225584,2320285,-9584280,10149187,-33444663,5808648,-14876251,-1729667,31234590,6090599]), y_minus_x: Fe([-9633316,116426,26083934,2897444,-6364437,-2688086,609721,15878753,-6970405,-9034768]), xy2d: Fe([-27757857,247744,-15194774,-9002551,23288161,-10011936,-23869595,6503646,20650474,1804084]), }, GePrecomp { y_plus_x: Fe([-27589786,15456424,8972517,8469608,15640622,4439847,3121995,-10329713,27842616,-202328]), y_minus_x: Fe([-15306973,2839644,22530074,10026331,4602058,5048462,28248656,5031932,-11375082,12714369]), xy2d: Fe([20807691,-7270825,29286141,11421711,-27876523,-13868230,-21227475,1035546,-19733229,12796920]), }, GePrecomp { y_plus_x: Fe([12076899,-14301286,-8785001,-11848922,-25012791,16400684,-17591495,-12899438,3480665,-15182815]), y_minus_x: Fe([-32361549,5457597,28548107,7833186,7303070,-11953545,-24363064,-15921875,-33374054,2771025]), xy2d: Fe([-21389266,421932,26597266,6860826,22486084,-6737172,-17137485,-4210226,-24552282,15673397]), }, GePrecomp { y_plus_x: Fe([-20184622,2338216,19788685,-9620956,-4001265,-8740893,-20271184,4733254,3727144,-12934448]), y_minus_x: Fe([6120119,814863,-11794402,-622716,6812205,-15747771,2019594,7975683,31123697,-10958981]), xy2d: Fe([30069250,-11435332,30434654,2958439,18399564,-976289,12296869,9204260,-16432438,9648165]), }, GePrecomp { y_plus_x: Fe([32705432,-1550977,30705658,7451065,-11805606,9631813,3305266,5248604,-26008332,-11377501]), y_minus_x: Fe([17219865,2375039,-31570947,-5575615,-19459679,9219903,294711,15298639,2662509,-16297073]), xy2d: Fe([-1172927,-7558695,-4366770,-4287744,-21346413,-8434326,32087529,-1222777,32247248,-14389861]), }, GePrecomp { y_plus_x: Fe([14312628,1221556,17395390,-8700143,-4945741,-8684635,-28197744,-9637817,-16027623,-13378845]), y_minus_x: Fe([-1428825,-9678990,-9235681,6549687,-7383069,-468664,23046502,9803137,17597934,2346211]), xy2d: Fe([18510800,15337574,26171504,981392,-22241552,7827556,-23491134,-11323352,3059833,-11782870]), }, GePrecomp { y_plus_x: Fe([10141598,6082907,17829293,-1947643,9830092,13613136,-25556636,-5544586,-33502212,3592096]), y_minus_x: Fe([33114168,-15889352,-26525686,-13343397,33076705,8716171,1151462,1521897,-982665,-6837803]), xy2d: Fe([-32939165,-4255815,23947181,-324178,-33072974,-12305637,-16637686,3891704,26353178,693168]), }, GePrecomp { y_plus_x: Fe([30374239,1595580,-16884039,13186931,4600344,406904,9585294,-400668,31375464,14369965]), y_minus_x: Fe([-14370654,-7772529,1510301,6434173,-18784789,-6262728,32732230,-13108839,17901441,16011505]), xy2d: Fe([18171223,-11934626,-12500402,15197122,-11038147,-15230035,-19172240,-16046376,8764035,12309598]), }, ], [ GePrecomp { y_plus_x: Fe([5975908,-5243188,-19459362,-9681747,-11541277,14015782,-23665757,1228319,17544096,-10593782]), y_minus_x: Fe([5811932,-1715293,3442887,-2269310,-18367348,-8359541,-18044043,-15410127,-5565381,12348900]), xy2d: Fe([-31399660,11407555,25755363,6891399,-3256938,14872274,-24849353,8141295,-10632534,-585479]), }, GePrecomp { y_plus_x: Fe([-12675304,694026,-5076145,13300344,14015258,-14451394,-9698672,-11329050,30944593,1130208]), y_minus_x: Fe([8247766,-6710942,-26562381,-7709309,-14401939,-14648910,4652152,2488540,23550156,-271232]), xy2d: Fe([17294316,-3788438,7026748,15626851,22990044,113481,2267737,-5908146,-408818,-137719]), }, GePrecomp { y_plus_x: Fe([16091085,-16253926,18599252,7340678,2137637,-1221657,-3364161,14550936,3260525,-7166271]), y_minus_x: Fe([-4910104,-13332887,18550887,10864893,-16459325,-7291596,-23028869,-13204905,-12748722,2701326]), xy2d: Fe([-8574695,16099415,4629974,-16340524,-20786213,-6005432,-10018363,9276971,11329923,1862132]), }, GePrecomp { y_plus_x: Fe([14763076,-15903608,-30918270,3689867,3511892,10313526,-21951088,12219231,-9037963,-940300]), y_minus_x: Fe([8894987,-3446094,6150753,3013931,301220,15693451,-31981216,-2909717,-15438168,11595570]), xy2d: Fe([15214962,3537601,-26238722,-14058872,4418657,-15230761,13947276,10730794,-13489462,-4363670]), }, GePrecomp { y_plus_x: Fe([-2538306,7682793,32759013,263109,-29984731,-7955452,-22332124,-10188635,977108,699994]), y_minus_x: Fe([-12466472,4195084,-9211532,550904,-15565337,12917920,19118110,-439841,-30534533,-14337913]), xy2d: Fe([31788461,-14507657,4799989,7372237,8808585,-14747943,9408237,-10051775,12493932,-5409317]), }, GePrecomp { y_plus_x: Fe([-25680606,5260744,-19235809,-6284470,-3695942,16566087,27218280,2607121,29375955,6024730]), y_minus_x: Fe([842132,-2794693,-4763381,-8722815,26332018,-12405641,11831880,6985184,-9940361,2854096]), xy2d: Fe([-4847262,-7969331,2516242,-5847713,9695691,-7221186,16512645,960770,12121869,16648078]), }, GePrecomp { y_plus_x: Fe([-15218652,14667096,-13336229,2013717,30598287,-464137,-31504922,-7882064,20237806,2838411]), y_minus_x: Fe([-19288047,4453152,15298546,-16178388,22115043,-15972604,12544294,-13470457,1068881,-12499905]), xy2d: Fe([-9558883,-16518835,33238498,13506958,30505848,-1114596,-8486907,-2630053,12521378,4845654]), }, GePrecomp { y_plus_x: Fe([-28198521,10744108,-2958380,10199664,7759311,-13088600,3409348,-873400,-6482306,-12885870]), y_minus_x: Fe([-23561822,6230156,-20382013,10655314,-24040585,-11621172,10477734,-1240216,-3113227,13974498]), xy2d: Fe([12966261,15550616,-32038948,-1615346,21025980,-629444,5642325,7188737,18895762,12629579]), }, ], [ GePrecomp { y_plus_x: Fe([14741879,-14946887,22177208,-11721237,1279741,8058600,11758140,789443,32195181,3895677]), y_minus_x: Fe([10758205,15755439,-4509950,9243698,-4879422,6879879,-2204575,-3566119,-8982069,4429647]), xy2d: Fe([-2453894,15725973,-20436342,-10410672,-5803908,-11040220,-7135870,-11642895,18047436,-15281743]), }, GePrecomp { y_plus_x: Fe([-25173001,-11307165,29759956,11776784,-22262383,-15820455,10993114,-12850837,-17620701,-9408468]), y_minus_x: Fe([21987233,700364,-24505048,14972008,-7774265,-5718395,32155026,2581431,-29958985,8773375]), xy2d: Fe([-25568350,454463,-13211935,16126715,25240068,8594567,20656846,12017935,-7874389,-13920155]), }, GePrecomp { y_plus_x: Fe([6028182,6263078,-31011806,-11301710,-818919,2461772,-31841174,-5468042,-1721788,-2776725]), y_minus_x: Fe([-12278994,16624277,987579,-5922598,32908203,1248608,7719845,-4166698,28408820,6816612]), xy2d: Fe([-10358094,-8237829,19549651,-12169222,22082623,16147817,20613181,13982702,-10339570,5067943]), }, GePrecomp { y_plus_x: Fe([-30505967,-3821767,12074681,13582412,-19877972,2443951,-19719286,12746132,5331210,-10105944]), y_minus_x: Fe([30528811,3601899,-1957090,4619785,-27361822,-15436388,24180793,-12570394,27679908,-1648928]), xy2d: Fe([9402404,-13957065,32834043,10838634,-26580150,-13237195,26653274,-8685565,22611444,-12715406]), }, GePrecomp { y_plus_x: Fe([22190590,1118029,22736441,15130463,-30460692,-5991321,19189625,-4648942,4854859,6622139]), y_minus_x: Fe([-8310738,-2953450,-8262579,-3388049,-10401731,-271929,13424426,-3567227,26404409,13001963]), xy2d: Fe([-31241838,-15415700,-2994250,8939346,11562230,-12840670,-26064365,-11621720,-15405155,11020693]), }, GePrecomp { y_plus_x: Fe([1866042,-7949489,-7898649,-10301010,12483315,13477547,3175636,-12424163,28761762,1406734]), y_minus_x: Fe([-448555,-1777666,13018551,3194501,-9580420,-11161737,24760585,-4347088,25577411,-13378680]), xy2d: Fe([-24290378,4759345,-690653,-1852816,2066747,10693769,-29595790,9884936,-9368926,4745410]), }, GePrecomp { y_plus_x: Fe([-9141284,6049714,-19531061,-4341411,-31260798,9944276,-15462008,-11311852,10931924,-11931931]), y_minus_x: Fe([-16561513,14112680,-8012645,4817318,-8040464,-11414606,-22853429,10856641,-20470770,13434654]), xy2d: Fe([22759489,-10073434,-16766264,-1871422,13637442,-10168091,1765144,-12654326,28445307,-5364710]), }, GePrecomp { y_plus_x: Fe([29875063,12493613,2795536,-3786330,1710620,15181182,-10195717,-8788675,9074234,1167180]), y_minus_x: Fe([-26205683,11014233,-9842651,-2635485,-26908120,7532294,-18716888,-9535498,3843903,9367684]), xy2d: Fe([-10969595,-6403711,9591134,9582310,11349256,108879,16235123,8601684,-139197,4242895]), }, ], [ GePrecomp { y_plus_x: Fe([22092954,-13191123,-2042793,-11968512,32186753,-11517388,-6574341,2470660,-27417366,16625501]), y_minus_x: Fe([-11057722,3042016,13770083,-9257922,584236,-544855,-7770857,2602725,-27351616,14247413]), xy2d: Fe([6314175,-10264892,-32772502,15957557,-10157730,168750,-8618807,14290061,27108877,-1180880]), }, GePrecomp { y_plus_x: Fe([-8586597,-7170966,13241782,10960156,-32991015,-13794596,33547976,-11058889,-27148451,981874]), y_minus_x: Fe([22833440,9293594,-32649448,-13618667,-9136966,14756819,-22928859,-13970780,-10479804,-16197962]), xy2d: Fe([-7768587,3326786,-28111797,10783824,19178761,14905060,22680049,13906969,-15933690,3797899]), }, GePrecomp { y_plus_x: Fe([21721356,-4212746,-12206123,9310182,-3882239,-13653110,23740224,-2709232,20491983,-8042152]), y_minus_x: Fe([9209270,-15135055,-13256557,-6167798,-731016,15289673,25947805,15286587,30997318,-6703063]), xy2d: Fe([7392032,16618386,23946583,-8039892,-13265164,-1533858,-14197445,-2321576,17649998,-250080]), }, GePrecomp { y_plus_x: Fe([-9301088,-14193827,30609526,-3049543,-25175069,-1283752,-15241566,-9525724,-2233253,7662146]), y_minus_x: Fe([-17558673,1763594,-33114336,15908610,-30040870,-12174295,7335080,-8472199,-3174674,3440183]), xy2d: Fe([-19889700,-5977008,-24111293,-9688870,10799743,-16571957,40450,-4431835,4862400,1133]), }, GePrecomp { y_plus_x: Fe([-32856209,-7873957,-5422389,14860950,-16319031,7956142,7258061,311861,-30594991,-7379421]), y_minus_x: Fe([-3773428,-1565936,28985340,7499440,24445838,9325937,29727763,16527196,18278453,15405622]), xy2d: Fe([-4381906,8508652,-19898366,-3674424,-5984453,15149970,-13313598,843523,-21875062,13626197]), }, GePrecomp { y_plus_x: Fe([2281448,-13487055,-10915418,-2609910,1879358,16164207,-10783882,3953792,13340839,15928663]), y_minus_x: Fe([31727126,-7179855,-18437503,-8283652,2875793,-16390330,-25269894,-7014826,-23452306,5964753]), xy2d: Fe([4100420,-5959452,-17179337,6017714,-18705837,12227141,-26684835,11344144,2538215,-7570755]), }, GePrecomp { y_plus_x: Fe([-9433605,6123113,11159803,-2156608,30016280,14966241,-20474983,1485421,-629256,-15958862]), y_minus_x: Fe([-26804558,4260919,11851389,9658551,-32017107,16367492,-20205425,-13191288,11659922,-11115118]), xy2d: Fe([26180396,10015009,-30844224,-8581293,5418197,9480663,2231568,-10170080,33100372,-1306171]), }, GePrecomp { y_plus_x: Fe([15121113,-5201871,-10389905,15427821,-27509937,-15992507,21670947,4486675,-5931810,-14466380]), y_minus_x: Fe([16166486,-9483733,-11104130,6023908,-31926798,-1364923,2340060,-16254968,-10735770,-10039824]), xy2d: Fe([28042865,-3557089,-12126526,12259706,-3717498,-6945899,6766453,-8689599,18036436,5803270]), }, ], [ GePrecomp { y_plus_x: Fe([-817581,6763912,11803561,1585585,10958447,-2671165,23855391,4598332,-6159431,-14117438]), y_minus_x: Fe([-31031306,-14256194,17332029,-2383520,31312682,-5967183,696309,50292,-20095739,11763584]), xy2d: Fe([-594563,-2514283,-32234153,12643980,12650761,14811489,665117,-12613632,-19773211,-10713562]), }, GePrecomp { y_plus_x: Fe([30464590,-11262872,-4127476,-12734478,19835327,-7105613,-24396175,2075773,-17020157,992471]), y_minus_x: Fe([18357185,-6994433,7766382,16342475,-29324918,411174,14578841,8080033,-11574335,-10601610]), xy2d: Fe([19598397,10334610,12555054,2555664,18821899,-10339780,21873263,16014234,26224780,16452269]), }, GePrecomp { y_plus_x: Fe([-30223925,5145196,5944548,16385966,3976735,2009897,-11377804,-7618186,-20533829,3698650]), y_minus_x: Fe([14187449,3448569,-10636236,-10810935,-22663880,-3433596,7268410,-10890444,27394301,12015369]), xy2d: Fe([19695761,16087646,28032085,12999827,6817792,11427614,20244189,-1312777,-13259127,-3402461]), }, GePrecomp { y_plus_x: Fe([30860103,12735208,-1888245,-4699734,-16974906,2256940,-8166013,12298312,-8550524,-10393462]), y_minus_x: Fe([-5719826,-11245325,-1910649,15569035,26642876,-7587760,-5789354,-15118654,-4976164,12651793]), xy2d: Fe([-2848395,9953421,11531313,-5282879,26895123,-12697089,-13118820,-16517902,9768698,-2533218]), }, GePrecomp { y_plus_x: Fe([-24719459,1894651,-287698,-4704085,15348719,-8156530,32767513,12765450,4940095,10678226]), y_minus_x: Fe([18860224,15980149,-18987240,-1562570,-26233012,-11071856,-7843882,13944024,-24372348,16582019]), xy2d: Fe([-15504260,4970268,-29893044,4175593,-20993212,-2199756,-11704054,15444560,-11003761,7989037]), }, GePrecomp { y_plus_x: Fe([31490452,5568061,-2412803,2182383,-32336847,4531686,-32078269,6200206,-19686113,-14800171]), y_minus_x: Fe([-17308668,-15879940,-31522777,-2831,-32887382,16375549,8680158,-16371713,28550068,-6857132]), xy2d: Fe([-28126887,-5688091,16837845,-1820458,-6850681,12700016,-30039981,4364038,1155602,5988841]), }, GePrecomp { y_plus_x: Fe([21890435,-13272907,-12624011,12154349,-7831873,15300496,23148983,-4470481,24618407,8283181]), y_minus_x: Fe([-33136107,-10512751,9975416,6841041,-31559793,16356536,3070187,-7025928,1466169,10740210]), xy2d: Fe([-1509399,-15488185,-13503385,-10655916,32799044,909394,-13938903,-5779719,-32164649,-15327040]), }, GePrecomp { y_plus_x: Fe([3960823,-14267803,-28026090,-15918051,-19404858,13146868,15567327,951507,-3260321,-573935]), y_minus_x: Fe([24740841,5052253,-30094131,8961361,25877428,6165135,-24368180,14397372,-7380369,-6144105]), xy2d: Fe([-28888365,3510803,-28103278,-1158478,-11238128,-10631454,-15441463,-14453128,-1625486,-6494814]), }, ], [ GePrecomp { y_plus_x: Fe([793299,-9230478,8836302,-6235707,-27360908,-2369593,33152843,-4885251,-9906200,-621852]), y_minus_x: Fe([5666233,525582,20782575,-8038419,-24538499,14657740,16099374,1468826,-6171428,-15186581]), xy2d: Fe([-4859255,-3779343,-2917758,-6748019,7778750,11688288,-30404353,-9871238,-1558923,-9863646]), }, GePrecomp { y_plus_x: Fe([10896332,-7719704,824275,472601,-19460308,3009587,25248958,14783338,-30581476,-15757844]), y_minus_x: Fe([10566929,12612572,-31944212,11118703,-12633376,12362879,21752402,8822496,24003793,14264025]), xy2d: Fe([27713862,-7355973,-11008240,9227530,27050101,2504721,23886875,-13117525,13958495,-5732453]), }, GePrecomp { y_plus_x: Fe([-23481610,4867226,-27247128,3900521,29838369,-8212291,-31889399,-10041781,7340521,-15410068]), y_minus_x: Fe([4646514,-8011124,-22766023,-11532654,23184553,8566613,31366726,-1381061,-15066784,-10375192]), xy2d: Fe([-17270517,12723032,-16993061,14878794,21619651,-6197576,27584817,3093888,-8843694,3849921]), }, GePrecomp { y_plus_x: Fe([-9064912,2103172,25561640,-15125738,-5239824,9582958,32477045,-9017955,5002294,-15550259]), y_minus_x: Fe([-12057553,-11177906,21115585,-13365155,8808712,-12030708,16489530,13378448,-25845716,12741426]), xy2d: Fe([-5946367,10645103,-30911586,15390284,-3286982,-7118677,24306472,15852464,28834118,-7646072]), }, GePrecomp { y_plus_x: Fe([-17335748,-9107057,-24531279,9434953,-8472084,-583362,-13090771,455841,20461858,5491305]), y_minus_x: Fe([13669248,-16095482,-12481974,-10203039,-14569770,-11893198,-24995986,11293807,-28588204,-9421832]), xy2d: Fe([28497928,6272777,-33022994,14470570,8906179,-1225630,18504674,-14165166,29867745,-8795943]), }, GePrecomp { y_plus_x: Fe([-16207023,13517196,-27799630,-13697798,24009064,-6373891,-6367600,-13175392,22853429,-4012011]), y_minus_x: Fe([24191378,16712145,-13931797,15217831,14542237,1646131,18603514,-11037887,12876623,-2112447]), xy2d: Fe([17902668,4518229,-411702,-2829247,26878217,5258055,-12860753,608397,16031844,3723494]), }, GePrecomp { y_plus_x: Fe([-28632773,12763728,-20446446,7577504,33001348,-13017745,17558842,-7872890,23896954,-4314245]), y_minus_x: Fe([-20005381,-12011952,31520464,605201,2543521,5991821,-2945064,7229064,-9919646,-8826859]), xy2d: Fe([28816045,298879,-28165016,-15920938,19000928,-1665890,-12680833,-2949325,-18051778,-2082915]), }, GePrecomp { y_plus_x: Fe([16000882,-344896,3493092,-11447198,-29504595,-13159789,12577740,16041268,-19715240,7847707]), y_minus_x: Fe([10151868,10572098,27312476,7922682,14825339,4723128,-32855931,-6519018,-10020567,3852848]), xy2d: Fe([-11430470,15697596,-21121557,-4420647,5386314,15063598,16514493,-15932110,29330899,-15076224]), }, ], [ GePrecomp { y_plus_x: Fe([-25499735,-4378794,-15222908,-6901211,16615731,2051784,3303702,15490,-27548796,12314391]), y_minus_x: Fe([15683520,-6003043,18109120,-9980648,15337968,-5997823,-16717435,15921866,16103996,-3731215]), xy2d: Fe([-23169824,-10781249,13588192,-1628807,-3798557,-1074929,-19273607,5402699,-29815713,-9841101]), }, GePrecomp { y_plus_x: Fe([23190676,2384583,-32714340,3462154,-29903655,-1529132,-11266856,8911517,-25205859,2739713]), y_minus_x: Fe([21374101,-3554250,-33524649,9874411,15377179,11831242,-33529904,6134907,4931255,11987849]), xy2d: Fe([-7732,-2978858,-16223486,7277597,105524,-322051,-31480539,13861388,-30076310,10117930]), }, GePrecomp { y_plus_x: Fe([-29501170,-10744872,-26163768,13051539,-25625564,5089643,-6325503,6704079,12890019,15728940]), y_minus_x: Fe([-21972360,-11771379,-951059,-4418840,14704840,2695116,903376,-10428139,12885167,8311031]), xy2d: Fe([-17516482,5352194,10384213,-13811658,7506451,13453191,26423267,4384730,1888765,-5435404]), }, GePrecomp { y_plus_x: Fe([-25817338,-3107312,-13494599,-3182506,30896459,-13921729,-32251644,-12707869,-19464434,-3340243]), y_minus_x: Fe([-23607977,-2665774,-526091,4651136,5765089,4618330,6092245,14845197,17151279,-9854116]), xy2d: Fe([-24830458,-12733720,-15165978,10367250,-29530908,-265356,22825805,-7087279,-16866484,16176525]), }, GePrecomp { y_plus_x: Fe([-23583256,6564961,20063689,3798228,-4740178,7359225,2006182,-10363426,-28746253,-10197509]), y_minus_x: Fe([-10626600,-4486402,-13320562,-5125317,3432136,-6393229,23632037,-1940610,32808310,1099883]), xy2d: Fe([15030977,5768825,-27451236,-2887299,-6427378,-15361371,-15277896,-6809350,2051441,-15225865]), }, GePrecomp { y_plus_x: Fe([-3362323,-7239372,7517890,9824992,23555850,295369,5148398,-14154188,-22686354,16633660]), y_minus_x: Fe([4577086,-16752288,13249841,-15304328,19958763,-14537274,18559670,-10759549,8402478,-9864273]), xy2d: Fe([-28406330,-1051581,-26790155,-907698,-17212414,-11030789,9453451,-14980072,17983010,9967138]), }, GePrecomp { y_plus_x: Fe([-25762494,6524722,26585488,9969270,24709298,1220360,-1677990,7806337,17507396,3651560]), y_minus_x: Fe([-10420457,-4118111,14584639,15971087,-15768321,8861010,26556809,-5574557,-18553322,-11357135]), xy2d: Fe([2839101,14284142,4029895,3472686,14402957,12689363,-26642121,8459447,-5605463,-7621941]), }, GePrecomp { y_plus_x: Fe([-4839289,-3535444,9744961,2871048,25113978,3187018,-25110813,-849066,17258084,-7977739]), y_minus_x: Fe([18164541,-10595176,-17154882,-1542417,19237078,-9745295,23357533,-15217008,26908270,12150756]), xy2d: Fe([-30264870,-7647865,5112249,-7036672,-1499807,-6974257,43168,-5537701,-32302074,16215819]), }, ], [ GePrecomp { y_plus_x: Fe([-6898905,9824394,-12304779,-4401089,-31397141,-6276835,32574489,12532905,-7503072,-8675347]), y_minus_x: Fe([-27343522,-16515468,-27151524,-10722951,946346,16291093,254968,7168080,21676107,-1943028]), xy2d: Fe([21260961,-8424752,-16831886,-11920822,-23677961,3968121,-3651949,-6215466,-3556191,-7913075]), }, GePrecomp { y_plus_x: Fe([16544754,13250366,-16804428,15546242,-4583003,12757258,-2462308,-8680336,-18907032,-9662799]), y_minus_x: Fe([-2415239,-15577728,18312303,4964443,-15272530,-12653564,26820651,16690659,25459437,-4564609]), xy2d: Fe([-25144690,11425020,28423002,-11020557,-6144921,-15826224,9142795,-2391602,-6432418,-1644817]), }, GePrecomp { y_plus_x: Fe([-23104652,6253476,16964147,-3768872,-25113972,-12296437,-27457225,-16344658,6335692,7249989]), y_minus_x: Fe([-30333227,13979675,7503222,-12368314,-11956721,-4621693,-30272269,2682242,25993170,-12478523]), xy2d: Fe([4364628,5930691,32304656,-10044554,-8054781,15091131,22857016,-10598955,31820368,15075278]), }, GePrecomp { y_plus_x: Fe([31879134,-8918693,17258761,90626,-8041836,-4917709,24162788,-9650886,-17970238,12833045]), y_minus_x: Fe([19073683,14851414,-24403169,-11860168,7625278,11091125,-19619190,2074449,-9413939,14905377]), xy2d: Fe([24483667,-11935567,-2518866,-11547418,-1553130,15355506,-25282080,9253129,27628530,-7555480]), }, GePrecomp { y_plus_x: Fe([17597607,8340603,19355617,552187,26198470,-3176583,4593324,-9157582,-14110875,15297016]), y_minus_x: Fe([510886,14337390,-31785257,16638632,6328095,2713355,-20217417,-11864220,8683221,2921426]), xy2d: Fe([18606791,11874196,27155355,-5281482,-24031742,6265446,-25178240,-1278924,4674690,13890525]), }, GePrecomp { y_plus_x: Fe([13609624,13069022,-27372361,-13055908,24360586,9592974,14977157,9835105,4389687,288396]), y_minus_x: Fe([9922506,-519394,13613107,5883594,-18758345,-434263,-12304062,8317628,23388070,16052080]), xy2d: Fe([12720016,11937594,-31970060,-5028689,26900120,8561328,-20155687,-11632979,-14754271,-10812892]), }, GePrecomp { y_plus_x: Fe([15961858,14150409,26716931,-665832,-22794328,13603569,11829573,7467844,-28822128,929275]), y_minus_x: Fe([11038231,-11582396,-27310482,-7316562,-10498527,-16307831,-23479533,-9371869,-21393143,2465074]), xy2d: Fe([20017163,-4323226,27915242,1529148,12396362,15675764,13817261,-9658066,2463391,-4622140]), }, GePrecomp { y_plus_x: Fe([-16358878,-12663911,-12065183,4996454,-1256422,1073572,9583558,12851107,4003896,12673717]), y_minus_x: Fe([-1731589,-15155870,-3262930,16143082,19294135,13385325,14741514,-9103726,7903886,2348101]), xy2d: Fe([24536016,-16515207,12715592,-3862155,1511293,10047386,-3842346,-7129159,-28377538,10048127]), }, ], [ GePrecomp { y_plus_x: Fe([-12622226,-6204820,30718825,2591312,-10617028,12192840,18873298,-7297090,-32297756,15221632]), y_minus_x: Fe([-26478122,-11103864,11546244,-1852483,9180880,7656409,-21343950,2095755,29769758,6593415]), xy2d: Fe([-31994208,-2907461,4176912,3264766,12538965,-868111,26312345,-6118678,30958054,8292160]), }, GePrecomp { y_plus_x: Fe([31429822,-13959116,29173532,15632448,12174511,-2760094,32808831,3977186,26143136,-3148876]), y_minus_x: Fe([22648901,1402143,-22799984,13746059,7936347,365344,-8668633,-1674433,-3758243,-2304625]), xy2d: Fe([-15491917,8012313,-2514730,-12702462,-23965846,-10254029,-1612713,-1535569,-16664475,8194478]), }, GePrecomp { y_plus_x: Fe([27338066,-7507420,-7414224,10140405,-19026427,-6589889,27277191,8855376,28572286,3005164]), y_minus_x: Fe([26287124,4821776,25476601,-4145903,-3764513,-15788984,-18008582,1182479,-26094821,-13079595]), xy2d: Fe([-7171154,3178080,23970071,6201893,-17195577,-4489192,-21876275,-13982627,32208683,-1198248]), }, GePrecomp { y_plus_x: Fe([-16657702,2817643,-10286362,14811298,6024667,13349505,-27315504,-10497842,-27672585,-11539858]), y_minus_x: Fe([15941029,-9405932,-21367050,8062055,31876073,-238629,-15278393,-1444429,15397331,-4130193]), xy2d: Fe([8934485,-13485467,-23286397,-13423241,-32446090,14047986,31170398,-1441021,-27505566,15087184]), }, GePrecomp { y_plus_x: Fe([-18357243,-2156491,24524913,-16677868,15520427,-6360776,-15502406,11461896,16788528,-5868942]), y_minus_x: Fe([-1947386,16013773,21750665,3714552,-17401782,-16055433,-3770287,-10323320,31322514,-11615635]), xy2d: Fe([21426655,-5650218,-13648287,-5347537,-28812189,-4920970,-18275391,-14621414,13040862,-12112948]), }, GePrecomp { y_plus_x: Fe([11293895,12478086,-27136401,15083750,-29307421,14748872,14555558,-13417103,1613711,4896935]), y_minus_x: Fe([-25894883,15323294,-8489791,-8057900,25967126,-13425460,2825960,-4897045,-23971776,-11267415]), xy2d: Fe([-15924766,-5229880,-17443532,6410664,3622847,10243618,20615400,12405433,-23753030,-8436416]), }, GePrecomp { y_plus_x: Fe([-7091295,12556208,-20191352,9025187,-17072479,4333801,4378436,2432030,23097949,-566018]), y_minus_x: Fe([4565804,-16025654,20084412,-7842817,1724999,189254,24767264,10103221,-18512313,2424778]), xy2d: Fe([366633,-11976806,8173090,-6890119,30788634,5745705,-7168678,1344109,-3642553,12412659]), }, GePrecomp { y_plus_x: Fe([-24001791,7690286,14929416,-168257,-32210835,-13412986,24162697,-15326504,-3141501,11179385]), y_minus_x: Fe([18289522,-14724954,8056945,16430056,-21729724,7842514,-6001441,-1486897,-18684645,-11443503]), xy2d: Fe([476239,6601091,-6152790,-9723375,17503545,-4863900,27672959,13403813,11052904,5219329]), }, ], [ GePrecomp { y_plus_x: Fe([20678546,-8375738,-32671898,8849123,-5009758,14574752,31186971,-3973730,9014762,-8579056]), y_minus_x: Fe([-13644050,-10350239,-15962508,5075808,-1514661,-11534600,-33102500,9160280,8473550,-3256838]), xy2d: Fe([24900749,14435722,17209120,-15292541,-22592275,9878983,-7689309,-16335821,-24568481,11788948]), }, GePrecomp { y_plus_x: Fe([-3118155,-11395194,-13802089,14797441,9652448,-6845904,-20037437,10410733,-24568470,-1458691]), y_minus_x: Fe([-15659161,16736706,-22467150,10215878,-9097177,7563911,11871841,-12505194,-18513325,8464118]), xy2d: Fe([-23400612,8348507,-14585951,-861714,-3950205,-6373419,14325289,8628612,33313881,-8370517]), }, GePrecomp { y_plus_x: Fe([-20186973,-4967935,22367356,5271547,-1097117,-4788838,-24805667,-10236854,-8940735,-5818269]), y_minus_x: Fe([-6948785,-1795212,-32625683,-16021179,32635414,-7374245,15989197,-12838188,28358192,-4253904]), xy2d: Fe([-23561781,-2799059,-32351682,-1661963,-9147719,10429267,-16637684,4072016,-5351664,5596589]), }, GePrecomp { y_plus_x: Fe([-28236598,-3390048,12312896,6213178,3117142,16078565,29266239,2557221,1768301,15373193]), y_minus_x: Fe([-7243358,-3246960,-4593467,-7553353,-127927,-912245,-1090902,-4504991,-24660491,3442910]), xy2d: Fe([-30210571,5124043,14181784,8197961,18964734,-11939093,22597931,7176455,-18585478,13365930]), }, GePrecomp { y_plus_x: Fe([-7877390,-1499958,8324673,4690079,6261860,890446,24538107,-8570186,-9689599,-3031667]), y_minus_x: Fe([25008904,-10771599,-4305031,-9638010,16265036,15721635,683793,-11823784,15723479,-15163481]), xy2d: Fe([-9660625,12374379,-27006999,-7026148,-7724114,-12314514,11879682,5400171,519526,-1235876]), }, GePrecomp { y_plus_x: Fe([22258397,-16332233,-7869817,14613016,-22520255,-2950923,-20353881,7315967,16648397,7605640]), y_minus_x: Fe([-8081308,-8464597,-8223311,9719710,19259459,-15348212,23994942,-5281555,-9468848,4763278]), xy2d: Fe([-21699244,9220969,-15730624,1084137,-25476107,-2852390,31088447,-7764523,-11356529,728112]), }, GePrecomp { y_plus_x: Fe([26047220,-11751471,-6900323,-16521798,24092068,9158119,-4273545,-12555558,-29365436,-5498272]), y_minus_x: Fe([17510331,-322857,5854289,8403524,17133918,-3112612,-28111007,12327945,10750447,10014012]), xy2d: Fe([-10312768,3936952,9156313,-8897683,16498692,-994647,-27481051,-666732,3424691,7540221]), }, GePrecomp { y_plus_x: Fe([30322361,-6964110,11361005,-4143317,7433304,4989748,-7071422,-16317219,-9244265,15258046]), y_minus_x: Fe([13054562,-2779497,19155474,469045,-12482797,4566042,5631406,2711395,1062915,-5136345]), xy2d: Fe([-19240248,-11254599,-29509029,-7499965,-5835763,13005411,-6066489,12194497,32960380,1459310]), }, ], [ GePrecomp { y_plus_x: Fe([19852034,7027924,23669353,10020366,8586503,-6657907,394197,-6101885,18638003,-11174937]), y_minus_x: Fe([31395534,15098109,26581030,8030562,-16527914,-5007134,9012486,-7584354,-6643087,-5442636]), xy2d: Fe([-9192165,-2347377,-1997099,4529534,25766844,607986,-13222,9677543,-32294889,-6456008]), }, GePrecomp { y_plus_x: Fe([-2444496,-149937,29348902,8186665,1873760,12489863,-30934579,-7839692,-7852844,-8138429]), y_minus_x: Fe([-15236356,-15433509,7766470,746860,26346930,-10221762,-27333451,10754588,-9431476,5203576]), xy2d: Fe([31834314,14135496,-770007,5159118,20917671,-16768096,-7467973,-7337524,31809243,7347066]), }, GePrecomp { y_plus_x: Fe([-9606723,-11874240,20414459,13033986,13716524,-11691881,19797970,-12211255,15192876,-2087490]), y_minus_x: Fe([-12663563,-2181719,1168162,-3804809,26747877,-14138091,10609330,12694420,33473243,-13382104]), xy2d: Fe([33184999,11180355,15832085,-11385430,-1633671,225884,15089336,-11023903,-6135662,14480053]), }, GePrecomp { y_plus_x: Fe([31308717,-5619998,31030840,-1897099,15674547,-6582883,5496208,13685227,27595050,8737275]), y_minus_x: Fe([-20318852,-15150239,10933843,-16178022,8335352,-7546022,-31008351,-12610604,26498114,66511]), xy2d: Fe([22644454,-8761729,-16671776,4884562,-3105614,-13559366,30540766,-4286747,-13327787,-7515095]), }, GePrecomp { y_plus_x: Fe([-28017847,9834845,18617207,-2681312,-3401956,-13307506,8205540,13585437,-17127465,15115439]), y_minus_x: Fe([23711543,-672915,31206561,-8362711,6164647,-9709987,-33535882,-1426096,8236921,16492939]), xy2d: Fe([-23910559,-13515526,-26299483,-4503841,25005590,-7687270,19574902,10071562,6708380,-6222424]), }, GePrecomp { y_plus_x: Fe([2101391,-4930054,19702731,2367575,-15427167,1047675,5301017,9328700,29955601,-11678310]), y_minus_x: Fe([3096359,9271816,-21620864,-15521844,-14847996,-7592937,-25892142,-12635595,-9917575,6216608]), xy2d: Fe([-32615849,338663,-25195611,2510422,-29213566,-13820213,24822830,-6146567,-26767480,7525079]), }, GePrecomp { y_plus_x: Fe([-23066649,-13985623,16133487,-7896178,-3389565,778788,-910336,-2782495,-19386633,11994101]), y_minus_x: Fe([21691500,-13624626,-641331,-14367021,3285881,-3483596,-25064666,9718258,-7477437,13381418]), xy2d: Fe([18445390,-4202236,14979846,11622458,-1727110,-3582980,23111648,-6375247,28535282,15779576]), }, GePrecomp { y_plus_x: Fe([30098053,3089662,-9234387,16662135,-21306940,11308411,-14068454,12021730,9955285,-16303356]), y_minus_x: Fe([9734894,-14576830,-7473633,-9138735,2060392,11313496,-18426029,9924399,20194861,13380996]), xy2d: Fe([-26378102,-7965207,-22167821,15789297,-18055342,-6168792,-1984914,15707771,26342023,10146099]), }, ], [ GePrecomp { y_plus_x: Fe([-26016874,-219943,21339191,-41388,19745256,-2878700,-29637280,2227040,21612326,-545728]), y_minus_x: Fe([-13077387,1184228,23562814,-5970442,-20351244,-6348714,25764461,12243797,-20856566,11649658]), xy2d: Fe([-10031494,11262626,27384172,2271902,26947504,-15997771,39944,6114064,33514190,2333242]), }, GePrecomp { y_plus_x: Fe([-21433588,-12421821,8119782,7219913,-21830522,-9016134,-6679750,-12670638,24350578,-13450001]), y_minus_x: Fe([-4116307,-11271533,-23886186,4843615,-30088339,690623,-31536088,-10406836,8317860,12352766]), xy2d: Fe([18200138,-14475911,-33087759,-2696619,-23702521,-9102511,-23552096,-2287550,20712163,6719373]), }, GePrecomp { y_plus_x: Fe([26656208,6075253,-7858556,1886072,-28344043,4262326,11117530,-3763210,26224235,-3297458]), y_minus_x: Fe([-17168938,-14854097,-3395676,-16369877,-19954045,14050420,21728352,9493610,18620611,-16428628]), xy2d: Fe([-13323321,13325349,11432106,5964811,18609221,6062965,-5269471,-9725556,-30701573,-16479657]), }, GePrecomp { y_plus_x: Fe([-23860538,-11233159,26961357,1640861,-32413112,-16737940,12248509,-5240639,13735342,1934062]), y_minus_x: Fe([25089769,6742589,17081145,-13406266,21909293,-16067981,-15136294,-3765346,-21277997,5473616]), xy2d: Fe([31883677,-7961101,1083432,-11572403,22828471,13290673,-7125085,12469656,29111212,-5451014]), }, GePrecomp { y_plus_x: Fe([24244947,-15050407,-26262976,2791540,-14997599,16666678,24367466,6388839,-10295587,452383]), y_minus_x: Fe([-25640782,-3417841,5217916,16224624,19987036,-4082269,-24236251,-5915248,15766062,8407814]), xy2d: Fe([-20406999,13990231,15495425,16395525,5377168,15166495,-8917023,-4388953,-8067909,2276718]), }, GePrecomp { y_plus_x: Fe([30157918,12924066,-17712050,9245753,19895028,3368142,-23827587,5096219,22740376,-7303417]), y_minus_x: Fe([2041139,-14256350,7783687,13876377,-25946985,-13352459,24051124,13742383,-15637599,13295222]), xy2d: Fe([33338237,-8505733,12532113,7977527,9106186,-1715251,-17720195,-4612972,-4451357,-14669444]), }, GePrecomp { y_plus_x: Fe([-20045281,5454097,-14346548,6447146,28862071,1883651,-2469266,-4141880,7770569,9620597]), y_minus_x: Fe([23208068,7979712,33071466,8149229,1758231,-10834995,30945528,-1694323,-33502340,-14767970]), xy2d: Fe([1439958,-16270480,-1079989,-793782,4625402,10647766,-5043801,1220118,30494170,-11440799]), }, GePrecomp { y_plus_x: Fe([-5037580,-13028295,-2970559,-3061767,15640974,-6701666,-26739026,926050,-1684339,-13333647]), y_minus_x: Fe([13908495,-3549272,30919928,-6273825,-21521863,7989039,9021034,9078865,3353509,4033511]), xy2d: Fe([-29663431,-15113610,32259991,-344482,24295849,-12912123,23161163,8839127,27485041,7356032]), }, ], [ GePrecomp { y_plus_x: Fe([9661027,705443,11980065,-5370154,-1628543,14661173,-6346142,2625015,28431036,-16771834]), y_minus_x: Fe([-23839233,-8311415,-25945511,7480958,-17681669,-8354183,-22545972,14150565,15970762,4099461]), xy2d: Fe([29262576,16756590,26350592,-8793563,8529671,-11208050,13617293,-9937143,11465739,8317062]), }, GePrecomp { y_plus_x: Fe([-25493081,-6962928,32500200,-9419051,-23038724,-2302222,14898637,3848455,20969334,-5157516]), y_minus_x: Fe([-20384450,-14347713,-18336405,13884722,-33039454,2842114,-21610826,-3649888,11177095,14989547]), xy2d: Fe([-24496721,-11716016,16959896,2278463,12066309,10137771,13515641,2581286,-28487508,9930240]), }, GePrecomp { y_plus_x: Fe([-17751622,-2097826,16544300,-13009300,-15914807,-14949081,18345767,-13403753,16291481,-5314038]), y_minus_x: Fe([-33229194,2553288,32678213,9875984,8534129,6889387,-9676774,6957617,4368891,9788741]), xy2d: Fe([16660756,7281060,-10830758,12911820,20108584,-8101676,-21722536,-8613148,16250552,-11111103]), }, GePrecomp { y_plus_x: Fe([-19765507,2390526,-16551031,14161980,1905286,6414907,4689584,10604807,-30190403,4782747]), y_minus_x: Fe([-1354539,14736941,-7367442,-13292886,7710542,-14155590,-9981571,4383045,22546403,437323]), xy2d: Fe([31665577,-12180464,-16186830,1491339,-18368625,3294682,27343084,2786261,-30633590,-14097016]), }, GePrecomp { y_plus_x: Fe([-14467279,-683715,-33374107,7448552,19294360,14334329,-19690631,2355319,-19284671,-6114373]), y_minus_x: Fe([15121312,-15796162,6377020,-6031361,-10798111,-12957845,18952177,15496498,-29380133,11754228]), xy2d: Fe([-2637277,-13483075,8488727,-14303896,12728761,-1622493,7141596,11724556,22761615,-10134141]), }, GePrecomp { y_plus_x: Fe([16918416,11729663,-18083579,3022987,-31015732,-13339659,-28741185,-12227393,32851222,11717399]), y_minus_x: Fe([11166634,7338049,-6722523,4531520,-29468672,-7302055,31474879,3483633,-1193175,-4030831]), xy2d: Fe([-185635,9921305,31456609,-13536438,-12013818,13348923,33142652,6546660,-19985279,-3948376]), }, GePrecomp { y_plus_x: Fe([-32460596,11266712,-11197107,-7899103,31703694,3855903,-8537131,-12833048,-30772034,-15486313]), y_minus_x: Fe([-18006477,12709068,3991746,-6479188,-21491523,-10550425,-31135347,-16049879,10928917,3011958]), xy2d: Fe([-6957757,-15594337,31696059,334240,29576716,14796075,-30831056,-12805180,18008031,10258577]), }, GePrecomp { y_plus_x: Fe([-22448644,15655569,7018479,-4410003,-30314266,-1201591,-1853465,1367120,25127874,6671743]), y_minus_x: Fe([29701166,-14373934,-10878120,9279288,-17568,13127210,21382910,11042292,25838796,4642684]), xy2d: Fe([-20430234,14955537,-24126347,8124619,-5369288,-5990470,30468147,-13900640,18423289,4177476]), }, ], ];
45.356025
131
0.625484
d7d4560a4eac3b308a845f2235ab530746d806bb
8,840
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Overlap: No two impls for the same trait are implemented for the //! same type. Likewise, no two inherent impls for a given type //! constructor provide a method with the same name. use middle::cstore::CrateStore; use middle::def_id::DefId; use rustc::traits::{self, ProjectionMode}; use rustc::infer; use rustc::ty::{self, TyCtxt}; use syntax::ast; use rustc::dep_graph::DepNode; use rustc_front::hir; use rustc_front::intravisit; use util::nodemap::DefIdMap; use lint; pub fn check(tcx: &TyCtxt) { let mut overlap = OverlapChecker { tcx: tcx, default_impls: DefIdMap() }; // this secondary walk specifically checks for some other cases, // like defaulted traits, for which additional overlap rules exist tcx.visit_all_items_in_krate(DepNode::CoherenceOverlapCheckSpecial, &mut overlap); } struct OverlapChecker<'cx, 'tcx:'cx> { tcx: &'cx TyCtxt<'tcx>, // maps from a trait def-id to an impl id default_impls: DefIdMap<ast::NodeId>, } impl<'cx, 'tcx> OverlapChecker<'cx, 'tcx> { fn check_for_common_items_in_impls(&self, impl1: DefId, impl2: DefId) { #[derive(Copy, Clone, PartialEq)] enum Namespace { Type, Value } fn name_and_namespace(tcx: &TyCtxt, item: &ty::ImplOrTraitItemId) -> (ast::Name, Namespace) { let name = tcx.impl_or_trait_item(item.def_id()).name(); (name, match *item { ty::TypeTraitItemId(..) => Namespace::Type, ty::ConstTraitItemId(..) => Namespace::Value, ty::MethodTraitItemId(..) => Namespace::Value, }) } let impl_items = self.tcx.impl_items.borrow(); for item1 in &impl_items[&impl1] { let (name, namespace) = name_and_namespace(&self.tcx, item1); for item2 in &impl_items[&impl2] { if (name, namespace) == name_and_namespace(&self.tcx, item2) { let msg = format!("duplicate definitions with name `{}`", name); let node_id = self.tcx.map.as_local_node_id(item1.def_id()).unwrap(); self.tcx.sess.add_lint(lint::builtin::OVERLAPPING_INHERENT_IMPLS, node_id, self.tcx.span_of_impl(item1.def_id()).unwrap(), msg); } } } } fn check_for_overlapping_inherent_impls(&self, ty_def_id: DefId) { let _task = self.tcx.dep_graph.in_task(DepNode::CoherenceOverlapInherentCheck(ty_def_id)); let inherent_impls = self.tcx.inherent_impls.borrow(); let impls = match inherent_impls.get(&ty_def_id) { Some(impls) => impls, None => return }; for (i, &impl1_def_id) in impls.iter().enumerate() { for &impl2_def_id in &impls[(i+1)..] { let infcx = infer::new_infer_ctxt(self.tcx, &self.tcx.tables, None, ProjectionMode::Topmost); if traits::overlapping_impls(&infcx, impl1_def_id, impl2_def_id).is_some() { self.check_for_common_items_in_impls(impl1_def_id, impl2_def_id) } } } } } impl<'cx, 'tcx,'v> intravisit::Visitor<'v> for OverlapChecker<'cx, 'tcx> { fn visit_item(&mut self, item: &'v hir::Item) { match item.node { hir::ItemEnum(..) | hir::ItemStruct(..) => { let type_def_id = self.tcx.map.local_def_id(item.id); self.check_for_overlapping_inherent_impls(type_def_id); } hir::ItemDefaultImpl(..) => { // look for another default impl; note that due to the // general orphan/coherence rules, it must always be // in this crate. let impl_def_id = self.tcx.map.local_def_id(item.id); let trait_ref = self.tcx.impl_trait_ref(impl_def_id).unwrap(); let prev_default_impl = self.default_impls.insert(trait_ref.def_id, item.id); if let Some(prev_id) = prev_default_impl { let mut err = struct_span_err!( self.tcx.sess, self.tcx.span_of_impl(impl_def_id).unwrap(), E0521, "redundant default implementations of trait `{}`:", trait_ref); err.span_note(self.tcx.span_of_impl(self.tcx.map.local_def_id(prev_id)) .unwrap(), "redundant implementation is here:"); err.emit(); } } hir::ItemImpl(_, _, _, Some(_), _, _) => { let impl_def_id = self.tcx.map.local_def_id(item.id); let trait_ref = self.tcx.impl_trait_ref(impl_def_id).unwrap(); let trait_def_id = trait_ref.def_id; let _task = self.tcx.dep_graph.in_task( DepNode::CoherenceOverlapCheck(trait_def_id)); let def = self.tcx.lookup_trait_def(trait_def_id); // attempt to insert into the specialization graph let insert_result = def.add_impl_for_specialization(self.tcx, impl_def_id); // insertion failed due to overlap if let Err(overlap) = insert_result { // only print the Self type if it has at least some outer // concrete shell; otherwise, it's not adding much // information. let self_type = { overlap.on_trait_ref.substs.self_ty().and_then(|ty| { if ty.has_concrete_skeleton() { Some(format!(" for type `{}`", ty)) } else { None } }).unwrap_or(String::new()) }; let mut err = struct_span_err!( self.tcx.sess, self.tcx.span_of_impl(impl_def_id).unwrap(), E0119, "conflicting implementations of trait `{}`{}:", overlap.on_trait_ref, self_type); match self.tcx.span_of_impl(overlap.with_impl) { Ok(span) => { err.span_note(span, "conflicting implementation is here:"); } Err(cname) => { err.note(&format!("conflicting implementation in crate `{}`", cname)); } } err.emit(); } // check for overlap with the automatic `impl Trait for Trait` if let ty::TyTrait(ref data) = trait_ref.self_ty().sty { // This is something like impl Trait1 for Trait2. Illegal // if Trait1 is a supertrait of Trait2 or Trait2 is not object safe. if !traits::is_object_safe(self.tcx, data.principal_def_id()) { // This is an error, but it will be // reported by wfcheck. Ignore it // here. This is tested by // `coherence-impl-trait-for-trait-object-safe.rs`. } else { let mut supertrait_def_ids = traits::supertrait_def_ids(self.tcx, data.principal_def_id()); if supertrait_def_ids.any(|d| d == trait_def_id) { span_err!(self.tcx.sess, item.span, E0371, "the object type `{}` automatically \ implements the trait `{}`", trait_ref.self_ty(), self.tcx.item_path_str(trait_def_id)); } } } } _ => {} } } }
43.762376
98
0.510407
62236d1cf100d628a00e5d965a99f489c1fa519a
537
pub(crate) const HEADER_SIZE: u32 = 0x70; pub(crate) const STRING_ID_ITEM_SIZE: u32 = 0x04; pub(crate) const TYPE_ID_ITEM_SIZE: u32 = 0x04; pub(crate) const PROTO_ID_ITEM_SIZE: u32 = 0x0c; pub(crate) const FIELD_ID_ITEM_SIZE: u32 = 0x08; pub(crate) const METHOD_ID_ITEM_SIZE: u32 = 0x08; pub(crate) const CLASS_DEF_ITEM_SIZE: u32 = 0x20; // pub(crate) const MAP_ITEM_SIZE: u32 = 12; // pub(crate) const TYPE_ITEM_SIZE: u32 = 2; // pub(crate) const ANNOTATION_SET_REF_SIZE: u32 = 4; // pub(crate) const ANNOTATION_SET_ITEM_SIZE: u32 = 4;
44.75
54
0.752328
0ad0bb12b52961f54052d88fc91940afe5485a96
1,423
mod constants; mod game_events; mod game_logic; mod matchbox; mod rendering; mod states; use crate::{ constants::*, game_events::{BoardEventConsumer, CompoundEventType, EventBroker, GameEvent}, game_logic::{board::*, piece::*, ranges::*}, rendering::{BoardRender, CustomRenderContext}, }; use macroquad::{prelude::*, rand::srand}; use macroquad_canvas::Canvas2D; use crate::states::{core_game_state::CoreGameState, loading::LoadingState, GameState}; use std::{borrow::BorrowMut, cell::RefCell, rc::Rc}; use egui_macroquad::egui; use egui_macroquad::egui::emath; //use wasm_bindgen::prelude::*; fn window_conf() -> Conf { Conf { window_title: "Makrochess".to_owned(), window_width: WINDOW_WIDTH, window_height: WINDOW_HEIGHT, ..Default::default() } } #[macroquad::main(window_conf)] async fn main() { let mut state: Box<dyn GameState> = Box::new(LoadingState::new()); let canvas = Canvas2D::new(WINDOW_WIDTH as f32, WINDOW_HEIGHT as f32); loop { set_camera(&canvas.camera); clear_background(BLACK); if let Some(new_state) = state.update(&canvas) { state = new_state; } state.render(&canvas); set_default_camera(); clear_background(BLACK); canvas.draw(); if state.uses_egui() { egui_macroquad::draw(); } next_frame().await; } }
23.327869
86
0.6409
0e59ac6452533c9c39f5696baa2c8651c913e2a7
307
use observable_btree::BTree; #[tokio::main] async fn main() { let btree = BTree::start(1000); let ins = btree.insert("hello".to_string(), 546).await; assert!(ins.unwrap().is_none()); let cont = btree.contains("hello".to_string()).await; assert!(cont.unwrap()); print!("Done!") }
20.466667
59
0.62215
9b58044a6aa51a37826df155c9cb84062656f706
9,701
// Copyright (c) The Libra Core Contributors // SPDX-License-Identifier: Apache-2.0 //! This module provides mock storage clients for tests. use anyhow::{Error, Result}; use futures::stream::BoxStream; use libra_crypto::{ed25519::Ed25519PrivateKey, HashValue, PrivateKey, Uniform}; use libra_types::{ access_path::AccessPath, account_address::AccountAddress, account_config::{from_currency_code_string, AccountResource, LBR_NAME}, account_state::AccountState, account_state_blob::AccountStateBlob, epoch_change::EpochChangeProof, event::EventHandle, get_with_proof::{RequestItem, ResponseItem}, ledger_info::{LedgerInfo, LedgerInfoWithSignatures}, move_resource::MoveResource, proof::{AccumulatorConsistencyProof, SparseMerkleProof, SparseMerkleRangeProof}, proto::types::{ request_item::RequestedItems, response_item::ResponseItems, AccountStateWithProof, GetAccountStateResponse, GetTransactionsResponse, LedgerInfoWithSignatures as ProtoLedgerInfoWithSignatures, RequestItem as ProtoRequestItem, ResponseItem as ProtoResponseItem, TransactionListWithProof, UpdateToLatestLedgerRequest, UpdateToLatestLedgerResponse, }, test_helpers::transaction_test_helpers::get_test_signed_txn, transaction::{Transaction, Version}, vm_error::StatusCode, }; use rand::{ rngs::{OsRng, StdRng}, Rng, SeedableRng, }; use std::{collections::BTreeMap, convert::TryFrom}; use storage_client::StorageRead; use storage_interface::StartupInfo; use storage_proto::{ BackupAccountStateResponse, BackupTransactionInfoResponse, BackupTransactionResponse, }; /// This is a mock of the storage read client used in tests. /// /// See the real /// [`StorageReadServiceClient`](../../../storage-client/struct.StorageReadServiceClient.html). #[derive(Clone)] pub struct MockStorageReadClient; #[async_trait::async_trait] impl StorageRead for MockStorageReadClient { async fn update_to_latest_ledger( &self, client_known_version: Version, request_items: Vec<RequestItem>, ) -> Result<( Vec<ResponseItem>, LedgerInfoWithSignatures, EpochChangeProof, AccumulatorConsistencyProof, )> { let request = libra_types::get_with_proof::UpdateToLatestLedgerRequest::new( client_known_version, request_items, ); let proto_request = request.into(); let proto_response = get_mock_update_to_latest_ledger(&proto_request); let response = libra_types::get_with_proof::UpdateToLatestLedgerResponse::try_from(proto_response) .unwrap(); let ret = ( response.response_items, response.ledger_info_with_sigs, response.epoch_change_proof, response.ledger_consistency_proof, ); Ok(ret) } async fn get_transactions( &self, _start_version: Version, _batch_size: u64, _ledger_version: Version, _fetch_events: bool, ) -> Result<libra_types::transaction::TransactionListWithProof> { unimplemented!() } async fn get_latest_state_root(&self) -> Result<(Version, HashValue)> { unimplemented!() } async fn get_latest_account_state( &self, _address: AccountAddress, ) -> Result<Option<AccountStateBlob>> { Ok(Some(get_mock_account_state_blob())) } async fn get_account_state_with_proof_by_version( &self, _address: AccountAddress, _version: Version, ) -> Result<(Option<AccountStateBlob>, SparseMerkleProof)> { unimplemented!(); } async fn get_startup_info(&self) -> Result<Option<StartupInfo>> { unimplemented!() } async fn get_epoch_change_ledger_infos( &self, _start_epoch: u64, _end_epoch: u64, ) -> Result<EpochChangeProof> { unimplemented!() } async fn backup_account_state( &self, _version: u64, ) -> Result<BoxStream<'_, Result<BackupAccountStateResponse, Error>>> { unimplemented!() } async fn get_account_state_range_proof( &self, _rightmost_key: HashValue, _version: Version, ) -> Result<SparseMerkleRangeProof> { unimplemented!() } async fn backup_transaction( &self, _start_version: Version, _num_transactions: u64, ) -> Result<BoxStream<'_, Result<BackupTransactionResponse, Error>>> { unimplemented!() } async fn backup_transaction_info( &self, _start_version: Version, _num_transaction_infos: u64, ) -> Result<BoxStream<'_, Result<BackupTransactionInfoResponse, Error>>> { unimplemented!() } async fn batch_fetch_config(&self, _access_paths: Vec<AccessPath>) -> Result<Vec<Vec<u8>>> { unimplemented!() } } fn get_mock_update_to_latest_ledger( req: &UpdateToLatestLedgerRequest, ) -> UpdateToLatestLedgerResponse { let mut resp = UpdateToLatestLedgerResponse::default(); for request_item in req.requested_items.iter() { resp.response_items .push(get_mock_response_item(request_item).unwrap()); } let mut ledger_info = libra_types::proto::types::LedgerInfo::default(); ledger_info.transaction_accumulator_hash = HashValue::zero().to_vec(); ledger_info.consensus_data_hash = HashValue::zero().to_vec(); ledger_info.consensus_block_id = HashValue::zero().to_vec(); ledger_info.version = 7; let ledger_info_with_sigs = ProtoLedgerInfoWithSignatures::from(LedgerInfoWithSignatures::new( LedgerInfo::try_from(ledger_info).unwrap(), BTreeMap::new(), )); resp.ledger_info_with_sigs = Some(ledger_info_with_sigs); resp } fn get_mock_response_item(request_item: &ProtoRequestItem) -> Result<ProtoResponseItem> { let mut response_item = ProtoResponseItem::default(); if let Some(ref requested_item) = request_item.requested_items { match requested_item { RequestedItems::GetAccountStateRequest(_request) => { let mut resp = GetAccountStateResponse::default(); let mut account_state_with_proof = AccountStateWithProof::default(); let blob = get_mock_account_state_blob().into(); let proof = { let ledger_info_to_transaction_info_proof = libra_types::proof::AccumulatorProof::new(vec![]); let transaction_info = libra_types::transaction::TransactionInfo::new( HashValue::zero(), HashValue::zero(), HashValue::zero(), 0, StatusCode::UNKNOWN_STATUS, ); let transaction_info_to_account_proof = libra_types::proof::SparseMerkleProof::new(None, vec![]); libra_types::proof::AccountStateProof::new( ledger_info_to_transaction_info_proof, transaction_info, transaction_info_to_account_proof, ) .into() }; account_state_with_proof.blob = Some(blob); account_state_with_proof.proof = Some(proof); resp.account_state_with_proof = Some(account_state_with_proof); response_item.response_items = Some(ResponseItems::GetAccountStateResponse(resp)); } RequestedItems::GetAccountTransactionBySequenceNumberRequest(_request) => { unimplemented!(); } RequestedItems::GetEventsByEventAccessPathRequest(_request) => { unimplemented!(); } RequestedItems::GetTransactionsRequest(request) => { let mut ret = TransactionListWithProof::default(); let sender = AccountAddress::new([1; AccountAddress::LENGTH]); if request.limit > 0 { let txns = get_mock_txn_data(sender, 0, request.limit - 1); ret.transactions = txns; } let mut resp = GetTransactionsResponse::default(); resp.txn_list_with_proof = Some(ret); response_item.response_items = Some(ResponseItems::GetTransactionsResponse(resp)); } } } Ok(response_item) } fn get_mock_account_state_blob() -> AccountStateBlob { let account_resource = AccountResource::new( 0, vec![], false, false, EventHandle::random_handle(0), EventHandle::random_handle(0), false, from_currency_code_string(LBR_NAME).unwrap(), ); let mut account_state = AccountState::default(); account_state.insert( AccountResource::resource_path(), lcs::to_bytes(&account_resource).unwrap(), ); AccountStateBlob::try_from(&account_state).unwrap() } fn get_mock_txn_data( address: AccountAddress, start_seq: u64, end_seq: u64, ) -> Vec<libra_types::proto::types::Transaction> { let mut seed_rng = OsRng; let seed_buf: [u8; 32] = seed_rng.gen(); let mut rng = StdRng::from_seed(seed_buf); let priv_key = Ed25519PrivateKey::generate(&mut rng); let mut txns = vec![]; for i in start_seq..=end_seq { let txn = Transaction::UserTransaction(get_test_signed_txn( address, i, &priv_key, priv_key.public_key(), None, )); txns.push(txn.into()); } txns }
34.895683
99
0.638388
1642fbeb77fb6223f8dbfa9784572ba0f60ba962
1,398
#[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::TASKS_GOSLEEP { #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } } #[doc = r" Proxy"] pub struct _TASKS_GOSLEEPW<'a> { w: &'a mut W, } impl<'a> _TASKS_GOSLEEPW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bit 0"] #[inline] pub fn tasks_gosleep(&mut self) -> _TASKS_GOSLEEPW { _TASKS_GOSLEEPW { w: self } } }
24.103448
59
0.51216
d95df850372e7ceb968c681ef7b42e3b9d101417
2,385
use crate::prelude::*; use quicksilver::prelude::{Window, MouseButton}; use specs::prelude::*; use crate::game::Game; use crate::gui::gui_components::TableTextProvider; use crate::gui::ui_state::UiState; use crate::gui::input::{ left_click::MapLeftClickSystem, MouseState, }; use crate::view::Frame; use crate::gui::gui_components::ResourcesComponent; pub (crate) struct MapMenuFrame<'a,'b> { text_provider: TableTextProvider, left_click_dispatcher: Dispatcher<'a,'b>, _hover_component: ResourcesComponent, } impl MapMenuFrame<'_,'_> { pub fn new<'a,'b>(game: &mut Game<'a,'b>, ep: EventPool) -> PadlResult<Self> { let mut left_click_dispatcher = DispatcherBuilder::new() .with(MapLeftClickSystem::new(ep), "", &[]) .build(); left_click_dispatcher.setup(&mut game.world); Ok(MapMenuFrame { text_provider: TableTextProvider::new(), left_click_dispatcher, _hover_component: ResourcesComponent::new()?, }) } } impl<'a,'b> Frame for MapMenuFrame<'a,'b> { type Error = PadlError; type State = Game<'a,'b>; type Graphics = Window; type Event = PadlEvent; fn draw(&mut self, state: &mut Self::State, window: &mut Self::Graphics) -> Result<(),Self::Error> { self.text_provider.reset(); let inner_area = state.render_menu_box(window)?; let selected_entity = state.world.fetch::<UiState>().selected_entity; if let Some(e) = selected_entity { state.render_entity_details( window, &inner_area, e, &mut self.text_provider, &mut self._hover_component, )?; } self.text_provider.finish_draw(); Ok(()) } fn left_click(&mut self, state: &mut Self::State, pos: (i32,i32)) -> Result<(),Self::Error> { state.click_buttons(pos); let mut ms = state.world.write_resource::<MouseState>(); *ms = MouseState(pos.into(), Some(MouseButton::Left)); std::mem::drop(ms); // This drop is essential! The internal RefCell will not be release otherwise self.left_click_dispatcher.dispatch(&state.world); Ok(()) } fn leave(&mut self, _state: &mut Self::State) -> Result<(), Self::Error> { self.text_provider.hide(); Ok(()) } }
35.073529
105
0.607547
1c6ec8aa68a51eaef22a30544db29c926e6eb63d
2,469
// Copyright (c) Aptos // SPDX-License-Identifier: Apache-2.0 use anyhow::{bail, Result}; use aptos_types::{ access_path::AccessPath, account_address::AccountAddress, account_state::AccountState, contract_event::ContractEvent, }; use aptos_vm::move_vm_ext::MoveResolverExt; use move_deps::{ move_core_types::language_storage::StructTag, move_resource_viewer::MoveValueAnnotator, }; use std::{ collections::BTreeMap, fmt::{Display, Formatter}, }; pub use move_deps::move_resource_viewer::{AnnotatedMoveStruct, AnnotatedMoveValue}; pub struct AptosValueAnnotator<'a, T>(MoveValueAnnotator<'a, T>); /// A wrapper around `MoveValueAnnotator` that adds a few aptos-specific funtionalities. #[derive(Debug)] pub struct AnnotatedAccountStateBlob(BTreeMap<StructTag, AnnotatedMoveStruct>); impl<'a, T: MoveResolverExt> AptosValueAnnotator<'a, T> { pub fn new(storage: &'a T) -> Self { Self(MoveValueAnnotator::new(storage)) } pub fn view_resource(&self, tag: &StructTag, blob: &[u8]) -> Result<AnnotatedMoveStruct> { self.0.view_resource(tag, blob) } pub fn view_access_path( &self, access_path: AccessPath, blob: &[u8], ) -> Result<AnnotatedMoveStruct> { match access_path.get_struct_tag() { Some(tag) => self.view_resource(&tag, blob), None => bail!("Bad resource access path"), } } pub fn view_contract_event(&self, event: &ContractEvent) -> Result<AnnotatedMoveValue> { self.0.view_value(event.type_tag(), event.event_data()) } pub fn view_account_state(&self, state: &AccountState) -> Result<AnnotatedAccountStateBlob> { let mut output = BTreeMap::new(); for (k, v) in state.iter() { let tag = match AccessPath::new(AccountAddress::random(), k.to_vec()).get_struct_tag() { Some(t) => t, None => { println!("Uncached AccessPath: {:?}", k); continue; } }; let value = self.view_resource(&tag, v)?; output.insert(tag, value); } Ok(AnnotatedAccountStateBlob(output)) } } impl Display for AnnotatedAccountStateBlob { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { writeln!(f, "{{")?; for v in self.0.values() { write!(f, "{}", v)?; writeln!(f, ",")?; } writeln!(f, "}}") } }
32.064935
100
0.613609
d59614ba2bec3eb7a8d606e05331d961b02a038b
3,903
use super::{ BaseRocksSecondaryIndex, IndexId, Partition, RocksSecondaryIndex, RocksTable, TableId, }; use crate::base_rocks_secondary_index; use crate::metastore::{IdRow, MetaStoreEvent}; use crate::rocks_table_impl; use crate::table::Row; use byteorder::{BigEndian, WriteBytesExt}; use chrono::Utc; use rocksdb::DB; use serde::{Deserialize, Deserializer}; use std::ops::Sub; impl Partition { pub fn new(index_id: u64, min_value: Option<Row>, max_value: Option<Row>) -> Partition { Partition { index_id, min_value, max_value, parent_partition_id: None, active: true, warmed_up: false, main_table_row_count: 0, last_used: None, } } pub fn child(&self, id: u64) -> Partition { Partition { index_id: self.index_id, min_value: None, max_value: None, parent_partition_id: Some(id), active: false, warmed_up: false, main_table_row_count: 0, last_used: None, } } pub fn get_min_val(&self) -> &Option<Row> { &self.min_value } pub fn get_max_val(&self) -> &Option<Row> { &self.max_value } pub fn get_full_name(&self, partition_id: u64) -> Option<String> { self.parent_partition_id .and(Some(format!("{}.parquet", partition_id))) } pub fn to_active(&self, active: bool) -> Partition { let mut p = self.clone(); p.active = active; p } pub fn to_warmed_up(&self) -> Partition { let mut p = self.clone(); p.warmed_up = true; p } pub fn update_min_max_and_row_count( &self, min_value: Option<Row>, max_value: Option<Row>, main_table_row_count: u64, ) -> Partition { let mut p = self.clone(); p.min_value = min_value; p.max_value = max_value; p.main_table_row_count = main_table_row_count; p } pub fn update_last_used(&self) -> Self { let mut new = self.clone(); new.last_used = Some(Utc::now()); new } pub fn get_index_id(&self) -> u64 { self.index_id } pub fn parent_partition_id(&self) -> &Option<u64> { &self.parent_partition_id } pub fn is_active(&self) -> bool { self.active } pub fn is_warmed_up(&self) -> bool { self.warmed_up } pub fn main_table_row_count(&self) -> u64 { self.main_table_row_count } pub fn is_used(&self, timeout: u64) -> bool { self.last_used .map(|time| Utc::now().sub(time.clone()).num_seconds() < timeout as i64) .unwrap_or(false) } } #[derive(Clone, Copy, Debug)] pub(crate) enum PartitionRocksIndex { IndexId = 1, } rocks_table_impl!(Partition, PartitionRocksTable, TableId::Partitions, { vec![Box::new(PartitionRocksIndex::IndexId)] }); #[derive(Hash, Clone, Debug)] pub enum PartitionIndexKey { ByIndexId(u64), } base_rocks_secondary_index!(Partition, PartitionRocksIndex); impl RocksSecondaryIndex<Partition, PartitionIndexKey> for PartitionRocksIndex { fn typed_key_by(&self, row: &Partition) -> PartitionIndexKey { match self { PartitionRocksIndex::IndexId => PartitionIndexKey::ByIndexId(row.index_id), } } fn key_to_bytes(&self, key: &PartitionIndexKey) -> Vec<u8> { match key { PartitionIndexKey::ByIndexId(index_id) => { let mut buf = Vec::with_capacity(8); buf.write_u64::<BigEndian>(*index_id).unwrap(); buf } } } fn is_unique(&self) -> bool { match self { PartitionRocksIndex::IndexId => false, } } fn get_id(&self) -> IndexId { *self as IndexId } }
25.180645
92
0.580323
d65777063685e98f51661530f63f625257270665
1,885
/* Copyright 2016 Martin Buck Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ //! FilterAllRandomAccessible, a filter to chain multiple IsFilterRandomAccessible with the and condition => must pass all filters to pass this filter use crate::*; //------------------------------------------------------------------------------ #[derive(Default)] /// FilterAllRandomAccessible, a filter to chain multiple IsFilterRandomAccessible with the and condition => must pass all filters to pass this filter pub struct FilterAllRandomAccessible<RA, T> where RA: IsRandomAccessible<T>, { pub filters: Vec<Box<dyn IsFilterRandomAccessible<RA, T>>>, } impl<RA, T> IsFilterRandomAccessible<RA, T> for FilterAllRandomAccessible<RA, T> where RA: IsRandomAccessible<T>, { fn filter(&self, ra: &RA, mut view: &mut View) { for f in &self.filters { f.filter(&ra, &mut view) } } }
39.270833
150
0.731565
fe1cab6d428f177c8f08b6820a25493729af8786
3,565
use assert_cmd::prelude::*; use predicates::prelude::*; use std::process::Command; #[test] fn send_not_enough_args() -> Result<(), Box<std::error::Error>> { let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME"))?; cmd.arg("send").arg("192.168.1.32"); cmd.assert().failure().stderr(predicate::str::contains( "The following required arguments were not provided", )); Ok(()) } #[test] fn send_to_malformed_ipv4_group() -> Result<(), Box<std::error::Error>> { let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME"))?; cmd.arg("send") .arg("192.168.3.32") .arg("4001") .arg("bad.ip.address") ; cmd.assert() .failure() .stderr(predicate::str::contains("Invalid value for '<group-ip>'")); Ok(()) } #[test] fn send_to_out_of_range_port() -> Result<(), Box<std::error::Error>> { let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME"))?; cmd.arg("send") .arg("192.168.3.32") .arg("65537") .arg("231.0.3.1") ; cmd.assert() .failure() .stderr(predicate::str::contains("Invalid value for '<port>'")); Ok(()) } #[test] fn send_to_malformed_ipv4_interface() -> Result<(), Box<std::error::Error>> { let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME"))?; cmd.arg("send") .arg("192324.168.3.32") .arg("4001") .arg("231.0.3.1") ; cmd.assert() .failure() .stderr(predicate::str::contains("Invalid value for '<nic>'")); Ok(()) } #[test] fn listen_not_enough_args() -> Result<(), Box<std::error::Error>> { let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME"))?; cmd.arg("listen").arg("192.168.3.32"); cmd.assert().failure().stderr(predicate::str::contains( "The following required arguments were not provided", )); Ok(()) } #[test] fn listen_must_be_given_at_least_one_multicast_group() -> Result<(), Box<std::error::Error>> { let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME"))?; cmd.arg("listen").arg("192.168.3.32").arg("4001"); cmd.assert().failure().stderr(predicate::str::contains( "The following required arguments were not provided", )); Ok(()) } #[test] fn listen_to_malformed_ipv4_group() -> Result<(), Box<std::error::Error>> { let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME"))?; cmd.arg("listen") .arg("192.168.3.32") .arg("4001") .arg("bad.ip.address") ; cmd.assert() .failure() .stderr(predicate::str::contains("Invalid value for '<group-ip>'")); Ok(()) } #[test] fn listen_to_out_of_range_port() -> Result<(), Box<std::error::Error>> { let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME"))?; cmd.arg("listen") .arg("192.168.3.32") .arg("65537") .arg("231.0.3.1") ; cmd.assert() .failure() .stderr(predicate::str::contains("Invalid value for '<port>'")); Ok(()) } #[test] fn listen_to_malformed_ipv4_interface() -> Result<(), Box<std::error::Error>> { let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME"))?; cmd.arg("listen") .arg("192324.168.3.32") .arg("4001") .arg("231.0.3.1") ; cmd.assert() .failure() .stderr(predicate::str::contains("Invalid value for '<nic>'")); Ok(()) } #[test] fn unrecognised_command() -> Result<(), Box<std::error::Error>> { let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME"))?; cmd.arg("nonexistentcmd"); cmd.assert().failure(); Ok(()) }
28.070866
94
0.578401
4abbe79b2dd57528be81f26128ba5c0c956d6725
17,924
// Copyright (c) 2016 Anatoly Ikorsky // // Licensed under the Apache License, Version 2.0 // <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT // license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. All files in the project carrying such notice may not be copied, // modified, or distributed except according to those terms. pub use self::{read_packet::ReadPacket, write_packet::WritePacket}; use bytes::BytesMut; use futures_core::{ready, stream}; use futures_util::stream::{FuturesUnordered, StreamExt}; use mio::net::{TcpKeepalive, TcpSocket}; use mysql_common::proto::codec::PacketCodec as PacketCodecInner; use native_tls::{Certificate, Identity, TlsConnector}; use pin_project::pin_project; #[cfg(unix)] use tokio::io::AsyncWriteExt; use tokio::{ io::{AsyncRead, AsyncWrite, ErrorKind::Interrupted, ReadBuf}, net::TcpStream, }; use tokio_util::codec::{Decoder, Encoder, Framed, FramedParts}; #[cfg(unix)] use std::path::Path; use std::{ fmt, fs::File, future::Future, io::{ self, ErrorKind::{BrokenPipe, NotConnected, Other}, Read, }, net::{SocketAddr, ToSocketAddrs}, ops::{Deref, DerefMut}, pin::Pin, task::{Context, Poll}, time::Duration, }; use crate::{error::IoError, opts::SslOpts}; #[cfg(unix)] use crate::io::socket::Socket; macro_rules! with_interrupted { ($e:expr) => { loop { match $e { Poll::Ready(Err(err)) if err.kind() == Interrupted => continue, x => break x, } } }; } mod read_packet; mod socket; mod write_packet; #[derive(Debug, Default)] pub struct PacketCodec(PacketCodecInner); impl Deref for PacketCodec { type Target = PacketCodecInner; fn deref(&self) -> &Self::Target { &self.0 } } impl DerefMut for PacketCodec { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl Decoder for PacketCodec { type Item = Vec<u8>; type Error = IoError; fn decode(&mut self, src: &mut BytesMut) -> std::result::Result<Option<Self::Item>, IoError> { Ok(self.0.decode(src)?) } } impl Encoder<Vec<u8>> for PacketCodec { type Error = IoError; fn encode(&mut self, item: Vec<u8>, dst: &mut BytesMut) -> std::result::Result<(), IoError> { Ok(self.0.encode(item, dst)?) } } #[pin_project(project = EndpointProj)] #[derive(Debug)] pub(crate) enum Endpoint { Plain(Option<TcpStream>), Secure(#[pin] tokio_native_tls::TlsStream<TcpStream>), #[cfg(unix)] Socket(#[pin] Socket), } /// This future will check that TcpStream is live. /// /// This check is similar to a one, implemented by GitHub team for the go-sql-driver/mysql. #[derive(Debug)] struct CheckTcpStream<'a>(&'a mut TcpStream); impl Future for CheckTcpStream<'_> { type Output = io::Result<()>; fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { match self.0.poll_read_ready(cx) { Poll::Ready(Ok(())) => { // stream is readable let mut buf = [0_u8; 1]; match self.0.try_read(&mut buf) { Ok(0) => Poll::Ready(Err(io::Error::new(BrokenPipe, "broken pipe"))), Ok(_) => Poll::Ready(Err(io::Error::new(Other, "stream should be empty"))), Err(err) if err.kind() == io::ErrorKind::WouldBlock => Poll::Ready(Ok(())), Err(err) => Poll::Ready(Err(err)), } } Poll::Ready(Err(err)) => Poll::Ready(Err(err)), Poll::Pending => Poll::Ready(Ok(())), } } } impl Endpoint { /// Checks, that connection is alive. async fn check(&mut self) -> std::result::Result<(), IoError> { //return Ok(()); match self { Endpoint::Plain(Some(stream)) => { CheckTcpStream(stream).await?; Ok(()) } Endpoint::Secure(tls_stream) => { CheckTcpStream(tls_stream.get_mut().get_mut().get_mut()).await?; Ok(()) } #[cfg(unix)] Endpoint::Socket(socket) => { socket.write(&[]).await?; Ok(()) } Endpoint::Plain(None) => unreachable!(), } } pub fn is_secure(&self) -> bool { matches!(self, Endpoint::Secure(_)) } pub fn set_tcp_nodelay(&self, val: bool) -> io::Result<()> { match *self { Endpoint::Plain(Some(ref stream)) => stream.set_nodelay(val)?, Endpoint::Plain(None) => unreachable!(), Endpoint::Secure(ref stream) => { stream.get_ref().get_ref().get_ref().set_nodelay(val)? } #[cfg(unix)] Endpoint::Socket(_) => (/* inapplicable */), } Ok(()) } pub async fn make_secure( &mut self, domain: String, ssl_opts: SslOpts, ) -> std::result::Result<(), IoError> { #[cfg(unix)] if let Endpoint::Socket(_) = self { // inapplicable return Ok(()); } let mut builder = TlsConnector::builder(); if let Some(root_cert_path) = ssl_opts.root_cert_path() { let mut root_cert_data = vec![]; let mut root_cert_file = File::open(root_cert_path)?; root_cert_file.read_to_end(&mut root_cert_data)?; let root_certs = Certificate::from_der(&*root_cert_data) .map(|x| vec![x]) .or_else(|_| { pem::parse_many(&*root_cert_data) .iter() .map(pem::encode) .map(|s| Certificate::from_pem(s.as_bytes())) .collect() })?; for root_cert in root_certs { builder.add_root_certificate(root_cert); } } if let Some(pkcs12_path) = ssl_opts.pkcs12_path() { let der = std::fs::read(pkcs12_path)?; let identity = Identity::from_pkcs12(&*der, ssl_opts.password().unwrap_or(""))?; builder.identity(identity); } builder.danger_accept_invalid_hostnames(ssl_opts.skip_domain_validation()); builder.danger_accept_invalid_certs(ssl_opts.accept_invalid_certs()); let tls_connector: tokio_native_tls::TlsConnector = builder.build()?.into(); *self = match self { Endpoint::Plain(stream) => { let stream = stream.take().unwrap(); let tls_stream = tls_connector.connect(&*domain, stream).await?; Endpoint::Secure(tls_stream) } Endpoint::Secure(_) => unreachable!(), #[cfg(unix)] Endpoint::Socket(_) => unreachable!(), }; Ok(()) } } impl From<TcpStream> for Endpoint { fn from(stream: TcpStream) -> Self { Endpoint::Plain(Some(stream)) } } #[cfg(unix)] impl From<Socket> for Endpoint { fn from(socket: Socket) -> Self { Endpoint::Socket(socket) } } impl From<tokio_native_tls::TlsStream<TcpStream>> for Endpoint { fn from(stream: tokio_native_tls::TlsStream<TcpStream>) -> Self { Endpoint::Secure(stream) } } impl AsyncRead for Endpoint { fn poll_read( self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut ReadBuf<'_>, ) -> Poll<std::result::Result<(), tokio::io::Error>> { let mut this = self.project(); with_interrupted!(match this { EndpointProj::Plain(ref mut stream) => { Pin::new(stream.as_mut().unwrap()).poll_read(cx, buf) } EndpointProj::Secure(ref mut stream) => stream.as_mut().poll_read(cx, buf), #[cfg(unix)] EndpointProj::Socket(ref mut stream) => stream.as_mut().poll_read(cx, buf), }) } } impl AsyncWrite for Endpoint { fn poll_write( self: Pin<&mut Self>, cx: &mut Context, buf: &[u8], ) -> Poll<std::result::Result<usize, tokio::io::Error>> { let mut this = self.project(); with_interrupted!(match this { EndpointProj::Plain(ref mut stream) => { Pin::new(stream.as_mut().unwrap()).poll_write(cx, buf) } EndpointProj::Secure(ref mut stream) => stream.as_mut().poll_write(cx, buf), #[cfg(unix)] EndpointProj::Socket(ref mut stream) => stream.as_mut().poll_write(cx, buf), }) } fn poll_flush( self: Pin<&mut Self>, cx: &mut Context, ) -> Poll<std::result::Result<(), tokio::io::Error>> { let mut this = self.project(); with_interrupted!(match this { EndpointProj::Plain(ref mut stream) => { Pin::new(stream.as_mut().unwrap()).poll_flush(cx) } EndpointProj::Secure(ref mut stream) => stream.as_mut().poll_flush(cx), #[cfg(unix)] EndpointProj::Socket(ref mut stream) => stream.as_mut().poll_flush(cx), }) } fn poll_shutdown( self: Pin<&mut Self>, cx: &mut Context, ) -> Poll<std::result::Result<(), tokio::io::Error>> { let mut this = self.project(); with_interrupted!(match this { EndpointProj::Plain(ref mut stream) => { Pin::new(stream.as_mut().unwrap()).poll_shutdown(cx) } EndpointProj::Secure(ref mut stream) => stream.as_mut().poll_shutdown(cx), #[cfg(unix)] EndpointProj::Socket(ref mut stream) => stream.as_mut().poll_shutdown(cx), }) } } /// A Stream, connected to MySql server. pub struct Stream { closed: bool, pub(crate) codec: Option<Box<Framed<Endpoint, PacketCodec>>>, } impl fmt::Debug for Stream { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "Stream (endpoint={:?})", self.codec.as_ref().unwrap().get_ref() ) } } impl Stream { #[cfg(unix)] fn new<T: Into<Endpoint>>(endpoint: T) -> Self { let endpoint = endpoint.into(); Self { closed: false, codec: Box::new(Framed::new(endpoint, PacketCodec::default())).into(), } } pub(crate) async fn connect_tcp<S>(addr: S, keepalive: Option<Duration>) -> io::Result<Stream> where S: ToSocketAddrs, { // TODO: Use tokio to setup keepalive (see tokio-rs/tokio#3082) async fn connect_stream( addr: SocketAddr, keepalive_opts: Option<TcpKeepalive>, ) -> io::Result<TcpStream> { let socket = if addr.is_ipv6() { TcpSocket::new_v6()? } else { TcpSocket::new_v4()? }; if let Some(keepalive_opts) = keepalive_opts { socket.set_keepalive_params(keepalive_opts)?; } let stream = tokio::task::spawn_blocking(move || { let mut stream = socket.connect(addr)?; let mut poll = mio::Poll::new()?; let mut events = mio::Events::with_capacity(1024); poll.registry() .register(&mut stream, mio::Token(0), mio::Interest::WRITABLE)?; loop { poll.poll(&mut events, None)?; for event in &events { if event.token() == mio::Token(0) && event.is_error() { return Err(io::Error::new( io::ErrorKind::ConnectionRefused, "Connection refused", )); } if event.token() == mio::Token(0) && event.is_writable() { // The socket connected (probably, it could still be a spurious // wakeup) return Ok::<_, io::Error>(stream); } } } }) .await??; #[cfg(unix)] let std_stream = unsafe { use std::os::unix::prelude::*; let fd = stream.into_raw_fd(); std::net::TcpStream::from_raw_fd(fd) }; #[cfg(windows)] let std_stream = unsafe { use std::os::windows::prelude::*; let fd = stream.into_raw_socket(); std::net::TcpStream::from_raw_socket(fd) }; Ok(TcpStream::from_std(std_stream)?) } let keepalive_opts = keepalive.map(|time| TcpKeepalive::new().with_time(time)); match addr.to_socket_addrs() { Ok(addresses) => { let mut streams = FuturesUnordered::new(); for address in addresses { streams.push(connect_stream(address, keepalive_opts.clone())); } let mut err = None; while let Some(stream) = streams.next().await { match stream { Err(e) => { err = Some(e); } Ok(stream) => { return Ok(Stream { closed: false, codec: Box::new(Framed::new(stream.into(), PacketCodec::default())) .into(), }); } } } if let Some(e) = err { Err(e) } else { Err(io::Error::new( io::ErrorKind::InvalidInput, "could not resolve to any address", )) } } Err(err) => Err(err), } } #[cfg(unix)] pub(crate) async fn connect_socket<P: AsRef<Path>>(path: P) -> io::Result<Stream> { Ok(Stream::new(Socket::new(path).await?)) } pub(crate) fn set_tcp_nodelay(&self, val: bool) -> io::Result<()> { self.codec.as_ref().unwrap().get_ref().set_tcp_nodelay(val) } pub(crate) async fn make_secure( &mut self, domain: String, ssl_opts: SslOpts, ) -> crate::error::Result<()> { let codec = self.codec.take().unwrap(); let FramedParts { mut io, codec, .. } = codec.into_parts(); io.make_secure(domain, ssl_opts).await?; let codec = Framed::new(io, codec); self.codec = Some(Box::new(codec)); Ok(()) } pub(crate) fn is_secure(&self) -> bool { self.codec.as_ref().unwrap().get_ref().is_secure() } pub(crate) fn reset_seq_id(&mut self) { if let Some(codec) = self.codec.as_mut() { codec.codec_mut().reset_seq_id(); } } pub(crate) fn sync_seq_id(&mut self) { if let Some(codec) = self.codec.as_mut() { codec.codec_mut().sync_seq_id(); } } pub(crate) fn set_max_allowed_packet(&mut self, max_allowed_packet: usize) { if let Some(codec) = self.codec.as_mut() { codec.codec_mut().max_allowed_packet = max_allowed_packet; } } pub(crate) fn compress(&mut self, level: crate::Compression) { if let Some(codec) = self.codec.as_mut() { codec.codec_mut().compress(level); } } /// Checks, that connection is alive. pub(crate) async fn check(&mut self) -> std::result::Result<(), IoError> { if let Some(codec) = self.codec.as_mut() { codec.get_mut().check().await?; } Ok(()) } pub(crate) async fn close(mut self) -> std::result::Result<(), IoError> { self.closed = true; if let Some(mut codec) = self.codec { use futures_sink::Sink; futures_util::future::poll_fn(|cx| match Pin::new(&mut *codec).poll_close(cx) { Poll::Ready(Err(IoError::Io(err))) if err.kind() == NotConnected => { Poll::Ready(Ok(())) } x => x, }) .await?; } Ok(()) } } impl stream::Stream for Stream { type Item = std::result::Result<Vec<u8>, IoError>; fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { if !self.closed { let item = ready!(Pin::new(self.codec.as_mut().unwrap()).poll_next(cx)).transpose()?; Poll::Ready(Ok(item).transpose()) } else { Poll::Ready(None) } } } #[cfg(test)] mod test { #[cfg(unix)] // no sane way to retrieve current keepalive value on windows #[tokio::test] async fn should_connect_with_keepalive() { use crate::{test_misc::get_opts, Conn}; let opts = get_opts() .tcp_keepalive(Some(42_000_u32)) .prefer_socket(false); let mut conn: Conn = Conn::new(opts).await.unwrap(); let stream = conn.stream_mut().unwrap(); let endpoint = stream.codec.as_mut().unwrap().get_ref(); let stream = match endpoint { super::Endpoint::Plain(Some(stream)) => stream, super::Endpoint::Secure(tls_stream) => tls_stream.get_ref().get_ref().get_ref(), _ => unreachable!(), }; let sock = unsafe { use std::os::unix::prelude::*; let raw = stream.as_raw_fd(); socket2::Socket::from_raw_fd(raw) }; assert_eq!( sock.keepalive().unwrap(), Some(std::time::Duration::from_millis(42_000)), ); std::mem::forget(sock); conn.disconnect().await.unwrap(); } }
31.780142
99
0.519248
e8b15521ca11d2e0f0d71b595804cf67e69f21ac
3,823
use nom::bytes::complete::tag; use nom::IResult; //space; use crate::address::port_ref; use crate::common::{be_u8, ws}; use language::syntax::{InputMapping, OutputMapping}; pub fn input_item(input: &[u8]) -> IResult<&[u8], InputMapping> { let (remaining, (port, _, input_ref)) = nom::sequence::tuple((port_ref, ws(tag("->")), be_u8))(input)?; let mapping = InputMapping { from: port, to: input_ref.into(), }; Ok((remaining, mapping)) } pub fn inputs(input: &[u8]) -> IResult<&[u8], Vec<InputMapping>> { nom::multi::separated_list1(ws(tag(",")), input_item)(input) } pub fn output_item(input: &[u8]) -> IResult<&[u8], OutputMapping> { let (remaining, (input_ref, _, port)) = nom::sequence::tuple((be_u8, ws(tag("->")), port_ref))(input)?; let mapping = OutputMapping { from: input_ref.into(), to: port, }; Ok((remaining, mapping)) } pub fn outputs(input: &[u8]) -> IResult<&[u8], Vec<OutputMapping>> { nom::multi::separated_list1(ws(tag(",")), output_item)(input) } #[cfg(test)] mod tests { use super::*; use crate::common::tests::*; use crate::common::to_input; use language::address::{Node, Port}; #[test] fn test_parse_input_item() { let res_in = input_item(to_input(b"IN:1 -> 3")); assert_full_result( res_in, InputMapping { from: Port::new(Node::In, 1.into()), to: 3.into(), }, ); let res_node = input_item(to_input(b"#node:32 -> 1")); assert_full_result( res_node, InputMapping { from: Port::named_port(&"node", 32.into()), to: 1.into(), }, ); } #[test] fn test_parse_inputs() { let res_one = inputs(to_input(b"#n:7 -> 14")); assert_full_result( res_one, vec![InputMapping { from: Port::named_port(&"n", 7.into()), to: 14.into(), }], ); let res_many = inputs(to_input(b"OUT:1 -> 2, #abc:3 -> 4")); assert_full_result( res_many, vec![ InputMapping { from: Port::new(Node::Out, 1.into()), to: 2.into(), }, InputMapping { from: Port::named_port(&"abc", 3.into()), to: 4.into(), }, ], ); } #[test] fn test_parse_output_item() { let res_in = output_item(to_input(b"1 -> OUT:3")); assert_full_result( res_in, OutputMapping { from: 1.into(), to: Port::new(Node::Out, 3.into()), }, ); let res_node = output_item(to_input(b"1 -> #node:32")); assert_full_result( res_node, OutputMapping { from: 1.into(), to: Port::named_port(&"node", 32.into()), }, ); } #[test] fn test_parse_outputs() { let res_one = outputs(to_input(b"3 -> #n:7")); assert_full_result( res_one, vec![OutputMapping { from: 3.into(), to: Port::named_port(&"n", 7.into()), }], ); let res_many = outputs(to_input(b"1 -> OUT:2, 3 -> #abc:4")); assert_full_result( res_many, vec![ OutputMapping { from: 1.into(), to: Port::new(Node::Out, 2.into()), }, OutputMapping { from: 3.into(), to: Port::named_port(&"abc", 4.into()), }, ], ); } }
27.307143
71
0.461679
9c994d29202eec751a87aeb954fe543dfb29fa26
138,303
// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use self::Entry::*; use self::VacantEntryState::*; use intrinsics::unlikely; use collections::CollectionAllocErr; use cell::Cell; use borrow::Borrow; use cmp::max; use fmt::{self, Debug}; #[allow(deprecated)] use hash::{Hash, Hasher, BuildHasher, SipHasher13}; use iter::{FromIterator, FusedIterator}; use mem::{self, replace}; use ops::{Deref, DerefMut, Index}; use sys; use super::table::{self, Bucket, EmptyBucket, Fallibility, FullBucket, FullBucketMut, RawTable, SafeHash}; use super::table::BucketState::{Empty, Full}; use super::table::Fallibility::{Fallible, Infallible}; const MIN_NONZERO_RAW_CAPACITY: usize = 32; // must be a power of two /// The default behavior of HashMap implements a maximum load factor of 90.9%. #[derive(Clone)] struct DefaultResizePolicy; impl DefaultResizePolicy { #[inline] fn new() -> DefaultResizePolicy { DefaultResizePolicy } /// A hash map's "capacity" is the number of elements it can hold without /// being resized. Its "raw capacity" is the number of slots required to /// provide that capacity, accounting for maximum loading. The raw capacity /// is always zero or a power of two. #[inline] fn try_raw_capacity(&self, len: usize) -> Result<usize, CollectionAllocErr> { if len == 0 { Ok(0) } else { // 1. Account for loading: `raw_capacity >= len * 1.1`. // 2. Ensure it is a power of two. // 3. Ensure it is at least the minimum size. let mut raw_cap = len.checked_mul(11) .map(|l| l / 10) .and_then(|l| l.checked_next_power_of_two()) .ok_or(CollectionAllocErr::CapacityOverflow)?; raw_cap = max(MIN_NONZERO_RAW_CAPACITY, raw_cap); Ok(raw_cap) } } #[inline] fn raw_capacity(&self, len: usize) -> usize { self.try_raw_capacity(len).expect("raw_capacity overflow") } /// The capacity of the given raw capacity. #[inline] fn capacity(&self, raw_cap: usize) -> usize { // This doesn't have to be checked for overflow since allocation size // in bytes will overflow earlier than multiplication by 10. // // As per https://github.com/rust-lang/rust/pull/30991 this is updated // to be: (raw_cap * den + den - 1) / num (raw_cap * 10 + 10 - 1) / 11 } } // The main performance trick in this hashmap is called Robin Hood Hashing. // It gains its excellent performance from one essential operation: // // If an insertion collides with an existing element, and that element's // "probe distance" (how far away the element is from its ideal location) // is higher than how far we've already probed, swap the elements. // // This massively lowers variance in probe distance, and allows us to get very // high load factors with good performance. The 90% load factor I use is rather // conservative. // // > Why a load factor of approximately 90%? // // In general, all the distances to initial buckets will converge on the mean. // At a load factor of α, the odds of finding the target bucket after k // probes is approximately 1-α^k. If we set this equal to 50% (since we converge // on the mean) and set k=8 (64-byte cache line / 8-byte hash), α=0.92. I round // this down to make the math easier on the CPU and avoid its FPU. // Since on average we start the probing in the middle of a cache line, this // strategy pulls in two cache lines of hashes on every lookup. I think that's // pretty good, but if you want to trade off some space, it could go down to one // cache line on average with an α of 0.84. // // > Wait, what? Where did you get 1-α^k from? // // On the first probe, your odds of a collision with an existing element is α. // The odds of doing this twice in a row is approximately α^2. For three times, // α^3, etc. Therefore, the odds of colliding k times is α^k. The odds of NOT // colliding after k tries is 1-α^k. // // The paper from 1986 cited below mentions an implementation which keeps track // of the distance-to-initial-bucket histogram. This approach is not suitable // for modern architectures because it requires maintaining an internal data // structure. This allows very good first guesses, but we are most concerned // with guessing entire cache lines, not individual indexes. Furthermore, array // accesses are no longer linear and in one direction, as we have now. There // is also memory and cache pressure that this would entail that would be very // difficult to properly see in a microbenchmark. // // ## Future Improvements (FIXME!) // // Allow the load factor to be changed dynamically and/or at initialization. // // Also, would it be possible for us to reuse storage when growing the // underlying table? This is exactly the use case for 'realloc', and may // be worth exploring. // // ## Future Optimizations (FIXME!) // // Another possible design choice that I made without any real reason is // parameterizing the raw table over keys and values. Technically, all we need // is the size and alignment of keys and values, and the code should be just as // efficient (well, we might need one for power-of-two size and one for not...). // This has the potential to reduce code bloat in rust executables, without // really losing anything except 4 words (key size, key alignment, val size, // val alignment) which can be passed in to every call of a `RawTable` function. // This would definitely be an avenue worth exploring if people start complaining // about the size of rust executables. // // Annotate exceedingly likely branches in `table::make_hash` // and `search_hashed` to reduce instruction cache pressure // and mispredictions once it becomes possible (blocked on issue #11092). // // Shrinking the table could simply reallocate in place after moving buckets // to the first half. // // The growth algorithm (fragment of the Proof of Correctness) // -------------------- // // The growth algorithm is basically a fast path of the naive reinsertion- // during-resize algorithm. Other paths should never be taken. // // Consider growing a robin hood hashtable of capacity n. Normally, we do this // by allocating a new table of capacity `2n`, and then individually reinsert // each element in the old table into the new one. This guarantees that the // new table is a valid robin hood hashtable with all the desired statistical // properties. Remark that the order we reinsert the elements in should not // matter. For simplicity and efficiency, we will consider only linear // reinsertions, which consist of reinserting all elements in the old table // into the new one by increasing order of index. However we will not be // starting our reinsertions from index 0 in general. If we start from index // i, for the purpose of reinsertion we will consider all elements with real // index j < i to have virtual index n + j. // // Our hash generation scheme consists of generating a 64-bit hash and // truncating the most significant bits. When moving to the new table, we // simply introduce a new bit to the front of the hash. Therefore, if an // element has ideal index i in the old table, it can have one of two ideal // locations in the new table. If the new bit is 0, then the new ideal index // is i. If the new bit is 1, then the new ideal index is n + i. Intuitively, // we are producing two independent tables of size n, and for each element we // independently choose which table to insert it into with equal probability. // However, rather than wrapping around themselves on overflowing their // indexes, the first table overflows into the second, and the second into the // first. Visually, our new table will look something like: // // [yy_xxx_xxxx_xxx|xx_yyy_yyyy_yyy] // // Where x's are elements inserted into the first table, y's are elements // inserted into the second, and _'s are empty sections. We now define a few // key concepts that we will use later. Note that this is a very abstract // perspective of the table. A real resized table would be at least half // empty. // // Theorem: A linear robin hood reinsertion from the first ideal element // produces identical results to a linear naive reinsertion from the same // element. // // FIXME(Gankro, pczarn): review the proof and put it all in a separate README.md // // Adaptive early resizing // ---------------------- // To protect against degenerate performance scenarios (including DOS attacks), // the implementation includes an adaptive behavior that can resize the map // early (before its capacity is exceeded) when suspiciously long probe sequences // are encountered. // // With this algorithm in place it would be possible to turn a CPU attack into // a memory attack due to the aggressive resizing. To prevent that the // adaptive behavior only triggers when the map is at least half full. // This reduces the effectiveness of the algorithm but also makes it completely safe. // // The previous safety measure also prevents degenerate interactions with // really bad quality hash algorithms that can make normal inputs look like a // DOS attack. // const DISPLACEMENT_THRESHOLD: usize = 128; // // The threshold of 128 is chosen to minimize the chance of exceeding it. // In particular, we want that chance to be less than 10^-8 with a load of 90%. // For displacement, the smallest constant that fits our needs is 90, // so we round that up to 128. // // At a load factor of α, the odds of finding the target bucket after exactly n // unsuccessful probes[1] are // // Pr_α{displacement = n} = // (1 - α) / α * ∑_{k≥1} e^(-kα) * (kα)^(k+n) / (k + n)! * (1 - kα / (k + n + 1)) // // We use this formula to find the probability of triggering the adaptive behavior // // Pr_0.909{displacement > 128} = 1.601 * 10^-11 // // 1. Alfredo Viola (2005). Distributional analysis of Robin Hood linear probing // hashing with buckets. /// A hash map implemented with linear probing and Robin Hood bucket stealing. /// /// By default, `HashMap` uses a hashing algorithm selected to provide /// resistance against HashDoS attacks. The algorithm is randomly seeded, and a /// reasonable best-effort is made to generate this seed from a high quality, /// secure source of randomness provided by the host without blocking the /// program. Because of this, the randomness of the seed depends on the output /// quality of the system's random number generator when the seed is created. /// In particular, seeds generated when the system's entropy pool is abnormally /// low such as during system boot may be of a lower quality. /// /// The default hashing algorithm is currently SipHash 1-3, though this is /// subject to change at any point in the future. While its performance is very /// competitive for medium sized keys, other hashing algorithms will outperform /// it for small keys such as integers as well as large keys such as long /// strings, though those algorithms will typically *not* protect against /// attacks such as HashDoS. /// /// The hashing algorithm can be replaced on a per-`HashMap` basis using the /// [`default`], [`with_hasher`], and [`with_capacity_and_hasher`] methods. Many /// alternative algorithms are available on crates.io, such as the [`fnv`] crate. /// /// It is required that the keys implement the [`Eq`] and [`Hash`] traits, although /// this can frequently be achieved by using `#[derive(PartialEq, Eq, Hash)]`. /// If you implement these yourself, it is important that the following /// property holds: /// /// ```text /// k1 == k2 -> hash(k1) == hash(k2) /// ``` /// /// In other words, if two keys are equal, their hashes must be equal. /// /// It is a logic error for a key to be modified in such a way that the key's /// hash, as determined by the [`Hash`] trait, or its equality, as determined by /// the [`Eq`] trait, changes while it is in the map. This is normally only /// possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code. /// /// Relevant papers/articles: /// /// 1. Pedro Celis. ["Robin Hood Hashing"](https://cs.uwaterloo.ca/research/tr/1986/CS-86-14.pdf) /// 2. Emmanuel Goossaert. ["Robin Hood /// hashing"](http://codecapsule.com/2013/11/11/robin-hood-hashing/) /// 3. Emmanuel Goossaert. ["Robin Hood hashing: backward shift /// deletion"](http://codecapsule.com/2013/11/17/robin-hood-hashing-backward-shift-deletion/) /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// // Type inference lets us omit an explicit type signature (which /// // would be `HashMap<String, String>` in this example). /// let mut book_reviews = HashMap::new(); /// /// // Review some books. /// book_reviews.insert( /// "Adventures of Huckleberry Finn".to_string(), /// "My favorite book.".to_string(), /// ); /// book_reviews.insert( /// "Grimms' Fairy Tales".to_string(), /// "Masterpiece.".to_string(), /// ); /// book_reviews.insert( /// "Pride and Prejudice".to_string(), /// "Very enjoyable.".to_string(), /// ); /// book_reviews.insert( /// "The Adventures of Sherlock Holmes".to_string(), /// "Eye lyked it alot.".to_string(), /// ); /// /// // Check for a specific one. /// // When collections store owned values (String), they can still be /// // queried using references (&str). /// if !book_reviews.contains_key("Les Misérables") { /// println!("We've got {} reviews, but Les Misérables ain't one.", /// book_reviews.len()); /// } /// /// // oops, this review has a lot of spelling mistakes, let's delete it. /// book_reviews.remove("The Adventures of Sherlock Holmes"); /// /// // Look up the values associated with some keys. /// let to_find = ["Pride and Prejudice", "Alice's Adventure in Wonderland"]; /// for &book in &to_find { /// match book_reviews.get(book) { /// Some(review) => println!("{}: {}", book, review), /// None => println!("{} is unreviewed.", book) /// } /// } /// /// // Iterate over everything. /// for (book, review) in &book_reviews { /// println!("{}: \"{}\"", book, review); /// } /// ``` /// /// `HashMap` also implements an [`Entry API`](#method.entry), which allows /// for more complex methods of getting, setting, updating and removing keys and /// their values: /// /// ``` /// use std::collections::HashMap; /// /// // type inference lets us omit an explicit type signature (which /// // would be `HashMap<&str, u8>` in this example). /// let mut player_stats = HashMap::new(); /// /// fn random_stat_buff() -> u8 { /// // could actually return some random value here - let's just return /// // some fixed value for now /// 42 /// } /// /// // insert a key only if it doesn't already exist /// player_stats.entry("health").or_insert(100); /// /// // insert a key using a function that provides a new value only if it /// // doesn't already exist /// player_stats.entry("defence").or_insert_with(random_stat_buff); /// /// // update a key, guarding against the key possibly not being set /// let stat = player_stats.entry("attack").or_insert(100); /// *stat += random_stat_buff(); /// ``` /// /// The easiest way to use `HashMap` with a custom key type is to derive [`Eq`] and [`Hash`]. /// We must also derive [`PartialEq`]. /// /// [`Eq`]: ../../std/cmp/trait.Eq.html /// [`Hash`]: ../../std/hash/trait.Hash.html /// [`PartialEq`]: ../../std/cmp/trait.PartialEq.html /// [`RefCell`]: ../../std/cell/struct.RefCell.html /// [`Cell`]: ../../std/cell/struct.Cell.html /// [`default`]: #method.default /// [`with_hasher`]: #method.with_hasher /// [`with_capacity_and_hasher`]: #method.with_capacity_and_hasher /// [`fnv`]: https://crates.io/crates/fnv /// /// ``` /// use std::collections::HashMap; /// /// #[derive(Hash, Eq, PartialEq, Debug)] /// struct Viking { /// name: String, /// country: String, /// } /// /// impl Viking { /// /// Create a new Viking. /// fn new(name: &str, country: &str) -> Viking { /// Viking { name: name.to_string(), country: country.to_string() } /// } /// } /// /// // Use a HashMap to store the vikings' health points. /// let mut vikings = HashMap::new(); /// /// vikings.insert(Viking::new("Einar", "Norway"), 25); /// vikings.insert(Viking::new("Olaf", "Denmark"), 24); /// vikings.insert(Viking::new("Harald", "Iceland"), 12); /// /// // Use derived implementation to print the status of the vikings. /// for (viking, health) in &vikings { /// println!("{:?} has {} hp", viking, health); /// } /// ``` /// /// A `HashMap` with fixed list of elements can be initialized from an array: /// /// ``` /// use std::collections::HashMap; /// /// fn main() { /// let timber_resources: HashMap<&str, i32> = /// [("Norway", 100), /// ("Denmark", 50), /// ("Iceland", 10)] /// .iter().cloned().collect(); /// // use the values stored in map /// } /// ``` #[derive(Clone)] #[stable(feature = "rust1", since = "1.0.0")] pub struct HashMap<K, V, S = RandomState> { // All hashes are keyed on these values, to prevent hash collision attacks. hash_builder: S, table: RawTable<K, V>, resize_policy: DefaultResizePolicy, } /// Search for a pre-hashed key. /// If you don't already know the hash, use search or search_mut instead #[inline] fn search_hashed<K, V, M, F>(table: M, hash: SafeHash, is_match: F) -> InternalEntry<K, V, M> where M: Deref<Target = RawTable<K, V>>, F: FnMut(&K) -> bool { // This is the only function where capacity can be zero. To avoid // undefined behavior when Bucket::new gets the raw bucket in this // case, immediately return the appropriate search result. if table.capacity() == 0 { return InternalEntry::TableIsEmpty; } search_hashed_nonempty(table, hash, is_match, true) } /// Search for a pre-hashed key when the hash map is known to be non-empty. #[inline] fn search_hashed_nonempty<K, V, M, F>(table: M, hash: SafeHash, mut is_match: F, compare_hashes: bool) -> InternalEntry<K, V, M> where M: Deref<Target = RawTable<K, V>>, F: FnMut(&K) -> bool { // Do not check the capacity as an extra branch could slow the lookup. let size = table.size(); let mut probe = Bucket::new(table, hash); let mut displacement = 0; loop { let full = match probe.peek() { Empty(bucket) => { // Found a hole! return InternalEntry::Vacant { hash, elem: NoElem(bucket, displacement), }; } Full(bucket) => bucket, }; let probe_displacement = full.displacement(); if probe_displacement < displacement { // Found a luckier bucket than me. // We can finish the search early if we hit any bucket // with a lower distance to initial bucket than we've probed. return InternalEntry::Vacant { hash, elem: NeqElem(full, probe_displacement), }; } // If the hash doesn't match, it can't be this one.. if !compare_hashes || hash == full.hash() { // If the key doesn't match, it can't be this one.. if is_match(full.read().0) { return InternalEntry::Occupied { elem: full }; } } displacement += 1; probe = full.next(); debug_assert!(displacement <= size); } } /// Same as `search_hashed_nonempty` but for mutable access. #[inline] fn search_hashed_nonempty_mut<K, V, M, F>(table: M, hash: SafeHash, mut is_match: F, compare_hashes: bool) -> InternalEntry<K, V, M> where M: DerefMut<Target = RawTable<K, V>>, F: FnMut(&K) -> bool { // Do not check the capacity as an extra branch could slow the lookup. let size = table.size(); let mut probe = Bucket::new(table, hash); let mut displacement = 0; loop { let mut full = match probe.peek() { Empty(bucket) => { // Found a hole! return InternalEntry::Vacant { hash, elem: NoElem(bucket, displacement), }; } Full(bucket) => bucket, }; let probe_displacement = full.displacement(); if probe_displacement < displacement { // Found a luckier bucket than me. // We can finish the search early if we hit any bucket // with a lower distance to initial bucket than we've probed. return InternalEntry::Vacant { hash, elem: NeqElem(full, probe_displacement), }; } // If the hash doesn't match, it can't be this one.. if hash == full.hash() || !compare_hashes { // If the key doesn't match, it can't be this one.. if is_match(full.read_mut().0) { return InternalEntry::Occupied { elem: full }; } } displacement += 1; probe = full.next(); debug_assert!(displacement <= size); } } fn pop_internal<K, V>(starting_bucket: FullBucketMut<K, V>) -> (K, V, &mut RawTable<K, V>) { let (empty, retkey, retval) = starting_bucket.take(); let mut gap = match empty.gap_peek() { Ok(b) => b, Err(b) => return (retkey, retval, b.into_table()), }; while gap.full().displacement() != 0 { gap = match gap.shift() { Ok(b) => b, Err(b) => { return (retkey, retval, b.into_table()); }, }; } // Now we've done all our shifting. Return the value we grabbed earlier. (retkey, retval, gap.into_table()) } /// Perform robin hood bucket stealing at the given `bucket`. You must /// also pass that bucket's displacement so we don't have to recalculate it. /// /// `hash`, `key`, and `val` are the elements to "robin hood" into the hashtable. fn robin_hood<'a, K: 'a, V: 'a>(bucket: FullBucketMut<'a, K, V>, mut displacement: usize, mut hash: SafeHash, mut key: K, mut val: V) -> FullBucketMut<'a, K, V> { let size = bucket.table().size(); let raw_capacity = bucket.table().capacity(); // There can be at most `size - dib` buckets to displace, because // in the worst case, there are `size` elements and we already are // `displacement` buckets away from the initial one. let idx_end = (bucket.index() + size - bucket.displacement()) % raw_capacity; // Save the *starting point*. let mut bucket = bucket.stash(); loop { let (old_hash, old_key, old_val) = bucket.replace(hash, key, val); hash = old_hash; key = old_key; val = old_val; loop { displacement += 1; let probe = bucket.next(); debug_assert!(probe.index() != idx_end); let full_bucket = match probe.peek() { Empty(bucket) => { // Found a hole! let bucket = bucket.put(hash, key, val); // Now that it's stolen, just read the value's pointer // right out of the table! Go back to the *starting point*. // // This use of `into_table` is misleading. It turns the // bucket, which is a FullBucket on top of a // FullBucketMut, into just one FullBucketMut. The "table" // refers to the inner FullBucketMut in this context. return bucket.into_table(); } Full(bucket) => bucket, }; let probe_displacement = full_bucket.displacement(); bucket = full_bucket; // Robin hood! Steal the spot. if probe_displacement < displacement { displacement = probe_displacement; break; } } } } impl<K, V, S> HashMap<K, V, S> where K: Eq + Hash, S: BuildHasher { fn make_hash<X: ?Sized>(&self, x: &X) -> SafeHash where X: Hash { table::make_hash(&self.hash_builder, x) } /// Search for a key, yielding the index if it's found in the hashtable. /// If you already have the hash for the key lying around, or if you need an /// InternalEntry, use search_hashed or search_hashed_nonempty. #[inline] fn search<'a, Q: ?Sized>(&'a self, q: &Q) -> Option<FullBucket<K, V, &'a RawTable<K, V>>> where K: Borrow<Q>, Q: Eq + Hash { if self.is_empty() { return None; } let hash = self.make_hash(q); search_hashed_nonempty(&self.table, hash, |k| q.eq(k.borrow()), true) .into_occupied_bucket() } #[inline] fn search_mut<'a, Q: ?Sized>(&'a mut self, q: &Q) -> Option<FullBucket<K, V, &'a mut RawTable<K, V>>> where K: Borrow<Q>, Q: Eq + Hash { if self.is_empty() { return None; } let hash = self.make_hash(q); search_hashed_nonempty(&mut self.table, hash, |k| q.eq(k.borrow()), true) .into_occupied_bucket() } // The caller should ensure that invariants by Robin Hood Hashing hold // and that there's space in the underlying table. fn insert_hashed_ordered(&mut self, hash: SafeHash, k: K, v: V) { let mut buckets = Bucket::new(&mut self.table, hash); let start_index = buckets.index(); loop { // We don't need to compare hashes for value swap. // Not even DIBs for Robin Hood. buckets = match buckets.peek() { Empty(empty) => { empty.put(hash, k, v); return; } Full(b) => b.into_bucket(), }; buckets.next(); debug_assert!(buckets.index() != start_index); } } } impl<K: Hash + Eq, V> HashMap<K, V, RandomState> { /// Creates an empty `HashMap`. /// /// The hash map is initially created with a capacity of 0, so it will not allocate until it /// is first inserted into. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// let mut map: HashMap<&str, i32> = HashMap::new(); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn new() -> HashMap<K, V, RandomState> { Default::default() } /// Creates an empty `HashMap` with the specified capacity. /// /// The hash map will be able to hold at least `capacity` elements without /// reallocating. If `capacity` is 0, the hash map will not allocate. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// let mut map: HashMap<&str, i32> = HashMap::with_capacity(10); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn with_capacity(capacity: usize) -> HashMap<K, V, RandomState> { HashMap::with_capacity_and_hasher(capacity, Default::default()) } } impl<K, V, S> HashMap<K, V, S> where K: Eq + Hash, S: BuildHasher { /// Creates an empty `HashMap` which will use the given hash builder to hash /// keys. /// /// The created map has the default initial capacity. /// /// Warning: `hash_builder` is normally randomly generated, and /// is designed to allow HashMaps to be resistant to attacks that /// cause many collisions and very poor performance. Setting it /// manually using this function can expose a DoS attack vector. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// use std::collections::hash_map::RandomState; /// /// let s = RandomState::new(); /// let mut map = HashMap::with_hasher(s); /// map.insert(1, 2); /// ``` #[inline] #[stable(feature = "hashmap_build_hasher", since = "1.7.0")] pub fn with_hasher(hash_builder: S) -> HashMap<K, V, S> { HashMap { hash_builder, resize_policy: DefaultResizePolicy::new(), table: RawTable::new(0), } } /// Creates an empty `HashMap` with the specified capacity, using `hash_builder` /// to hash the keys. /// /// The hash map will be able to hold at least `capacity` elements without /// reallocating. If `capacity` is 0, the hash map will not allocate. /// /// Warning: `hash_builder` is normally randomly generated, and /// is designed to allow HashMaps to be resistant to attacks that /// cause many collisions and very poor performance. Setting it /// manually using this function can expose a DoS attack vector. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// use std::collections::hash_map::RandomState; /// /// let s = RandomState::new(); /// let mut map = HashMap::with_capacity_and_hasher(10, s); /// map.insert(1, 2); /// ``` #[inline] #[stable(feature = "hashmap_build_hasher", since = "1.7.0")] pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> HashMap<K, V, S> { let resize_policy = DefaultResizePolicy::new(); let raw_cap = resize_policy.raw_capacity(capacity); HashMap { hash_builder, resize_policy, table: RawTable::new(raw_cap), } } /// Returns a reference to the map's [`BuildHasher`]. /// /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// use std::collections::hash_map::RandomState; /// /// let hasher = RandomState::new(); /// let map: HashMap<i32, i32> = HashMap::with_hasher(hasher); /// let hasher: &RandomState = map.hasher(); /// ``` #[stable(feature = "hashmap_public_hasher", since = "1.9.0")] pub fn hasher(&self) -> &S { &self.hash_builder } /// Returns the number of elements the map can hold without reallocating. /// /// This number is a lower bound; the `HashMap<K, V>` might be able to hold /// more, but is guaranteed to be able to hold at least this many. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// let map: HashMap<i32, i32> = HashMap::with_capacity(100); /// assert!(map.capacity() >= 100); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn capacity(&self) -> usize { self.resize_policy.capacity(self.raw_capacity()) } /// Returns the hash map's raw capacity. #[inline] fn raw_capacity(&self) -> usize { self.table.capacity() } /// Reserves capacity for at least `additional` more elements to be inserted /// in the `HashMap`. The collection may reserve more space to avoid /// frequent reallocations. /// /// # Panics /// /// Panics if the new allocation size overflows [`usize`]. /// /// [`usize`]: ../../std/primitive.usize.html /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// let mut map: HashMap<&str, i32> = HashMap::new(); /// map.reserve(10); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve(&mut self, additional: usize) { match self.reserve_internal(additional, Infallible) { Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"), Err(CollectionAllocErr::AllocErr) => unreachable!(), Ok(()) => { /* yay */ } } } /// Tries to reserve capacity for at least `additional` more elements to be inserted /// in the given `HashMap<K,V>`. The collection may reserve more space to avoid /// frequent reallocations. /// /// # Errors /// /// If the capacity overflows, or the allocator reports a failure, then an error /// is returned. /// /// # Examples /// /// ``` /// #![feature(try_reserve)] /// use std::collections::HashMap; /// let mut map: HashMap<&str, isize> = HashMap::new(); /// map.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?"); /// ``` #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { self.reserve_internal(additional, Fallible) } #[inline] fn reserve_internal(&mut self, additional: usize, fallibility: Fallibility) -> Result<(), CollectionAllocErr> { let remaining = self.capacity() - self.len(); // this can't overflow if remaining < additional { let min_cap = self.len() .checked_add(additional) .ok_or(CollectionAllocErr::CapacityOverflow)?; let raw_cap = self.resize_policy.try_raw_capacity(min_cap)?; self.try_resize(raw_cap, fallibility)?; } else if self.table.tag() && remaining <= self.len() { // Probe sequence is too long and table is half full, // resize early to reduce probing length. let new_capacity = self.table.capacity() * 2; self.try_resize(new_capacity, fallibility)?; } Ok(()) } /// Resizes the internal vectors to a new capacity. It's your /// responsibility to: /// 1) Ensure `new_raw_cap` is enough for all the elements, accounting /// for the load factor. /// 2) Ensure `new_raw_cap` is a power of two or zero. #[inline(never)] #[cold] fn try_resize( &mut self, new_raw_cap: usize, fallibility: Fallibility, ) -> Result<(), CollectionAllocErr> { assert!(self.table.size() <= new_raw_cap); assert!(new_raw_cap.is_power_of_two() || new_raw_cap == 0); let mut old_table = replace( &mut self.table, match fallibility { Infallible => RawTable::new(new_raw_cap), Fallible => RawTable::try_new(new_raw_cap)?, } ); let old_size = old_table.size(); if old_table.size() == 0 { return Ok(()); } let mut bucket = Bucket::head_bucket(&mut old_table); // This is how the buckets might be laid out in memory: // ($ marks an initialized bucket) // ________________ // |$$$_$$$$$$_$$$$$| // // But we've skipped the entire initial cluster of buckets // and will continue iteration in this order: // ________________ // |$$$$$$_$$$$$ // ^ wrap around once end is reached // ________________ // $$$_____________| // ^ exit once table.size == 0 loop { bucket = match bucket.peek() { Full(bucket) => { let h = bucket.hash(); let (b, k, v) = bucket.take(); self.insert_hashed_ordered(h, k, v); if b.table().size() == 0 { break; } b.into_bucket() } Empty(b) => b.into_bucket(), }; bucket.next(); } assert_eq!(self.table.size(), old_size); Ok(()) } /// Shrinks the capacity of the map as much as possible. It will drop /// down as much as possible while maintaining the internal rules /// and possibly leaving some space in accordance with the resize policy. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100); /// map.insert(1, 2); /// map.insert(3, 4); /// assert!(map.capacity() >= 100); /// map.shrink_to_fit(); /// assert!(map.capacity() >= 2); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn shrink_to_fit(&mut self) { let new_raw_cap = self.resize_policy.raw_capacity(self.len()); if self.raw_capacity() != new_raw_cap { let old_table = replace(&mut self.table, RawTable::new(new_raw_cap)); let old_size = old_table.size(); // Shrink the table. Naive algorithm for resizing: for (h, k, v) in old_table.into_iter() { self.insert_hashed_nocheck(h, k, v); } debug_assert_eq!(self.table.size(), old_size); } } /// Shrinks the capacity of the map with a lower limit. It will drop /// down no lower than the supplied limit while maintaining the internal rules /// and possibly leaving some space in accordance with the resize policy. /// /// Panics if the current capacity is smaller than the supplied /// minimum capacity. /// /// # Examples /// /// ``` /// #![feature(shrink_to)] /// use std::collections::HashMap; /// /// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100); /// map.insert(1, 2); /// map.insert(3, 4); /// assert!(map.capacity() >= 100); /// map.shrink_to(10); /// assert!(map.capacity() >= 10); /// map.shrink_to(0); /// assert!(map.capacity() >= 2); /// ``` #[unstable(feature = "shrink_to", reason = "new API", issue="56431")] pub fn shrink_to(&mut self, min_capacity: usize) { assert!(self.capacity() >= min_capacity, "Tried to shrink to a larger capacity"); let new_raw_cap = self.resize_policy.raw_capacity(max(self.len(), min_capacity)); if self.raw_capacity() != new_raw_cap { let old_table = replace(&mut self.table, RawTable::new(new_raw_cap)); let old_size = old_table.size(); // Shrink the table. Naive algorithm for resizing: for (h, k, v) in old_table.into_iter() { self.insert_hashed_nocheck(h, k, v); } debug_assert_eq!(self.table.size(), old_size); } } /// Insert a pre-hashed key-value pair, without first checking /// that there's enough room in the buckets. Returns a reference to the /// newly insert value. /// /// If the key already exists, the hashtable will be returned untouched /// and a reference to the existing element will be returned. fn insert_hashed_nocheck(&mut self, hash: SafeHash, k: K, v: V) -> Option<V> { let entry = search_hashed(&mut self.table, hash, |key| *key == k).into_entry(k); match entry { Some(Occupied(mut elem)) => Some(elem.insert(v)), Some(Vacant(elem)) => { elem.insert(v); None } None => unreachable!(), } } /// An iterator visiting all keys in arbitrary order. /// The iterator element type is `&'a K`. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut map = HashMap::new(); /// map.insert("a", 1); /// map.insert("b", 2); /// map.insert("c", 3); /// /// for key in map.keys() { /// println!("{}", key); /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn keys(&self) -> Keys<K, V> { Keys { inner: self.iter() } } /// An iterator visiting all values in arbitrary order. /// The iterator element type is `&'a V`. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut map = HashMap::new(); /// map.insert("a", 1); /// map.insert("b", 2); /// map.insert("c", 3); /// /// for val in map.values() { /// println!("{}", val); /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn values(&self) -> Values<K, V> { Values { inner: self.iter() } } /// An iterator visiting all values mutably in arbitrary order. /// The iterator element type is `&'a mut V`. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut map = HashMap::new(); /// /// map.insert("a", 1); /// map.insert("b", 2); /// map.insert("c", 3); /// /// for val in map.values_mut() { /// *val = *val + 10; /// } /// /// for val in map.values() { /// println!("{}", val); /// } /// ``` #[stable(feature = "map_values_mut", since = "1.10.0")] pub fn values_mut(&mut self) -> ValuesMut<K, V> { ValuesMut { inner: self.iter_mut() } } /// An iterator visiting all key-value pairs in arbitrary order. /// The iterator element type is `(&'a K, &'a V)`. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut map = HashMap::new(); /// map.insert("a", 1); /// map.insert("b", 2); /// map.insert("c", 3); /// /// for (key, val) in map.iter() { /// println!("key: {} val: {}", key, val); /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn iter(&self) -> Iter<K, V> { Iter { inner: self.table.iter() } } /// An iterator visiting all key-value pairs in arbitrary order, /// with mutable references to the values. /// The iterator element type is `(&'a K, &'a mut V)`. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut map = HashMap::new(); /// map.insert("a", 1); /// map.insert("b", 2); /// map.insert("c", 3); /// /// // Update all values /// for (_, val) in map.iter_mut() { /// *val *= 2; /// } /// /// for (key, val) in &map { /// println!("key: {} val: {}", key, val); /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn iter_mut(&mut self) -> IterMut<K, V> { IterMut { inner: self.table.iter_mut() } } /// Gets the given key's corresponding entry in the map for in-place manipulation. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut letters = HashMap::new(); /// /// for ch in "a short treatise on fungi".chars() { /// let counter = letters.entry(ch).or_insert(0); /// *counter += 1; /// } /// /// assert_eq!(letters[&'s'], 2); /// assert_eq!(letters[&'t'], 3); /// assert_eq!(letters[&'u'], 1); /// assert_eq!(letters.get(&'y'), None); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn entry(&mut self, key: K) -> Entry<K, V> { // Gotta resize now. self.reserve(1); let hash = self.make_hash(&key); search_hashed(&mut self.table, hash, |q| q.eq(&key)) .into_entry(key).expect("unreachable") } /// Returns the number of elements in the map. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut a = HashMap::new(); /// assert_eq!(a.len(), 0); /// a.insert(1, "a"); /// assert_eq!(a.len(), 1); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn len(&self) -> usize { self.table.size() } /// Returns true if the map contains no elements. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut a = HashMap::new(); /// assert!(a.is_empty()); /// a.insert(1, "a"); /// assert!(!a.is_empty()); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn is_empty(&self) -> bool { self.len() == 0 } /// Clears the map, returning all key-value pairs as an iterator. Keeps the /// allocated memory for reuse. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut a = HashMap::new(); /// a.insert(1, "a"); /// a.insert(2, "b"); /// /// for (k, v) in a.drain().take(1) { /// assert!(k == 1 || k == 2); /// assert!(v == "a" || v == "b"); /// } /// /// assert!(a.is_empty()); /// ``` #[inline] #[stable(feature = "drain", since = "1.6.0")] pub fn drain(&mut self) -> Drain<K, V> { Drain { inner: self.table.drain() } } /// Clears the map, removing all key-value pairs. Keeps the allocated memory /// for reuse. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut a = HashMap::new(); /// a.insert(1, "a"); /// a.clear(); /// assert!(a.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn clear(&mut self) { self.drain(); } /// Returns a reference to the value corresponding to the key. /// /// The key may be any borrowed form of the map's key type, but /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for /// the key type. /// /// [`Eq`]: ../../std/cmp/trait.Eq.html /// [`Hash`]: ../../std/hash/trait.Hash.html /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut map = HashMap::new(); /// map.insert(1, "a"); /// assert_eq!(map.get(&1), Some(&"a")); /// assert_eq!(map.get(&2), None); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V> where K: Borrow<Q>, Q: Hash + Eq { self.search(k).map(|bucket| bucket.into_refs().1) } /// Returns the key-value pair corresponding to the supplied key. /// /// The supplied key may be any borrowed form of the map's key type, but /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for /// the key type. /// /// [`Eq`]: ../../std/cmp/trait.Eq.html /// [`Hash`]: ../../std/hash/trait.Hash.html /// /// # Examples /// /// ``` /// #![feature(map_get_key_value)] /// use std::collections::HashMap; /// /// let mut map = HashMap::new(); /// map.insert(1, "a"); /// assert_eq!(map.get_key_value(&1), Some((&1, &"a"))); /// assert_eq!(map.get_key_value(&2), None); /// ``` #[unstable(feature = "map_get_key_value", issue = "49347")] pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)> where K: Borrow<Q>, Q: Hash + Eq { self.search(k).map(|bucket| bucket.into_refs()) } /// Returns true if the map contains a value for the specified key. /// /// The key may be any borrowed form of the map's key type, but /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for /// the key type. /// /// [`Eq`]: ../../std/cmp/trait.Eq.html /// [`Hash`]: ../../std/hash/trait.Hash.html /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut map = HashMap::new(); /// map.insert(1, "a"); /// assert_eq!(map.contains_key(&1), true); /// assert_eq!(map.contains_key(&2), false); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn contains_key<Q: ?Sized>(&self, k: &Q) -> bool where K: Borrow<Q>, Q: Hash + Eq { self.search(k).is_some() } /// Returns a mutable reference to the value corresponding to the key. /// /// The key may be any borrowed form of the map's key type, but /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for /// the key type. /// /// [`Eq`]: ../../std/cmp/trait.Eq.html /// [`Hash`]: ../../std/hash/trait.Hash.html /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut map = HashMap::new(); /// map.insert(1, "a"); /// if let Some(x) = map.get_mut(&1) { /// *x = "b"; /// } /// assert_eq!(map[&1], "b"); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn get_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut V> where K: Borrow<Q>, Q: Hash + Eq { self.search_mut(k).map(|bucket| bucket.into_mut_refs().1) } /// Inserts a key-value pair into the map. /// /// If the map did not have this key present, [`None`] is returned. /// /// If the map did have this key present, the value is updated, and the old /// value is returned. The key is not updated, though; this matters for /// types that can be `==` without being identical. See the [module-level /// documentation] for more. /// /// [`None`]: ../../std/option/enum.Option.html#variant.None /// [module-level documentation]: index.html#insert-and-complex-keys /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut map = HashMap::new(); /// assert_eq!(map.insert(37, "a"), None); /// assert_eq!(map.is_empty(), false); /// /// map.insert(37, "b"); /// assert_eq!(map.insert(37, "c"), Some("b")); /// assert_eq!(map[&37], "c"); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn insert(&mut self, k: K, v: V) -> Option<V> { let hash = self.make_hash(&k); self.reserve(1); self.insert_hashed_nocheck(hash, k, v) } /// Removes a key from the map, returning the value at the key if the key /// was previously in the map. /// /// The key may be any borrowed form of the map's key type, but /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for /// the key type. /// /// [`Eq`]: ../../std/cmp/trait.Eq.html /// [`Hash`]: ../../std/hash/trait.Hash.html /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut map = HashMap::new(); /// map.insert(1, "a"); /// assert_eq!(map.remove(&1), Some("a")); /// assert_eq!(map.remove(&1), None); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn remove<Q: ?Sized>(&mut self, k: &Q) -> Option<V> where K: Borrow<Q>, Q: Hash + Eq { self.search_mut(k).map(|bucket| pop_internal(bucket).1) } /// Removes a key from the map, returning the stored key and value if the /// key was previously in the map. /// /// The key may be any borrowed form of the map's key type, but /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for /// the key type. /// /// [`Eq`]: ../../std/cmp/trait.Eq.html /// [`Hash`]: ../../std/hash/trait.Hash.html /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// # fn main() { /// let mut map = HashMap::new(); /// map.insert(1, "a"); /// assert_eq!(map.remove_entry(&1), Some((1, "a"))); /// assert_eq!(map.remove(&1), None); /// # } /// ``` #[stable(feature = "hash_map_remove_entry", since = "1.27.0")] pub fn remove_entry<Q: ?Sized>(&mut self, k: &Q) -> Option<(K, V)> where K: Borrow<Q>, Q: Hash + Eq { self.search_mut(k) .map(|bucket| { let (k, v, _) = pop_internal(bucket); (k, v) }) } /// Retains only the elements specified by the predicate. /// /// In other words, remove all pairs `(k, v)` such that `f(&k,&mut v)` returns `false`. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut map: HashMap<i32, i32> = (0..8).map(|x|(x, x*10)).collect(); /// map.retain(|&k, _| k % 2 == 0); /// assert_eq!(map.len(), 4); /// ``` #[stable(feature = "retain_hash_collection", since = "1.18.0")] pub fn retain<F>(&mut self, mut f: F) where F: FnMut(&K, &mut V) -> bool { if self.table.size() == 0 { return; } let mut elems_left = self.table.size(); let mut bucket = Bucket::head_bucket(&mut self.table); bucket.prev(); let start_index = bucket.index(); while elems_left != 0 { bucket = match bucket.peek() { Full(mut full) => { elems_left -= 1; let should_remove = { let (k, v) = full.read_mut(); !f(k, v) }; if should_remove { let prev_raw = full.raw(); let (_, _, t) = pop_internal(full); Bucket::new_from(prev_raw, t) } else { full.into_bucket() } }, Empty(b) => { b.into_bucket() } }; bucket.prev(); // reverse iteration debug_assert!(elems_left == 0 || bucket.index() != start_index); } } } impl<K, V, S> HashMap<K, V, S> where K: Eq + Hash, S: BuildHasher { /// Creates a raw entry builder for the HashMap. /// /// Raw entries provide the lowest level of control for searching and /// manipulating a map. They must be manually initialized with a hash and /// then manually searched. After this, insertions into a vacant entry /// still require an owned key to be provided. /// /// Raw entries are useful for such exotic situations as: /// /// * Hash memoization /// * Deferring the creation of an owned key until it is known to be required /// * Using a search key that doesn't work with the Borrow trait /// * Using custom comparison logic without newtype wrappers /// /// Because raw entries provide much more low-level control, it's much easier /// to put the HashMap into an inconsistent state which, while memory-safe, /// will cause the map to produce seemingly random results. Higher-level and /// more foolproof APIs like `entry` should be preferred when possible. /// /// In particular, the hash used to initialized the raw entry must still be /// consistent with the hash of the key that is ultimately stored in the entry. /// This is because implementations of HashMap may need to recompute hashes /// when resizing, at which point only the keys are available. /// /// Raw entries give mutable access to the keys. This must not be used /// to modify how the key would compare or hash, as the map will not re-evaluate /// where the key should go, meaning the keys may become "lost" if their /// location does not reflect their state. For instance, if you change a key /// so that the map now contains keys which compare equal, search may start /// acting erratically, with two keys randomly masking each other. Implementations /// are free to assume this doesn't happen (within the limits of memory-safety). #[inline(always)] #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<K, V, S> { self.reserve(1); RawEntryBuilderMut { map: self } } /// Creates a raw immutable entry builder for the HashMap. /// /// Raw entries provide the lowest level of control for searching and /// manipulating a map. They must be manually initialized with a hash and /// then manually searched. /// /// This is useful for /// * Hash memoization /// * Using a search key that doesn't work with the Borrow trait /// * Using custom comparison logic without newtype wrappers /// /// Unless you are in such a situation, higher-level and more foolproof APIs like /// `get` should be preferred. /// /// Immutable raw entries have very limited use; you might instead want `raw_entry_mut`. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn raw_entry(&self) -> RawEntryBuilder<K, V, S> { RawEntryBuilder { map: self } } } #[stable(feature = "rust1", since = "1.0.0")] impl<K, V, S> PartialEq for HashMap<K, V, S> where K: Eq + Hash, V: PartialEq, S: BuildHasher { fn eq(&self, other: &HashMap<K, V, S>) -> bool { if self.len() != other.len() { return false; } self.iter().all(|(key, value)| other.get(key).map_or(false, |v| *value == *v)) } } #[stable(feature = "rust1", since = "1.0.0")] impl<K, V, S> Eq for HashMap<K, V, S> where K: Eq + Hash, V: Eq, S: BuildHasher { } #[stable(feature = "rust1", since = "1.0.0")] impl<K, V, S> Debug for HashMap<K, V, S> where K: Eq + Hash + Debug, V: Debug, S: BuildHasher { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_map().entries(self.iter()).finish() } } #[stable(feature = "rust1", since = "1.0.0")] impl<K, V, S> Default for HashMap<K, V, S> where K: Eq + Hash, S: BuildHasher + Default { /// Creates an empty `HashMap<K, V, S>`, with the `Default` value for the hasher. fn default() -> HashMap<K, V, S> { HashMap::with_hasher(Default::default()) } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, Q: ?Sized, V, S> Index<&'a Q> for HashMap<K, V, S> where K: Eq + Hash + Borrow<Q>, Q: Eq + Hash, S: BuildHasher { type Output = V; /// Returns a reference to the value corresponding to the supplied key. /// /// # Panics /// /// Panics if the key is not present in the `HashMap`. #[inline] fn index(&self, key: &Q) -> &V { self.get(key).expect("no entry found for key") } } /// An iterator over the entries of a `HashMap`. /// /// This `struct` is created by the [`iter`] method on [`HashMap`]. See its /// documentation for more. /// /// [`iter`]: struct.HashMap.html#method.iter /// [`HashMap`]: struct.HashMap.html #[stable(feature = "rust1", since = "1.0.0")] pub struct Iter<'a, K: 'a, V: 'a> { inner: table::Iter<'a, K, V>, } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> Clone for Iter<'a, K, V> { fn clone(&self) -> Iter<'a, K, V> { Iter { inner: self.inner.clone() } } } #[stable(feature = "std_debug", since = "1.16.0")] impl<'a, K: Debug, V: Debug> fmt::Debug for Iter<'a, K, V> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_list() .entries(self.clone()) .finish() } } /// A mutable iterator over the entries of a `HashMap`. /// /// This `struct` is created by the [`iter_mut`] method on [`HashMap`]. See its /// documentation for more. /// /// [`iter_mut`]: struct.HashMap.html#method.iter_mut /// [`HashMap`]: struct.HashMap.html #[stable(feature = "rust1", since = "1.0.0")] pub struct IterMut<'a, K: 'a, V: 'a> { inner: table::IterMut<'a, K, V>, } /// An owning iterator over the entries of a `HashMap`. /// /// This `struct` is created by the [`into_iter`] method on [`HashMap`][`HashMap`] /// (provided by the `IntoIterator` trait). See its documentation for more. /// /// [`into_iter`]: struct.HashMap.html#method.into_iter /// [`HashMap`]: struct.HashMap.html #[stable(feature = "rust1", since = "1.0.0")] pub struct IntoIter<K, V> { pub(super) inner: table::IntoIter<K, V>, } /// An iterator over the keys of a `HashMap`. /// /// This `struct` is created by the [`keys`] method on [`HashMap`]. See its /// documentation for more. /// /// [`keys`]: struct.HashMap.html#method.keys /// [`HashMap`]: struct.HashMap.html #[stable(feature = "rust1", since = "1.0.0")] pub struct Keys<'a, K: 'a, V: 'a> { inner: Iter<'a, K, V>, } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> Clone for Keys<'a, K, V> { fn clone(&self) -> Keys<'a, K, V> { Keys { inner: self.inner.clone() } } } #[stable(feature = "std_debug", since = "1.16.0")] impl<'a, K: Debug, V> fmt::Debug for Keys<'a, K, V> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_list() .entries(self.clone()) .finish() } } /// An iterator over the values of a `HashMap`. /// /// This `struct` is created by the [`values`] method on [`HashMap`]. See its /// documentation for more. /// /// [`values`]: struct.HashMap.html#method.values /// [`HashMap`]: struct.HashMap.html #[stable(feature = "rust1", since = "1.0.0")] pub struct Values<'a, K: 'a, V: 'a> { inner: Iter<'a, K, V>, } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> Clone for Values<'a, K, V> { fn clone(&self) -> Values<'a, K, V> { Values { inner: self.inner.clone() } } } #[stable(feature = "std_debug", since = "1.16.0")] impl<'a, K, V: Debug> fmt::Debug for Values<'a, K, V> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_list() .entries(self.clone()) .finish() } } /// A draining iterator over the entries of a `HashMap`. /// /// This `struct` is created by the [`drain`] method on [`HashMap`]. See its /// documentation for more. /// /// [`drain`]: struct.HashMap.html#method.drain /// [`HashMap`]: struct.HashMap.html #[stable(feature = "drain", since = "1.6.0")] pub struct Drain<'a, K: 'a, V: 'a> { pub(super) inner: table::Drain<'a, K, V>, } /// A mutable iterator over the values of a `HashMap`. /// /// This `struct` is created by the [`values_mut`] method on [`HashMap`]. See its /// documentation for more. /// /// [`values_mut`]: struct.HashMap.html#method.values_mut /// [`HashMap`]: struct.HashMap.html #[stable(feature = "map_values_mut", since = "1.10.0")] pub struct ValuesMut<'a, K: 'a, V: 'a> { inner: IterMut<'a, K, V>, } enum InternalEntry<K, V, M> { Occupied { elem: FullBucket<K, V, M> }, Vacant { hash: SafeHash, elem: VacantEntryState<K, V, M>, }, TableIsEmpty, } impl<K, V, M> InternalEntry<K, V, M> { #[inline] fn into_occupied_bucket(self) -> Option<FullBucket<K, V, M>> { match self { InternalEntry::Occupied { elem } => Some(elem), _ => None, } } } impl<'a, K, V> InternalEntry<K, V, &'a mut RawTable<K, V>> { #[inline] fn into_entry(self, key: K) -> Option<Entry<'a, K, V>> { match self { InternalEntry::Occupied { elem } => { Some(Occupied(OccupiedEntry { key: Some(key), elem, })) } InternalEntry::Vacant { hash, elem } => { Some(Vacant(VacantEntry { hash, key, elem, })) } InternalEntry::TableIsEmpty => None, } } } /// A builder for computing where in a HashMap a key-value pair would be stored. /// /// See the [`HashMap::raw_entry_mut`] docs for usage examples. /// /// [`HashMap::raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut #[unstable(feature = "hash_raw_entry", issue = "56167")] pub struct RawEntryBuilderMut<'a, K: 'a, V: 'a, S: 'a> { map: &'a mut HashMap<K, V, S>, } /// A view into a single entry in a map, which may either be vacant or occupied. /// /// This is a lower-level version of [`Entry`]. /// /// This `enum` is constructed from the [`raw_entry`] method on [`HashMap`]. /// /// [`HashMap`]: struct.HashMap.html /// [`Entry`]: enum.Entry.html /// [`raw_entry`]: struct.HashMap.html#method.raw_entry #[unstable(feature = "hash_raw_entry", issue = "56167")] pub enum RawEntryMut<'a, K: 'a, V: 'a, S: 'a> { /// An occupied entry. Occupied(RawOccupiedEntryMut<'a, K, V>), /// A vacant entry. Vacant(RawVacantEntryMut<'a, K, V, S>), } /// A view into an occupied entry in a `HashMap`. /// It is part of the [`RawEntryMut`] enum. /// /// [`RawEntryMut`]: enum.RawEntryMut.html #[unstable(feature = "hash_raw_entry", issue = "56167")] pub struct RawOccupiedEntryMut<'a, K: 'a, V: 'a> { elem: FullBucket<K, V, &'a mut RawTable<K, V>>, } /// A view into a vacant entry in a `HashMap`. /// It is part of the [`RawEntryMut`] enum. /// /// [`RawEntryMut`]: enum.RawEntryMut.html #[unstable(feature = "hash_raw_entry", issue = "56167")] pub struct RawVacantEntryMut<'a, K: 'a, V: 'a, S: 'a> { elem: VacantEntryState<K, V, &'a mut RawTable<K, V>>, hash_builder: &'a S, } /// A builder for computing where in a HashMap a key-value pair would be stored. /// /// See the [`HashMap::raw_entry`] docs for usage examples. /// /// [`HashMap::raw_entry`]: struct.HashMap.html#method.raw_entry #[unstable(feature = "hash_raw_entry", issue = "56167")] pub struct RawEntryBuilder<'a, K: 'a, V: 'a, S: 'a> { map: &'a HashMap<K, V, S>, } impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S> where S: BuildHasher, K: Eq + Hash, { /// Create a `RawEntryMut` from the given key. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn from_key<Q: ?Sized>(self, k: &Q) -> RawEntryMut<'a, K, V, S> where K: Borrow<Q>, Q: Hash + Eq { let mut hasher = self.map.hash_builder.build_hasher(); k.hash(&mut hasher); self.from_key_hashed_nocheck(hasher.finish(), k) } /// Create a `RawEntryMut` from the given key and its hash. #[inline] #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S> where K: Borrow<Q>, Q: Eq { self.from_hash(hash, |q| q.borrow().eq(k)) } #[inline] fn search<F>(self, hash: u64, is_match: F, compare_hashes: bool) -> RawEntryMut<'a, K, V, S> where for<'b> F: FnMut(&'b K) -> bool, { match search_hashed_nonempty_mut(&mut self.map.table, SafeHash::new(hash), is_match, compare_hashes) { InternalEntry::Occupied { elem } => { RawEntryMut::Occupied(RawOccupiedEntryMut { elem }) } InternalEntry::Vacant { elem, .. } => { RawEntryMut::Vacant(RawVacantEntryMut { elem, hash_builder: &self.map.hash_builder, }) } InternalEntry::TableIsEmpty => { unreachable!() } } } /// Create a `RawEntryMut` from the given hash. #[inline] #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn from_hash<F>(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S> where for<'b> F: FnMut(&'b K) -> bool, { self.search(hash, is_match, true) } /// Search possible locations for an element with hash `hash` until `is_match` returns true for /// one of them. There is no guarantee that all keys passed to `is_match` will have the provided /// hash. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn search_bucket<F>(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S> where for<'b> F: FnMut(&'b K) -> bool, { self.search(hash, is_match, false) } } impl<'a, K, V, S> RawEntryBuilder<'a, K, V, S> where S: BuildHasher, { /// Access an entry by key. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn from_key<Q: ?Sized>(self, k: &Q) -> Option<(&'a K, &'a V)> where K: Borrow<Q>, Q: Hash + Eq { let mut hasher = self.map.hash_builder.build_hasher(); k.hash(&mut hasher); self.from_key_hashed_nocheck(hasher.finish(), k) } /// Access an entry by a key and its hash. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> Option<(&'a K, &'a V)> where K: Borrow<Q>, Q: Hash + Eq { self.from_hash(hash, |q| q.borrow().eq(k)) } fn search<F>(self, hash: u64, is_match: F, compare_hashes: bool) -> Option<(&'a K, &'a V)> where F: FnMut(&K) -> bool { if unsafe { unlikely(self.map.table.size() == 0) } { return None; } match search_hashed_nonempty(&self.map.table, SafeHash::new(hash), is_match, compare_hashes) { InternalEntry::Occupied { elem } => Some(elem.into_refs()), InternalEntry::Vacant { .. } => None, InternalEntry::TableIsEmpty => unreachable!(), } } /// Access an entry by hash. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn from_hash<F>(self, hash: u64, is_match: F) -> Option<(&'a K, &'a V)> where F: FnMut(&K) -> bool { self.search(hash, is_match, true) } /// Search possible locations for an element with hash `hash` until `is_match` returns true for /// one of them. There is no guarantee that all keys passed to `is_match` will have the provided /// hash. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn search_bucket<F>(self, hash: u64, is_match: F) -> Option<(&'a K, &'a V)> where F: FnMut(&K) -> bool { self.search(hash, is_match, false) } } impl<'a, K, V, S> RawEntryMut<'a, K, V, S> { /// Ensures a value is in the entry by inserting the default if empty, and returns /// mutable references to the key and value in the entry. /// /// # Examples /// /// ``` /// #![feature(hash_raw_entry)] /// use std::collections::HashMap; /// /// let mut map: HashMap<&str, u32> = HashMap::new(); /// /// map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 3); /// assert_eq!(map["poneyland"], 3); /// /// *map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 10).1 *= 2; /// assert_eq!(map["poneyland"], 6); /// ``` #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn or_insert(self, default_key: K, default_val: V) -> (&'a mut K, &'a mut V) where K: Hash, S: BuildHasher, { match self { RawEntryMut::Occupied(entry) => entry.into_key_value(), RawEntryMut::Vacant(entry) => entry.insert(default_key, default_val), } } /// Ensures a value is in the entry by inserting the result of the default function if empty, /// and returns mutable references to the key and value in the entry. /// /// # Examples /// /// ``` /// #![feature(hash_raw_entry)] /// use std::collections::HashMap; /// /// let mut map: HashMap<&str, String> = HashMap::new(); /// /// map.raw_entry_mut().from_key("poneyland").or_insert_with(|| { /// ("poneyland", "hoho".to_string()) /// }); /// /// assert_eq!(map["poneyland"], "hoho".to_string()); /// ``` #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn or_insert_with<F>(self, default: F) -> (&'a mut K, &'a mut V) where F: FnOnce() -> (K, V), K: Hash, S: BuildHasher, { match self { RawEntryMut::Occupied(entry) => entry.into_key_value(), RawEntryMut::Vacant(entry) => { let (k, v) = default(); entry.insert(k, v) } } } /// Provides in-place mutable access to an occupied entry before any /// potential inserts into the map. /// /// # Examples /// /// ``` /// #![feature(hash_raw_entry)] /// use std::collections::HashMap; /// /// let mut map: HashMap<&str, u32> = HashMap::new(); /// /// map.raw_entry_mut() /// .from_key("poneyland") /// .and_modify(|_k, v| { *v += 1 }) /// .or_insert("poneyland", 42); /// assert_eq!(map["poneyland"], 42); /// /// map.raw_entry_mut() /// .from_key("poneyland") /// .and_modify(|_k, v| { *v += 1 }) /// .or_insert("poneyland", 0); /// assert_eq!(map["poneyland"], 43); /// ``` #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn and_modify<F>(self, f: F) -> Self where F: FnOnce(&mut K, &mut V) { match self { RawEntryMut::Occupied(mut entry) => { { let (k, v) = entry.get_key_value_mut(); f(k, v); } RawEntryMut::Occupied(entry) }, RawEntryMut::Vacant(entry) => RawEntryMut::Vacant(entry), } } } impl<'a, K, V> RawOccupiedEntryMut<'a, K, V> { /// Gets a reference to the key in the entry. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn key(&self) -> &K { self.elem.read().0 } /// Gets a mutable reference to the key in the entry. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn key_mut(&mut self) -> &mut K { self.elem.read_mut().0 } /// Converts the entry into a mutable reference to the key in the entry /// with a lifetime bound to the map itself. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn into_key(self) -> &'a mut K { self.elem.into_mut_refs().0 } /// Gets a reference to the value in the entry. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn get(&self) -> &V { self.elem.read().1 } /// Converts the OccupiedEntry into a mutable reference to the value in the entry /// with a lifetime bound to the map itself. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn into_mut(self) -> &'a mut V { self.elem.into_mut_refs().1 } /// Gets a mutable reference to the value in the entry. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn get_mut(&mut self) -> &mut V { self.elem.read_mut().1 } /// Gets a reference to the key and value in the entry. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn get_key_value(&mut self) -> (&K, &V) { self.elem.read() } /// Gets a mutable reference to the key and value in the entry. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn get_key_value_mut(&mut self) -> (&mut K, &mut V) { self.elem.read_mut() } /// Converts the OccupiedEntry into a mutable reference to the key and value in the entry /// with a lifetime bound to the map itself. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn into_key_value(self) -> (&'a mut K, &'a mut V) { self.elem.into_mut_refs() } /// Sets the value of the entry, and returns the entry's old value. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn insert(&mut self, value: V) -> V { mem::replace(self.get_mut(), value) } /// Sets the value of the entry, and returns the entry's old value. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn insert_key(&mut self, key: K) -> K { mem::replace(self.key_mut(), key) } /// Takes the value out of the entry, and returns it. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn remove(self) -> V { pop_internal(self.elem).1 } /// Take the ownership of the key and value from the map. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn remove_entry(self) -> (K, V) { let (k, v, _) = pop_internal(self.elem); (k, v) } } impl<'a, K, V, S> RawVacantEntryMut<'a, K, V, S> { /// Sets the value of the entry with the VacantEntry's key, /// and returns a mutable reference to it. #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn insert(self, key: K, value: V) -> (&'a mut K, &'a mut V) where K: Hash, S: BuildHasher, { let mut hasher = self.hash_builder.build_hasher(); key.hash(&mut hasher); self.insert_hashed_nocheck(hasher.finish(), key, value) } /// Sets the value of the entry with the VacantEntry's key, /// and returns a mutable reference to it. #[inline] #[unstable(feature = "hash_raw_entry", issue = "56167")] pub fn insert_hashed_nocheck(self, hash: u64, key: K, value: V) -> (&'a mut K, &'a mut V) { let hash = SafeHash::new(hash); let b = match self.elem { NeqElem(mut bucket, disp) => { if disp >= DISPLACEMENT_THRESHOLD { bucket.table_mut().set_tag(true); } robin_hood(bucket, disp, hash, key, value) }, NoElem(mut bucket, disp) => { if disp >= DISPLACEMENT_THRESHOLD { bucket.table_mut().set_tag(true); } bucket.put(hash, key, value) }, }; b.into_mut_refs() } } #[unstable(feature = "hash_raw_entry", issue = "56167")] impl<'a, K, V, S> Debug for RawEntryBuilderMut<'a, K, V, S> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("RawEntryBuilder") .finish() } } #[unstable(feature = "hash_raw_entry", issue = "56167")] impl<'a, K: Debug, V: Debug, S> Debug for RawEntryMut<'a, K, V, S> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { RawEntryMut::Vacant(ref v) => { f.debug_tuple("RawEntry") .field(v) .finish() } RawEntryMut::Occupied(ref o) => { f.debug_tuple("RawEntry") .field(o) .finish() } } } } #[unstable(feature = "hash_raw_entry", issue = "56167")] impl<'a, K: Debug, V: Debug> Debug for RawOccupiedEntryMut<'a, K, V> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("RawOccupiedEntryMut") .field("key", self.key()) .field("value", self.get()) .finish() } } #[unstable(feature = "hash_raw_entry", issue = "56167")] impl<'a, K, V, S> Debug for RawVacantEntryMut<'a, K, V, S> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("RawVacantEntryMut") .finish() } } #[unstable(feature = "hash_raw_entry", issue = "56167")] impl<'a, K, V, S> Debug for RawEntryBuilder<'a, K, V, S> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("RawEntryBuilder") .finish() } } /// A view into a single entry in a map, which may either be vacant or occupied. /// /// This `enum` is constructed from the [`entry`] method on [`HashMap`]. /// /// [`HashMap`]: struct.HashMap.html /// [`entry`]: struct.HashMap.html#method.entry #[stable(feature = "rust1", since = "1.0.0")] pub enum Entry<'a, K: 'a, V: 'a> { /// An occupied entry. #[stable(feature = "rust1", since = "1.0.0")] Occupied(#[stable(feature = "rust1", since = "1.0.0")] OccupiedEntry<'a, K, V>), /// A vacant entry. #[stable(feature = "rust1", since = "1.0.0")] Vacant(#[stable(feature = "rust1", since = "1.0.0")] VacantEntry<'a, K, V>), } #[stable(feature= "debug_hash_map", since = "1.12.0")] impl<'a, K: 'a + Debug, V: 'a + Debug> Debug for Entry<'a, K, V> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Vacant(ref v) => { f.debug_tuple("Entry") .field(v) .finish() } Occupied(ref o) => { f.debug_tuple("Entry") .field(o) .finish() } } } } /// A view into an occupied entry in a `HashMap`. /// It is part of the [`Entry`] enum. /// /// [`Entry`]: enum.Entry.html #[stable(feature = "rust1", since = "1.0.0")] pub struct OccupiedEntry<'a, K: 'a, V: 'a> { key: Option<K>, elem: FullBucket<K, V, &'a mut RawTable<K, V>>, } #[stable(feature= "debug_hash_map", since = "1.12.0")] impl<'a, K: 'a + Debug, V: 'a + Debug> Debug for OccupiedEntry<'a, K, V> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("OccupiedEntry") .field("key", self.key()) .field("value", self.get()) .finish() } } /// A view into a vacant entry in a `HashMap`. /// It is part of the [`Entry`] enum. /// /// [`Entry`]: enum.Entry.html #[stable(feature = "rust1", since = "1.0.0")] pub struct VacantEntry<'a, K: 'a, V: 'a> { hash: SafeHash, key: K, elem: VacantEntryState<K, V, &'a mut RawTable<K, V>>, } #[stable(feature= "debug_hash_map", since = "1.12.0")] impl<'a, K: 'a + Debug, V: 'a> Debug for VacantEntry<'a, K, V> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("VacantEntry") .field(self.key()) .finish() } } /// Possible states of a VacantEntry. enum VacantEntryState<K, V, M> { /// The index is occupied, but the key to insert has precedence, /// and will kick the current one out on insertion. NeqElem(FullBucket<K, V, M>, usize), /// The index is genuinely vacant. NoElem(EmptyBucket<K, V, M>, usize), } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V, S> IntoIterator for &'a HashMap<K, V, S> where K: Eq + Hash, S: BuildHasher { type Item = (&'a K, &'a V); type IntoIter = Iter<'a, K, V>; fn into_iter(self) -> Iter<'a, K, V> { self.iter() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V, S> IntoIterator for &'a mut HashMap<K, V, S> where K: Eq + Hash, S: BuildHasher { type Item = (&'a K, &'a mut V); type IntoIter = IterMut<'a, K, V>; fn into_iter(self) -> IterMut<'a, K, V> { self.iter_mut() } } #[stable(feature = "rust1", since = "1.0.0")] impl<K, V, S> IntoIterator for HashMap<K, V, S> where K: Eq + Hash, S: BuildHasher { type Item = (K, V); type IntoIter = IntoIter<K, V>; /// Creates a consuming iterator, that is, one that moves each key-value /// pair out of the map in arbitrary order. The map cannot be used after /// calling this. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut map = HashMap::new(); /// map.insert("a", 1); /// map.insert("b", 2); /// map.insert("c", 3); /// /// // Not possible with .iter() /// let vec: Vec<(&str, i32)> = map.into_iter().collect(); /// ``` fn into_iter(self) -> IntoIter<K, V> { IntoIter { inner: self.table.into_iter() } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> Iterator for Iter<'a, K, V> { type Item = (&'a K, &'a V); #[inline] fn next(&mut self) -> Option<(&'a K, &'a V)> { self.inner.next() } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> ExactSizeIterator for Iter<'a, K, V> { #[inline] fn len(&self) -> usize { self.inner.len() } } #[stable(feature = "fused", since = "1.26.0")] impl<'a, K, V> FusedIterator for Iter<'a, K, V> {} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> Iterator for IterMut<'a, K, V> { type Item = (&'a K, &'a mut V); #[inline] fn next(&mut self) -> Option<(&'a K, &'a mut V)> { self.inner.next() } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> ExactSizeIterator for IterMut<'a, K, V> { #[inline] fn len(&self) -> usize { self.inner.len() } } #[stable(feature = "fused", since = "1.26.0")] impl<'a, K, V> FusedIterator for IterMut<'a, K, V> {} #[stable(feature = "std_debug", since = "1.16.0")] impl<'a, K, V> fmt::Debug for IterMut<'a, K, V> where K: fmt::Debug, V: fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_list() .entries(self.inner.iter()) .finish() } } #[stable(feature = "rust1", since = "1.0.0")] impl<K, V> Iterator for IntoIter<K, V> { type Item = (K, V); #[inline] fn next(&mut self) -> Option<(K, V)> { self.inner.next().map(|(_, k, v)| (k, v)) } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() } } #[stable(feature = "rust1", since = "1.0.0")] impl<K, V> ExactSizeIterator for IntoIter<K, V> { #[inline] fn len(&self) -> usize { self.inner.len() } } #[stable(feature = "fused", since = "1.26.0")] impl<K, V> FusedIterator for IntoIter<K, V> {} #[stable(feature = "std_debug", since = "1.16.0")] impl<K: Debug, V: Debug> fmt::Debug for IntoIter<K, V> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_list() .entries(self.inner.iter()) .finish() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> Iterator for Keys<'a, K, V> { type Item = &'a K; #[inline] fn next(&mut self) -> Option<(&'a K)> { self.inner.next().map(|(k, _)| k) } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> ExactSizeIterator for Keys<'a, K, V> { #[inline] fn len(&self) -> usize { self.inner.len() } } #[stable(feature = "fused", since = "1.26.0")] impl<'a, K, V> FusedIterator for Keys<'a, K, V> {} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> Iterator for Values<'a, K, V> { type Item = &'a V; #[inline] fn next(&mut self) -> Option<(&'a V)> { self.inner.next().map(|(_, v)| v) } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> { #[inline] fn len(&self) -> usize { self.inner.len() } } #[stable(feature = "fused", since = "1.26.0")] impl<'a, K, V> FusedIterator for Values<'a, K, V> {} #[stable(feature = "map_values_mut", since = "1.10.0")] impl<'a, K, V> Iterator for ValuesMut<'a, K, V> { type Item = &'a mut V; #[inline] fn next(&mut self) -> Option<(&'a mut V)> { self.inner.next().map(|(_, v)| v) } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() } } #[stable(feature = "map_values_mut", since = "1.10.0")] impl<'a, K, V> ExactSizeIterator for ValuesMut<'a, K, V> { #[inline] fn len(&self) -> usize { self.inner.len() } } #[stable(feature = "fused", since = "1.26.0")] impl<'a, K, V> FusedIterator for ValuesMut<'a, K, V> {} #[stable(feature = "std_debug", since = "1.16.0")] impl<'a, K, V> fmt::Debug for ValuesMut<'a, K, V> where K: fmt::Debug, V: fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_list() .entries(self.inner.inner.iter()) .finish() } } #[stable(feature = "drain", since = "1.6.0")] impl<'a, K, V> Iterator for Drain<'a, K, V> { type Item = (K, V); #[inline] fn next(&mut self) -> Option<(K, V)> { self.inner.next().map(|(_, k, v)| (k, v)) } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() } } #[stable(feature = "drain", since = "1.6.0")] impl<'a, K, V> ExactSizeIterator for Drain<'a, K, V> { #[inline] fn len(&self) -> usize { self.inner.len() } } #[stable(feature = "fused", since = "1.26.0")] impl<'a, K, V> FusedIterator for Drain<'a, K, V> {} #[stable(feature = "std_debug", since = "1.16.0")] impl<'a, K, V> fmt::Debug for Drain<'a, K, V> where K: fmt::Debug, V: fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_list() .entries(self.inner.iter()) .finish() } } impl<'a, K, V> Entry<'a, K, V> { #[stable(feature = "rust1", since = "1.0.0")] /// Ensures a value is in the entry by inserting the default if empty, and returns /// a mutable reference to the value in the entry. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut map: HashMap<&str, u32> = HashMap::new(); /// /// map.entry("poneyland").or_insert(3); /// assert_eq!(map["poneyland"], 3); /// /// *map.entry("poneyland").or_insert(10) *= 2; /// assert_eq!(map["poneyland"], 6); /// ``` pub fn or_insert(self, default: V) -> &'a mut V { match self { Occupied(entry) => entry.into_mut(), Vacant(entry) => entry.insert(default), } } #[stable(feature = "rust1", since = "1.0.0")] /// Ensures a value is in the entry by inserting the result of the default function if empty, /// and returns a mutable reference to the value in the entry. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut map: HashMap<&str, String> = HashMap::new(); /// let s = "hoho".to_string(); /// /// map.entry("poneyland").or_insert_with(|| s); /// /// assert_eq!(map["poneyland"], "hoho".to_string()); /// ``` pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V { match self { Occupied(entry) => entry.into_mut(), Vacant(entry) => entry.insert(default()), } } /// Returns a reference to this entry's key. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut map: HashMap<&str, u32> = HashMap::new(); /// assert_eq!(map.entry("poneyland").key(), &"poneyland"); /// ``` #[stable(feature = "map_entry_keys", since = "1.10.0")] pub fn key(&self) -> &K { match *self { Occupied(ref entry) => entry.key(), Vacant(ref entry) => entry.key(), } } /// Provides in-place mutable access to an occupied entry before any /// potential inserts into the map. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut map: HashMap<&str, u32> = HashMap::new(); /// /// map.entry("poneyland") /// .and_modify(|e| { *e += 1 }) /// .or_insert(42); /// assert_eq!(map["poneyland"], 42); /// /// map.entry("poneyland") /// .and_modify(|e| { *e += 1 }) /// .or_insert(42); /// assert_eq!(map["poneyland"], 43); /// ``` #[stable(feature = "entry_and_modify", since = "1.26.0")] pub fn and_modify<F>(self, f: F) -> Self where F: FnOnce(&mut V) { match self { Occupied(mut entry) => { f(entry.get_mut()); Occupied(entry) }, Vacant(entry) => Vacant(entry), } } } impl<'a, K, V: Default> Entry<'a, K, V> { #[stable(feature = "entry_or_default", since = "1.28.0")] /// Ensures a value is in the entry by inserting the default value if empty, /// and returns a mutable reference to the value in the entry. /// /// # Examples /// /// ``` /// # fn main() { /// use std::collections::HashMap; /// /// let mut map: HashMap<&str, Option<u32>> = HashMap::new(); /// map.entry("poneyland").or_default(); /// /// assert_eq!(map["poneyland"], None); /// # } /// ``` pub fn or_default(self) -> &'a mut V { match self { Occupied(entry) => entry.into_mut(), Vacant(entry) => entry.insert(Default::default()), } } } impl<'a, K, V> OccupiedEntry<'a, K, V> { /// Gets a reference to the key in the entry. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut map: HashMap<&str, u32> = HashMap::new(); /// map.entry("poneyland").or_insert(12); /// assert_eq!(map.entry("poneyland").key(), &"poneyland"); /// ``` #[stable(feature = "map_entry_keys", since = "1.10.0")] pub fn key(&self) -> &K { self.elem.read().0 } /// Take the ownership of the key and value from the map. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// use std::collections::hash_map::Entry; /// /// let mut map: HashMap<&str, u32> = HashMap::new(); /// map.entry("poneyland").or_insert(12); /// /// if let Entry::Occupied(o) = map.entry("poneyland") { /// // We delete the entry from the map. /// o.remove_entry(); /// } /// /// assert_eq!(map.contains_key("poneyland"), false); /// ``` #[stable(feature = "map_entry_recover_keys2", since = "1.12.0")] pub fn remove_entry(self) -> (K, V) { let (k, v, _) = pop_internal(self.elem); (k, v) } /// Gets a reference to the value in the entry. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// use std::collections::hash_map::Entry; /// /// let mut map: HashMap<&str, u32> = HashMap::new(); /// map.entry("poneyland").or_insert(12); /// /// if let Entry::Occupied(o) = map.entry("poneyland") { /// assert_eq!(o.get(), &12); /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn get(&self) -> &V { self.elem.read().1 } /// Gets a mutable reference to the value in the entry. /// /// If you need a reference to the `OccupiedEntry` which may outlive the /// destruction of the `Entry` value, see [`into_mut`]. /// /// [`into_mut`]: #method.into_mut /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// use std::collections::hash_map::Entry; /// /// let mut map: HashMap<&str, u32> = HashMap::new(); /// map.entry("poneyland").or_insert(12); /// /// assert_eq!(map["poneyland"], 12); /// if let Entry::Occupied(mut o) = map.entry("poneyland") { /// *o.get_mut() += 10; /// assert_eq!(*o.get(), 22); /// /// // We can use the same Entry multiple times. /// *o.get_mut() += 2; /// } /// /// assert_eq!(map["poneyland"], 24); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn get_mut(&mut self) -> &mut V { self.elem.read_mut().1 } /// Converts the OccupiedEntry into a mutable reference to the value in the entry /// with a lifetime bound to the map itself. /// /// If you need multiple references to the `OccupiedEntry`, see [`get_mut`]. /// /// [`get_mut`]: #method.get_mut /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// use std::collections::hash_map::Entry; /// /// let mut map: HashMap<&str, u32> = HashMap::new(); /// map.entry("poneyland").or_insert(12); /// /// assert_eq!(map["poneyland"], 12); /// if let Entry::Occupied(o) = map.entry("poneyland") { /// *o.into_mut() += 10; /// } /// /// assert_eq!(map["poneyland"], 22); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn into_mut(self) -> &'a mut V { self.elem.into_mut_refs().1 } /// Sets the value of the entry, and returns the entry's old value. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// use std::collections::hash_map::Entry; /// /// let mut map: HashMap<&str, u32> = HashMap::new(); /// map.entry("poneyland").or_insert(12); /// /// if let Entry::Occupied(mut o) = map.entry("poneyland") { /// assert_eq!(o.insert(15), 12); /// } /// /// assert_eq!(map["poneyland"], 15); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn insert(&mut self, mut value: V) -> V { let old_value = self.get_mut(); mem::swap(&mut value, old_value); value } /// Takes the value out of the entry, and returns it. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// use std::collections::hash_map::Entry; /// /// let mut map: HashMap<&str, u32> = HashMap::new(); /// map.entry("poneyland").or_insert(12); /// /// if let Entry::Occupied(o) = map.entry("poneyland") { /// assert_eq!(o.remove(), 12); /// } /// /// assert_eq!(map.contains_key("poneyland"), false); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn remove(self) -> V { pop_internal(self.elem).1 } /// Returns a key that was used for search. /// /// The key was retained for further use. fn take_key(&mut self) -> Option<K> { self.key.take() } /// Replaces the entry, returning the old key and value. The new key in the hash map will be /// the key used to create this entry. /// /// # Examples /// /// ``` /// #![feature(map_entry_replace)] /// use std::collections::hash_map::{Entry, HashMap}; /// use std::rc::Rc; /// /// let mut map: HashMap<Rc<String>, u32> = HashMap::new(); /// map.insert(Rc::new("Stringthing".to_string()), 15); /// /// let my_key = Rc::new("Stringthing".to_string()); /// /// if let Entry::Occupied(entry) = map.entry(my_key) { /// // Also replace the key with a handle to our other key. /// let (old_key, old_value): (Rc<String>, u32) = entry.replace_entry(16); /// } /// /// ``` #[unstable(feature = "map_entry_replace", issue = "44286")] pub fn replace_entry(mut self, value: V) -> (K, V) { let (old_key, old_value) = self.elem.read_mut(); let old_key = mem::replace(old_key, self.key.unwrap()); let old_value = mem::replace(old_value, value); (old_key, old_value) } /// Replaces the key in the hash map with the key used to create this entry. /// /// # Examples /// /// ``` /// #![feature(map_entry_replace)] /// use std::collections::hash_map::{Entry, HashMap}; /// use std::rc::Rc; /// /// let mut map: HashMap<Rc<String>, u32> = HashMap::new(); /// let mut known_strings: Vec<Rc<String>> = Vec::new(); /// /// // Initialise known strings, run program, etc. /// /// reclaim_memory(&mut map, &known_strings); /// /// fn reclaim_memory(map: &mut HashMap<Rc<String>, u32>, known_strings: &[Rc<String>] ) { /// for s in known_strings { /// if let Entry::Occupied(entry) = map.entry(s.clone()) { /// // Replaces the entry's key with our version of it in `known_strings`. /// entry.replace_key(); /// } /// } /// } /// ``` #[unstable(feature = "map_entry_replace", issue = "44286")] pub fn replace_key(mut self) -> K { let (old_key, _) = self.elem.read_mut(); mem::replace(old_key, self.key.unwrap()) } } impl<'a, K: 'a, V: 'a> VacantEntry<'a, K, V> { /// Gets a reference to the key that would be used when inserting a value /// through the `VacantEntry`. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// /// let mut map: HashMap<&str, u32> = HashMap::new(); /// assert_eq!(map.entry("poneyland").key(), &"poneyland"); /// ``` #[stable(feature = "map_entry_keys", since = "1.10.0")] pub fn key(&self) -> &K { &self.key } /// Take ownership of the key. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// use std::collections::hash_map::Entry; /// /// let mut map: HashMap<&str, u32> = HashMap::new(); /// /// if let Entry::Vacant(v) = map.entry("poneyland") { /// v.into_key(); /// } /// ``` #[stable(feature = "map_entry_recover_keys2", since = "1.12.0")] pub fn into_key(self) -> K { self.key } /// Sets the value of the entry with the VacantEntry's key, /// and returns a mutable reference to it. /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// use std::collections::hash_map::Entry; /// /// let mut map: HashMap<&str, u32> = HashMap::new(); /// /// if let Entry::Vacant(o) = map.entry("poneyland") { /// o.insert(37); /// } /// assert_eq!(map["poneyland"], 37); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn insert(self, value: V) -> &'a mut V { let b = match self.elem { NeqElem(mut bucket, disp) => { if disp >= DISPLACEMENT_THRESHOLD { bucket.table_mut().set_tag(true); } robin_hood(bucket, disp, self.hash, self.key, value) }, NoElem(mut bucket, disp) => { if disp >= DISPLACEMENT_THRESHOLD { bucket.table_mut().set_tag(true); } bucket.put(self.hash, self.key, value) }, }; b.into_mut_refs().1 } } #[stable(feature = "rust1", since = "1.0.0")] impl<K, V, S> FromIterator<(K, V)> for HashMap<K, V, S> where K: Eq + Hash, S: BuildHasher + Default { fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> HashMap<K, V, S> { let mut map = HashMap::with_hasher(Default::default()); map.extend(iter); map } } #[stable(feature = "rust1", since = "1.0.0")] impl<K, V, S> Extend<(K, V)> for HashMap<K, V, S> where K: Eq + Hash, S: BuildHasher { fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) { // Keys may be already present or show multiple times in the iterator. // Reserve the entire hint lower bound if the map is empty. // Otherwise reserve half the hint (rounded up), so the map // will only resize twice in the worst case. let iter = iter.into_iter(); let reserve = if self.is_empty() { iter.size_hint().0 } else { (iter.size_hint().0 + 1) / 2 }; self.reserve(reserve); for (k, v) in iter { self.insert(k, v); } } } #[stable(feature = "hash_extend_copy", since = "1.4.0")] impl<'a, K, V, S> Extend<(&'a K, &'a V)> for HashMap<K, V, S> where K: Eq + Hash + Copy, V: Copy, S: BuildHasher { fn extend<T: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: T) { self.extend(iter.into_iter().map(|(&key, &value)| (key, value))); } } /// `RandomState` is the default state for [`HashMap`] types. /// /// A particular instance `RandomState` will create the same instances of /// [`Hasher`], but the hashers created by two different `RandomState` /// instances are unlikely to produce the same result for the same values. /// /// [`HashMap`]: struct.HashMap.html /// [`Hasher`]: ../../hash/trait.Hasher.html /// /// # Examples /// /// ``` /// use std::collections::HashMap; /// use std::collections::hash_map::RandomState; /// /// let s = RandomState::new(); /// let mut map = HashMap::with_hasher(s); /// map.insert(1, 2); /// ``` #[derive(Clone)] #[stable(feature = "hashmap_build_hasher", since = "1.7.0")] pub struct RandomState { k0: u64, k1: u64, } impl RandomState { /// Constructs a new `RandomState` that is initialized with random keys. /// /// # Examples /// /// ``` /// use std::collections::hash_map::RandomState; /// /// let s = RandomState::new(); /// ``` #[inline] #[allow(deprecated)] // rand #[stable(feature = "hashmap_build_hasher", since = "1.7.0")] pub fn new() -> RandomState { // Historically this function did not cache keys from the OS and instead // simply always called `rand::thread_rng().gen()` twice. In #31356 it // was discovered, however, that because we re-seed the thread-local RNG // from the OS periodically that this can cause excessive slowdown when // many hash maps are created on a thread. To solve this performance // trap we cache the first set of randomly generated keys per-thread. // // Later in #36481 it was discovered that exposing a deterministic // iteration order allows a form of DOS attack. To counter that we // increment one of the seeds on every RandomState creation, giving // every corresponding HashMap a different iteration order. thread_local!(static KEYS: Cell<(u64, u64)> = { Cell::new(sys::hashmap_random_keys()) }); KEYS.with(|keys| { let (k0, k1) = keys.get(); keys.set((k0.wrapping_add(1), k1)); RandomState { k0: k0, k1: k1 } }) } } #[stable(feature = "hashmap_build_hasher", since = "1.7.0")] impl BuildHasher for RandomState { type Hasher = DefaultHasher; #[inline] #[allow(deprecated)] fn build_hasher(&self) -> DefaultHasher { DefaultHasher(SipHasher13::new_with_keys(self.k0, self.k1)) } } /// The default [`Hasher`] used by [`RandomState`]. /// /// The internal algorithm is not specified, and so it and its hashes should /// not be relied upon over releases. /// /// [`RandomState`]: struct.RandomState.html /// [`Hasher`]: ../../hash/trait.Hasher.html #[stable(feature = "hashmap_default_hasher", since = "1.13.0")] #[allow(deprecated)] #[derive(Clone, Debug)] pub struct DefaultHasher(SipHasher13); impl DefaultHasher { /// Creates a new `DefaultHasher`. /// /// This hasher is not guaranteed to be the same as all other /// `DefaultHasher` instances, but is the same as all other `DefaultHasher` /// instances created through `new` or `default`. #[stable(feature = "hashmap_default_hasher", since = "1.13.0")] #[allow(deprecated)] pub fn new() -> DefaultHasher { DefaultHasher(SipHasher13::new_with_keys(0, 0)) } } #[stable(feature = "hashmap_default_hasher", since = "1.13.0")] impl Default for DefaultHasher { /// Creates a new `DefaultHasher` using [`new`][DefaultHasher::new]. /// See its documentation for more. fn default() -> DefaultHasher { DefaultHasher::new() } } #[stable(feature = "hashmap_default_hasher", since = "1.13.0")] impl Hasher for DefaultHasher { #[inline] fn write(&mut self, msg: &[u8]) { self.0.write(msg) } #[inline] fn finish(&self) -> u64 { self.0.finish() } } #[stable(feature = "hashmap_build_hasher", since = "1.7.0")] impl Default for RandomState { /// Constructs a new `RandomState`. #[inline] fn default() -> RandomState { RandomState::new() } } #[stable(feature = "std_debug", since = "1.16.0")] impl fmt::Debug for RandomState { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.pad("RandomState { .. }") } } impl<K, S, Q: ?Sized> super::Recover<Q> for HashMap<K, (), S> where K: Eq + Hash + Borrow<Q>, S: BuildHasher, Q: Eq + Hash { type Key = K; #[inline] fn get(&self, key: &Q) -> Option<&K> { self.search(key).map(|bucket| bucket.into_refs().0) } fn take(&mut self, key: &Q) -> Option<K> { self.search_mut(key).map(|bucket| pop_internal(bucket).0) } #[inline] fn replace(&mut self, key: K) -> Option<K> { self.reserve(1); match self.entry(key) { Occupied(mut occupied) => { let key = occupied.take_key().unwrap(); Some(mem::replace(occupied.elem.read_mut().0, key)) } Vacant(vacant) => { vacant.insert(()); None } } } } #[allow(dead_code)] fn assert_covariance() { fn map_key<'new>(v: HashMap<&'static str, u8>) -> HashMap<&'new str, u8> { v } fn map_val<'new>(v: HashMap<u8, &'static str>) -> HashMap<u8, &'new str> { v } fn iter_key<'a, 'new>(v: Iter<'a, &'static str, u8>) -> Iter<'a, &'new str, u8> { v } fn iter_val<'a, 'new>(v: Iter<'a, u8, &'static str>) -> Iter<'a, u8, &'new str> { v } fn into_iter_key<'new>(v: IntoIter<&'static str, u8>) -> IntoIter<&'new str, u8> { v } fn into_iter_val<'new>(v: IntoIter<u8, &'static str>) -> IntoIter<u8, &'new str> { v } fn keys_key<'a, 'new>(v: Keys<'a, &'static str, u8>) -> Keys<'a, &'new str, u8> { v } fn keys_val<'a, 'new>(v: Keys<'a, u8, &'static str>) -> Keys<'a, u8, &'new str> { v } fn values_key<'a, 'new>(v: Values<'a, &'static str, u8>) -> Values<'a, &'new str, u8> { v } fn values_val<'a, 'new>(v: Values<'a, u8, &'static str>) -> Values<'a, u8, &'new str> { v } fn drain<'new>(d: Drain<'static, &'static str, &'static str>) -> Drain<'new, &'new str, &'new str> { d } } #[cfg(test)] mod test_map { use super::HashMap; use super::Entry::{Occupied, Vacant}; use super::RandomState; use cell::RefCell; use rand::{thread_rng, Rng}; use realstd::collections::CollectionAllocErr::*; use realstd::mem::size_of; use realstd::usize; #[test] fn test_zero_capacities() { type HM = HashMap<i32, i32>; let m = HM::new(); assert_eq!(m.capacity(), 0); let m = HM::default(); assert_eq!(m.capacity(), 0); let m = HM::with_hasher(RandomState::new()); assert_eq!(m.capacity(), 0); let m = HM::with_capacity(0); assert_eq!(m.capacity(), 0); let m = HM::with_capacity_and_hasher(0, RandomState::new()); assert_eq!(m.capacity(), 0); let mut m = HM::new(); m.insert(1, 1); m.insert(2, 2); m.remove(&1); m.remove(&2); m.shrink_to_fit(); assert_eq!(m.capacity(), 0); let mut m = HM::new(); m.reserve(0); assert_eq!(m.capacity(), 0); } #[test] fn test_create_capacity_zero() { let mut m = HashMap::with_capacity(0); assert!(m.insert(1, 1).is_none()); assert!(m.contains_key(&1)); assert!(!m.contains_key(&0)); } #[test] fn test_insert() { let mut m = HashMap::new(); assert_eq!(m.len(), 0); assert!(m.insert(1, 2).is_none()); assert_eq!(m.len(), 1); assert!(m.insert(2, 4).is_none()); assert_eq!(m.len(), 2); assert_eq!(*m.get(&1).unwrap(), 2); assert_eq!(*m.get(&2).unwrap(), 4); } #[test] fn test_clone() { let mut m = HashMap::new(); assert_eq!(m.len(), 0); assert!(m.insert(1, 2).is_none()); assert_eq!(m.len(), 1); assert!(m.insert(2, 4).is_none()); assert_eq!(m.len(), 2); let m2 = m.clone(); assert_eq!(*m2.get(&1).unwrap(), 2); assert_eq!(*m2.get(&2).unwrap(), 4); assert_eq!(m2.len(), 2); } thread_local! { static DROP_VECTOR: RefCell<Vec<i32>> = RefCell::new(Vec::new()) } #[derive(Hash, PartialEq, Eq)] struct Droppable { k: usize, } impl Droppable { fn new(k: usize) -> Droppable { DROP_VECTOR.with(|slot| { slot.borrow_mut()[k] += 1; }); Droppable { k } } } impl Drop for Droppable { fn drop(&mut self) { DROP_VECTOR.with(|slot| { slot.borrow_mut()[self.k] -= 1; }); } } impl Clone for Droppable { fn clone(&self) -> Droppable { Droppable::new(self.k) } } #[test] fn test_drops() { DROP_VECTOR.with(|slot| { *slot.borrow_mut() = vec![0; 200]; }); { let mut m = HashMap::new(); DROP_VECTOR.with(|v| { for i in 0..200 { assert_eq!(v.borrow()[i], 0); } }); for i in 0..100 { let d1 = Droppable::new(i); let d2 = Droppable::new(i + 100); m.insert(d1, d2); } DROP_VECTOR.with(|v| { for i in 0..200 { assert_eq!(v.borrow()[i], 1); } }); for i in 0..50 { let k = Droppable::new(i); let v = m.remove(&k); assert!(v.is_some()); DROP_VECTOR.with(|v| { assert_eq!(v.borrow()[i], 1); assert_eq!(v.borrow()[i+100], 1); }); } DROP_VECTOR.with(|v| { for i in 0..50 { assert_eq!(v.borrow()[i], 0); assert_eq!(v.borrow()[i+100], 0); } for i in 50..100 { assert_eq!(v.borrow()[i], 1); assert_eq!(v.borrow()[i+100], 1); } }); } DROP_VECTOR.with(|v| { for i in 0..200 { assert_eq!(v.borrow()[i], 0); } }); } #[test] fn test_into_iter_drops() { DROP_VECTOR.with(|v| { *v.borrow_mut() = vec![0; 200]; }); let hm = { let mut hm = HashMap::new(); DROP_VECTOR.with(|v| { for i in 0..200 { assert_eq!(v.borrow()[i], 0); } }); for i in 0..100 { let d1 = Droppable::new(i); let d2 = Droppable::new(i + 100); hm.insert(d1, d2); } DROP_VECTOR.with(|v| { for i in 0..200 { assert_eq!(v.borrow()[i], 1); } }); hm }; // By the way, ensure that cloning doesn't screw up the dropping. drop(hm.clone()); { let mut half = hm.into_iter().take(50); DROP_VECTOR.with(|v| { for i in 0..200 { assert_eq!(v.borrow()[i], 1); } }); for _ in half.by_ref() {} DROP_VECTOR.with(|v| { let nk = (0..100) .filter(|&i| v.borrow()[i] == 1) .count(); let nv = (0..100) .filter(|&i| v.borrow()[i + 100] == 1) .count(); assert_eq!(nk, 50); assert_eq!(nv, 50); }); }; DROP_VECTOR.with(|v| { for i in 0..200 { assert_eq!(v.borrow()[i], 0); } }); } #[test] fn test_empty_remove() { let mut m: HashMap<i32, bool> = HashMap::new(); assert_eq!(m.remove(&0), None); } #[test] fn test_empty_entry() { let mut m: HashMap<i32, bool> = HashMap::new(); match m.entry(0) { Occupied(_) => panic!(), Vacant(_) => {} } assert!(*m.entry(0).or_insert(true)); assert_eq!(m.len(), 1); } #[test] fn test_empty_iter() { let mut m: HashMap<i32, bool> = HashMap::new(); assert_eq!(m.drain().next(), None); assert_eq!(m.keys().next(), None); assert_eq!(m.values().next(), None); assert_eq!(m.values_mut().next(), None); assert_eq!(m.iter().next(), None); assert_eq!(m.iter_mut().next(), None); assert_eq!(m.len(), 0); assert!(m.is_empty()); assert_eq!(m.into_iter().next(), None); } #[test] fn test_lots_of_insertions() { let mut m = HashMap::new(); // Try this a few times to make sure we never screw up the hashmap's // internal state. for _ in 0..10 { assert!(m.is_empty()); for i in 1..1001 { assert!(m.insert(i, i).is_none()); for j in 1..=i { let r = m.get(&j); assert_eq!(r, Some(&j)); } for j in i + 1..1001 { let r = m.get(&j); assert_eq!(r, None); } } for i in 1001..2001 { assert!(!m.contains_key(&i)); } // remove forwards for i in 1..1001 { assert!(m.remove(&i).is_some()); for j in 1..=i { assert!(!m.contains_key(&j)); } for j in i + 1..1001 { assert!(m.contains_key(&j)); } } for i in 1..1001 { assert!(!m.contains_key(&i)); } for i in 1..1001 { assert!(m.insert(i, i).is_none()); } // remove backwards for i in (1..1001).rev() { assert!(m.remove(&i).is_some()); for j in i..1001 { assert!(!m.contains_key(&j)); } for j in 1..i { assert!(m.contains_key(&j)); } } } } #[test] fn test_find_mut() { let mut m = HashMap::new(); assert!(m.insert(1, 12).is_none()); assert!(m.insert(2, 8).is_none()); assert!(m.insert(5, 14).is_none()); let new = 100; match m.get_mut(&5) { None => panic!(), Some(x) => *x = new, } assert_eq!(m.get(&5), Some(&new)); } #[test] fn test_insert_overwrite() { let mut m = HashMap::new(); assert!(m.insert(1, 2).is_none()); assert_eq!(*m.get(&1).unwrap(), 2); assert!(!m.insert(1, 3).is_none()); assert_eq!(*m.get(&1).unwrap(), 3); } #[test] fn test_insert_conflicts() { let mut m = HashMap::with_capacity(4); assert!(m.insert(1, 2).is_none()); assert!(m.insert(5, 3).is_none()); assert!(m.insert(9, 4).is_none()); assert_eq!(*m.get(&9).unwrap(), 4); assert_eq!(*m.get(&5).unwrap(), 3); assert_eq!(*m.get(&1).unwrap(), 2); } #[test] fn test_conflict_remove() { let mut m = HashMap::with_capacity(4); assert!(m.insert(1, 2).is_none()); assert_eq!(*m.get(&1).unwrap(), 2); assert!(m.insert(5, 3).is_none()); assert_eq!(*m.get(&1).unwrap(), 2); assert_eq!(*m.get(&5).unwrap(), 3); assert!(m.insert(9, 4).is_none()); assert_eq!(*m.get(&1).unwrap(), 2); assert_eq!(*m.get(&5).unwrap(), 3); assert_eq!(*m.get(&9).unwrap(), 4); assert!(m.remove(&1).is_some()); assert_eq!(*m.get(&9).unwrap(), 4); assert_eq!(*m.get(&5).unwrap(), 3); } #[test] fn test_is_empty() { let mut m = HashMap::with_capacity(4); assert!(m.insert(1, 2).is_none()); assert!(!m.is_empty()); assert!(m.remove(&1).is_some()); assert!(m.is_empty()); } #[test] fn test_remove() { let mut m = HashMap::new(); m.insert(1, 2); assert_eq!(m.remove(&1), Some(2)); assert_eq!(m.remove(&1), None); } #[test] fn test_remove_entry() { let mut m = HashMap::new(); m.insert(1, 2); assert_eq!(m.remove_entry(&1), Some((1, 2))); assert_eq!(m.remove(&1), None); } #[test] fn test_iterate() { let mut m = HashMap::with_capacity(4); for i in 0..32 { assert!(m.insert(i, i*2).is_none()); } assert_eq!(m.len(), 32); let mut observed: u32 = 0; for (k, v) in &m { assert_eq!(*v, *k * 2); observed |= 1 << *k; } assert_eq!(observed, 0xFFFF_FFFF); } #[test] fn test_keys() { let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')]; let map: HashMap<_, _> = vec.into_iter().collect(); let keys: Vec<_> = map.keys().cloned().collect(); assert_eq!(keys.len(), 3); assert!(keys.contains(&1)); assert!(keys.contains(&2)); assert!(keys.contains(&3)); } #[test] fn test_values() { let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')]; let map: HashMap<_, _> = vec.into_iter().collect(); let values: Vec<_> = map.values().cloned().collect(); assert_eq!(values.len(), 3); assert!(values.contains(&'a')); assert!(values.contains(&'b')); assert!(values.contains(&'c')); } #[test] fn test_values_mut() { let vec = vec![(1, 1), (2, 2), (3, 3)]; let mut map: HashMap<_, _> = vec.into_iter().collect(); for value in map.values_mut() { *value = (*value) * 2 } let values: Vec<_> = map.values().cloned().collect(); assert_eq!(values.len(), 3); assert!(values.contains(&2)); assert!(values.contains(&4)); assert!(values.contains(&6)); } #[test] fn test_find() { let mut m = HashMap::new(); assert!(m.get(&1).is_none()); m.insert(1, 2); match m.get(&1) { None => panic!(), Some(v) => assert_eq!(*v, 2), } } #[test] fn test_eq() { let mut m1 = HashMap::new(); m1.insert(1, 2); m1.insert(2, 3); m1.insert(3, 4); let mut m2 = HashMap::new(); m2.insert(1, 2); m2.insert(2, 3); assert!(m1 != m2); m2.insert(3, 4); assert_eq!(m1, m2); } #[test] fn test_show() { let mut map = HashMap::new(); let empty: HashMap<i32, i32> = HashMap::new(); map.insert(1, 2); map.insert(3, 4); let map_str = format!("{:?}", map); assert!(map_str == "{1: 2, 3: 4}" || map_str == "{3: 4, 1: 2}"); assert_eq!(format!("{:?}", empty), "{}"); } #[test] fn test_expand() { let mut m = HashMap::new(); assert_eq!(m.len(), 0); assert!(m.is_empty()); let mut i = 0; let old_raw_cap = m.raw_capacity(); while old_raw_cap == m.raw_capacity() { m.insert(i, i); i += 1; } assert_eq!(m.len(), i); assert!(!m.is_empty()); } #[test] fn test_behavior_resize_policy() { let mut m = HashMap::new(); assert_eq!(m.len(), 0); assert_eq!(m.raw_capacity(), 0); assert!(m.is_empty()); m.insert(0, 0); m.remove(&0); assert!(m.is_empty()); let initial_raw_cap = m.raw_capacity(); m.reserve(initial_raw_cap); let raw_cap = m.raw_capacity(); assert_eq!(raw_cap, initial_raw_cap * 2); let mut i = 0; for _ in 0..raw_cap * 3 / 4 { m.insert(i, i); i += 1; } // three quarters full assert_eq!(m.len(), i); assert_eq!(m.raw_capacity(), raw_cap); for _ in 0..raw_cap / 4 { m.insert(i, i); i += 1; } // half full let new_raw_cap = m.raw_capacity(); assert_eq!(new_raw_cap, raw_cap * 2); for _ in 0..raw_cap / 2 - 1 { i -= 1; m.remove(&i); assert_eq!(m.raw_capacity(), new_raw_cap); } // A little more than one quarter full. m.shrink_to_fit(); assert_eq!(m.raw_capacity(), raw_cap); // again, a little more than half full for _ in 0..raw_cap / 2 - 1 { i -= 1; m.remove(&i); } m.shrink_to_fit(); assert_eq!(m.len(), i); assert!(!m.is_empty()); assert_eq!(m.raw_capacity(), initial_raw_cap); } #[test] fn test_reserve_shrink_to_fit() { let mut m = HashMap::new(); m.insert(0, 0); m.remove(&0); assert!(m.capacity() >= m.len()); for i in 0..128 { m.insert(i, i); } m.reserve(256); let usable_cap = m.capacity(); for i in 128..(128 + 256) { m.insert(i, i); assert_eq!(m.capacity(), usable_cap); } for i in 100..(128 + 256) { assert_eq!(m.remove(&i), Some(i)); } m.shrink_to_fit(); assert_eq!(m.len(), 100); assert!(!m.is_empty()); assert!(m.capacity() >= m.len()); for i in 0..100 { assert_eq!(m.remove(&i), Some(i)); } m.shrink_to_fit(); m.insert(0, 0); assert_eq!(m.len(), 1); assert!(m.capacity() >= m.len()); assert_eq!(m.remove(&0), Some(0)); } #[test] fn test_from_iter() { let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; let map: HashMap<_, _> = xs.iter().cloned().collect(); for &(k, v) in &xs { assert_eq!(map.get(&k), Some(&v)); } } #[test] fn test_size_hint() { let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; let map: HashMap<_, _> = xs.iter().cloned().collect(); let mut iter = map.iter(); for _ in iter.by_ref().take(3) {} assert_eq!(iter.size_hint(), (3, Some(3))); } #[test] fn test_iter_len() { let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; let map: HashMap<_, _> = xs.iter().cloned().collect(); let mut iter = map.iter(); for _ in iter.by_ref().take(3) {} assert_eq!(iter.len(), 3); } #[test] fn test_mut_size_hint() { let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; let mut map: HashMap<_, _> = xs.iter().cloned().collect(); let mut iter = map.iter_mut(); for _ in iter.by_ref().take(3) {} assert_eq!(iter.size_hint(), (3, Some(3))); } #[test] fn test_iter_mut_len() { let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; let mut map: HashMap<_, _> = xs.iter().cloned().collect(); let mut iter = map.iter_mut(); for _ in iter.by_ref().take(3) {} assert_eq!(iter.len(), 3); } #[test] fn test_index() { let mut map = HashMap::new(); map.insert(1, 2); map.insert(2, 1); map.insert(3, 4); assert_eq!(map[&2], 1); } #[test] #[should_panic] fn test_index_nonexistent() { let mut map = HashMap::new(); map.insert(1, 2); map.insert(2, 1); map.insert(3, 4); map[&4]; } #[test] fn test_entry() { let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)]; let mut map: HashMap<_, _> = xs.iter().cloned().collect(); // Existing key (insert) match map.entry(1) { Vacant(_) => unreachable!(), Occupied(mut view) => { assert_eq!(view.get(), &10); assert_eq!(view.insert(100), 10); } } assert_eq!(map.get(&1).unwrap(), &100); assert_eq!(map.len(), 6); // Existing key (update) match map.entry(2) { Vacant(_) => unreachable!(), Occupied(mut view) => { let v = view.get_mut(); let new_v = (*v) * 10; *v = new_v; } } assert_eq!(map.get(&2).unwrap(), &200); assert_eq!(map.len(), 6); // Existing key (take) match map.entry(3) { Vacant(_) => unreachable!(), Occupied(view) => { assert_eq!(view.remove(), 30); } } assert_eq!(map.get(&3), None); assert_eq!(map.len(), 5); // Inexistent key (insert) match map.entry(10) { Occupied(_) => unreachable!(), Vacant(view) => { assert_eq!(*view.insert(1000), 1000); } } assert_eq!(map.get(&10).unwrap(), &1000); assert_eq!(map.len(), 6); } #[test] fn test_entry_take_doesnt_corrupt() { #![allow(deprecated)] //rand // Test for #19292 fn check(m: &HashMap<i32, ()>) { for k in m.keys() { assert!(m.contains_key(k), "{} is in keys() but not in the map?", k); } } let mut m = HashMap::new(); let mut rng = thread_rng(); // Populate the map with some items. for _ in 0..50 { let x = rng.gen_range(-10, 10); m.insert(x, ()); } for _ in 0..1000 { let x = rng.gen_range(-10, 10); match m.entry(x) { Vacant(_) => {} Occupied(e) => { e.remove(); } } check(&m); } } #[test] fn test_extend_ref() { let mut a = HashMap::new(); a.insert(1, "one"); let mut b = HashMap::new(); b.insert(2, "two"); b.insert(3, "three"); a.extend(&b); assert_eq!(a.len(), 3); assert_eq!(a[&1], "one"); assert_eq!(a[&2], "two"); assert_eq!(a[&3], "three"); } #[test] fn test_capacity_not_less_than_len() { let mut a = HashMap::new(); let mut item = 0; for _ in 0..116 { a.insert(item, 0); item += 1; } assert!(a.capacity() > a.len()); let free = a.capacity() - a.len(); for _ in 0..free { a.insert(item, 0); item += 1; } assert_eq!(a.len(), a.capacity()); // Insert at capacity should cause allocation. a.insert(item, 0); assert!(a.capacity() > a.len()); } #[test] fn test_occupied_entry_key() { let mut a = HashMap::new(); let key = "hello there"; let value = "value goes here"; assert!(a.is_empty()); a.insert(key.clone(), value.clone()); assert_eq!(a.len(), 1); assert_eq!(a[key], value); match a.entry(key.clone()) { Vacant(_) => panic!(), Occupied(e) => assert_eq!(key, *e.key()), } assert_eq!(a.len(), 1); assert_eq!(a[key], value); } #[test] fn test_vacant_entry_key() { let mut a = HashMap::new(); let key = "hello there"; let value = "value goes here"; assert!(a.is_empty()); match a.entry(key.clone()) { Occupied(_) => panic!(), Vacant(e) => { assert_eq!(key, *e.key()); e.insert(value.clone()); } } assert_eq!(a.len(), 1); assert_eq!(a[key], value); } #[test] fn test_retain() { let mut map: HashMap<i32, i32> = (0..100).map(|x|(x, x*10)).collect(); map.retain(|&k, _| k % 2 == 0); assert_eq!(map.len(), 50); assert_eq!(map[&2], 20); assert_eq!(map[&4], 40); assert_eq!(map[&6], 60); } #[test] fn test_adaptive() { const TEST_LEN: usize = 5000; // by cloning we get maps with the same hasher seed let mut first = HashMap::new(); let mut second = first.clone(); first.extend((0..TEST_LEN).map(|i| (i, i))); second.extend((TEST_LEN..TEST_LEN * 2).map(|i| (i, i))); for (&k, &v) in &second { let prev_cap = first.capacity(); let expect_grow = first.len() == prev_cap; first.insert(k, v); if !expect_grow && first.capacity() != prev_cap { return; } } panic!("Adaptive early resize failed"); } #[test] fn test_try_reserve() { let mut empty_bytes: HashMap<u8,u8> = HashMap::new(); const MAX_USIZE: usize = usize::MAX; // HashMap and RawTables use complicated size calculations // hashes_size is sizeof(HashUint) * capacity; // pairs_size is sizeof((K. V)) * capacity; // alignment_hashes_size is 8 // alignment_pairs size is 4 let size_of_multiplier = (size_of::<usize>() + size_of::<(u8, u8)>()).next_power_of_two(); // The following formula is used to calculate the new capacity let max_no_ovf = ((MAX_USIZE / 11) * 10) / size_of_multiplier - 1; if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_USIZE) { } else { panic!("usize::MAX should trigger an overflow!"); } if size_of::<usize>() < 8 { if let Err(CapacityOverflow) = empty_bytes.try_reserve(max_no_ovf) { } else { panic!("isize::MAX + 1 should trigger a CapacityOverflow!") } } else { if let Err(AllocErr) = empty_bytes.try_reserve(max_no_ovf) { } else { panic!("isize::MAX + 1 should trigger an OOM!") } } } #[test] fn test_raw_entry() { use super::RawEntryMut::{Occupied, Vacant}; let xs = [(1i32, 10i32), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)]; let mut map: HashMap<_, _> = xs.iter().cloned().collect(); let compute_hash = |map: &HashMap<i32, i32>, k: i32| -> u64 { use core::hash::{BuildHasher, Hash, Hasher}; let mut hasher = map.hasher().build_hasher(); k.hash(&mut hasher); hasher.finish() }; // Existing key (insert) match map.raw_entry_mut().from_key(&1) { Vacant(_) => unreachable!(), Occupied(mut view) => { assert_eq!(view.get(), &10); assert_eq!(view.insert(100), 10); } } let hash1 = compute_hash(&map, 1); assert_eq!(map.raw_entry().from_key(&1).unwrap(), (&1, &100)); assert_eq!(map.raw_entry().from_hash(hash1, |k| *k == 1).unwrap(), (&1, &100)); assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash1, &1).unwrap(), (&1, &100)); assert_eq!(map.raw_entry().search_bucket(hash1, |k| *k == 1).unwrap(), (&1, &100)); assert_eq!(map.len(), 6); // Existing key (update) match map.raw_entry_mut().from_key(&2) { Vacant(_) => unreachable!(), Occupied(mut view) => { let v = view.get_mut(); let new_v = (*v) * 10; *v = new_v; } } let hash2 = compute_hash(&map, 2); assert_eq!(map.raw_entry().from_key(&2).unwrap(), (&2, &200)); assert_eq!(map.raw_entry().from_hash(hash2, |k| *k == 2).unwrap(), (&2, &200)); assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash2, &2).unwrap(), (&2, &200)); assert_eq!(map.raw_entry().search_bucket(hash2, |k| *k == 2).unwrap(), (&2, &200)); assert_eq!(map.len(), 6); // Existing key (take) let hash3 = compute_hash(&map, 3); match map.raw_entry_mut().from_key_hashed_nocheck(hash3, &3) { Vacant(_) => unreachable!(), Occupied(view) => { assert_eq!(view.remove_entry(), (3, 30)); } } assert_eq!(map.raw_entry().from_key(&3), None); assert_eq!(map.raw_entry().from_hash(hash3, |k| *k == 3), None); assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash3, &3), None); assert_eq!(map.raw_entry().search_bucket(hash3, |k| *k == 3), None); assert_eq!(map.len(), 5); // Nonexistent key (insert) match map.raw_entry_mut().from_key(&10) { Occupied(_) => unreachable!(), Vacant(view) => { assert_eq!(view.insert(10, 1000), (&mut 10, &mut 1000)); } } assert_eq!(map.raw_entry().from_key(&10).unwrap(), (&10, &1000)); assert_eq!(map.len(), 6); // Ensure all lookup methods produce equivalent results. for k in 0..12 { let hash = compute_hash(&map, k); let v = map.get(&k).cloned(); let kv = v.as_ref().map(|v| (&k, v)); assert_eq!(map.raw_entry().from_key(&k), kv); assert_eq!(map.raw_entry().from_hash(hash, |q| *q == k), kv); assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &k), kv); assert_eq!(map.raw_entry().search_bucket(hash, |q| *q == k), kv); match map.raw_entry_mut().from_key(&k) { Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv), Vacant(_) => assert_eq!(v, None), } match map.raw_entry_mut().from_key_hashed_nocheck(hash, &k) { Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv), Vacant(_) => assert_eq!(v, None), } match map.raw_entry_mut().from_hash(hash, |q| *q == k) { Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv), Vacant(_) => assert_eq!(v, None), } match map.raw_entry_mut().search_bucket(hash, |q| *q == k) { Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv), Vacant(_) => assert_eq!(v, None), } } } }
31.706327
100
0.53659
790d9243fbaec874db7488ff817c615d5068cb1e
42,399
//! Propagates assignment destinations backwards in the CFG to eliminate redundant assignments. //! //! # Motivation //! //! MIR building can insert a lot of redundant copies, and Rust code in general often tends to move //! values around a lot. The result is a lot of assignments of the form `dest = {move} src;` in MIR. //! MIR building for constants in particular tends to create additional locals that are only used //! inside a single block to shuffle a value around unnecessarily. //! //! LLVM by itself is not good enough at eliminating these redundant copies (eg. see //! <https://github.com/rust-lang/rust/issues/32966>), so this leaves some performance on the table //! that we can regain by implementing an optimization for removing these assign statements in rustc //! itself. When this optimization runs fast enough, it can also speed up the constant evaluation //! and code generation phases of rustc due to the reduced number of statements and locals. //! //! # The Optimization //! //! Conceptually, this optimization is "destination propagation". It is similar to the Named Return //! Value Optimization, or NRVO, known from the C++ world, except that it isn't limited to return //! values or the return place `_0`. On a very high level, independent of the actual implementation //! details, it does the following: //! //! 1) Identify `dest = src;` statements that can be soundly eliminated. //! 2) Replace all mentions of `src` with `dest` ("unifying" them and propagating the destination //! backwards). //! 3) Delete the `dest = src;` statement (by making it a `nop`). //! //! Step 1) is by far the hardest, so it is explained in more detail below. //! //! ## Soundness //! //! Given an `Assign` statement `dest = src;`, where `dest` is a `Place` and `src` is an `Rvalue`, //! there are a few requirements that must hold for the optimization to be sound: //! //! * `dest` must not contain any *indirection* through a pointer. It must access part of the base //! local. Otherwise it might point to arbitrary memory that is hard to track. //! //! It must also not contain any indexing projections, since those take an arbitrary `Local` as //! the index, and that local might only be initialized shortly before `dest` is used. //! //! Subtle case: If `dest` is a, or projects through a union, then we have to make sure that there //! remains an assignment to it, since that sets the "active field" of the union. But if `src` is //! a ZST, it might not be initialized, so there might not be any use of it before the assignment, //! and performing the optimization would simply delete the assignment, leaving `dest` //! uninitialized. //! //! * `src` must be a bare `Local` without any indirections or field projections (FIXME: Is this a //! fundamental restriction or just current impl state?). It can be copied or moved by the //! assignment. //! //! * The `dest` and `src` locals must never be [*live*][liveness] at the same time. If they are, it //! means that they both hold a (potentially different) value that is needed by a future use of //! the locals. Unifying them would overwrite one of the values. //! //! Note that computing liveness of locals that have had their address taken is more difficult: //! Short of doing full escape analysis on the address/pointer/reference, the pass would need to //! assume that any operation that can potentially involve opaque user code (such as function //! calls, destructors, and inline assembly) may access any local that had its address taken //! before that point. //! //! Here, the first two conditions are simple structural requirements on the `Assign` statements //! that can be trivially checked. The liveness requirement however is more difficult and costly to //! check. //! //! ## Previous Work //! //! A [previous attempt] at implementing an optimization like this turned out to be a significant //! regression in compiler performance. Fixing the regressions introduced a lot of undesirable //! complexity to the implementation. //! //! A [subsequent approach] tried to avoid the costly computation by limiting itself to acyclic //! CFGs, but still turned out to be far too costly to run due to suboptimal performance within //! individual basic blocks, requiring a walk across the entire block for every assignment found //! within the block. For the `tuple-stress` benchmark, which has 458745 statements in a single //! block, this proved to be far too costly. //! //! Since the first attempt at this, the compiler has improved dramatically, and new analysis //! frameworks have been added that should make this approach viable without requiring a limited //! approach that only works for some classes of CFGs: //! - rustc now has a powerful dataflow analysis framework that can handle forwards and backwards //! analyses efficiently. //! - Layout optimizations for generators have been added to improve code generation for //! async/await, which are very similar in spirit to what this optimization does. Both walk the //! MIR and record conflicting uses of locals in a `BitMatrix`. //! //! Also, rustc now has a simple NRVO pass (see `nrvo.rs`), which handles a subset of the cases that //! this destination propagation pass handles, proving that similar optimizations can be performed //! on MIR. //! //! ## Pre/Post Optimization //! //! It is recommended to run `SimplifyCfg` and then `SimplifyLocals` some time after this pass, as //! it replaces the eliminated assign statements with `nop`s and leaves unused locals behind. //! //! [liveness]: https://en.wikipedia.org/wiki/Live_variable_analysis //! [previous attempt]: https://github.com/rust-lang/rust/pull/47954 //! [subsequent approach]: https://github.com/rust-lang/rust/pull/71003 use crate::MirPass; use itertools::Itertools; use rustc_data_structures::unify::{InPlaceUnificationTable, UnifyKey}; use rustc_index::{ bit_set::{BitMatrix, BitSet}, vec::IndexVec, }; use rustc_middle::mir::tcx::PlaceTy; use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor}; use rustc_middle::mir::{dump_mir, PassWhere}; use rustc_middle::mir::{ traversal, Body, InlineAsmOperand, Local, LocalKind, Location, Operand, Place, PlaceElem, Rvalue, Statement, StatementKind, Terminator, TerminatorKind, }; use rustc_middle::ty::TyCtxt; use rustc_mir_dataflow::impls::{MaybeInitializedLocals, MaybeLiveLocals}; use rustc_mir_dataflow::Analysis; // Empirical measurements have resulted in some observations: // - Running on a body with a single block and 500 locals takes barely any time // - Running on a body with ~400 blocks and ~300 relevant locals takes "too long" // ...so we just limit both to somewhat reasonable-ish looking values. const MAX_LOCALS: usize = 500; const MAX_BLOCKS: usize = 250; pub struct DestinationPropagation; impl<'tcx> MirPass<'tcx> for DestinationPropagation { fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { // FIXME(#79191, #82678) if !tcx.sess.opts.debugging_opts.unsound_mir_opts { return; } // Only run at mir-opt-level=3 or higher for now (we don't fix up debuginfo and remove // storage statements at the moment). if tcx.sess.mir_opt_level() < 3 { return; } let def_id = body.source.def_id(); let candidates = find_candidates(tcx, body); if candidates.is_empty() { debug!("{:?}: no dest prop candidates, done", def_id); return; } // Collect all locals we care about. We only compute conflicts for these to save time. let mut relevant_locals = BitSet::new_empty(body.local_decls.len()); for CandidateAssignment { dest, src, loc: _ } in &candidates { relevant_locals.insert(dest.local); relevant_locals.insert(*src); } // This pass unfortunately has `O(l² * s)` performance, where `l` is the number of locals // and `s` is the number of statements and terminators in the function. // To prevent blowing up compile times too much, we bail out when there are too many locals. let relevant = relevant_locals.count(); debug!( "{:?}: {} locals ({} relevant), {} blocks", def_id, body.local_decls.len(), relevant, body.basic_blocks().len() ); if relevant > MAX_LOCALS { warn!( "too many candidate locals in {:?} ({}, max is {}), not optimizing", def_id, relevant, MAX_LOCALS ); return; } if body.basic_blocks().len() > MAX_BLOCKS { warn!( "too many blocks in {:?} ({}, max is {}), not optimizing", def_id, body.basic_blocks().len(), MAX_BLOCKS ); return; } let mut conflicts = Conflicts::build(tcx, body, &relevant_locals); let mut replacements = Replacements::new(body.local_decls.len()); for candidate @ CandidateAssignment { dest, src, loc } in candidates { // Merge locals that don't conflict. if !conflicts.can_unify(dest.local, src) { debug!("at assignment {:?}, conflict {:?} vs. {:?}", loc, dest.local, src); continue; } if replacements.for_src(candidate.src).is_some() { debug!("src {:?} already has replacement", candidate.src); continue; } if !tcx.consider_optimizing(|| { format!("DestinationPropagation {:?} {:?}", def_id, candidate) }) { break; } replacements.push(candidate); conflicts.unify(candidate.src, candidate.dest.local); } replacements.flatten(tcx); debug!("replacements {:?}", replacements.map); Replacer { tcx, replacements, place_elem_cache: Vec::new() }.visit_body(body); // FIXME fix debug info } } #[derive(Debug, Eq, PartialEq, Copy, Clone)] struct UnifyLocal(Local); impl From<Local> for UnifyLocal { fn from(l: Local) -> Self { Self(l) } } impl UnifyKey for UnifyLocal { type Value = (); fn index(&self) -> u32 { self.0.as_u32() } fn from_index(u: u32) -> Self { Self(Local::from_u32(u)) } fn tag() -> &'static str { "UnifyLocal" } } struct Replacements<'tcx> { /// Maps locals to their replacement. map: IndexVec<Local, Option<Place<'tcx>>>, /// Whose locals' live ranges to kill. kill: BitSet<Local>, } impl Replacements<'tcx> { fn new(locals: usize) -> Self { Self { map: IndexVec::from_elem_n(None, locals), kill: BitSet::new_empty(locals) } } fn push(&mut self, candidate: CandidateAssignment<'tcx>) { trace!("Replacements::push({:?})", candidate); let entry = &mut self.map[candidate.src]; assert!(entry.is_none()); *entry = Some(candidate.dest); self.kill.insert(candidate.src); self.kill.insert(candidate.dest.local); } /// Applies the stored replacements to all replacements, until no replacements would result in /// locals that need further replacements when applied. fn flatten(&mut self, tcx: TyCtxt<'tcx>) { // Note: This assumes that there are no cycles in the replacements, which is enforced via // `self.unified_locals`. Otherwise this can cause an infinite loop. for local in self.map.indices() { if let Some(replacement) = self.map[local] { // Substitute the base local of `replacement` until fixpoint. let mut base = replacement.local; let mut reversed_projection_slices = Vec::with_capacity(1); while let Some(replacement_for_replacement) = self.map[base] { base = replacement_for_replacement.local; reversed_projection_slices.push(replacement_for_replacement.projection); } let projection: Vec<_> = reversed_projection_slices .iter() .rev() .flat_map(|projs| projs.iter()) .chain(replacement.projection.iter()) .collect(); let projection = tcx.intern_place_elems(&projection); // Replace with the final `Place`. self.map[local] = Some(Place { local: base, projection }); } } } fn for_src(&self, src: Local) -> Option<Place<'tcx>> { self.map[src] } } struct Replacer<'tcx> { tcx: TyCtxt<'tcx>, replacements: Replacements<'tcx>, place_elem_cache: Vec<PlaceElem<'tcx>>, } impl<'tcx> MutVisitor<'tcx> for Replacer<'tcx> { fn tcx<'a>(&'a self) -> TyCtxt<'tcx> { self.tcx } fn visit_local(&mut self, local: &mut Local, context: PlaceContext, location: Location) { if context.is_use() && self.replacements.for_src(*local).is_some() { bug!( "use of local {:?} should have been replaced by visit_place; context={:?}, loc={:?}", local, context, location, ); } } fn process_projection_elem( &mut self, elem: PlaceElem<'tcx>, _: Location, ) -> Option<PlaceElem<'tcx>> { match elem { PlaceElem::Index(local) => { if let Some(replacement) = self.replacements.for_src(local) { bug!( "cannot replace {:?} with {:?} in index projection {:?}", local, replacement, elem, ); } else { None } } _ => None, } } fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) { if let Some(replacement) = self.replacements.for_src(place.local) { // Rebase `place`s projections onto `replacement`'s. self.place_elem_cache.clear(); self.place_elem_cache.extend(replacement.projection.iter().chain(place.projection)); let projection = self.tcx.intern_place_elems(&self.place_elem_cache); let new_place = Place { local: replacement.local, projection }; debug!("Replacer: {:?} -> {:?}", place, new_place); *place = new_place; } self.super_place(place, context, location); } fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) { self.super_statement(statement, location); match &statement.kind { // FIXME: Don't delete storage statements, merge the live ranges instead StatementKind::StorageDead(local) | StatementKind::StorageLive(local) if self.replacements.kill.contains(*local) => { statement.make_nop() } StatementKind::Assign(box (dest, rvalue)) => { match rvalue { Rvalue::Use(Operand::Copy(place) | Operand::Move(place)) => { // These might've been turned into self-assignments by the replacement // (this includes the original statement we wanted to eliminate). if dest == place { debug!("{:?} turned into self-assignment, deleting", location); statement.make_nop(); } } _ => {} } } _ => {} } } } struct Conflicts<'a> { relevant_locals: &'a BitSet<Local>, /// The conflict matrix. It is always symmetric and the adjacency matrix of the corresponding /// conflict graph. matrix: BitMatrix<Local, Local>, /// Preallocated `BitSet` used by `unify`. unify_cache: BitSet<Local>, /// Tracks locals that have been merged together to prevent cycles and propagate conflicts. unified_locals: InPlaceUnificationTable<UnifyLocal>, } impl Conflicts<'a> { fn build<'tcx>( tcx: TyCtxt<'tcx>, body: &'_ Body<'tcx>, relevant_locals: &'a BitSet<Local>, ) -> Self { // We don't have to look out for locals that have their address taken, since // `find_candidates` already takes care of that. let conflicts = BitMatrix::from_row_n( &BitSet::new_empty(body.local_decls.len()), body.local_decls.len(), ); let mut init = MaybeInitializedLocals .into_engine(tcx, body) .iterate_to_fixpoint() .into_results_cursor(body); let mut live = MaybeLiveLocals.into_engine(tcx, body).iterate_to_fixpoint().into_results_cursor(body); let mut reachable = None; dump_mir(tcx, None, "DestinationPropagation-dataflow", &"", body, |pass_where, w| { let reachable = reachable.get_or_insert_with(|| traversal::reachable_as_bitset(body)); match pass_where { PassWhere::BeforeLocation(loc) if reachable.contains(loc.block) => { init.seek_before_primary_effect(loc); live.seek_after_primary_effect(loc); writeln!(w, " // init: {:?}", init.get())?; writeln!(w, " // live: {:?}", live.get())?; } PassWhere::AfterTerminator(bb) if reachable.contains(bb) => { let loc = body.terminator_loc(bb); init.seek_after_primary_effect(loc); live.seek_before_primary_effect(loc); writeln!(w, " // init: {:?}", init.get())?; writeln!(w, " // live: {:?}", live.get())?; } PassWhere::BeforeBlock(bb) if reachable.contains(bb) => { init.seek_to_block_start(bb); live.seek_to_block_start(bb); writeln!(w, " // init: {:?}", init.get())?; writeln!(w, " // live: {:?}", live.get())?; } PassWhere::BeforeCFG | PassWhere::AfterCFG | PassWhere::AfterLocation(_) => {} PassWhere::BeforeLocation(_) | PassWhere::AfterTerminator(_) => { writeln!(w, " // init: <unreachable>")?; writeln!(w, " // live: <unreachable>")?; } PassWhere::BeforeBlock(_) => { writeln!(w, " // init: <unreachable>")?; writeln!(w, " // live: <unreachable>")?; } } Ok(()) }); let mut this = Self { relevant_locals, matrix: conflicts, unify_cache: BitSet::new_empty(body.local_decls.len()), unified_locals: { let mut table = InPlaceUnificationTable::new(); // Pre-fill table with all locals (this creates N nodes / "connected" components, // "graph"-ically speaking). for local in 0..body.local_decls.len() { assert_eq!(table.new_key(()), UnifyLocal(Local::from_usize(local))); } table }, }; let mut live_and_init_locals = Vec::new(); // Visit only reachable basic blocks. The exact order is not important. for (block, data) in traversal::preorder(body) { // We need to observe the dataflow state *before* all possible locations (statement or // terminator) in each basic block, and then observe the state *after* the terminator // effect is applied. As long as neither `init` nor `borrowed` has a "before" effect, // we will observe all possible dataflow states. // Since liveness is a backwards analysis, we need to walk the results backwards. To do // that, we first collect in the `MaybeInitializedLocals` results in a forwards // traversal. live_and_init_locals.resize_with(data.statements.len() + 1, || { BitSet::new_empty(body.local_decls.len()) }); // First, go forwards for `MaybeInitializedLocals` and apply intra-statement/terminator // conflicts. for (i, statement) in data.statements.iter().enumerate() { this.record_statement_conflicts(statement); let loc = Location { block, statement_index: i }; init.seek_before_primary_effect(loc); live_and_init_locals[i].clone_from(init.get()); } this.record_terminator_conflicts(data.terminator()); let term_loc = Location { block, statement_index: data.statements.len() }; init.seek_before_primary_effect(term_loc); live_and_init_locals[term_loc.statement_index].clone_from(init.get()); // Now, go backwards and union with the liveness results. for statement_index in (0..=data.statements.len()).rev() { let loc = Location { block, statement_index }; live.seek_after_primary_effect(loc); live_and_init_locals[statement_index].intersect(live.get()); trace!("record conflicts at {:?}", loc); this.record_dataflow_conflicts(&mut live_and_init_locals[statement_index]); } init.seek_to_block_end(block); live.seek_to_block_end(block); let mut conflicts = init.get().clone(); conflicts.intersect(live.get()); trace!("record conflicts at end of {:?}", block); this.record_dataflow_conflicts(&mut conflicts); } this } fn record_dataflow_conflicts(&mut self, new_conflicts: &mut BitSet<Local>) { // Remove all locals that are not candidates. new_conflicts.intersect(self.relevant_locals); for local in new_conflicts.iter() { self.matrix.union_row_with(&new_conflicts, local); } } fn record_local_conflict(&mut self, a: Local, b: Local, why: &str) { trace!("conflict {:?} <-> {:?} due to {}", a, b, why); self.matrix.insert(a, b); self.matrix.insert(b, a); } /// Records locals that must not overlap during the evaluation of `stmt`. These locals conflict /// and must not be merged. fn record_statement_conflicts(&mut self, stmt: &Statement<'_>) { match &stmt.kind { // While the left and right sides of an assignment must not overlap, we do not mark // conflicts here as that would make this optimization useless. When we optimize, we // eliminate the resulting self-assignments automatically. StatementKind::Assign(_) => {} StatementKind::LlvmInlineAsm(asm) => { // Inputs and outputs must not overlap. for (_, input) in &*asm.inputs { if let Some(in_place) = input.place() { if !in_place.is_indirect() { for out_place in &*asm.outputs { if !out_place.is_indirect() && !in_place.is_indirect() { self.record_local_conflict( in_place.local, out_place.local, "aliasing llvm_asm! operands", ); } } } } } } StatementKind::SetDiscriminant { .. } | StatementKind::StorageLive(..) | StatementKind::StorageDead(..) | StatementKind::Retag(..) | StatementKind::FakeRead(..) | StatementKind::AscribeUserType(..) | StatementKind::Coverage(..) | StatementKind::CopyNonOverlapping(..) | StatementKind::Nop => {} } } fn record_terminator_conflicts(&mut self, term: &Terminator<'_>) { match &term.kind { TerminatorKind::DropAndReplace { place: dropped_place, value, target: _, unwind: _, } => { if let Some(place) = value.place() { if !place.is_indirect() && !dropped_place.is_indirect() { self.record_local_conflict( place.local, dropped_place.local, "DropAndReplace operand overlap", ); } } } TerminatorKind::Yield { value, resume: _, resume_arg, drop: _ } => { if let Some(place) = value.place() { if !place.is_indirect() && !resume_arg.is_indirect() { self.record_local_conflict( place.local, resume_arg.local, "Yield operand overlap", ); } } } TerminatorKind::Call { func, args, destination: Some((dest_place, _)), cleanup: _, from_hir_call: _, fn_span: _, } => { // No arguments may overlap with the destination. for arg in args.iter().chain(Some(func)) { if let Some(place) = arg.place() { if !place.is_indirect() && !dest_place.is_indirect() { self.record_local_conflict( dest_place.local, place.local, "call dest/arg overlap", ); } } } } TerminatorKind::InlineAsm { template: _, operands, options: _, line_spans: _, destination: _, } => { // The intended semantics here aren't documented, we just assume that nothing that // could be written to by the assembly may overlap with any other operands. for op in operands { match op { InlineAsmOperand::Out { reg: _, late: _, place: Some(dest_place) } | InlineAsmOperand::InOut { reg: _, late: _, in_value: _, out_place: Some(dest_place), } => { // For output place `place`, add all places accessed by the inline asm. for op in operands { match op { InlineAsmOperand::In { reg: _, value } => { if let Some(p) = value.place() { if !p.is_indirect() && !dest_place.is_indirect() { self.record_local_conflict( p.local, dest_place.local, "asm! operand overlap", ); } } } InlineAsmOperand::Out { reg: _, late: _, place: Some(place), } => { if !place.is_indirect() && !dest_place.is_indirect() { self.record_local_conflict( place.local, dest_place.local, "asm! operand overlap", ); } } InlineAsmOperand::InOut { reg: _, late: _, in_value, out_place, } => { if let Some(place) = in_value.place() { if !place.is_indirect() && !dest_place.is_indirect() { self.record_local_conflict( place.local, dest_place.local, "asm! operand overlap", ); } } if let Some(place) = out_place { if !place.is_indirect() && !dest_place.is_indirect() { self.record_local_conflict( place.local, dest_place.local, "asm! operand overlap", ); } } } InlineAsmOperand::Out { reg: _, late: _, place: None } | InlineAsmOperand::Const { value: _ } | InlineAsmOperand::SymFn { value: _ } | InlineAsmOperand::SymStatic { def_id: _ } => {} } } } InlineAsmOperand::InOut { reg: _, late: _, in_value: _, out_place: None, } | InlineAsmOperand::In { reg: _, value: _ } | InlineAsmOperand::Out { reg: _, late: _, place: None } | InlineAsmOperand::Const { value: _ } | InlineAsmOperand::SymFn { value: _ } | InlineAsmOperand::SymStatic { def_id: _ } => {} } } } TerminatorKind::Goto { .. } | TerminatorKind::Call { destination: None, .. } | TerminatorKind::SwitchInt { .. } | TerminatorKind::Resume | TerminatorKind::Abort | TerminatorKind::Return | TerminatorKind::Unreachable | TerminatorKind::Drop { .. } | TerminatorKind::Assert { .. } | TerminatorKind::GeneratorDrop | TerminatorKind::FalseEdge { .. } | TerminatorKind::FalseUnwind { .. } => {} } } /// Checks whether `a` and `b` may be merged. Returns `false` if there's a conflict. fn can_unify(&mut self, a: Local, b: Local) -> bool { // After some locals have been unified, their conflicts are only tracked in the root key, // so look that up. let a = self.unified_locals.find(a).0; let b = self.unified_locals.find(b).0; if a == b { // Already merged (part of the same connected component). return false; } if self.matrix.contains(a, b) { // Conflict (derived via dataflow, intra-statement conflicts, or inherited from another // local during unification). return false; } true } /// Merges the conflicts of `a` and `b`, so that each one inherits all conflicts of the other. /// /// `can_unify` must have returned `true` for the same locals, or this may panic or lead to /// miscompiles. /// /// This is called when the pass makes the decision to unify `a` and `b` (or parts of `a` and /// `b`) and is needed to ensure that future unification decisions take potentially newly /// introduced conflicts into account. /// /// For an example, assume we have locals `_0`, `_1`, `_2`, and `_3`. There are these conflicts: /// /// * `_0` <-> `_1` /// * `_1` <-> `_2` /// * `_3` <-> `_0` /// /// We then decide to merge `_2` with `_3` since they don't conflict. Then we decide to merge /// `_2` with `_0`, which also doesn't have a conflict in the above list. However `_2` is now /// `_3`, which does conflict with `_0`. fn unify(&mut self, a: Local, b: Local) { trace!("unify({:?}, {:?})", a, b); // Get the root local of the connected components. The root local stores the conflicts of // all locals in the connected component (and *is stored* as the conflicting local of other // locals). let a = self.unified_locals.find(a).0; let b = self.unified_locals.find(b).0; assert_ne!(a, b); trace!("roots: a={:?}, b={:?}", a, b); trace!("{:?} conflicts: {:?}", a, self.matrix.iter(a).format(", ")); trace!("{:?} conflicts: {:?}", b, self.matrix.iter(b).format(", ")); self.unified_locals.union(a, b); let root = self.unified_locals.find(a).0; assert!(root == a || root == b); // Make all locals that conflict with `a` also conflict with `b`, and vice versa. self.unify_cache.clear(); for conflicts_with_a in self.matrix.iter(a) { self.unify_cache.insert(conflicts_with_a); } for conflicts_with_b in self.matrix.iter(b) { self.unify_cache.insert(conflicts_with_b); } for conflicts_with_a_or_b in self.unify_cache.iter() { // Set both `a` and `b` for this local's row. self.matrix.insert(conflicts_with_a_or_b, a); self.matrix.insert(conflicts_with_a_or_b, b); } // Write the locals `a` conflicts with to `b`'s row. self.matrix.union_rows(a, b); // Write the locals `b` conflicts with to `a`'s row. self.matrix.union_rows(b, a); } } /// A `dest = {move} src;` statement at `loc`. /// /// We want to consider merging `dest` and `src` due to this assignment. #[derive(Debug, Copy, Clone)] struct CandidateAssignment<'tcx> { /// Does not contain indirection or indexing (so the only local it contains is the place base). dest: Place<'tcx>, src: Local, loc: Location, } /// Scans the MIR for assignments between locals that we might want to consider merging. /// /// This will filter out assignments that do not match the right form (as described in the top-level /// comment) and also throw out assignments that involve a local that has its address taken or is /// otherwise ineligible (eg. locals used as array indices are ignored because we cannot propagate /// arbitrary places into array indices). fn find_candidates<'a, 'tcx>( tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, ) -> Vec<CandidateAssignment<'tcx>> { let mut visitor = FindAssignments { tcx, body, candidates: Vec::new(), ever_borrowed_locals: ever_borrowed_locals(body), locals_used_as_array_index: locals_used_as_array_index(body), }; visitor.visit_body(body); visitor.candidates } struct FindAssignments<'a, 'tcx> { tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, candidates: Vec<CandidateAssignment<'tcx>>, ever_borrowed_locals: BitSet<Local>, locals_used_as_array_index: BitSet<Local>, } impl<'a, 'tcx> Visitor<'tcx> for FindAssignments<'a, 'tcx> { fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) { if let StatementKind::Assign(box ( dest, Rvalue::Use(Operand::Copy(src) | Operand::Move(src)), )) = &statement.kind { // `dest` must not have pointer indirection. if dest.is_indirect() { return; } // `src` must be a plain local. if !src.projection.is_empty() { return; } // Since we want to replace `src` with `dest`, `src` must not be required. if is_local_required(src.local, self.body) { return; } // Can't optimize if both locals ever have their address taken (can introduce // aliasing). // FIXME: This can be smarter and take `StorageDead` into account (which // invalidates borrows). if self.ever_borrowed_locals.contains(dest.local) || self.ever_borrowed_locals.contains(src.local) { return; } assert_ne!(dest.local, src.local, "self-assignments are UB"); // We can't replace locals occurring in `PlaceElem::Index` for now. if self.locals_used_as_array_index.contains(src.local) { return; } // Handle the "subtle case" described above by rejecting any `dest` that is or // projects through a union. let mut place_ty = PlaceTy::from_ty(self.body.local_decls[dest.local].ty); if place_ty.ty.is_union() { return; } for elem in dest.projection { if let PlaceElem::Index(_) = elem { // `dest` contains an indexing projection. return; } place_ty = place_ty.projection_ty(self.tcx, elem); if place_ty.ty.is_union() { return; } } self.candidates.push(CandidateAssignment { dest: *dest, src: src.local, loc: location, }); } } } /// Some locals are part of the function's interface and can not be removed. /// /// Note that these locals *can* still be merged with non-required locals by removing that other /// local. fn is_local_required(local: Local, body: &Body<'_>) -> bool { match body.local_kind(local) { LocalKind::Arg | LocalKind::ReturnPointer => true, LocalKind::Var | LocalKind::Temp => false, } } /// Walks MIR to find all locals that have their address taken anywhere. fn ever_borrowed_locals(body: &Body<'_>) -> BitSet<Local> { let mut visitor = BorrowCollector { locals: BitSet::new_empty(body.local_decls.len()) }; visitor.visit_body(body); visitor.locals } struct BorrowCollector { locals: BitSet<Local>, } impl<'tcx> Visitor<'tcx> for BorrowCollector { fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) { self.super_rvalue(rvalue, location); match rvalue { Rvalue::AddressOf(_, borrowed_place) | Rvalue::Ref(_, _, borrowed_place) => { if !borrowed_place.is_indirect() { self.locals.insert(borrowed_place.local); } } Rvalue::Cast(..) | Rvalue::ShallowInitBox(..) | Rvalue::Use(..) | Rvalue::Repeat(..) | Rvalue::Len(..) | Rvalue::BinaryOp(..) | Rvalue::CheckedBinaryOp(..) | Rvalue::NullaryOp(..) | Rvalue::UnaryOp(..) | Rvalue::Discriminant(..) | Rvalue::Aggregate(..) | Rvalue::ThreadLocalRef(..) => {} } } fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) { self.super_terminator(terminator, location); match terminator.kind { TerminatorKind::Drop { place: dropped_place, .. } | TerminatorKind::DropAndReplace { place: dropped_place, .. } => { self.locals.insert(dropped_place.local); } TerminatorKind::Abort | TerminatorKind::Assert { .. } | TerminatorKind::Call { .. } | TerminatorKind::FalseEdge { .. } | TerminatorKind::FalseUnwind { .. } | TerminatorKind::GeneratorDrop | TerminatorKind::Goto { .. } | TerminatorKind::Resume | TerminatorKind::Return | TerminatorKind::SwitchInt { .. } | TerminatorKind::Unreachable | TerminatorKind::Yield { .. } | TerminatorKind::InlineAsm { .. } => {} } } } /// `PlaceElem::Index` only stores a `Local`, so we can't replace that with a full `Place`. /// /// Collect locals used as indices so we don't generate candidates that are impossible to apply /// later. fn locals_used_as_array_index(body: &Body<'_>) -> BitSet<Local> { let mut visitor = IndexCollector { locals: BitSet::new_empty(body.local_decls.len()) }; visitor.visit_body(body); visitor.locals } struct IndexCollector { locals: BitSet<Local>, } impl<'tcx> Visitor<'tcx> for IndexCollector { fn visit_projection_elem( &mut self, local: Local, proj_base: &[PlaceElem<'tcx>], elem: PlaceElem<'tcx>, context: PlaceContext, location: Location, ) { if let PlaceElem::Index(i) = elem { self.locals.insert(i); } self.super_projection_elem(local, proj_base, elem, context, location); } }
40.807507
101
0.536994
09399eb90168d3769e05c819548f7b2bebf81f09
5,692
mod test_runnel { use runnel::medium::stringio::{StringErr, StringIn, StringOut}; use runnel::{RunnelIoe, RunnelIoeBuilder}; // #[test] fn test_size() { assert_eq!(std::mem::size_of::<RunnelIoe>(), 48); assert_eq!(std::mem::size_of::<RunnelIoeBuilder>(), 48); } #[test] fn test_debug_runnel_ioe() { let sioe = RunnelIoe::new( Box::new(StringIn::with_str("ABCDE\nefgh\n")), Box::new(StringOut::default()), Box::new(StringErr::default()), ); let s = format!("{:?}", sioe); // #[cfg(has_fmt_dbg_mutex_poisoned)] let t = concat!( "RunnelIoe {", " pin: StringIn(LockableStringIn {", " inner: Mutex { data: BufReader { reader: RawStringIn {", " buf: \"ABCDE\\nefgh\\n\", pos: 0, amt: 0 }, buffer: 0/1024 },", " poisoned: false, .. } }),", " pout: StringOut(LockableStringOut {", " inner: Mutex { data: RawStringOut { buf: \"\" },", " poisoned: false, .. } }),", " perr: StringErr(LockableStringOut {", " inner: Mutex { data: RawStringOut { buf: \"\" },", " poisoned: false, .. } }) }", ); #[cfg(not(has_fmt_dbg_mutex_poisoned))] let t = concat!( "RunnelIoe {", " pin: StringIn(LockableStringIn {", " inner: Mutex { data: BufReader { reader: RawStringIn {", " buf: \"ABCDE\\nefgh\\n\", pos: 0, amt: 0 }, buffer: 0/1024 } } }),", " pout: StringOut(LockableStringOut {", " inner: Mutex { data: RawStringOut { buf: \"\" } } }),", " perr: StringErr(LockableStringOut {", " inner: Mutex { data: RawStringOut { buf: \"\" } } })", " }" ); assert_eq!(s, t); } #[test] fn test_debug_runnel_ioe_builder() { let sioe = RunnelIoeBuilder::new() .pin(StringIn::with_str("ABCDE\nefgh\n")) .build(); let s = format!("{:?}", sioe); #[cfg(has_fmt_dbg_mutex_poisoned)] let t = concat!( "RunnelIoe {", " pin: StringIn(LockableStringIn {", " inner: Mutex { data: BufReader {", " reader: RawStringIn {", " buf: \"ABCDE\\nefgh\\n\", pos: 0, amt: 0 },", " buffer: 0/1024 },", " poisoned: false, .. } }),", " pout: StdOut(Stdout { .. }),", " perr: StdErr(Stderr { .. }) }", ); #[cfg(not(has_fmt_dbg_mutex_poisoned))] let t = concat!( "RunnelIoe {", " pin: StringIn(LockableStringIn {", " inner: Mutex { data: BufReader {", " reader: RawStringIn {", " buf: \"ABCDE\\nefgh\\n\", pos: 0, amt: 0 },", " buffer: 0/1024 } } }),", " pout: StdOut(Stdout { .. }),", " perr: StdErr(Stderr { .. })", " }", ); assert_eq!(s, t); } #[test] fn test_stdio() { let sioe = RunnelIoeBuilder::new().build(); let s = format!("{:?}", sioe); assert_eq!( s, concat!( "RunnelIoe {", " pin: StdIn(Stdin { .. }),", " pout: StdOut(Stdout { .. }),", " perr: StdErr(Stderr { .. }) }", ) ); } #[test] fn test_stringio() { use std::io::{BufRead, Write}; // #[rustfmt::skip] let sioe = RunnelIoeBuilder::new().fill_stringio_with_str("ABCDE\nefgh\n").build(); // pluggable stream in let mut lines_iter = sioe.pin().lock().lines().map(|l| l.unwrap()); assert_eq!(lines_iter.next(), Some(String::from("ABCDE"))); assert_eq!(lines_iter.next(), Some(String::from("efgh"))); assert_eq!(lines_iter.next(), None); // // pluggable stream out #[rustfmt::skip] let res = sioe.pout().lock() .write_fmt(format_args!("{}\nACBDE\nefgh\n", 1234)); assert!(res.is_ok()); assert_eq!(sioe.pout().lock().buffer_str(), "1234\nACBDE\nefgh\n"); // // pluggable stream err #[rustfmt::skip] let res = sioe.perr().lock() .write_fmt(format_args!("{}\nACBDE\nefgh\n", 1234)); assert!(res.is_ok()); assert_eq!(sioe.perr().lock().buffer_str(), "1234\nACBDE\nefgh\n"); } #[test] fn test_pipeio() { use runnel::medium::pipeio::pipe; use std::io::{BufRead, Write}; // create in memory pipe let (a_out, a_in) = pipe(1); // // a working thread #[rustfmt::skip] let sioe = RunnelIoeBuilder::new().fill_stringio_with_str("ABCDE\nefgh\n") .pout(a_out) // pluggable pipe out .build(); let handler = std::thread::spawn(move || { for line in sioe.pin().lock().lines().map(|l| l.unwrap()) { let mut out = sioe.pout().lock(); out.write_fmt(format_args!("{}\n", line)).unwrap(); out.flush().unwrap(); } }); // // a main thread #[rustfmt::skip] let sioe = RunnelIoeBuilder::new().fill_stringio_with_str("") .pin(a_in) // pluggable pipe out .build(); let mut lines_iter = sioe.pin().lock().lines().map(|l| l.unwrap()); assert_eq!(lines_iter.next(), Some(String::from("ABCDE"))); assert_eq!(lines_iter.next(), Some(String::from("efgh"))); assert_eq!(lines_iter.next(), None); // assert!(handler.join().is_ok()); } }
37.447368
91
0.478566
9bbfabd885d98b24903aced501ab331253143ff1
82,459
#![cfg(any(feature = "bpf_c", feature = "bpf_rust"))] #[macro_use] extern crate solana_bpf_loader_program; use itertools::izip; use log::{log_enabled, trace, Level::Trace}; use solana_account_decoder::parse_bpf_loader::{ parse_bpf_upgradeable_loader, BpfUpgradeableLoaderAccountType, }; use solana_bpf_loader_program::{ BpfError, create_vm, serialization::{deserialize_parameters, serialize_parameters}, syscalls::register_syscalls, ThisInstructionMeter, }; use solana_cli_output::display::println_transaction; use solana_rbpf::vm::{Config, Executable, Tracer}; use solana_runtime::{ bank::{Bank, ExecuteTimings, NonceRollbackInfo, TransactionBalancesSet, TransactionResults}, bank_client::BankClient, genesis_utils::{create_genesis_config, GenesisConfigInfo}, loader_utils::{ load_buffer_account, load_program, load_upgradeable_program, set_upgrade_authority, upgrade_program, }, }; use solana_sdk::{ account::{AccountSharedData, ReadableAccount}, bpf_loader, bpf_loader_deprecated, bpf_loader_upgradeable, client::SyncClient, clock::MAX_PROCESSING_AGE, entrypoint::{MAX_PERMITTED_DATA_INCREASE, SUCCESS}, instruction::{AccountMeta, CompiledInstruction, Instruction, InstructionError}, keyed_account::KeyedAccount, message::Message, process_instruction::{InvokeContext, MockInvokeContext}, pubkey::Pubkey, signature::{keypair_from_seed, Keypair, Signer}, system_instruction, sysvar::{clock, fees, rent}, transaction::{Transaction, TransactionError}, }; use solana_transaction_status::{ token_balances::collect_token_balances, ConfirmedTransaction, InnerInstructions, TransactionStatusMeta, TransactionWithStatusMeta, UiTransactionEncoding, }; use std::{ cell::RefCell, collections::HashMap, env, fs::File, io::Read, path::PathBuf, str::FromStr, sync::Arc, }; /// BPF program file extension const PLATFORM_FILE_EXTENSION_BPF: &str = "so"; /// Create a BPF program file name fn create_bpf_path(name: &str) -> PathBuf { let mut pathbuf = { let current_exe = env::current_exe().unwrap(); PathBuf::from(current_exe.parent().unwrap().parent().unwrap()) }; pathbuf.push("bpf/"); pathbuf.push(name); pathbuf.set_extension(PLATFORM_FILE_EXTENSION_BPF); pathbuf } fn load_bpf_program( bank_client: &BankClient, loader_id: &Pubkey, payer_keypair: &Keypair, name: &str, ) -> Pubkey { let elf = read_bpf_program(name); load_program(bank_client, payer_keypair, loader_id, elf) } fn read_bpf_program(name: &str) -> Vec<u8> { let path = create_bpf_path(name); let mut file = File::open(&path).unwrap_or_else(|err| { panic!("Failed to open {}: {}", path.display(), err); }); let mut elf = Vec::new(); file.read_to_end(&mut elf).unwrap(); elf } #[cfg(feature = "bpf_rust")] fn write_bpf_program( bank_client: &BankClient, loader_id: &Pubkey, payer_keypair: &Keypair, program_keypair: &Keypair, elf: &[u8], ) { use solana_sdk::loader_instruction; let chunk_size = 256; // Size of chunk just needs to fit into tx let mut offset = 0; for chunk in elf.chunks(chunk_size) { let instruction = loader_instruction::write(&program_keypair.pubkey(), loader_id, offset, chunk.to_vec()); let message = Message::new(&[instruction], Some(&payer_keypair.pubkey())); bank_client .send_and_confirm_message(&[payer_keypair, &program_keypair], message) .unwrap(); offset += chunk_size as u32; } } fn load_upgradeable_bpf_program( bank_client: &BankClient, payer_keypair: &Keypair, buffer_keypair: &Keypair, executable_keypair: &Keypair, authority_keypair: &Keypair, name: &str, ) { let path = create_bpf_path(name); let mut file = File::open(&path).unwrap_or_else(|err| { panic!("Failed to open {}: {}", path.display(), err); }); let mut elf = Vec::new(); file.read_to_end(&mut elf).unwrap(); load_upgradeable_program( bank_client, payer_keypair, buffer_keypair, executable_keypair, authority_keypair, elf, ); } fn load_upgradeable_buffer( bank_client: &BankClient, payer_keypair: &Keypair, buffer_keypair: &Keypair, buffer_authority_keypair: &Keypair, name: &str, ) { let path = create_bpf_path(name); let mut file = File::open(&path).unwrap_or_else(|err| { panic!("Failed to open {}: {}", path.display(), err); }); let mut elf = Vec::new(); file.read_to_end(&mut elf).unwrap(); load_buffer_account( bank_client, payer_keypair, &buffer_keypair, buffer_authority_keypair, &elf, ); } fn upgrade_bpf_program( bank_client: &BankClient, payer_keypair: &Keypair, buffer_keypair: &Keypair, executable_pubkey: &Pubkey, authority_keypair: &Keypair, name: &str, ) { load_upgradeable_buffer( bank_client, payer_keypair, buffer_keypair, authority_keypair, name, ); upgrade_program( bank_client, payer_keypair, executable_pubkey, &buffer_keypair.pubkey(), &authority_keypair, &payer_keypair.pubkey(), ); } fn run_program( name: &str, program_id: &Pubkey, parameter_accounts: Vec<KeyedAccount>, instruction_data: &[u8], ) -> Result<u64, InstructionError> { let path = create_bpf_path(name); let mut file = File::open(path).unwrap(); let mut data = vec![]; file.read_to_end(&mut data).unwrap(); let loader_id = bpf_loader::id(); let parameter_bytes = serialize_parameters( &bpf_loader::id(), program_id, &parameter_accounts, &instruction_data, ) .unwrap(); let mut invoke_context = MockInvokeContext::new(parameter_accounts); let compute_meter = invoke_context.get_compute_meter(); let mut instruction_meter = ThisInstructionMeter { compute_meter }; let config = Config { max_call_depth: 20, stack_frame_size: 4096, enable_instruction_meter: true, enable_instruction_tracing: true, }; let mut executable = <dyn Executable::<BpfError, ThisInstructionMeter>>::from_elf(&data, None, config).unwrap(); executable.set_syscall_registry(register_syscalls(&mut invoke_context).unwrap()); executable.jit_compile().unwrap(); let mut instruction_count = 0; let mut tracer = None; for i in 0..2 { let mut parameter_bytes = parameter_bytes.clone(); { let mut vm = create_vm( &loader_id, executable.as_ref(), parameter_bytes.as_slice_mut(), &mut invoke_context, ) .unwrap(); let result = if i == 0 { vm.execute_program_interpreted(&mut instruction_meter) } else { vm.execute_program_jit(&mut instruction_meter) }; assert_eq!(SUCCESS, result.unwrap()); if i == 1 { assert_eq!(instruction_count, vm.get_total_instruction_count()); } instruction_count = vm.get_total_instruction_count(); if config.enable_instruction_tracing { if i == 1 { if !Tracer::compare(tracer.as_ref().unwrap(), vm.get_tracer()) { let mut tracer_display = String::new(); tracer .as_ref() .unwrap() .write(&mut tracer_display, vm.get_program()) .unwrap(); println!("TRACE (interpreted): {}", tracer_display); let mut tracer_display = String::new(); vm.get_tracer() .write(&mut tracer_display, vm.get_program()) .unwrap(); println!("TRACE (jit): {}", tracer_display); assert!(false); } else if log_enabled!(Trace) { let mut trace_buffer = String::new(); tracer .as_ref() .unwrap() .write(&mut trace_buffer, vm.get_program()) .unwrap(); trace!("BPF Program Instruction Trace:\n{}", trace_buffer); } } tracer = Some(vm.get_tracer().clone()); } } let parameter_accounts = invoke_context.get_keyed_accounts().unwrap(); deserialize_parameters( &bpf_loader::id(), parameter_accounts, parameter_bytes.as_slice(), true, ) .unwrap(); } Ok(instruction_count) } fn process_transaction_and_record_inner( bank: &Bank, tx: Transaction, ) -> (Result<(), TransactionError>, Vec<Vec<CompiledInstruction>>) { let signature = tx.signatures.get(0).unwrap().clone(); let txs = vec![tx]; let tx_batch = bank.prepare_batch(txs.iter()); let (mut results, _, mut inner, _transaction_logs) = bank.load_execute_and_commit_transactions( &tx_batch, MAX_PROCESSING_AGE, false, true, false, &mut ExecuteTimings::default(), ); let inner_instructions = if inner.is_empty() { Some(vec![vec![]]) } else { inner.swap_remove(0) }; let result = results .fee_collection_results .swap_remove(0) .and_then(|_| bank.get_signature_status(&signature).unwrap()); ( result, inner_instructions.expect("cpi recording should be enabled"), ) } fn execute_transactions(bank: &Bank, txs: &[Transaction]) -> Vec<ConfirmedTransaction> { let batch = bank.prepare_batch(txs.iter()); let mut timings = ExecuteTimings::default(); let mut mint_decimals = HashMap::new(); let tx_pre_token_balances = collect_token_balances(&bank, &batch, &mut mint_decimals); let ( TransactionResults { execution_results, .. }, TransactionBalancesSet { pre_balances, post_balances, .. }, mut inner_instructions, mut transaction_logs, ) = bank.load_execute_and_commit_transactions( &batch, std::usize::MAX, true, true, true, &mut timings, ); let tx_post_token_balances = collect_token_balances(&bank, &batch, &mut mint_decimals); for _ in 0..(txs.len() - transaction_logs.len()) { transaction_logs.push(vec![]); } for _ in 0..(txs.len() - inner_instructions.len()) { inner_instructions.push(None); } izip!( txs.iter(), execution_results.into_iter(), inner_instructions.into_iter(), pre_balances.into_iter(), post_balances.into_iter(), tx_pre_token_balances.into_iter(), tx_post_token_balances.into_iter(), transaction_logs.into_iter(), ) .map( |( tx, (execute_result, nonce_rollback), inner_instructions, pre_balances, post_balances, pre_token_balances, post_token_balances, log_messages, )| { let fee_calculator = nonce_rollback .map(|nonce_rollback| nonce_rollback.fee_calculator()) .unwrap_or_else(|| bank.get_fee_calculator(&tx.message().recent_blockhash)) .expect("FeeCalculator must exist"); let fee = fee_calculator.calculate_fee(tx.message()); let inner_instructions = inner_instructions.map(|inner_instructions| { inner_instructions .into_iter() .enumerate() .map(|(index, instructions)| InnerInstructions { index: index as u8, instructions, }) .filter(|i| !i.instructions.is_empty()) .collect() }); let tx_status_meta = TransactionStatusMeta { status: execute_result, fee, pre_balances, post_balances, pre_token_balances: Some(pre_token_balances), post_token_balances: Some(post_token_balances), inner_instructions, log_messages: Some(log_messages), }; ConfirmedTransaction { slot: bank.slot(), transaction: TransactionWithStatusMeta { transaction: tx.clone(), meta: Some(tx_status_meta), }, block_time: None, } }, ) .collect() } fn print_confirmed_tx(name: &str, confirmed_tx: ConfirmedTransaction) { let block_time = confirmed_tx.block_time; let tx = confirmed_tx.transaction.transaction.clone(); let encoded = confirmed_tx.encode(UiTransactionEncoding::JsonParsed); println!("EXECUTE {} (slot {})", name, encoded.slot); println_transaction(&tx, &encoded.transaction.meta, " ", None, block_time); } #[test] #[cfg(any(feature = "bpf_c", feature = "bpf_rust"))] fn test_program_bpf_sanity() { solana_logger::setup(); let mut programs = Vec::new(); #[cfg(feature = "bpf_c")] { programs.extend_from_slice(&[ ("alloc", true), ("bpf_to_bpf", true), ("multiple_static", true), ("noop", true), ("noop++", true), ("panic", false), ("relative_call", true), ("sanity", true), ("sanity++", true), ("sha", true), ("struct_pass", true), ("struct_ret", true), ]); } #[cfg(feature = "bpf_rust")] { programs.extend_from_slice(&[ ("solana_bpf_rust_128bit", true), ("solana_bpf_rust_alloc", true), ("solana_bpf_rust_custom_heap", true), ("solana_bpf_rust_dep_crate", true), ("solana_bpf_rust_external_spend", false), ("solana_bpf_rust_iter", true), ("solana_bpf_rust_many_args", true), ("solana_bpf_rust_mem", true), ("solana_bpf_rust_noop", true), ("solana_bpf_rust_panic", false), ("solana_bpf_rust_param_passing", true), ("solana_bpf_rust_rand", true), ("solana_bpf_rust_sanity", true), ("solana_bpf_rust_sha", true), ]); } for program in programs.iter() { println!("Test program: {:?}", program.0); let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_program!(); bank.add_builtin(&name, id, entrypoint); let bank_client = BankClient::new(bank); // Call user program let program_id = load_bpf_program(&bank_client, &bpf_loader::id(), &mint_keypair, program.0); let account_metas = vec![ AccountMeta::new(mint_keypair.pubkey(), true), AccountMeta::new(Keypair::new().pubkey(), false), ]; let instruction = Instruction::new_with_bytes(program_id, &[1], account_metas); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); if program.1 { assert!(result.is_ok()); } else { assert!(result.is_err()); } } } #[test] #[cfg(any(feature = "bpf_c", feature = "bpf_rust"))] fn test_program_bpf_loader_deprecated() { solana_logger::setup(); let mut programs = Vec::new(); #[cfg(feature = "bpf_c")] { programs.extend_from_slice(&[("deprecated_loader")]); } #[cfg(feature = "bpf_rust")] { programs.extend_from_slice(&[("solana_bpf_rust_deprecated_loader")]); } for program in programs.iter() { println!("Test program: {:?}", program); let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_deprecated_program!(); bank.add_builtin(&name, id, entrypoint); let bank_client = BankClient::new(bank); let program_id = load_bpf_program( &bank_client, &bpf_loader_deprecated::id(), &mint_keypair, program, ); let account_metas = vec![AccountMeta::new(mint_keypair.pubkey(), true)]; let instruction = Instruction::new_with_bytes(program_id, &[1], account_metas); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); assert!(result.is_ok()); } } #[test] fn test_program_bpf_duplicate_accounts() { solana_logger::setup(); let mut programs = Vec::new(); #[cfg(feature = "bpf_c")] { programs.extend_from_slice(&[("dup_accounts")]); } #[cfg(feature = "bpf_rust")] { programs.extend_from_slice(&[("solana_bpf_rust_dup_accounts")]); } for program in programs.iter() { println!("Test program: {:?}", program); let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_program!(); bank.add_builtin(&name, id, entrypoint); let bank = Arc::new(bank); let bank_client = BankClient::new_shared(&bank); let program_id = load_bpf_program(&bank_client, &bpf_loader::id(), &mint_keypair, program); let payee_account = AccountSharedData::new(10, 1, &program_id); let payee_pubkey = solana_sdk::pubkey::new_rand(); bank.store_account(&payee_pubkey, &payee_account); let account = AccountSharedData::new(10, 1, &program_id); let pubkey = solana_sdk::pubkey::new_rand(); let account_metas = vec![ AccountMeta::new(mint_keypair.pubkey(), true), AccountMeta::new(payee_pubkey, false), AccountMeta::new(pubkey, false), AccountMeta::new(pubkey, false), ]; bank.store_account(&pubkey, &account); let instruction = Instruction::new_with_bytes(program_id, &[1], account_metas.clone()); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); let data = bank_client.get_account_data(&pubkey).unwrap().unwrap(); assert!(result.is_ok()); assert_eq!(data[0], 1); bank.store_account(&pubkey, &account); let instruction = Instruction::new_with_bytes(program_id, &[2], account_metas.clone()); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); let data = bank_client.get_account_data(&pubkey).unwrap().unwrap(); assert!(result.is_ok()); assert_eq!(data[0], 2); bank.store_account(&pubkey, &account); let instruction = Instruction::new_with_bytes(program_id, &[3], account_metas.clone()); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); let data = bank_client.get_account_data(&pubkey).unwrap().unwrap(); assert!(result.is_ok()); assert_eq!(data[0], 3); bank.store_account(&pubkey, &account); let instruction = Instruction::new_with_bytes(program_id, &[4], account_metas.clone()); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); let lamports = bank_client.get_balance(&pubkey).unwrap(); assert!(result.is_ok()); assert_eq!(lamports, 11); bank.store_account(&pubkey, &account); let instruction = Instruction::new_with_bytes(program_id, &[5], account_metas.clone()); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); let lamports = bank_client.get_balance(&pubkey).unwrap(); assert!(result.is_ok()); assert_eq!(lamports, 12); bank.store_account(&pubkey, &account); let instruction = Instruction::new_with_bytes(program_id, &[6], account_metas.clone()); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); let lamports = bank_client.get_balance(&pubkey).unwrap(); assert!(result.is_ok()); assert_eq!(lamports, 13); let keypair = Keypair::new(); let pubkey = keypair.pubkey(); let account_metas = vec![ AccountMeta::new(mint_keypair.pubkey(), true), AccountMeta::new(payee_pubkey, false), AccountMeta::new(pubkey, false), AccountMeta::new_readonly(pubkey, true), AccountMeta::new_readonly(program_id, false), ]; bank.store_account(&pubkey, &account); let instruction = Instruction::new_with_bytes(program_id, &[7], account_metas.clone()); let message = Message::new(&[instruction], Some(&mint_keypair.pubkey())); let result = bank_client.send_and_confirm_message(&[&mint_keypair, &keypair], message); assert!(result.is_ok()); } } #[test] fn test_program_bpf_error_handling() { solana_logger::setup(); let mut programs = Vec::new(); #[cfg(feature = "bpf_c")] { programs.extend_from_slice(&[("error_handling")]); } #[cfg(feature = "bpf_rust")] { programs.extend_from_slice(&[("solana_bpf_rust_error_handling")]); } for program in programs.iter() { println!("Test program: {:?}", program); let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_program!(); bank.add_builtin(&name, id, entrypoint); let bank_client = BankClient::new(bank); let program_id = load_bpf_program(&bank_client, &bpf_loader::id(), &mint_keypair, program); let account_metas = vec![AccountMeta::new(mint_keypair.pubkey(), true)]; let instruction = Instruction::new_with_bytes(program_id, &[1], account_metas.clone()); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); assert!(result.is_ok()); let instruction = Instruction::new_with_bytes(program_id, &[2], account_metas.clone()); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError(0, InstructionError::InvalidAccountData) ); let instruction = Instruction::new_with_bytes(program_id, &[3], account_metas.clone()); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError(0, InstructionError::Custom(0)) ); let instruction = Instruction::new_with_bytes(program_id, &[4], account_metas.clone()); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError(0, InstructionError::Custom(42)) ); let instruction = Instruction::new_with_bytes(program_id, &[5], account_metas.clone()); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); let result = result.unwrap_err().unwrap(); if TransactionError::InstructionError(0, InstructionError::InvalidInstructionData) != result { assert_eq!( result, TransactionError::InstructionError(0, InstructionError::InvalidError) ); } let instruction = Instruction::new_with_bytes(program_id, &[6], account_metas.clone()); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); let result = result.unwrap_err().unwrap(); if TransactionError::InstructionError(0, InstructionError::InvalidInstructionData) != result { assert_eq!( result, TransactionError::InstructionError(0, InstructionError::InvalidError) ); } let instruction = Instruction::new_with_bytes(program_id, &[7], account_metas.clone()); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); let result = result.unwrap_err().unwrap(); if TransactionError::InstructionError(0, InstructionError::InvalidInstructionData) != result { assert_eq!( result, TransactionError::InstructionError(0, InstructionError::AccountBorrowFailed) ); } let instruction = Instruction::new_with_bytes(program_id, &[8], account_metas.clone()); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError(0, InstructionError::InvalidInstructionData) ); let instruction = Instruction::new_with_bytes(program_id, &[9], account_metas.clone()); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError(0, InstructionError::MaxSeedLengthExceeded) ); } } #[test] fn test_program_bpf_invoke_sanity() { solana_logger::setup(); const TEST_SUCCESS: u8 = 1; const TEST_PRIVILEGE_ESCALATION_SIGNER: u8 = 2; const TEST_PRIVILEGE_ESCALATION_WRITABLE: u8 = 3; const TEST_PPROGRAM_NOT_EXECUTABLE: u8 = 4; const TEST_EMPTY_ACCOUNTS_SLICE: u8 = 5; const TEST_CAP_SEEDS: u8 = 6; const TEST_CAP_SIGNERS: u8 = 7; const TEST_ALLOC_ACCESS_VIOLATION: u8 = 8; const TEST_INSTRUCTION_DATA_TOO_LARGE: u8 = 9; const TEST_INSTRUCTION_META_TOO_LARGE: u8 = 10; const TEST_RETURN_ERROR: u8 = 11; const TEST_PRIVILEGE_DEESCALATION_ESCALATION_SIGNER: u8 = 12; const TEST_PRIVILEGE_DEESCALATION_ESCALATION_WRITABLE: u8 = 13; #[allow(dead_code)] #[derive(Debug)] enum Languages { C, Rust, } let mut programs = Vec::new(); #[cfg(feature = "bpf_c")] { programs.push((Languages::C, "invoke", "invoked", "noop")); } #[cfg(feature = "bpf_rust")] { programs.push(( Languages::Rust, "solana_bpf_rust_invoke", "solana_bpf_rust_invoked", "solana_bpf_rust_noop", )); } for program in programs.iter() { println!("Test program: {:?}", program); let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_program!(); bank.add_builtin(&name, id, entrypoint); let bank = Arc::new(bank); let bank_client = BankClient::new_shared(&bank); let invoke_program_id = load_bpf_program(&bank_client, &bpf_loader::id(), &mint_keypair, program.1); let invoked_program_id = load_bpf_program(&bank_client, &bpf_loader::id(), &mint_keypair, program.2); let noop_program_id = load_bpf_program(&bank_client, &bpf_loader::id(), &mint_keypair, program.3); let argument_keypair = Keypair::new(); let account = AccountSharedData::new(42, 100, &invoke_program_id); bank.store_account(&argument_keypair.pubkey(), &account); let invoked_argument_keypair = Keypair::new(); let account = AccountSharedData::new(10, 10, &invoked_program_id); bank.store_account(&invoked_argument_keypair.pubkey(), &account); let from_keypair = Keypair::new(); let account = AccountSharedData::new(84, 0, &solana_sdk::system_program::id()); bank.store_account(&from_keypair.pubkey(), &account); let (derived_key1, bump_seed1) = Pubkey::find_program_address(&[b"You pass butter"], &invoke_program_id); let (derived_key2, bump_seed2) = Pubkey::find_program_address(&[b"Lil'", b"Bits"], &invoked_program_id); let (derived_key3, bump_seed3) = Pubkey::find_program_address(&[derived_key2.as_ref()], &invoked_program_id); let mint_pubkey = mint_keypair.pubkey(); let account_metas = vec![ AccountMeta::new(mint_pubkey, true), AccountMeta::new(argument_keypair.pubkey(), true), AccountMeta::new_readonly(invoked_program_id, false), AccountMeta::new(invoked_argument_keypair.pubkey(), true), AccountMeta::new_readonly(invoked_program_id, false), AccountMeta::new(argument_keypair.pubkey(), true), AccountMeta::new(derived_key1, false), AccountMeta::new(derived_key2, false), AccountMeta::new_readonly(derived_key3, false), AccountMeta::new_readonly(solana_sdk::system_program::id(), false), AccountMeta::new(from_keypair.pubkey(), true), ]; // success cases let instruction = Instruction::new_with_bytes( invoke_program_id, &[TEST_SUCCESS, bump_seed1, bump_seed2, bump_seed3], account_metas.clone(), ); let noop_instruction = Instruction::new_with_bytes(noop_program_id, &[], vec![]); let message = Message::new(&[instruction, noop_instruction], Some(&mint_pubkey)); let tx = Transaction::new( &[ &mint_keypair, &argument_keypair, &invoked_argument_keypair, &from_keypair, ], message.clone(), bank.last_blockhash(), ); let (result, inner_instructions) = process_transaction_and_record_inner(&bank, tx); assert!(result.is_ok()); let invoked_programs: Vec<Pubkey> = inner_instructions[0] .iter() .map(|ix| message.account_keys[ix.program_id_index as usize].clone()) .collect(); let expected_invoked_programs = match program.0 { Languages::C => vec![ solana_sdk::system_program::id(), solana_sdk::system_program::id(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), ], Languages::Rust => vec![ solana_sdk::system_program::id(), solana_sdk::system_program::id(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), invoked_program_id.clone(), solana_sdk::system_program::id(), ], }; assert_eq!(invoked_programs.len(), expected_invoked_programs.len()); assert_eq!(invoked_programs, expected_invoked_programs); let no_invoked_programs: Vec<Pubkey> = inner_instructions[1] .iter() .map(|ix| message.account_keys[ix.program_id_index as usize].clone()) .collect(); assert_eq!(no_invoked_programs.len(), 0); // failure cases let do_invoke_failure_test_local = |test: u8, expected_error: TransactionError, expected_invoked_programs: &[Pubkey]| { println!("Running failure test #{:?}", test); let instruction_data = &[test, bump_seed1, bump_seed2, bump_seed3]; let signers = vec![ &mint_keypair, &argument_keypair, &invoked_argument_keypair, &from_keypair, ]; let instruction = Instruction::new_with_bytes( invoke_program_id, instruction_data, account_metas.clone(), ); let message = Message::new(&[instruction], Some(&mint_pubkey)); let tx = Transaction::new(&signers, message.clone(), bank.last_blockhash()); let (result, inner_instructions) = process_transaction_and_record_inner(&bank, tx); let invoked_programs: Vec<Pubkey> = inner_instructions[0] .iter() .map(|ix| message.account_keys[ix.program_id_index as usize].clone()) .collect(); assert_eq!(result.unwrap_err(), expected_error); assert_eq!(invoked_programs, expected_invoked_programs); }; do_invoke_failure_test_local( TEST_PRIVILEGE_ESCALATION_SIGNER, TransactionError::InstructionError(0, InstructionError::PrivilegeEscalation), &[invoked_program_id.clone()], ); do_invoke_failure_test_local( TEST_PRIVILEGE_ESCALATION_WRITABLE, TransactionError::InstructionError(0, InstructionError::PrivilegeEscalation), &[invoked_program_id.clone()], ); do_invoke_failure_test_local( TEST_PPROGRAM_NOT_EXECUTABLE, TransactionError::InstructionError(0, InstructionError::AccountNotExecutable), &[], ); do_invoke_failure_test_local( TEST_EMPTY_ACCOUNTS_SLICE, TransactionError::InstructionError(0, InstructionError::MissingAccount), &[], ); do_invoke_failure_test_local( TEST_CAP_SEEDS, TransactionError::InstructionError(0, InstructionError::MaxSeedLengthExceeded), &[], ); do_invoke_failure_test_local( TEST_CAP_SIGNERS, TransactionError::InstructionError(0, InstructionError::ProgramFailedToComplete), &[], ); do_invoke_failure_test_local( TEST_INSTRUCTION_DATA_TOO_LARGE, TransactionError::InstructionError(0, InstructionError::ProgramFailedToComplete), &[], ); do_invoke_failure_test_local( TEST_INSTRUCTION_META_TOO_LARGE, TransactionError::InstructionError(0, InstructionError::ProgramFailedToComplete), &[], ); do_invoke_failure_test_local( TEST_RETURN_ERROR, TransactionError::InstructionError(0, InstructionError::Custom(42)), &[invoked_program_id.clone()], ); do_invoke_failure_test_local( TEST_PRIVILEGE_DEESCALATION_ESCALATION_SIGNER, TransactionError::InstructionError(0, InstructionError::PrivilegeEscalation), &[invoked_program_id.clone()], ); do_invoke_failure_test_local( TEST_PRIVILEGE_DEESCALATION_ESCALATION_WRITABLE, TransactionError::InstructionError(0, InstructionError::PrivilegeEscalation), &[invoked_program_id.clone()], ); // Check resulting state assert_eq!(43, bank.get_balance(&derived_key1)); let account = bank.get_account(&derived_key1).unwrap(); assert_eq!(&invoke_program_id, account.owner()); assert_eq!( MAX_PERMITTED_DATA_INCREASE, bank.get_account(&derived_key1).unwrap().data().len() ); for i in 0..20 { assert_eq!(i as u8, account.data()[i]); } // Attempt to realloc into unauthorized address space let account = AccountSharedData::new(84, 0, &solana_sdk::system_program::id()); bank.store_account(&from_keypair.pubkey(), &account); bank.store_account(&derived_key1, &AccountSharedData::default()); let instruction = Instruction::new_with_bytes( invoke_program_id, &[ TEST_ALLOC_ACCESS_VIOLATION, bump_seed1, bump_seed2, bump_seed3, ], account_metas.clone(), ); let message = Message::new(&[instruction], Some(&mint_pubkey)); let tx = Transaction::new( &[ &mint_keypair, &argument_keypair, &invoked_argument_keypair, &from_keypair, ], message.clone(), bank.last_blockhash(), ); let (result, inner_instructions) = process_transaction_and_record_inner(&bank, tx); let invoked_programs: Vec<Pubkey> = inner_instructions[0] .iter() .map(|ix| message.account_keys[ix.program_id_index as usize].clone()) .collect(); assert_eq!(invoked_programs, vec![solana_sdk::system_program::id()]); assert_eq!( result.unwrap_err(), TransactionError::InstructionError(0, InstructionError::ProgramFailedToComplete) ); } } #[cfg(feature = "bpf_rust")] #[test] fn test_program_bpf_program_id_spoofing() { let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_program!(); bank.add_builtin(&name, id, entrypoint); let bank = Arc::new(bank); let bank_client = BankClient::new_shared(&bank); let malicious_swap_pubkey = load_bpf_program( &bank_client, &bpf_loader::id(), &mint_keypair, "solana_bpf_rust_spoof1", ); let malicious_system_pubkey = load_bpf_program( &bank_client, &bpf_loader::id(), &mint_keypair, "solana_bpf_rust_spoof1_system", ); let from_pubkey = Pubkey::new_unique(); let account = AccountSharedData::new(10, 0, &solana_sdk::system_program::id()); bank.store_account(&from_pubkey, &account); let to_pubkey = Pubkey::new_unique(); let account = AccountSharedData::new(0, 0, &solana_sdk::system_program::id()); bank.store_account(&to_pubkey, &account); let account_metas = vec![ AccountMeta::new_readonly(solana_sdk::system_program::id(), false), AccountMeta::new_readonly(malicious_system_pubkey, false), AccountMeta::new(from_pubkey, false), AccountMeta::new(to_pubkey, false), ]; let instruction = Instruction::new_with_bytes(malicious_swap_pubkey, &[], account_metas.clone()); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError(0, InstructionError::MissingRequiredSignature) ); assert_eq!(10, bank.get_balance(&from_pubkey)); assert_eq!(0, bank.get_balance(&to_pubkey)); } #[cfg(feature = "bpf_rust")] #[test] fn test_program_bpf_caller_has_access_to_cpi_program() { let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_program!(); bank.add_builtin(&name, id, entrypoint); let bank = Arc::new(bank); let bank_client = BankClient::new_shared(&bank); let caller_pubkey = load_bpf_program( &bank_client, &bpf_loader::id(), &mint_keypair, "solana_bpf_rust_caller_access", ); let caller2_pubkey = load_bpf_program( &bank_client, &bpf_loader::id(), &mint_keypair, "solana_bpf_rust_caller_access", ); let account_metas = vec![ AccountMeta::new_readonly(caller_pubkey, false), AccountMeta::new_readonly(caller2_pubkey, false), ]; let instruction = Instruction::new_with_bytes(caller_pubkey, &[1], account_metas.clone()); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError(0, InstructionError::MissingAccount) ); } #[cfg(feature = "bpf_rust")] #[test] fn test_program_bpf_ro_modify() { solana_logger::setup(); let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_program!(); bank.add_builtin(&name, id, entrypoint); let bank = Arc::new(bank); let bank_client = BankClient::new_shared(&bank); let program_pubkey = load_bpf_program( &bank_client, &bpf_loader::id(), &mint_keypair, "solana_bpf_rust_ro_modify", ); let test_keypair = Keypair::new(); let account = AccountSharedData::new(10, 0, &solana_sdk::system_program::id()); bank.store_account(&test_keypair.pubkey(), &account); let account_metas = vec![ AccountMeta::new_readonly(solana_sdk::system_program::id(), false), AccountMeta::new(test_keypair.pubkey(), true), ]; let instruction = Instruction::new_with_bytes(program_pubkey, &[1], account_metas.clone()); let message = Message::new(&[instruction], Some(&mint_keypair.pubkey())); let result = bank_client.send_and_confirm_message(&[&mint_keypair, &test_keypair], message); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError(0, InstructionError::ProgramFailedToComplete) ); let instruction = Instruction::new_with_bytes(program_pubkey, &[3], account_metas.clone()); let message = Message::new(&[instruction], Some(&mint_keypair.pubkey())); let result = bank_client.send_and_confirm_message(&[&mint_keypair, &test_keypair], message); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError(0, InstructionError::ProgramFailedToComplete) ); let instruction = Instruction::new_with_bytes(program_pubkey, &[4], account_metas.clone()); let message = Message::new(&[instruction], Some(&mint_keypair.pubkey())); let result = bank_client.send_and_confirm_message(&[&mint_keypair, &test_keypair], message); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError(0, InstructionError::ProgramFailedToComplete) ); } #[cfg(feature = "bpf_rust")] #[test] fn test_program_bpf_call_depth() { use solana_sdk::process_instruction::BpfComputeBudget; solana_logger::setup(); println!("Test program: solana_bpf_rust_call_depth"); let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_program!(); bank.add_builtin(&name, id, entrypoint); let bank_client = BankClient::new(bank); let program_id = load_bpf_program( &bank_client, &bpf_loader::id(), &mint_keypair, "solana_bpf_rust_call_depth", ); let instruction = Instruction::new_with_bincode( program_id, &(BpfComputeBudget::default().max_call_depth - 1), vec![], ); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); assert!(result.is_ok()); let instruction = Instruction::new_with_bincode( program_id, &BpfComputeBudget::default().max_call_depth, vec![], ); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); assert!(result.is_err()); } #[test] fn assert_instruction_count() { solana_logger::setup(); let mut programs = Vec::new(); #[cfg(feature = "bpf_c")] { programs.extend_from_slice(&[ ("alloc", 1137), ("bpf_to_bpf", 13), ("multiple_static", 8), ("noop", 5), ("noop++", 5), ("relative_call", 10), ("sanity", 169), ("sanity++", 168), ("sha", 694), ("struct_pass", 8), ("struct_ret", 22), ]); } #[cfg(feature = "bpf_rust")] { programs.extend_from_slice(&[ ("solana_bpf_rust_128bit", 584), ("solana_bpf_rust_alloc", 4967), ("solana_bpf_rust_custom_heap", 365), ("solana_bpf_rust_dep_crate", 2), ("solana_bpf_rust_external_spend", 334), ("solana_bpf_rust_iter", 8), ("solana_bpf_rust_many_args", 189), ("solana_bpf_rust_mem", 1665), ("solana_bpf_rust_noop", 322), ("solana_bpf_rust_param_passing", 46), ("solana_bpf_rust_rand", 325), ("solana_bpf_rust_sanity", 587), ("solana_bpf_rust_sha", 22417), ]); } let mut passed = true; println!("\n {:30} expected actual diff", "BPF program"); for program in programs.iter() { let program_id = solana_sdk::pubkey::new_rand(); let key = solana_sdk::pubkey::new_rand(); let mut account = RefCell::new(AccountSharedData::default()); let parameter_accounts = vec![KeyedAccount::new(&key, false, &mut account)]; let count = run_program(program.0, &program_id, parameter_accounts, &[]).unwrap(); let diff: i64 = count as i64 - program.1 as i64; println!( " {:30} {:8} {:6} {:+5} ({:+3.0}%)", program.0, program.1, count, diff, 100.0_f64 * count as f64 / program.1 as f64 - 100.0_f64, ); if count > program.1 { passed = false; } } assert!(passed); } #[cfg(any(feature = "bpf_rust"))] #[test] fn test_program_bpf_instruction_introspection() { solana_logger::setup(); let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50_000); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_program!(); bank.add_builtin(&name, id, entrypoint); let bank = Arc::new(bank); let bank_client = BankClient::new_shared(&bank); let program_id = load_bpf_program( &bank_client, &bpf_loader::id(), &mint_keypair, "solana_bpf_rust_instruction_introspection", ); // Passing transaction let account_metas = vec![AccountMeta::new_readonly( solana_sdk::sysvar::instructions::id(), false, )]; let instruction0 = Instruction::new_with_bytes(program_id, &[0u8, 0u8], account_metas.clone()); let instruction1 = Instruction::new_with_bytes(program_id, &[0u8, 1u8], account_metas.clone()); let instruction2 = Instruction::new_with_bytes(program_id, &[0u8, 2u8], account_metas); let message = Message::new( &[instruction0, instruction1, instruction2], Some(&mint_keypair.pubkey()), ); let result = bank_client.send_and_confirm_message(&[&mint_keypair], message); assert!(result.is_ok()); // writable special instructions11111 key, should not be allowed let account_metas = vec![AccountMeta::new( solana_sdk::sysvar::instructions::id(), false, )]; let instruction = Instruction::new_with_bytes(program_id, &[0], account_metas); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); assert_eq!( result.unwrap_err().unwrap(), // sysvar write locks are demoted to read only. So this will no longer // cause InvalidAccountIndex error. TransactionError::InstructionError(0, InstructionError::ProgramFailedToComplete), ); // No accounts, should error let instruction = Instruction::new_with_bytes(program_id, &[0], vec![]); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); assert!(result.is_err()); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError( 0, solana_sdk::instruction::InstructionError::NotEnoughAccountKeys ) ); assert!(bank .get_account(&solana_sdk::sysvar::instructions::id()) .is_none()); } #[cfg(feature = "bpf_rust")] #[test] fn test_program_bpf_test_use_latest_executor() { use solana_sdk::{loader_instruction, system_instruction}; solana_logger::setup(); let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_program!(); bank.add_builtin(&name, id, entrypoint); let bank_client = BankClient::new(bank); let panic_id = load_bpf_program( &bank_client, &bpf_loader::id(), &mint_keypair, "solana_bpf_rust_panic", ); let program_keypair = Keypair::new(); // Write the panic program into the program account let elf = read_bpf_program("solana_bpf_rust_panic"); let message = Message::new( &[system_instruction::create_account( &mint_keypair.pubkey(), &program_keypair.pubkey(), 1, elf.len() as u64 * 2, &bpf_loader::id(), )], Some(&mint_keypair.pubkey()), ); assert!(bank_client .send_and_confirm_message(&[&mint_keypair, &program_keypair], message) .is_ok()); write_bpf_program( &bank_client, &bpf_loader::id(), &mint_keypair, &program_keypair, &elf, ); // Finalize the panic program, but fail the tx let message = Message::new( &[ loader_instruction::finalize(&program_keypair.pubkey(), &bpf_loader::id()), Instruction::new_with_bytes(panic_id, &[0], vec![]), ], Some(&mint_keypair.pubkey()), ); assert!(bank_client .send_and_confirm_message(&[&mint_keypair, &program_keypair], message) .is_err()); // Write the noop program into the same program account let elf = read_bpf_program("solana_bpf_rust_noop"); write_bpf_program( &bank_client, &bpf_loader::id(), &mint_keypair, &program_keypair, &elf, ); // Finalize the noop program let message = Message::new( &[loader_instruction::finalize( &program_keypair.pubkey(), &bpf_loader::id(), )], Some(&mint_keypair.pubkey()), ); assert!(bank_client .send_and_confirm_message(&[&mint_keypair, &program_keypair], message) .is_ok()); // Call the noop program, should get noop not panic let message = Message::new( &[Instruction::new_with_bytes( program_keypair.pubkey(), &[0], vec![], )], Some(&mint_keypair.pubkey()), ); assert!(bank_client .send_and_confirm_message(&[&mint_keypair], message) .is_ok()); } #[ignore] // Invoking BPF loaders from CPI not allowed #[cfg(feature = "bpf_rust")] #[test] fn test_program_bpf_test_use_latest_executor2() { use solana_sdk::{loader_instruction, system_instruction}; solana_logger::setup(); let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_program!(); bank.add_builtin(&name, id, entrypoint); let bank_client = BankClient::new(bank); let invoke_and_error = load_bpf_program( &bank_client, &bpf_loader::id(), &mint_keypair, "solana_bpf_rust_invoke_and_error", ); let invoke_and_ok = load_bpf_program( &bank_client, &bpf_loader::id(), &mint_keypair, "solana_bpf_rust_invoke_and_ok", ); let program_keypair = Keypair::new(); // Write the panic program into the program account let elf = read_bpf_program("solana_bpf_rust_panic"); let message = Message::new( &[system_instruction::create_account( &mint_keypair.pubkey(), &program_keypair.pubkey(), 1, elf.len() as u64 * 2, &bpf_loader::id(), )], Some(&mint_keypair.pubkey()), ); assert!(bank_client .send_and_confirm_message(&[&mint_keypair, &program_keypair], message) .is_ok()); write_bpf_program( &bank_client, &bpf_loader::id(), &mint_keypair, &program_keypair, &elf, ); // - invoke finalize and return error, swallow error let mut instruction = loader_instruction::finalize(&program_keypair.pubkey(), &bpf_loader::id()); instruction.accounts.insert( 0, AccountMeta { is_signer: false, is_writable: false, pubkey: instruction.program_id, }, ); instruction.program_id = invoke_and_ok; instruction.accounts.insert( 0, AccountMeta { is_signer: false, is_writable: false, pubkey: invoke_and_error, }, ); let message = Message::new(&[instruction], Some(&mint_keypair.pubkey())); assert!(bank_client .send_and_confirm_message(&[&mint_keypair, &program_keypair], message) .is_ok()); // invoke program, verify not found let message = Message::new( &[Instruction::new_with_bytes( program_keypair.pubkey(), &[0], vec![], )], Some(&mint_keypair.pubkey()), ); assert_eq!( bank_client .send_and_confirm_message(&[&mint_keypair], message) .unwrap_err() .unwrap(), TransactionError::InvalidProgramForExecution ); // Write the noop program into the same program account let elf = read_bpf_program("solana_bpf_rust_noop"); write_bpf_program( &bank_client, &bpf_loader::id(), &mint_keypair, &program_keypair, &elf, ); // Finalize the noop program let message = Message::new( &[loader_instruction::finalize( &program_keypair.pubkey(), &bpf_loader::id(), )], Some(&mint_keypair.pubkey()), ); assert!(bank_client .send_and_confirm_message(&[&mint_keypair, &program_keypair], message) .is_ok()); // Call the program, should get noop, not panic let message = Message::new( &[Instruction::new_with_bytes( program_keypair.pubkey(), &[0], vec![], )], Some(&mint_keypair.pubkey()), ); assert!(bank_client .send_and_confirm_message(&[&mint_keypair], message) .is_ok()); } #[cfg(feature = "bpf_rust")] #[test] fn test_program_bpf_upgrade() { solana_logger::setup(); let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_upgradeable_program!(); bank.add_builtin(&name, id, entrypoint); let bank_client = BankClient::new(bank); // Deploy upgrade program let buffer_keypair = Keypair::new(); let program_keypair = Keypair::new(); let program_id = program_keypair.pubkey(); let authority_keypair = Keypair::new(); load_upgradeable_bpf_program( &bank_client, &mint_keypair, &buffer_keypair, &program_keypair, &authority_keypair, "solana_bpf_rust_upgradeable", ); let mut instruction = Instruction::new_with_bytes( program_id, &[0], vec![ AccountMeta::new(program_id.clone(), false), AccountMeta::new(clock::id(), false), AccountMeta::new(fees::id(), false), ], ); // Call upgrade program let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction.clone()); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError(0, InstructionError::Custom(42)) ); // Upgrade program let buffer_keypair = Keypair::new(); upgrade_bpf_program( &bank_client, &mint_keypair, &buffer_keypair, &program_id, &authority_keypair, "solana_bpf_rust_upgraded", ); // Call upgraded program instruction.data[0] += 1; let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction.clone()); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError(0, InstructionError::Custom(43)) ); // Set a new authority let new_authority_keypair = Keypair::new(); set_upgrade_authority( &bank_client, &mint_keypair, &program_id, &authority_keypair, Some(&new_authority_keypair.pubkey()), ); // Upgrade back to the original program let buffer_keypair = Keypair::new(); upgrade_bpf_program( &bank_client, &mint_keypair, &buffer_keypair, &program_id, &new_authority_keypair, "solana_bpf_rust_upgradeable", ); // Call original program instruction.data[0] += 1; let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError(0, InstructionError::Custom(42)) ); } #[cfg(feature = "bpf_rust")] #[test] fn test_program_bpf_upgrade_and_invoke_in_same_tx() { solana_logger::setup(); let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_upgradeable_program!(); bank.add_builtin(&name, id, entrypoint); let bank = Arc::new(bank); let bank_client = BankClient::new_shared(&bank); // Deploy upgrade program let buffer_keypair = Keypair::new(); let program_keypair = Keypair::new(); let program_id = program_keypair.pubkey(); let authority_keypair = Keypair::new(); load_upgradeable_bpf_program( &bank_client, &mint_keypair, &buffer_keypair, &program_keypair, &authority_keypair, "solana_bpf_rust_noop", ); let invoke_instruction = Instruction::new_with_bytes( program_id, &[0], vec![ AccountMeta::new(program_id.clone(), false), AccountMeta::new(clock::id(), false), AccountMeta::new(fees::id(), false), ], ); // Call upgradeable program let result = bank_client.send_and_confirm_instruction(&mint_keypair, invoke_instruction.clone()); assert!(result.is_ok()); // Prepare for upgrade let buffer_keypair = Keypair::new(); load_upgradeable_buffer( &bank_client, &mint_keypair, &buffer_keypair, &authority_keypair, "solana_bpf_rust_panic", ); // Invoke, then upgrade the program, and then invoke again in same tx let message = Message::new( &[ invoke_instruction.clone(), bpf_loader_upgradeable::upgrade( &program_id, &buffer_keypair.pubkey(), &authority_keypair.pubkey(), &mint_keypair.pubkey(), ), invoke_instruction, ], Some(&mint_keypair.pubkey()), ); let tx = Transaction::new( &[&mint_keypair, &authority_keypair], message.clone(), bank.last_blockhash(), ); let (result, _) = process_transaction_and_record_inner(&bank, tx); assert_eq!( result.unwrap_err(), TransactionError::InstructionError(2, InstructionError::ProgramFailedToComplete) ); } #[cfg(feature = "bpf_rust")] #[test] fn test_program_bpf_invoke_upgradeable_via_cpi() { solana_logger::setup(); let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_program!(); bank.add_builtin(&name, id, entrypoint); let (name, id, entrypoint) = solana_bpf_loader_upgradeable_program!(); bank.add_builtin(&name, id, entrypoint); let bank_client = BankClient::new(bank); let invoke_and_return = load_bpf_program( &bank_client, &bpf_loader::id(), &mint_keypair, "solana_bpf_rust_invoke_and_return", ); // Deploy upgradeable program let buffer_keypair = Keypair::new(); let program_keypair = Keypair::new(); let program_id = program_keypair.pubkey(); let authority_keypair = Keypair::new(); load_upgradeable_bpf_program( &bank_client, &mint_keypair, &buffer_keypair, &program_keypair, &authority_keypair, "solana_bpf_rust_upgradeable", ); let mut instruction = Instruction::new_with_bytes( invoke_and_return, &[0], vec![ AccountMeta::new(program_id, false), AccountMeta::new(program_id, false), AccountMeta::new(clock::id(), false), AccountMeta::new(fees::id(), false), ], ); // Call invoker program to invoke the upgradeable program instruction.data[0] += 1; let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction.clone()); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError(0, InstructionError::Custom(42)) ); // Upgrade program let buffer_keypair = Keypair::new(); upgrade_bpf_program( &bank_client, &mint_keypair, &buffer_keypair, &program_id, &authority_keypair, "solana_bpf_rust_upgraded", ); // Call the upgraded program instruction.data[0] += 1; let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction.clone()); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError(0, InstructionError::Custom(43)) ); // Set a new authority let new_authority_keypair = Keypair::new(); set_upgrade_authority( &bank_client, &mint_keypair, &program_id, &authority_keypair, Some(&new_authority_keypair.pubkey()), ); // Upgrade back to the original program let buffer_keypair = Keypair::new(); upgrade_bpf_program( &bank_client, &mint_keypair, &buffer_keypair, &program_id, &new_authority_keypair, "solana_bpf_rust_upgradeable", ); // Call original program instruction.data[0] += 1; let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction.clone()); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError(0, InstructionError::Custom(42)) ); } #[test] #[cfg(any(feature = "bpf_c", feature = "bpf_rust"))] fn test_program_bpf_disguised_as_bpf_loader() { solana_logger::setup(); let mut programs = Vec::new(); #[cfg(feature = "bpf_c")] { programs.extend_from_slice(&[("noop")]); } #[cfg(feature = "bpf_rust")] { programs.extend_from_slice(&[("solana_bpf_rust_noop")]); } for program in programs.iter() { let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_deprecated_program!(); bank.add_builtin(&name, id, entrypoint); let bank_client = BankClient::new(bank); let program_id = load_bpf_program( &bank_client, &bpf_loader_deprecated::id(), &mint_keypair, program, ); let account_metas = vec![AccountMeta::new_readonly(program_id, false)]; let instruction = Instruction::new_with_bytes(bpf_loader_deprecated::id(), &[1], account_metas); let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError(0, InstructionError::IncorrectProgramId) ); } } #[test] #[cfg(feature = "bpf_c")] fn test_program_bpf_c_dup() { solana_logger::setup(); let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_program!(); bank.add_builtin(&name, id, entrypoint); let account_address = Pubkey::new_unique(); let account = AccountSharedData::new_data(42, &[1_u8, 2, 3], &solana_sdk::system_program::id()).unwrap(); bank.store_account(&account_address, &account); let bank_client = BankClient::new(bank); let program_id = load_bpf_program(&bank_client, &bpf_loader::id(), &mint_keypair, "ser"); let account_metas = vec![ AccountMeta::new_readonly(account_address, false), AccountMeta::new_readonly(account_address, false), ]; let instruction = Instruction::new_with_bytes(program_id, &[4, 5, 6, 7], account_metas); bank_client .send_and_confirm_instruction(&mint_keypair, instruction) .unwrap(); } #[cfg(feature = "bpf_rust")] #[test] fn test_program_bpf_upgrade_via_cpi() { solana_logger::setup(); let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_program!(); bank.add_builtin(&name, id, entrypoint); let (name, id, entrypoint) = solana_bpf_loader_upgradeable_program!(); bank.add_builtin(&name, id, entrypoint); let bank_client = BankClient::new(bank); let invoke_and_return = load_bpf_program( &bank_client, &bpf_loader::id(), &mint_keypair, "solana_bpf_rust_invoke_and_return", ); // Deploy upgradeable program let buffer_keypair = Keypair::new(); let program_keypair = Keypair::new(); let program_id = program_keypair.pubkey(); let authority_keypair = Keypair::new(); load_upgradeable_bpf_program( &bank_client, &mint_keypair, &buffer_keypair, &program_keypair, &authority_keypair, "solana_bpf_rust_upgradeable", ); let mut instruction = Instruction::new_with_bytes( invoke_and_return, &[0], vec![ AccountMeta::new(program_id, false), AccountMeta::new(program_id, false), AccountMeta::new(clock::id(), false), AccountMeta::new(fees::id(), false), ], ); // Call the upgraded program instruction.data[0] += 1; let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction.clone()); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError(0, InstructionError::Custom(42)) ); // Load the buffer account let path = create_bpf_path("solana_bpf_rust_upgraded"); let mut file = File::open(&path).unwrap_or_else(|err| { panic!("Failed to open {}: {}", path.display(), err); }); let mut elf = Vec::new(); file.read_to_end(&mut elf).unwrap(); let buffer_keypair = Keypair::new(); load_buffer_account( &bank_client, &mint_keypair, &buffer_keypair, &authority_keypair, &elf, ); // Upgrade program via CPI let mut upgrade_instruction = bpf_loader_upgradeable::upgrade( &program_id, &buffer_keypair.pubkey(), &authority_keypair.pubkey(), &mint_keypair.pubkey(), ); upgrade_instruction.program_id = invoke_and_return; upgrade_instruction .accounts .insert(0, AccountMeta::new(bpf_loader_upgradeable::id(), false)); let message = Message::new(&[upgrade_instruction], Some(&mint_keypair.pubkey())); bank_client .send_and_confirm_message(&[&mint_keypair, &authority_keypair], message) .unwrap(); // Call the upgraded program instruction.data[0] += 1; let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction.clone()); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError(0, InstructionError::Custom(43)) ); } #[cfg(feature = "bpf_rust")] #[test] fn test_program_bpf_upgrade_self_via_cpi() { solana_logger::setup(); let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_program!(); bank.add_builtin(&name, id, entrypoint); let (name, id, entrypoint) = solana_bpf_loader_upgradeable_program!(); bank.add_builtin(&name, id, entrypoint); let bank = Arc::new(bank); let bank_client = BankClient::new_shared(&bank); let noop_program_id = load_bpf_program( &bank_client, &bpf_loader::id(), &mint_keypair, "solana_bpf_rust_noop", ); // Deploy upgradeable program let buffer_keypair = Keypair::new(); let program_keypair = Keypair::new(); let program_id = program_keypair.pubkey(); let authority_keypair = Keypair::new(); load_upgradeable_bpf_program( &bank_client, &mint_keypair, &buffer_keypair, &program_keypair, &authority_keypair, "solana_bpf_rust_invoke_and_return", ); let mut invoke_instruction = Instruction::new_with_bytes( program_id, &[0], vec![ AccountMeta::new(noop_program_id, false), AccountMeta::new(noop_program_id, false), AccountMeta::new(clock::id(), false), AccountMeta::new(fees::id(), false), ], ); // Call the upgraded program invoke_instruction.data[0] += 1; let result = bank_client.send_and_confirm_instruction(&mint_keypair, invoke_instruction.clone()); assert!(result.is_ok()); // Prepare for upgrade let buffer_keypair = Keypair::new(); load_upgradeable_buffer( &bank_client, &mint_keypair, &buffer_keypair, &authority_keypair, "solana_bpf_rust_panic", ); // Invoke, then upgrade the program, and then invoke again in same tx let message = Message::new( &[ invoke_instruction.clone(), bpf_loader_upgradeable::upgrade( &program_id, &buffer_keypair.pubkey(), &authority_keypair.pubkey(), &mint_keypair.pubkey(), ), invoke_instruction, ], Some(&mint_keypair.pubkey()), ); let tx = Transaction::new( &[&mint_keypair, &authority_keypair], message.clone(), bank.last_blockhash(), ); let (result, _) = process_transaction_and_record_inner(&bank, tx); assert_eq!( result.unwrap_err(), TransactionError::InstructionError(2, InstructionError::ProgramFailedToComplete) ); } #[cfg(feature = "bpf_rust")] #[test] fn test_program_bpf_set_upgrade_authority_via_cpi() { solana_logger::setup(); let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_program!(); bank.add_builtin(&name, id, entrypoint); let (name, id, entrypoint) = solana_bpf_loader_upgradeable_program!(); bank.add_builtin(&name, id, entrypoint); let bank_client = BankClient::new(bank); // Deploy CPI invoker program let invoke_and_return = load_bpf_program( &bank_client, &bpf_loader::id(), &mint_keypair, "solana_bpf_rust_invoke_and_return", ); // Deploy upgradeable program let buffer_keypair = Keypair::new(); let program_keypair = Keypair::new(); let program_id = program_keypair.pubkey(); let authority_keypair = Keypair::new(); load_upgradeable_bpf_program( &bank_client, &mint_keypair, &buffer_keypair, &program_keypair, &authority_keypair, "solana_bpf_rust_upgradeable", ); // Set program upgrade authority instruction to invoke via CPI let new_upgrade_authority_key = Keypair::new().pubkey(); let mut set_upgrade_authority_instruction = bpf_loader_upgradeable::set_upgrade_authority( &program_id, &authority_keypair.pubkey(), Some(&new_upgrade_authority_key), ); // Invoke set_upgrade_authority via CPI invoker program set_upgrade_authority_instruction.program_id = invoke_and_return; set_upgrade_authority_instruction .accounts .insert(0, AccountMeta::new(bpf_loader_upgradeable::id(), false)); let message = Message::new( &[set_upgrade_authority_instruction], Some(&mint_keypair.pubkey()), ); bank_client .send_and_confirm_message(&[&mint_keypair, &authority_keypair], message) .unwrap(); // Assert upgrade authority was changed let program_account_data = bank_client.get_account_data(&program_id).unwrap().unwrap(); let program_account = parse_bpf_upgradeable_loader(&program_account_data).unwrap(); let upgrade_authority_key = match program_account { BpfUpgradeableLoaderAccountType::Program(ui_program) => { let program_data_account_key = Pubkey::from_str(&ui_program.program_data).unwrap(); let program_data_account_data = bank_client .get_account_data(&program_data_account_key) .unwrap() .unwrap(); let program_data_account = parse_bpf_upgradeable_loader(&program_data_account_data).unwrap(); match program_data_account { BpfUpgradeableLoaderAccountType::ProgramData(ui_program_data) => ui_program_data .authority .map(|a| Pubkey::from_str(&a).unwrap()), _ => None, } } _ => None, }; assert_eq!(Some(new_upgrade_authority_key), upgrade_authority_key); } #[cfg(feature = "bpf_rust")] #[test] fn test_program_upgradeable_locks() { fn setup_program_upgradeable_locks( payer_keypair: &Keypair, buffer_keypair: &Keypair, program_keypair: &Keypair, ) -> (Arc<Bank>, Transaction, Transaction) { solana_logger::setup(); let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(2_000_000_000); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_upgradeable_program!(); bank.add_builtin(&name, id, entrypoint); let bank = Arc::new(bank); let bank_client = BankClient::new_shared(&bank); load_upgradeable_bpf_program( &bank_client, &mint_keypair, buffer_keypair, program_keypair, payer_keypair, "solana_bpf_rust_panic", ); // Load the buffer account let path = create_bpf_path("solana_bpf_rust_noop"); let mut file = File::open(&path).unwrap_or_else(|err| { panic!("Failed to open {}: {}", path.display(), err); }); let mut elf = Vec::new(); file.read_to_end(&mut elf).unwrap(); load_buffer_account( &bank_client, &mint_keypair, buffer_keypair, &payer_keypair, &elf, ); bank_client .send_and_confirm_instruction( &mint_keypair, system_instruction::transfer( &mint_keypair.pubkey(), &payer_keypair.pubkey(), 1_000_000_000, ), ) .unwrap(); let invoke_tx = Transaction::new( &[payer_keypair], Message::new( &[Instruction::new_with_bytes( program_keypair.pubkey(), &[0; 0], vec![], )], Some(&payer_keypair.pubkey()), ), bank.last_blockhash(), ); let upgrade_tx = Transaction::new( &[payer_keypair], Message::new( &[bpf_loader_upgradeable::upgrade( &program_keypair.pubkey(), &buffer_keypair.pubkey(), &payer_keypair.pubkey(), &payer_keypair.pubkey(), )], Some(&payer_keypair.pubkey()), ), bank.last_blockhash(), ); (bank, invoke_tx, upgrade_tx) } let payer_keypair = keypair_from_seed(&[56u8; 32]).unwrap(); let buffer_keypair = keypair_from_seed(&[11; 32]).unwrap(); let program_keypair = keypair_from_seed(&[77u8; 32]).unwrap(); let results1 = { let (bank, invoke_tx, upgrade_tx) = setup_program_upgradeable_locks(&payer_keypair, &buffer_keypair, &program_keypair); execute_transactions(&bank, &[upgrade_tx, invoke_tx]) }; let results2 = { let (bank, invoke_tx, upgrade_tx) = setup_program_upgradeable_locks(&payer_keypair, &buffer_keypair, &program_keypair); execute_transactions(&bank, &[invoke_tx, upgrade_tx]) }; if false { println!("upgrade and invoke"); for result in &results1 { print_confirmed_tx("result", result.clone()); } println!("invoke and upgrade"); for result in &results2 { print_confirmed_tx("result", result.clone()); } } if let Some(ref meta) = results1[0].transaction.meta { assert_eq!(meta.status, Ok(())); } else { panic!("no meta"); } if let Some(ref meta) = results1[1].transaction.meta { assert_eq!(meta.status, Err(TransactionError::AccountInUse)); } else { panic!("no meta"); } if let Some(ref meta) = results2[0].transaction.meta { assert_eq!( meta.status, Err(TransactionError::InstructionError( 0, InstructionError::ProgramFailedToComplete )) ); } else { panic!("no meta"); } if let Some(ref meta) = results2[1].transaction.meta { assert_eq!(meta.status, Err(TransactionError::AccountInUse)); } else { panic!("no meta"); } } #[cfg(feature = "bpf_rust")] #[test] fn test_program_bpf_finalize() { solana_logger::setup(); let GenesisConfigInfo { genesis_config, mint_keypair, .. } = create_genesis_config(50); let mut bank = Bank::new(&genesis_config); let (name, id, entrypoint) = solana_bpf_loader_program!(); bank.add_builtin(&name, id, entrypoint); let bank = Arc::new(bank); let bank_client = BankClient::new_shared(&bank); let program_pubkey = load_bpf_program( &bank_client, &bpf_loader::id(), &mint_keypair, "solana_bpf_rust_finalize", ); let noop_keypair = Keypair::new(); // Write the noop program into the same program account let elf = read_bpf_program("solana_bpf_rust_noop"); let message = Message::new( &[system_instruction::create_account( &mint_keypair.pubkey(), &noop_keypair.pubkey(), 1, elf.len() as u64 * 2, &bpf_loader::id(), )], Some(&mint_keypair.pubkey()), ); assert!(bank_client .send_and_confirm_message(&[&mint_keypair, &noop_keypair], message) .is_ok()); write_bpf_program( &bank_client, &bpf_loader::id(), &mint_keypair, &noop_keypair, &elf, ); let account_metas = vec![ AccountMeta::new(noop_keypair.pubkey(), true), AccountMeta::new_readonly(bpf_loader::id(), false), AccountMeta::new(rent::id(), false), ]; let instruction = Instruction::new_with_bytes(program_pubkey, &[], account_metas.clone()); let message = Message::new(&[instruction], Some(&mint_keypair.pubkey())); let result = bank_client.send_and_confirm_message(&[&mint_keypair, &noop_keypair], message); assert_eq!( result.unwrap_err().unwrap(), TransactionError::InstructionError(0, InstructionError::ProgramFailedToComplete) ); }
33.919786
116
0.610691
90c979553bc9ba9c6f85bdd169c7c62e893c066c
2,151
use async_trait::async_trait; use tracing::instrument; use super::{MigrationStep, MigrationStepResult}; use crate::github::{GitHubRepo, GithubApiClient, PullRequestDescription}; use crate::migration::{MigrationError, MigrationTask}; use crate::models::CreatedPullRequest; use crate::workspace::Workspace; pub struct UpdateGithubStep<'a> { github_api: &'a GithubApiClient, repo: &'a GitHubRepo, existing_pr: Option<CreatedPullRequest>, branch: &'a str, title: &'a str, body: &'a str, } #[async_trait] impl<'a> MigrationStep<CreatedPullRequest> for UpdateGithubStep<'a> { #[instrument(name = "pull-request", skip(self, _workspace), fields(workspace_name = %_workspace.workspace_name, repo = %self.repo))] async fn execute_step( &self, _workspace: &mut Workspace, ) -> MigrationStepResult<CreatedPullRequest> { match self .github_api .sync_pull_request( self.repo, PullRequestDescription { branch: self.branch, title: self.title, body: self.body, }, self.existing_pr.as_ref().map(|it| it.pr_number), ) .await { Err(e) => MigrationStepResult::failure( "pull-request", MigrationError::UnableToCreatePullRequest { source: e }, ), Ok(new_pr) => { let pr = CreatedPullRequest { pr_number: new_pr.number, url: new_pr.permalink, }; MigrationStepResult::success_with_result("pull-request", pr) } } } } impl<'a> From<&'a MigrationTask<'a>> for UpdateGithubStep<'a> { fn from(task: &'a MigrationTask) -> Self { Self { github_api: task.exec_opts.github_client, repo: &task.repo, existing_pr: task.pull_request.clone(), branch: &task.definition.checkout.branch_name, title: &task.definition.pr.title, body: &task.definition.pr.description, } } }
32.590909
136
0.574152
08ee8fc984ddf3f3b1fa45abd761a21db8e64ac3
37,654
#![crate_name = "compiletest"] // The `test` crate is the only unstable feature // allowed here, just to share similar code. #![feature(test)] extern crate test; use crate::common::{ expected_output_path, output_base_dir, output_relative_path, PanicStrategy, UI_EXTENSIONS, }; use crate::common::{CompareMode, Config, Debugger, Mode, PassMode, Pretty, TestPaths}; use crate::util::logv; use getopts::Options; use std::env; use std::ffi::OsString; use std::fs; use std::io::{self, ErrorKind}; use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use std::time::SystemTime; use test::ColorConfig; use tracing::*; use walkdir::WalkDir; use self::header::EarlyProps; #[cfg(test)] mod tests; pub mod common; pub mod errors; pub mod header; mod json; mod raise_fd_limit; mod read2; pub mod runtest; pub mod util; fn main() { tracing_subscriber::fmt::init(); let config = parse_config(env::args().collect()); if config.valgrind_path.is_none() && config.force_valgrind { panic!("Can't find Valgrind to run Valgrind tests"); } if !config.has_tidy && config.mode == Mode::Rustdoc { eprintln!("warning: `tidy` is not installed; diffs will not be generated"); } log_config(&config); run_tests(config); } pub fn parse_config(args: Vec<String>) -> Config { let mut opts = Options::new(); opts.reqopt("", "compile-lib-path", "path to host shared libraries", "PATH") .reqopt("", "run-lib-path", "path to target shared libraries", "PATH") .reqopt("", "rustc-path", "path to rustc to use for compiling", "PATH") .optopt("", "rustdoc-path", "path to rustdoc to use for compiling", "PATH") .optopt("", "rust-demangler-path", "path to rust-demangler to use in tests", "PATH") .reqopt("", "lldb-python", "path to python to use for doc tests", "PATH") .reqopt("", "docck-python", "path to python to use for doc tests", "PATH") .optopt("", "jsondocck-path", "path to jsondocck to use for doc tests", "PATH") .optopt("", "valgrind-path", "path to Valgrind executable for Valgrind tests", "PROGRAM") .optflag("", "force-valgrind", "fail if Valgrind tests cannot be run under Valgrind") .optopt("", "run-clang-based-tests-with", "path to Clang executable", "PATH") .optopt("", "llvm-filecheck", "path to LLVM's FileCheck binary", "DIR") .reqopt("", "src-base", "directory to scan for test files", "PATH") .reqopt("", "build-base", "directory to deposit test outputs", "PATH") .reqopt("", "stage-id", "the target-stage identifier", "stageN-TARGET") .reqopt( "", "mode", "which sort of compile tests to run", "run-pass-valgrind | pretty | debug-info | codegen | rustdoc \ | rustdoc-json | codegen-units | incremental | run-make | ui | js-doc-test | mir-opt | assembly", ) .reqopt( "", "suite", "which suite of compile tests to run. used for nicer error reporting.", "SUITE", ) .optopt( "", "pass", "force {check,build,run}-pass tests to this mode.", "check | build | run", ) .optopt("", "run", "whether to execute run-* tests", "auto | always | never") .optflag("", "ignored", "run tests marked as ignored") .optflag("", "exact", "filters match exactly") .optopt( "", "runtool", "supervisor program to run tests under \ (eg. emulator, valgrind)", "PROGRAM", ) .optmulti("", "host-rustcflags", "flags to pass to rustc for host", "FLAGS") .optmulti("", "target-rustcflags", "flags to pass to rustc for target", "FLAGS") .optopt("", "target-panic", "what panic strategy the target supports", "unwind | abort") .optflag("", "verbose", "run tests verbosely, showing all output") .optflag( "", "bless", "overwrite stderr/stdout files instead of complaining about a mismatch", ) .optflag("", "quiet", "print one character per test instead of one line") .optopt("", "color", "coloring: auto, always, never", "WHEN") .optopt("", "logfile", "file to log test execution to", "FILE") .optopt("", "target", "the target to build for", "TARGET") .optopt("", "host", "the host to build for", "HOST") .optopt("", "cdb", "path to CDB to use for CDB debuginfo tests", "PATH") .optopt("", "gdb", "path to GDB to use for GDB debuginfo tests", "PATH") .optopt("", "lldb-version", "the version of LLDB used", "VERSION STRING") .optopt("", "llvm-version", "the version of LLVM used", "VERSION STRING") .optflag("", "system-llvm", "is LLVM the system LLVM") .optopt("", "android-cross-path", "Android NDK standalone path", "PATH") .optopt("", "adb-path", "path to the android debugger", "PATH") .optopt("", "adb-test-dir", "path to tests for the android debugger", "PATH") .optopt("", "lldb-python-dir", "directory containing LLDB's python module", "PATH") .reqopt("", "cc", "path to a C compiler", "PATH") .reqopt("", "cxx", "path to a C++ compiler", "PATH") .reqopt("", "cflags", "flags for the C compiler", "FLAGS") .optopt("", "ar", "path to an archiver", "PATH") .optopt("", "linker", "path to a linker", "PATH") .reqopt("", "llvm-components", "list of LLVM components built in", "LIST") .optopt("", "llvm-bin-dir", "Path to LLVM's `bin` directory", "PATH") .optopt("", "nodejs", "the name of nodejs", "PATH") .optopt("", "npm", "the name of npm", "PATH") .optopt("", "remote-test-client", "path to the remote test client", "PATH") .optopt( "", "compare-mode", "mode describing what file the actual ui output will be compared to", "COMPARE MODE", ) .optflag( "", "rustfix-coverage", "enable this to generate a Rustfix coverage file, which is saved in \ `./<build_base>/rustfix_missing_coverage.txt`", ) .optflag("h", "help", "show this message"); let (argv0, args_) = args.split_first().unwrap(); if args.len() == 1 || args[1] == "-h" || args[1] == "--help" { let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0); println!("{}", opts.usage(&message)); println!(); panic!() } let matches = &match opts.parse(args_) { Ok(m) => m, Err(f) => panic!("{:?}", f), }; if matches.opt_present("h") || matches.opt_present("help") { let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0); println!("{}", opts.usage(&message)); println!(); panic!() } fn opt_path(m: &getopts::Matches, nm: &str) -> PathBuf { match m.opt_str(nm) { Some(s) => PathBuf::from(&s), None => panic!("no option (=path) found for {}", nm), } } fn make_absolute(path: PathBuf) -> PathBuf { if path.is_relative() { env::current_dir().unwrap().join(path) } else { path } } let target = opt_str2(matches.opt_str("target")); let android_cross_path = opt_path(matches, "android-cross-path"); let (cdb, cdb_version) = analyze_cdb(matches.opt_str("cdb"), &target); let (gdb, gdb_version, gdb_native_rust) = analyze_gdb(matches.opt_str("gdb"), &target, &android_cross_path); let (lldb_version, lldb_native_rust) = matches .opt_str("lldb-version") .as_deref() .and_then(extract_lldb_version) .map(|(v, b)| (Some(v), b)) .unwrap_or((None, false)); let color = match matches.opt_str("color").as_deref() { Some("auto") | None => ColorConfig::AutoColor, Some("always") => ColorConfig::AlwaysColor, Some("never") => ColorConfig::NeverColor, Some(x) => panic!("argument for --color must be auto, always, or never, but found `{}`", x), }; let llvm_version = matches.opt_str("llvm-version").as_deref().and_then(header::extract_llvm_version); let src_base = opt_path(matches, "src-base"); let run_ignored = matches.opt_present("ignored"); let mode = matches.opt_str("mode").unwrap().parse().expect("invalid mode"); let has_tidy = if mode == Mode::Rustdoc { Command::new("tidy") .arg("--version") .stdout(Stdio::null()) .status() .map_or(false, |status| status.success()) } else { // Avoid spawning an external command when we know tidy won't be used. false }; Config { bless: matches.opt_present("bless"), compile_lib_path: make_absolute(opt_path(matches, "compile-lib-path")), run_lib_path: make_absolute(opt_path(matches, "run-lib-path")), rustc_path: opt_path(matches, "rustc-path"), rustdoc_path: matches.opt_str("rustdoc-path").map(PathBuf::from), rust_demangler_path: matches.opt_str("rust-demangler-path").map(PathBuf::from), lldb_python: matches.opt_str("lldb-python").unwrap(), docck_python: matches.opt_str("docck-python").unwrap(), jsondocck_path: matches.opt_str("jsondocck-path"), valgrind_path: matches.opt_str("valgrind-path"), force_valgrind: matches.opt_present("force-valgrind"), run_clang_based_tests_with: matches.opt_str("run-clang-based-tests-with"), llvm_filecheck: matches.opt_str("llvm-filecheck").map(PathBuf::from), llvm_bin_dir: matches.opt_str("llvm-bin-dir").map(PathBuf::from), src_base, build_base: opt_path(matches, "build-base"), stage_id: matches.opt_str("stage-id").unwrap(), mode, suite: matches.opt_str("suite").unwrap(), debugger: None, run_ignored, filters: matches.free.clone(), filter_exact: matches.opt_present("exact"), force_pass_mode: matches.opt_str("pass").map(|mode| { mode.parse::<PassMode>() .unwrap_or_else(|_| panic!("unknown `--pass` option `{}` given", mode)) }), run: matches.opt_str("run").and_then(|mode| match mode.as_str() { "auto" => None, "always" => Some(true), "never" => Some(false), _ => panic!("unknown `--run` option `{}` given", mode), }), logfile: matches.opt_str("logfile").map(|s| PathBuf::from(&s)), runtool: matches.opt_str("runtool"), host_rustcflags: Some(matches.opt_strs("host-rustcflags").join(" ")), target_rustcflags: Some(matches.opt_strs("target-rustcflags").join(" ")), target_panic: match matches.opt_str("target-panic").as_deref() { Some("unwind") | None => PanicStrategy::Unwind, Some("abort") => PanicStrategy::Abort, _ => panic!("unknown `--target-panic` option `{}` given", mode), }, target, host: opt_str2(matches.opt_str("host")), cdb, cdb_version, gdb, gdb_version, gdb_native_rust, lldb_version, lldb_native_rust, llvm_version, system_llvm: matches.opt_present("system-llvm"), android_cross_path, adb_path: opt_str2(matches.opt_str("adb-path")), adb_test_dir: opt_str2(matches.opt_str("adb-test-dir")), adb_device_status: opt_str2(matches.opt_str("target")).contains("android") && "(none)" != opt_str2(matches.opt_str("adb-test-dir")) && !opt_str2(matches.opt_str("adb-test-dir")).is_empty(), lldb_python_dir: matches.opt_str("lldb-python-dir"), verbose: matches.opt_present("verbose"), quiet: matches.opt_present("quiet"), color, remote_test_client: matches.opt_str("remote-test-client").map(PathBuf::from), compare_mode: matches.opt_str("compare-mode").map(CompareMode::parse), rustfix_coverage: matches.opt_present("rustfix-coverage"), has_tidy, cc: matches.opt_str("cc").unwrap(), cxx: matches.opt_str("cxx").unwrap(), cflags: matches.opt_str("cflags").unwrap(), ar: matches.opt_str("ar").unwrap_or_else(|| String::from("ar")), linker: matches.opt_str("linker"), llvm_components: matches.opt_str("llvm-components").unwrap(), nodejs: matches.opt_str("nodejs"), npm: matches.opt_str("npm"), } } pub fn log_config(config: &Config) { let c = config; logv(c, "configuration:".to_string()); logv(c, format!("compile_lib_path: {:?}", config.compile_lib_path)); logv(c, format!("run_lib_path: {:?}", config.run_lib_path)); logv(c, format!("rustc_path: {:?}", config.rustc_path.display())); logv(c, format!("rustdoc_path: {:?}", config.rustdoc_path)); logv(c, format!("rust_demangler_path: {:?}", config.rust_demangler_path)); logv(c, format!("src_base: {:?}", config.src_base.display())); logv(c, format!("build_base: {:?}", config.build_base.display())); logv(c, format!("stage_id: {}", config.stage_id)); logv(c, format!("mode: {}", config.mode)); logv(c, format!("run_ignored: {}", config.run_ignored)); logv(c, format!("filters: {:?}", config.filters)); logv(c, format!("filter_exact: {}", config.filter_exact)); logv( c, format!("force_pass_mode: {}", opt_str(&config.force_pass_mode.map(|m| format!("{}", m))),), ); logv(c, format!("runtool: {}", opt_str(&config.runtool))); logv(c, format!("host-rustcflags: {}", opt_str(&config.host_rustcflags))); logv(c, format!("target-rustcflags: {}", opt_str(&config.target_rustcflags))); logv(c, format!("target: {}", config.target)); logv(c, format!("host: {}", config.host)); logv(c, format!("android-cross-path: {:?}", config.android_cross_path.display())); logv(c, format!("adb_path: {:?}", config.adb_path)); logv(c, format!("adb_test_dir: {:?}", config.adb_test_dir)); logv(c, format!("adb_device_status: {}", config.adb_device_status)); logv(c, format!("ar: {}", config.ar)); logv(c, format!("linker: {:?}", config.linker)); logv(c, format!("verbose: {}", config.verbose)); logv(c, format!("quiet: {}", config.quiet)); logv(c, "\n".to_string()); } pub fn opt_str(maybestr: &Option<String>) -> &str { match *maybestr { None => "(none)", Some(ref s) => s, } } pub fn opt_str2(maybestr: Option<String>) -> String { match maybestr { None => "(none)".to_owned(), Some(s) => s, } } pub fn run_tests(config: Config) { // FIXME(#33435) Avoid spurious failures in codegen-units/partitioning tests. if let Mode::CodegenUnits = config.mode { let _ = fs::remove_dir_all("tmp/partitioning-tests"); } // If we want to collect rustfix coverage information, // we first make sure that the coverage file does not exist. // It will be created later on. if config.rustfix_coverage { let mut coverage_file_path = config.build_base.clone(); coverage_file_path.push("rustfix_missing_coverage.txt"); if coverage_file_path.exists() { if let Err(e) = fs::remove_file(&coverage_file_path) { panic!("Could not delete {} due to {}", coverage_file_path.display(), e) } } } // sadly osx needs some file descriptor limits raised for running tests in // parallel (especially when we have lots and lots of child processes). // For context, see #8904 unsafe { raise_fd_limit::raise_fd_limit(); } // Prevent issue #21352 UAC blocking .exe containing 'patch' etc. on Windows // If #11207 is resolved (adding manifest to .exe) this becomes unnecessary env::set_var("__COMPAT_LAYER", "RunAsInvoker"); // Let tests know which target they're running as env::set_var("TARGET", &config.target); let opts = test_opts(&config); let mut configs = Vec::new(); if let Mode::DebugInfo = config.mode { // Debugging emscripten code doesn't make sense today if !config.target.contains("emscripten") { configs.extend(configure_cdb(&config)); configs.extend(configure_gdb(&config)); configs.extend(configure_lldb(&config)); } } else { configs.push(config.clone()); }; let mut tests = Vec::new(); for c in &configs { make_tests(c, &mut tests); } let res = test::run_tests_console(&opts, tests); match res { Ok(true) => {} Ok(false) => { // We want to report that the tests failed, but we also want to give // some indication of just what tests we were running. Especially on // CI, where there can be cross-compiled tests for a lot of // architectures, without this critical information it can be quite // easy to miss which tests failed, and as such fail to reproduce // the failure locally. eprintln!( "Some tests failed in compiletest suite={}{} mode={} host={} target={}", config.suite, config.compare_mode.map(|c| format!(" compare_mode={:?}", c)).unwrap_or_default(), config.mode, config.host, config.target ); std::process::exit(1); } Err(e) => { // We don't know if tests passed or not, but if there was an error // during testing we don't want to just succeed (we may not have // tested something), so fail. // // This should realistically "never" happen, so don't try to make // this a pretty error message. panic!("I/O failure during tests: {:?}", e); } } } fn configure_cdb(config: &Config) -> Option<Config> { config.cdb.as_ref()?; Some(Config { debugger: Some(Debugger::Cdb), ..config.clone() }) } fn configure_gdb(config: &Config) -> Option<Config> { config.gdb_version?; if util::matches_env(&config.target, "msvc") { return None; } if config.remote_test_client.is_some() && !config.target.contains("android") { println!( "WARNING: debuginfo tests are not available when \ testing with remote" ); return None; } if config.target.contains("android") { println!( "{} debug-info test uses tcp 5039 port.\ please reserve it", config.target ); // android debug-info test uses remote debugger so, we test 1 thread // at once as they're all sharing the same TCP port to communicate // over. // // we should figure out how to lift this restriction! (run them all // on different ports allocated dynamically). env::set_var("RUST_TEST_THREADS", "1"); } Some(Config { debugger: Some(Debugger::Gdb), ..config.clone() }) } fn configure_lldb(config: &Config) -> Option<Config> { config.lldb_python_dir.as_ref()?; if let Some(350) = config.lldb_version { println!( "WARNING: The used version of LLDB (350) has a \ known issue that breaks debuginfo tests. See \ issue #32520 for more information. Skipping all \ LLDB-based tests!", ); return None; } // Some older versions of LLDB seem to have problems with multiple // instances running in parallel, so only run one test thread at a // time. env::set_var("RUST_TEST_THREADS", "1"); Some(Config { debugger: Some(Debugger::Lldb), ..config.clone() }) } pub fn test_opts(config: &Config) -> test::TestOpts { test::TestOpts { exclude_should_panic: false, filters: config.filters.clone(), filter_exact: config.filter_exact, run_ignored: if config.run_ignored { test::RunIgnored::Yes } else { test::RunIgnored::No }, format: if config.quiet { test::OutputFormat::Terse } else { test::OutputFormat::Pretty }, logfile: config.logfile.clone(), run_tests: true, bench_benchmarks: true, nocapture: match env::var("RUST_TEST_NOCAPTURE") { Ok(val) => &val != "0", Err(_) => false, }, color: config.color, test_threads: None, skip: vec![], list: false, options: test::Options::new(), time_options: None, force_run_in_process: false, } } pub fn make_tests(config: &Config, tests: &mut Vec<test::TestDescAndFn>) { debug!("making tests from {:?}", config.src_base.display()); let inputs = common_inputs_stamp(config); collect_tests_from_dir(config, &config.src_base, &PathBuf::new(), &inputs, tests) .unwrap_or_else(|_| panic!("Could not read tests from {}", config.src_base.display())); } /// Returns a stamp constructed from input files common to all test cases. fn common_inputs_stamp(config: &Config) -> Stamp { let rust_src_dir = config.find_rust_src_root().expect("Could not find Rust source root"); let mut stamp = Stamp::from_path(&config.rustc_path); // Relevant pretty printer files let pretty_printer_files = [ "src/etc/rust_types.py", "src/etc/gdb_load_rust_pretty_printers.py", "src/etc/gdb_lookup.py", "src/etc/gdb_providers.py", "src/etc/lldb_batchmode.py", "src/etc/lldb_lookup.py", "src/etc/lldb_providers.py", ]; for file in &pretty_printer_files { let path = rust_src_dir.join(file); stamp.add_path(&path); } stamp.add_dir(&config.run_lib_path); if let Some(ref rustdoc_path) = config.rustdoc_path { stamp.add_path(&rustdoc_path); stamp.add_path(&rust_src_dir.join("src/etc/htmldocck.py")); } // Compiletest itself. stamp.add_dir(&rust_src_dir.join("src/tools/compiletest/")); stamp } fn collect_tests_from_dir( config: &Config, dir: &Path, relative_dir_path: &Path, inputs: &Stamp, tests: &mut Vec<test::TestDescAndFn>, ) -> io::Result<()> { // Ignore directories that contain a file named `compiletest-ignore-dir`. if dir.join("compiletest-ignore-dir").exists() { return Ok(()); } if config.mode == Mode::RunMake && dir.join("Makefile").exists() { let paths = TestPaths { file: dir.to_path_buf(), relative_dir: relative_dir_path.parent().unwrap().to_path_buf(), }; tests.extend(make_test(config, &paths, inputs)); return Ok(()); } // If we find a test foo/bar.rs, we have to build the // output directory `$build/foo` so we can write // `$build/foo/bar` into it. We do this *now* in this // sequential loop because otherwise, if we do it in the // tests themselves, they race for the privilege of // creating the directories and sometimes fail randomly. let build_dir = output_relative_path(config, relative_dir_path); fs::create_dir_all(&build_dir).unwrap(); // Add each `.rs` file as a test, and recurse further on any // subdirectories we find, except for `aux` directories. for file in fs::read_dir(dir)? { let file = file?; let file_path = file.path(); let file_name = file.file_name(); if is_test(&file_name) { debug!("found test file: {:?}", file_path.display()); let paths = TestPaths { file: file_path, relative_dir: relative_dir_path.to_path_buf() }; tests.extend(make_test(config, &paths, inputs)) } else if file_path.is_dir() { let relative_file_path = relative_dir_path.join(file.file_name()); if &file_name != "auxiliary" { debug!("found directory: {:?}", file_path.display()); collect_tests_from_dir(config, &file_path, &relative_file_path, inputs, tests)?; } } else { debug!("found other file/directory: {:?}", file_path.display()); } } Ok(()) } /// Returns true if `file_name` looks like a proper test file name. pub fn is_test(file_name: &OsString) -> bool { let file_name = file_name.to_str().unwrap(); if !file_name.ends_with(".rs") { return false; } // `.`, `#`, and `~` are common temp-file prefixes. let invalid_prefixes = &[".", "#", "~"]; !invalid_prefixes.iter().any(|p| file_name.starts_with(p)) } fn make_test(config: &Config, testpaths: &TestPaths, inputs: &Stamp) -> Vec<test::TestDescAndFn> { let early_props = if config.mode == Mode::RunMake { // Allow `ignore` directives to be in the Makefile. EarlyProps::from_file(config, &testpaths.file.join("Makefile")) } else { EarlyProps::from_file(config, &testpaths.file) }; // The `should-fail` annotation doesn't apply to pretty tests, // since we run the pretty printer across all tests by default. // If desired, we could add a `should-fail-pretty` annotation. let should_panic = match config.mode { Pretty => test::ShouldPanic::No, _ => { if early_props.should_fail { test::ShouldPanic::Yes } else { test::ShouldPanic::No } } }; // Incremental tests are special, they inherently cannot be run in parallel. // `runtest::run` will be responsible for iterating over revisions. let revisions = if early_props.revisions.is_empty() || config.mode == Mode::Incremental { vec![None] } else { early_props.revisions.iter().map(Some).collect() }; revisions .into_iter() .map(|revision| { let ignore = early_props.ignore // Ignore tests that already run and are up to date with respect to inputs. || is_up_to_date( config, testpaths, &early_props, revision.map(|s| s.as_str()), inputs, ); test::TestDescAndFn { desc: test::TestDesc { name: make_test_name(config, testpaths, revision), ignore, should_panic, allow_fail: false, #[cfg(not(bootstrap))] compile_fail: false, #[cfg(not(bootstrap))] no_run: false, test_type: test::TestType::Unknown, }, testfn: make_test_closure(config, testpaths, revision), } }) .collect() } fn stamp(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> PathBuf { output_base_dir(config, testpaths, revision).join("stamp") } fn is_up_to_date( config: &Config, testpaths: &TestPaths, props: &EarlyProps, revision: Option<&str>, inputs: &Stamp, ) -> bool { let stamp_name = stamp(config, testpaths, revision); // Check hash. let contents = match fs::read_to_string(&stamp_name) { Ok(f) => f, Err(ref e) if e.kind() == ErrorKind::InvalidData => panic!("Can't read stamp contents"), Err(_) => return false, }; let expected_hash = runtest::compute_stamp_hash(config); if contents != expected_hash { return false; } // Check timestamps. let mut inputs = inputs.clone(); // Use `add_dir` to account for run-make tests, which use their individual directory inputs.add_dir(&testpaths.file); for aux in &props.aux { let path = testpaths.file.parent().unwrap().join("auxiliary").join(aux); inputs.add_path(&path); } // UI test files. for extension in UI_EXTENSIONS { let path = &expected_output_path(testpaths, revision, &config.compare_mode, extension); inputs.add_path(path); } inputs < Stamp::from_path(&stamp_name) } #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] struct Stamp { time: SystemTime, } impl Stamp { fn from_path(path: &Path) -> Self { let mut stamp = Stamp { time: SystemTime::UNIX_EPOCH }; stamp.add_path(path); stamp } fn add_path(&mut self, path: &Path) { let modified = fs::metadata(path) .and_then(|metadata| metadata.modified()) .unwrap_or(SystemTime::UNIX_EPOCH); self.time = self.time.max(modified); } fn add_dir(&mut self, path: &Path) { for entry in WalkDir::new(path) { let entry = entry.unwrap(); if entry.file_type().is_file() { let modified = entry .metadata() .ok() .and_then(|metadata| metadata.modified().ok()) .unwrap_or(SystemTime::UNIX_EPOCH); self.time = self.time.max(modified); } } } } fn make_test_name( config: &Config, testpaths: &TestPaths, revision: Option<&String>, ) -> test::TestName { // Convert a complete path to something like // // ui/foo/bar/baz.rs let path = PathBuf::from(config.src_base.file_name().unwrap()) .join(&testpaths.relative_dir) .join(&testpaths.file.file_name().unwrap()); let debugger = match config.debugger { Some(d) => format!("-{}", d), None => String::new(), }; let mode_suffix = match config.compare_mode { Some(ref mode) => format!(" ({})", mode.to_str()), None => String::new(), }; test::DynTestName(format!( "[{}{}{}] {}{}", config.mode, debugger, mode_suffix, path.display(), revision.map_or("".to_string(), |rev| format!("#{}", rev)) )) } fn make_test_closure( config: &Config, testpaths: &TestPaths, revision: Option<&String>, ) -> test::TestFn { let config = config.clone(); let testpaths = testpaths.clone(); let revision = revision.cloned(); test::DynTestFn(Box::new(move || runtest::run(config, &testpaths, revision.as_deref()))) } /// Returns `true` if the given target is an Android target for the /// purposes of GDB testing. fn is_android_gdb_target(target: &str) -> bool { matches!( &target[..], "arm-linux-androideabi" | "armv7-linux-androideabi" | "aarch64-linux-android" ) } /// Returns `true` if the given target is a MSVC target for the purpouses of CDB testing. fn is_pc_windows_msvc_target(target: &str) -> bool { target.ends_with("-pc-windows-msvc") } fn find_cdb(target: &str) -> Option<OsString> { if !(cfg!(windows) && is_pc_windows_msvc_target(target)) { return None; } let pf86 = env::var_os("ProgramFiles(x86)").or_else(|| env::var_os("ProgramFiles"))?; let cdb_arch = if cfg!(target_arch = "x86") { "x86" } else if cfg!(target_arch = "x86_64") { "x64" } else if cfg!(target_arch = "aarch64") { "arm64" } else if cfg!(target_arch = "arm") { "arm" } else { return None; // No compatible CDB.exe in the Windows 10 SDK }; let mut path = PathBuf::new(); path.push(pf86); path.push(r"Windows Kits\10\Debuggers"); // We could check 8.1 etc. too? path.push(cdb_arch); path.push(r"cdb.exe"); if !path.exists() { return None; } Some(path.into_os_string()) } /// Returns Path to CDB fn analyze_cdb(cdb: Option<String>, target: &str) -> (Option<OsString>, Option<[u16; 4]>) { let cdb = cdb.map(OsString::from).or_else(|| find_cdb(target)); let mut version = None; if let Some(cdb) = cdb.as_ref() { if let Ok(output) = Command::new(cdb).arg("/version").output() { if let Some(first_line) = String::from_utf8_lossy(&output.stdout).lines().next() { version = extract_cdb_version(&first_line); } } } (cdb, version) } fn extract_cdb_version(full_version_line: &str) -> Option<[u16; 4]> { // Example full_version_line: "cdb version 10.0.18362.1" let version = full_version_line.rsplit(' ').next()?; let mut components = version.split('.'); let major: u16 = components.next().unwrap().parse().unwrap(); let minor: u16 = components.next().unwrap().parse().unwrap(); let patch: u16 = components.next().unwrap_or("0").parse().unwrap(); let build: u16 = components.next().unwrap_or("0").parse().unwrap(); Some([major, minor, patch, build]) } /// Returns (Path to GDB, GDB Version, GDB has Rust Support) fn analyze_gdb( gdb: Option<String>, target: &str, android_cross_path: &PathBuf, ) -> (Option<String>, Option<u32>, bool) { #[cfg(not(windows))] const GDB_FALLBACK: &str = "gdb"; #[cfg(windows)] const GDB_FALLBACK: &str = "gdb.exe"; const MIN_GDB_WITH_RUST: u32 = 7011010; let fallback_gdb = || { if is_android_gdb_target(target) { let mut gdb_path = match android_cross_path.to_str() { Some(x) => x.to_owned(), None => panic!("cannot find android cross path"), }; gdb_path.push_str("/bin/gdb"); gdb_path } else { GDB_FALLBACK.to_owned() } }; let gdb = match gdb { None => fallback_gdb(), Some(ref s) if s.is_empty() => fallback_gdb(), // may be empty if configure found no gdb Some(ref s) => s.to_owned(), }; let mut version_line = None; if let Ok(output) = Command::new(&gdb).arg("--version").output() { if let Some(first_line) = String::from_utf8_lossy(&output.stdout).lines().next() { version_line = Some(first_line.to_string()); } } let version = match version_line { Some(line) => extract_gdb_version(&line), None => return (None, None, false), }; let gdb_native_rust = version.map_or(false, |v| v >= MIN_GDB_WITH_RUST); (Some(gdb), version, gdb_native_rust) } fn extract_gdb_version(full_version_line: &str) -> Option<u32> { let full_version_line = full_version_line.trim(); // GDB versions look like this: "major.minor.patch?.yyyymmdd?", with both // of the ? sections being optional // We will parse up to 3 digits for each component, ignoring the date // We skip text in parentheses. This avoids accidentally parsing // the openSUSE version, which looks like: // GNU gdb (GDB; openSUSE Leap 15.0) 8.1 // This particular form is documented in the GNU coding standards: // https://www.gnu.org/prep/standards/html_node/_002d_002dversion.html#g_t_002d_002dversion let unbracketed_part = full_version_line.split('[').next().unwrap(); let mut splits = unbracketed_part.trim_end().rsplit(' '); let version_string = splits.next().unwrap(); let mut splits = version_string.split('.'); let major = splits.next().unwrap(); let minor = splits.next().unwrap(); let patch = splits.next(); let major: u32 = major.parse().unwrap(); let (minor, patch): (u32, u32) = match minor.find(not_a_digit) { None => { let minor = minor.parse().unwrap(); let patch: u32 = match patch { Some(patch) => match patch.find(not_a_digit) { None => patch.parse().unwrap(), Some(idx) if idx > 3 => 0, Some(idx) => patch[..idx].parse().unwrap(), }, None => 0, }; (minor, patch) } // There is no patch version after minor-date (e.g. "4-2012"). Some(idx) => { let minor = minor[..idx].parse().unwrap(); (minor, 0) } }; Some(((major * 1000) + minor) * 1000 + patch) } /// Returns (LLDB version, LLDB is rust-enabled) fn extract_lldb_version(full_version_line: &str) -> Option<(u32, bool)> { // Extract the major LLDB version from the given version string. // LLDB version strings are different for Apple and non-Apple platforms. // The Apple variant looks like this: // // LLDB-179.5 (older versions) // lldb-300.2.51 (new versions) // // We are only interested in the major version number, so this function // will return `Some(179)` and `Some(300)` respectively. // // Upstream versions look like: // lldb version 6.0.1 // // There doesn't seem to be a way to correlate the Apple version // with the upstream version, and since the tests were originally // written against Apple versions, we make a fake Apple version by // multiplying the first number by 100. This is a hack, but // normally fine because the only non-Apple version we test is // rust-enabled. let full_version_line = full_version_line.trim(); if let Some(apple_ver) = full_version_line.strip_prefix("LLDB-").or_else(|| full_version_line.strip_prefix("lldb-")) { if let Some(idx) = apple_ver.find(not_a_digit) { let version: u32 = apple_ver[..idx].parse().unwrap(); return Some((version, full_version_line.contains("rust-enabled"))); } } else if let Some(lldb_ver) = full_version_line.strip_prefix("lldb version ") { if let Some(idx) = lldb_ver.find(not_a_digit) { let version: u32 = lldb_ver[..idx].parse().ok()?; return Some((version * 100, full_version_line.contains("rust-enabled"))); } } None } fn not_a_digit(c: char) -> bool { !c.is_digit(10) }
37.392254
109
0.591358
e645d14d10d596f1fb7eac1c3d2c8c7a5e9b97e2
10,305
#![cfg_attr(not(feature = "std"), no_std)] #![allow(unused_imports)] /// Edit this file to define custom logic or remove it if it is not needed. /// Learn more about FRAME and the core library of Substrate FRAME pallets: /// <https://docs.substrate.io/v3/runtime/frame> use codec::{Decode, Encode}; use frame_support::weights::Weight; use frame_support::{ dispatch::DispatchResult, sp_runtime::traits::Hash, sp_runtime::RuntimeDebug, traits::{BalanceStatus::Free, Currency, Get, ReservableCurrency}, }; use cumulus_primitives_core::ParaId; use xcm::latest::{prelude::*, Junction, MultiLocation, OriginKind, SendXcm, Xcm}; use frame_support::traits::OnKilledAccount; pub use pallet::*; pub use pallet_common::*; use scale_info::TypeInfo; use sp_std::prelude::*; #[cfg(feature = "std")] use frame_support::serde::{Deserialize, Serialize}; use sp_std::convert::{TryFrom, TryInto}; use cumulus_primitives_core::{ relay_chain, relay_chain::BlockNumber as RelayBlockNumber, ServiceQuality, XcmpMessageFormat, XcmpMessageHandler, }; use xcm::VersionedXcm; type XCMPMessageOf<T> = XCMPMessage< <T as frame_system::Config>::AccountId, BalanceOf<T>, <T as Config>::OrderPayload, <T as pallet_timestamp::Config>::Moment, >; pub type OrderBaseOf<T> = OrderBase< <T as Config>::OrderPayload, BalanceOf<T>, MomentOf<T>, <T as frame_system::Config>::AccountId, >; pub type OrderOf<T> = Order< <T as Config>::OrderPayload, BalanceOf<T>, MomentOf<T>, <T as frame_system::Config>::AccountId, ParaId, >; pub type BalanceOf<T> = <<T as Config>::Currency as Currency<<T as frame_system::Config>::AccountId>>::Balance; pub type MomentOf<T> = <T as pallet_timestamp::Config>::Moment; type Timestamp<T> = pallet_timestamp::Pallet<T>; #[frame_support::pallet] pub mod pallet { use super::*; use frame_support::{dispatch::DispatchResultWithPostInfo, pallet_prelude::*}; use frame_system::pallet_prelude::*; /// Configure the pallet by specifying the parameters and types on which it depends. #[pallet::config] pub trait Config: frame_system::Config + pallet_timestamp::Config { /// Because this pallet emits events, it depends on the runtime's definition of an event. type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>; type Currency: ReservableCurrency<Self::AccountId>; type OrderPayload: Encode + Decode + Clone + Default + Parameter + TypeInfo; type XcmpMessageSender: SendXcm; } // Struct for holding device information. #[derive(Clone, Encode, Decode, PartialEq, RuntimeDebug, TypeInfo)] #[scale_info(skip_type_params(T))] pub struct DeviceProfile<T: Config> { pub penalty: BalanceOf<T>, pub wcd: MomentOf<T>, pub para_id: ParaId, pub state: DeviceState, } #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] pub struct Pallet<T>(_); /// Device profiles #[pallet::storage] #[pallet::getter(fn devices)] pub type Device<T: Config> = StorageMap<_, Twox64Concat, T::AccountId, DeviceProfile<T>, OptionQuery>; #[pallet::storage] #[pallet::getter(fn orders)] pub type Orders<T: Config> = StorageMap<_, Twox64Concat, T::AccountId, OrderOf<T>, OptionQuery>; #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event<T: Config> { NewDevice(T::AccountId), NewOrder(T::AccountId, T::AccountId), Accept(T::AccountId, T::AccountId), Reject(T::AccountId, T::AccountId), Done(T::AccountId, T::AccountId), BadVersion(<T as frame_system::Config>::Hash), } // Errors inform users that something went wrong. #[pallet::error] pub enum Error<T> { NoneValue, OrderExists, IllegalState, Overdue, DeviceLowBail, DeviceExists, BadOrderDetails, NoDevice, NoOrder, Prohibited, CannotReachDestination, } #[pallet::hooks] impl<T: Config> Hooks<BlockNumberFor<T>> for Pallet<T> {} #[pallet::call] impl<T: Config> Pallet<T> { #[pallet::weight(10_000)] pub fn test(origin: OriginFor<T>) -> DispatchResult { let dest = (Parent, Parachain(200)); let call: Vec<u8> = vec![0x00, 0x20].encode(); let message = Xcm(vec![Instruction::Transact { origin_type: OriginKind::Native, require_weight_at_most: 0, call: call.into(), }]); T::XcmpMessageSender::send_xcm(dest, message) .map_err(|_| Error::<T>::CannotReachDestination.into()) .map(|_| ()) } #[pallet::weight(10_000)] pub fn order(origin: OriginFor<T>, order: OrderBaseOf<T>) -> DispatchResult { let who = ensure_signed(origin)?; let now = Timestamp::<T>::get(); if now >= order.until { return Err(Error::<T>::Overdue.into()); } if Orders::<T>::contains_key(&order.device) { return Err(Error::<T>::IllegalState.into()); }; let mut dev = Device::<T>::get(&order.device).ok_or(Error::<T>::NoDevice)?; if dev.state != DeviceState::Ready { return Err(Error::<T>::IllegalState.into()); } if order.until < (now + dev.wcd) { return Err(Error::<T>::BadOrderDetails.into()); }; if !T::Currency::can_reserve(&who, order.fee) { return Err(Error::<T>::DeviceLowBail.into()); } T::Currency::reserve(&order.device, dev.penalty)?; T::Currency::reserve(&who, order.fee)?; let device = order.device.clone(); // store order let order: OrderBaseOf<T> = { let order: OrderOf<T> = order.convert(who.clone()); Orders::<T>::insert(&device, &order); order.convert(device.clone()) }; let msg: XCMPMessageOf<T> = XCMPMessageOf::<T>::NewOrder(who.clone(), order); let dest = (Parent, Parachain(dev.para_id.into())); let call = msg.encode(); let message = Xcm(vec![Instruction::Transact { origin_type: OriginKind::Native, require_weight_at_most: 0, call: call.into(), }]); log::info!("send XCM order message"); T::XcmpMessageSender::send_xcm(dest, message) .map_err(|_| Error::<T>::CannotReachDestination) .map(|_| ()); log::info!("XCM order message has sent"); dev.state = DeviceState::Busy; Device::<T>::insert(&device, &dev); Self::deposit_event(Event::NewOrder(who, device.clone())); Ok(()) } #[pallet::weight(10_000)] pub fn cancel(origin: OriginFor<T>, device: T::AccountId) -> DispatchResult { let who = ensure_signed(origin)?; let order = Orders::<T>::get(&device).ok_or(Error::<T>::NoOrder)?; let now = Timestamp::<T>::get(); if now < order.until || order.client != who { return Err(Error::<T>::Prohibited.into()); } let mut dev = Device::<T>::get(&device).ok_or(Error::<T>::NoDevice)?; // Note. we don't change device state Self::order_reject(who, &order, now, device, &mut dev) } #[pallet::weight(10_000)] pub fn register( origin: OriginFor<T>, paraid: ParaId, penalty: BalanceOf<T>, wcd: MomentOf<T>, onoff: bool, ) -> DispatchResult { let id = ensure_signed(origin)?; if Orders::<T>::contains_key(&id) { return Err(Error::<T>::DeviceExists.into()); } // Despite the order doesn't exist, device can be in Busy,Busy2 state. // Device::<T>::insert( &id, DeviceProfile { wcd, penalty, state: if onoff { DeviceState::Ready } else { DeviceState::Off }, para_id: paraid, }, ); Self::deposit_event(Event::NewDevice(id)); Ok(()) } } } impl<T: Config> Pallet<T> { fn on_accept(who: T::AccountId, device: T::AccountId) -> DispatchResult { Self::deposit_event(Event::Accept(who, device)); Ok(()) } fn on_reject(who: T::AccountId, device: T::AccountId, onoff: bool) -> DispatchResult { let order = Orders::<T>::get(&device).ok_or(Error::<T>::NoOrder)?; let now = Timestamp::<T>::get(); let mut dev = Device::<T>::get(&device).ok_or(Error::<T>::NoDevice)?; dev.state = if !onoff { DeviceState::Off } else { DeviceState::Ready }; Self::order_reject(who, &order, now, device, &mut dev) } fn on_done(who: T::AccountId, device: T::AccountId, onoff: bool) -> DispatchResult { let order = Orders::<T>::get(&device).ok_or(Error::<T>::NoOrder)?; let now = Timestamp::<T>::get(); let mut dev = Device::<T>::get(&device).ok_or(Error::<T>::NoDevice)?; T::Currency::repatriate_reserved(&who, &device, order.fee, Free)?; if now < order.until { T::Currency::unreserve(&device, dev.penalty); } else { T::Currency::repatriate_reserved(&device, &who, dev.penalty, Free)?; } Orders::<T>::remove(&device); dev.state = if !onoff { DeviceState::Off } else { DeviceState::Ready }; Device::<T>::insert(&device, &dev); Self::deposit_event(Event::Done(who, device)); Ok(()) } fn order_reject( who: T::AccountId, order: &OrderOf<T>, now: T::Moment, device: T::AccountId, dev: &mut DeviceProfile<T>, ) -> DispatchResult { T::Currency::unreserve(&who, order.fee); if now < order.until { T::Currency::unreserve(&device, dev.penalty); } else { T::Currency::repatriate_reserved(&device, &order.client, dev.penalty, Free)?; } Orders::<T>::remove(&device); Device::<T>::insert(&device, &*dev); Self::deposit_event(Event::Reject(who, device)); Ok(()) } } impl<T: Config> OnKilledAccount<T::AccountId> for Pallet<T> { /// The account with the given id was reaped. fn on_killed_account(who: &T::AccountId) { //Timewait if let Some(mut dev) = Device::<T>::get(who) { if dev.state == DeviceState::Off { Device::<T>::remove(who); } else { dev.state = DeviceState::Timewait; Device::<T>::insert(who, dev); } } } } impl<T: Config> XcmpMessageHandler for Pallet<T> { fn handle_xcmp_messages<'a, I: Iterator<Item = (ParaId, RelayBlockNumber, &'a [u8])>>( iter: I, max_weight: Weight, ) -> Weight { for (sender, sent_at, data) in iter { let mut data_ref = data; match XCMPMessageOf::<T>::decode(&mut data_ref) { Err(e) => { log::error!("{:?}", e); return 0; }, Ok(XCMPMessageOf::<T>::OrderAccept(client, devid)) => { Self::on_accept(client, devid); log::info!("OrderAccept"); }, Ok(XCMPMessageOf::<T>::OrderReject(client, devid, onoff)) => { Self::on_reject(client, devid, onoff); log::info!("OrderReject"); }, Ok(XCMPMessageOf::<T>::OrderDone(cliend, devid, onoff)) => { Self::on_done(cliend, devid, onoff); log::info!("OrderDone"); }, Ok(_) => { log::warn!("unknown XCM message received"); }, }; } max_weight } }
28.784916
97
0.657739
bf36de48c3d7f7c7866370585a5769f8d49b58c8
14,601
//! Write DWARF debugging information. //! //! ## API Structure //! //! This module works by building up a representation of the debugging information //! in memory, and then writing it all at once. It supports two major use cases: //! //! * Use the [`DwarfUnit`](./struct.DwarfUnit.html) type when writing DWARF //! for a single compilation unit. //! //! * Use the [`Dwarf`](./struct.Dwarf.html) type when writing DWARF for multiple //! compilation units. //! //! The module also supports reading in DWARF debugging information and writing it out //! again, possibly after modifying it. Create a [`read::Dwarf`](../read/struct.Dwarf.html) //! instance, and then use [`Dwarf::from`](./struct.Dwarf.html#method.from) to convert //! it to a writable instance. //! //! ## Example Usage //! //! Write a compilation unit containing only the top level DIE. //! //! ```rust //! use gimli::write::{ //! Address, AttributeValue, DwarfUnit, EndianVec, Error, Range, RangeList, Sections, //! }; //! //! fn example() -> Result<(), Error> { //! // Choose the encoding parameters. //! let encoding = gimli::Encoding { //! format: gimli::Format::Dwarf32, //! version: 5, //! address_size: 8, //! }; //! // Create a container for a single compilation unit. //! let mut dwarf = DwarfUnit::new(encoding); //! // Set a range attribute on the root DIE. //! let range_list = RangeList(vec![Range::StartLength { //! begin: Address::Constant(0x100), //! length: 42, //! }]); //! let range_list_id = dwarf.unit.ranges.add(range_list); //! let root = dwarf.unit.root(); //! dwarf.unit.get_mut(root).set( //! gimli::DW_AT_ranges, //! AttributeValue::RangeListRef(range_list_id), //! ); //! // Create a `Vec` for each DWARF section. //! let mut sections = Sections::new(EndianVec::new(gimli::LittleEndian)); //! // Finally, write the DWARF data to the sections. //! dwarf.write(&mut sections)?; //! sections.for_each(|id, data| { //! // Here you can add the data to the output object file. //! Ok(()) //! }) //! } //! # fn main() { //! # example().unwrap(); //! # } use std::error; use std::fmt; use std::result; use crate::constants; mod endian_vec; pub use self::endian_vec::*; mod writer; pub use self::writer::*; #[macro_use] mod section; pub use self::section::*; macro_rules! define_id { ($name:ident, $docs:expr) => { #[doc=$docs] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct $name { base_id: BaseId, index: usize, } impl $name { #[inline] fn new(base_id: BaseId, index: usize) -> Self { $name { base_id, index } } } }; } macro_rules! define_offsets { ($offsets:ident: $id:ident => $offset:ident, $off_doc:expr) => { #[doc=$off_doc] #[derive(Debug)] pub struct $offsets { base_id: BaseId, // We know ids start at 0. offsets: Vec<$offset>, } impl $offsets { /// Return an empty list of offsets. #[inline] pub fn none() -> Self { $offsets { base_id: BaseId::default(), offsets: Vec::new(), } } /// Get the offset /// /// # Panics /// /// Panics if `id` is invalid. #[inline] pub fn get(&self, id: $id) -> $offset { debug_assert_eq!(self.base_id, id.base_id); self.offsets[id.index] } /// Return the number of offsets. #[inline] pub fn count(&self) -> usize { self.offsets.len() } } }; } mod abbrev; pub use self::abbrev::*; mod cfi; pub use self::cfi::*; mod dwarf; pub use self::dwarf::*; mod line; pub use self::line::*; mod loc; pub use self::loc::*; mod op; pub use self::op::*; mod range; pub use self::range::*; mod str; pub use self::str::*; mod unit; pub use self::unit::*; /// An error that occurred when writing. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum Error { /// The given offset is out of bounds. OffsetOutOfBounds, /// The given length is out of bounds. LengthOutOfBounds, /// The attribute value is an invalid for writing. InvalidAttributeValue, /// The value is too large for the encoding form. ValueTooLarge, /// Unsupported word size. UnsupportedWordSize(u8), /// Unsupported DWARF version. UnsupportedVersion(u16), /// The unit length is too large for the requested DWARF format. InitialLengthOverflow, /// The address is invalid. InvalidAddress, /// The reference is invalid. InvalidReference, /// A requested feature requires a different DWARF version. NeedVersion(u16), /// Strings in line number program have mismatched forms. LineStringFormMismatch, /// The range is empty or otherwise invalid. InvalidRange, /// The line number program encoding is incompatible with the unit encoding. IncompatibleLineProgramEncoding, /// Could not encode code offset for a frame instruction. InvalidFrameCodeOffset(u32), /// Could not encode data offset for a frame instruction. InvalidFrameDataOffset(i32), /// Unsupported eh_frame pointer encoding. UnsupportedPointerEncoding(constants::DwEhPe), /// Unsupported reference in CFI expression. UnsupportedCfiExpressionReference, /// Unsupported forward reference in expression. UnsupportedExpressionForwardReference, } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> result::Result<(), fmt::Error> { match *self { Error::OffsetOutOfBounds => write!(f, "The given offset is out of bounds."), Error::LengthOutOfBounds => write!(f, "The given length is out of bounds."), Error::InvalidAttributeValue => { write!(f, "The attribute value is an invalid for writing.") } Error::ValueTooLarge => write!(f, "The value is too large for the encoding form."), Error::UnsupportedWordSize(size) => write!(f, "Unsupported word size: {}", size), Error::UnsupportedVersion(version) => { write!(f, "Unsupported DWARF version: {}", version) } Error::InitialLengthOverflow => write!( f, "The unit length is too large for the requested DWARF format." ), Error::InvalidAddress => write!(f, "The address is invalid."), Error::InvalidReference => write!(f, "The reference is invalid."), Error::NeedVersion(version) => write!( f, "A requested feature requires a DWARF version {}.", version ), Error::LineStringFormMismatch => { write!(f, "Strings in line number program have mismatched forms.") } Error::InvalidRange => write!(f, "The range is empty or otherwise invalid."), Error::IncompatibleLineProgramEncoding => write!( f, "The line number program encoding is incompatible with the unit encoding." ), Error::InvalidFrameCodeOffset(offset) => write!( f, "Could not encode code offset ({}) for a frame instruction.", offset, ), Error::InvalidFrameDataOffset(offset) => write!( f, "Could not encode data offset ({}) for a frame instruction.", offset, ), Error::UnsupportedPointerEncoding(eh_pe) => { write!(f, "Unsupported eh_frame pointer encoding ({}).", eh_pe) } Error::UnsupportedCfiExpressionReference => { write!(f, "Unsupported reference in CFI expression.") } Error::UnsupportedExpressionForwardReference => { write!(f, "Unsupported forward reference in expression.") } } } } impl error::Error for Error {} /// The result of a write. pub type Result<T> = result::Result<T, Error>; /// An address. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum Address { /// A fixed address that does not require relocation. Constant(u64), /// An address that is relative to a symbol which may be relocated. Symbol { /// The symbol that the address is relative to. /// /// The meaning of this value is decided by the writer, but /// will typically be an index into a symbol table. symbol: usize, /// The offset of the address relative to the symbol. /// /// This will typically be used as the addend in a relocation. addend: i64, }, } /// A reference to a `.debug_info` entry. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum Reference { /// An external symbol. /// /// The meaning of this value is decided by the writer, but /// will typically be an index into a symbol table. Symbol(usize), /// An entry in the same section. /// /// This only supports references in units that are emitted together. Entry(UnitId, UnitEntryId), } // This type is only used in debug assertions. #[cfg(not(debug_assertions))] type BaseId = (); #[cfg(debug_assertions)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] struct BaseId(usize); #[cfg(debug_assertions)] impl Default for BaseId { fn default() -> Self { use std::sync::atomic; static BASE_ID: atomic::AtomicUsize = atomic::AtomicUsize::new(0); BaseId(BASE_ID.fetch_add(1, atomic::Ordering::Relaxed)) } } #[cfg(feature = "read")] mod convert { use super::*; use crate::read; pub(crate) use super::unit::convert::*; /// An error that occurred when converting a read value into a write value. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum ConvertError { /// An error occurred when reading. Read(read::Error), /// Writing of this attribute value is not implemented yet. UnsupportedAttributeValue, /// This attribute value is an invalid name/form combination. InvalidAttributeValue, /// A `.debug_info` reference does not refer to a valid entry. InvalidDebugInfoOffset, /// An address could not be converted. InvalidAddress, /// Writing this line number instruction is not implemented yet. UnsupportedLineInstruction, /// Writing this form of line string is not implemented yet. UnsupportedLineStringForm, /// A `.debug_line` file index is invalid. InvalidFileIndex, /// A `.debug_line` directory index is invalid. InvalidDirectoryIndex, /// A `.debug_line` line base is invalid. InvalidLineBase, /// A `.debug_line` reference is invalid. InvalidLineRef, /// A `.debug_info` unit entry reference is invalid. InvalidUnitRef, /// A `.debug_info` reference is invalid. InvalidDebugInfoRef, /// Invalid relative address in a range list. InvalidRangeRelativeAddress, /// Writing this CFI instruction is not implemented yet. UnsupportedCfiInstruction, /// Writing indirect pointers is not implemented yet. UnsupportedIndirectAddress, /// Writing this expression operation is not implemented yet. UnsupportedOperation, /// Operation branch target is invalid. InvalidBranchTarget, } impl fmt::Display for ConvertError { fn fmt(&self, f: &mut fmt::Formatter) -> result::Result<(), fmt::Error> { use self::ConvertError::*; match *self { Read(ref e) => e.fmt(f), UnsupportedAttributeValue => { write!(f, "Writing of this attribute value is not implemented yet.") } InvalidAttributeValue => write!( f, "This attribute value is an invalid name/form combination." ), InvalidDebugInfoOffset => write!( f, "A `.debug_info` reference does not refer to a valid entry." ), InvalidAddress => write!(f, "An address could not be converted."), UnsupportedLineInstruction => write!( f, "Writing this line number instruction is not implemented yet." ), UnsupportedLineStringForm => write!( f, "Writing this form of line string is not implemented yet." ), InvalidFileIndex => write!(f, "A `.debug_line` file index is invalid."), InvalidDirectoryIndex => write!(f, "A `.debug_line` directory index is invalid."), InvalidLineBase => write!(f, "A `.debug_line` line base is invalid."), InvalidLineRef => write!(f, "A `.debug_line` reference is invalid."), InvalidUnitRef => write!(f, "A `.debug_info` unit entry reference is invalid."), InvalidDebugInfoRef => write!(f, "A `.debug_info` reference is invalid."), InvalidRangeRelativeAddress => { write!(f, "Invalid relative address in a range list.") } UnsupportedCfiInstruction => { write!(f, "Writing this CFI instruction is not implemented yet.") } UnsupportedIndirectAddress => { write!(f, "Writing indirect pointers is not implemented yet.") } UnsupportedOperation => write!( f, "Writing this expression operation is not implemented yet." ), InvalidBranchTarget => write!(f, "Operation branch target is invalid."), } } } impl error::Error for ConvertError {} impl From<read::Error> for ConvertError { fn from(e: read::Error) -> Self { ConvertError::Read(e) } } /// The result of a conversion. pub type ConvertResult<T> = result::Result<T, ConvertError>; } #[cfg(feature = "read")] pub use self::convert::*;
34.51773
98
0.576467
f84f15e998090d064bc8ab605b22f909bc5bc924
28,025
//! A general parser for command-line options. //! //! exa uses its own hand-rolled parser for command-line options. It supports //! the following syntax: //! //! - Long options: `--inode`, `--grid` //! - Long options with values: `--sort size`, `--level=4` //! - Short options: `-i`, `-G` //! - Short options with values: `-ssize`, `-L=4` //! //! These values can be mixed and matched: `exa -lssize --grid`. If you’ve used //! other command-line programs, then hopefully it’ll work much like them. //! //! Because exa already has its own files for the help text, shell completions, //! man page, and readme, so it can get away with having the options parser do //! very little: all it really needs to do is parse a slice of strings. //! //! //! ## UTF-8 and `OsStr` //! //! The parser uses `OsStr` as its string type. This is necessary for exa to //! list files that have invalid UTF-8 in their names: by treating file paths //! as bytes with no encoding, a file can be specified on the command-line and //! be looked up without having to be encoded into a `str` first. //! //! It also avoids the overhead of checking for invalid UTF-8 when parsing //! command-line options, as all the options and their values (such as //! `--sort size`) are guaranteed to just be 8-bit ASCII. use std::ffi::{OsStr, OsString}; use std::fmt; use options::Misfire; /// A **short argument** is a single ASCII character. pub type ShortArg = u8; /// A **long argument** is a string. This can be a UTF-8 string, even though /// the arguments will all be unchecked OsStrings, because we don’t actually /// store the user’s input after it’s been matched to a flag, we just store /// which flag it was. pub type LongArg = &'static str; /// A **list of values** that an option can have, to be displayed when the /// user enters an invalid one or skips it. /// /// This is literally just help text, and won’t be used to validate a value to /// see if it’s correct. pub type Values = &'static [&'static str]; /// A **flag** is either of the two argument types, because they have to /// be in the same array together. #[derive(PartialEq, Debug, Clone)] pub enum Flag { Short(ShortArg), Long(LongArg), } impl Flag { pub fn matches(&self, arg: &Arg) -> bool { match *self { Flag::Short(short) => arg.short == Some(short), Flag::Long(long) => arg.long == long, } } } impl fmt::Display for Flag { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { match *self { Flag::Short(short) => write!(f, "-{}", short as char), Flag::Long(long) => write!(f, "--{}", long), } } } /// Whether redundant arguments should be considered a problem. #[derive(PartialEq, Debug, Copy, Clone)] pub enum Strictness { /// Throw an error when an argument doesn’t do anything, either because /// it requires another argument to be specified, or because two conflict. ComplainAboutRedundantArguments, /// Search the arguments list back-to-front, giving ones specified later /// in the list priority over earlier ones. UseLastArguments, } /// Whether a flag takes a value. This is applicable to both long and short /// arguments. #[derive(Copy, Clone, PartialEq, Debug)] pub enum TakesValue { /// This flag has to be followed by a value. /// If there’s a fixed set of possible values, they can be printed out /// with the error text. Necessary(Option<Values>), /// This flag will throw an error if there’s a value after it. Forbidden, } /// An **argument** can be matched by one of the user’s input strings. #[derive(PartialEq, Debug)] pub struct Arg { /// The short argument that matches it, if any. pub short: Option<ShortArg>, /// The long argument that matches it. This is non-optional; all flags /// should at least have a descriptive long name. pub long: LongArg, /// Whether this flag takes a value or not. pub takes_value: TakesValue, } impl fmt::Display for Arg { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!(f, "--{}", self.long)?; if let Some(short) = self.short { write!(f, " (-{})", short as char)?; } Ok(()) } } /// Literally just several args. #[derive(PartialEq, Debug)] pub struct Args(pub &'static [&'static Arg]); impl Args { /// Iterates over the given list of command-line arguments and parses /// them into a list of matched flags and free strings. pub fn parse<'args, I>(&self, inputs: I, strictness: Strictness) -> Result<Matches<'args>, ParseError> where I: IntoIterator<Item=&'args OsString> { use std::os::unix::ffi::OsStrExt; use self::TakesValue::*; let mut parsing = true; // The results that get built up. let mut result_flags = Vec::new(); let mut frees: Vec<&OsStr> = Vec::new(); // Iterate over the inputs with “while let” because we need to advance // the iterator manually whenever an argument that takes a value // doesn’t have one in its string so it needs the next one. let mut inputs = inputs.into_iter(); while let Some(arg) = inputs.next() { let bytes = arg.as_bytes(); // Stop parsing if one of the arguments is the literal string “--”. // This allows a file named “--arg” to be specified by passing in // the pair “-- --arg”, without it getting matched as a flag that // doesn’t exist. if !parsing { frees.push(arg) } else if arg == "--" { parsing = false; } // If the string starts with *two* dashes then it’s a long argument. else if bytes.starts_with(b"--") { let long_arg_name = OsStr::from_bytes(&bytes[2..]); // If there’s an equals in it, then the string before the // equals will be the flag’s name, and the string after it // will be its value. if let Some((before, after)) = split_on_equals(long_arg_name) { let arg = self.lookup_long(before)?; let flag = Flag::Long(arg.long); match arg.takes_value { Necessary(_) => result_flags.push((flag, Some(after))), Forbidden => return Err(ParseError::ForbiddenValue { flag }) } } // If there’s no equals, then the entire string (apart from // the dashes) is the argument name. else { let arg = self.lookup_long(long_arg_name)?; let flag = Flag::Long(arg.long); match arg.takes_value { Forbidden => result_flags.push((flag, None)), Necessary(values) => { if let Some(next_arg) = inputs.next() { result_flags.push((flag, Some(next_arg))); } else { return Err(ParseError::NeedsValue { flag, values }) } } } } } // If the string starts with *one* dash then it’s one or more // short arguments. else if bytes.starts_with(b"-") && arg != "-" { let short_arg = OsStr::from_bytes(&bytes[1..]); // If there’s an equals in it, then the argument immediately // before the equals was the one that has the value, with the // others (if any) as value-less short ones. // // -x=abc => ‘x=abc’ // -abcdx=fgh => ‘a’, ‘b’, ‘c’, ‘d’, ‘x=fgh’ // -x= => error // -abcdx= => error // // There’s no way to give two values in a cluster like this: // it’s an error if any of the first set of arguments actually // takes a value. if let Some((before, after)) = split_on_equals(short_arg) { let (arg_with_value, other_args) = before.as_bytes().split_last().unwrap(); // Process the characters immediately following the dash... for byte in other_args { let arg = self.lookup_short(*byte)?; let flag = Flag::Short(*byte); match arg.takes_value { Forbidden => result_flags.push((flag, None)), Necessary(values) => return Err(ParseError::NeedsValue { flag, values }) } } // ...then the last one and the value after the equals. let arg = self.lookup_short(*arg_with_value)?; let flag = Flag::Short(arg.short.unwrap()); match arg.takes_value { Necessary(_) => result_flags.push((flag, Some(after))), Forbidden => return Err(ParseError::ForbiddenValue { flag }) } } // If there’s no equals, then every character is parsed as // its own short argument. However, if any of the arguments // takes a value, then the *rest* of the string is used as // its value, and if there’s no rest of the string, then it // uses the next one in the iterator. // // -a => ‘a’ // -abc => ‘a’, ‘b’, ‘c’ // -abxdef => ‘a’, ‘b’, ‘x=def’ // -abx def => ‘a’, ‘b’, ‘x=def’ // -abx => error // else { for (index, byte) in bytes.into_iter().enumerate().skip(1) { let arg = self.lookup_short(*byte)?; let flag = Flag::Short(*byte); match arg.takes_value { Forbidden => result_flags.push((flag, None)), Necessary(values) => { if index < bytes.len() - 1 { let remnants = &bytes[index+1 ..]; result_flags.push((flag, Some(OsStr::from_bytes(remnants)))); break; } else if let Some(next_arg) = inputs.next() { result_flags.push((flag, Some(next_arg))); } else { return Err(ParseError::NeedsValue { flag, values }) } } } } } } // Otherwise, it’s a free string, usually a file name. else { frees.push(arg) } } Ok(Matches { frees, flags: MatchedFlags { flags: result_flags, strictness } }) } fn lookup_short(&self, short: ShortArg) -> Result<&Arg, ParseError> { match self.0.into_iter().find(|arg| arg.short == Some(short)) { Some(arg) => Ok(arg), None => Err(ParseError::UnknownShortArgument { attempt: short }) } } fn lookup_long<'b>(&self, long: &'b OsStr) -> Result<&Arg, ParseError> { match self.0.into_iter().find(|arg| arg.long == long) { Some(arg) => Ok(arg), None => Err(ParseError::UnknownArgument { attempt: long.to_os_string() }) } } } /// The **matches** are the result of parsing the user’s command-line strings. #[derive(PartialEq, Debug)] pub struct Matches<'args> { /// The flags that were parsed from the user’s input. pub flags: MatchedFlags<'args>, /// All the strings that weren’t matched as arguments, as well as anything /// after the special "--" string. pub frees: Vec<&'args OsStr>, } #[derive(PartialEq, Debug)] pub struct MatchedFlags<'args> { /// The individual flags from the user’s input, in the order they were /// originally given. /// /// Long and short arguments need to be kept in the same vector because /// we usually want the one nearest the end to count, and to know this, /// we need to know where they are in relation to one another. flags: Vec<(Flag, Option<&'args OsStr>)>, /// Whether to check for duplicate or redundant arguments. strictness: Strictness, } impl<'a> MatchedFlags<'a> { /// Whether the given argument was specified. /// Returns `true` if it was, `false` if it wasn’t, and an error in /// strict mode if it was specified more than once. pub fn has(&self, arg: &'static Arg) -> Result<bool, Misfire> { self.has_where(|flag| flag.matches(arg)).map(|flag| flag.is_some()) } /// Returns the first found argument that satisfies the predicate, or /// nothing if none is found, or an error in strict mode if multiple /// argument satisfy the predicate. /// /// You’ll have to test the resulting flag to see which argument it was. pub fn has_where<P>(&self, predicate: P) -> Result<Option<&Flag>, Misfire> where P: Fn(&Flag) -> bool { if self.is_strict() { let all = self.flags.iter() .filter(|tuple| tuple.1.is_none() && predicate(&tuple.0)) .collect::<Vec<_>>(); if all.len() < 2 { Ok(all.first().map(|t| &t.0)) } else { Err(Misfire::Duplicate(all[0].0.clone(), all[1].0.clone())) } } else { let any = self.flags.iter().rev() .find(|tuple| tuple.1.is_none() && predicate(&tuple.0)) .map(|tuple| &tuple.0); Ok(any) } } // This code could probably be better. // Both ‘has’ and ‘get’ immediately begin with a conditional, which makes // me think the functionality could be moved to inside Strictness. /// Returns the value of the given argument if it was specified, nothing /// if it wasn’t, and an error in strict mode if it was specified more /// than once. pub fn get(&self, arg: &'static Arg) -> Result<Option<&OsStr>, Misfire> { self.get_where(|flag| flag.matches(arg)) } /// Returns the value of the argument that matches the predicate if it /// was specified, nothing if it wasn’t, and an error in strict mode if /// multiple arguments matched the predicate. /// /// It’s not possible to tell which flag the value belonged to from this. pub fn get_where<P>(&self, predicate: P) -> Result<Option<&OsStr>, Misfire> where P: Fn(&Flag) -> bool { if self.is_strict() { let those = self.flags.iter() .filter(|tuple| tuple.1.is_some() && predicate(&tuple.0)) .collect::<Vec<_>>(); if those.len() < 2 { Ok(those.first().cloned().map(|t| t.1.unwrap())) } else { Err(Misfire::Duplicate(those[0].0.clone(), those[1].0.clone())) } } else { let found = self.flags.iter().rev() .find(|tuple| tuple.1.is_some() && predicate(&tuple.0)) .map(|tuple| tuple.1.unwrap()); Ok(found) } } // It’s annoying that ‘has’ and ‘get’ won’t work when accidentally given // flags that do/don’t take values, but this should be caught by tests. /// Counts the number of occurrences of the given argument, even in /// strict mode. pub fn count(&self, arg: &Arg) -> usize { self.flags.iter() .filter(|tuple| tuple.0.matches(arg)) .count() } /// Checks whether strict mode is on. This is usually done from within /// ‘has’ and ‘get’, but it’s available in an emergency. pub fn is_strict(&self) -> bool { self.strictness == Strictness::ComplainAboutRedundantArguments } } /// A problem with the user’s input that meant it couldn’t be parsed into a /// coherent list of arguments. #[derive(PartialEq, Debug)] pub enum ParseError { /// A flag that has to take a value was not given one. NeedsValue { flag: Flag, values: Option<Values> }, /// A flag that can’t take a value *was* given one. ForbiddenValue { flag: Flag }, /// A short argument, either alone or in a cluster, was not /// recognised by the program. UnknownShortArgument { attempt: ShortArg }, /// A long argument was not recognised by the program. /// We don’t have a known &str version of the flag, so /// this may not be valid UTF-8. UnknownArgument { attempt: OsString }, } // It’s technically possible for ParseError::UnknownArgument to borrow its // OsStr rather than owning it, but that would give ParseError a lifetime, // which would give Misfire a lifetime, which gets used everywhere. And this // only happens when an error occurs, so it’s not really worth it. /// Splits a string on its `=` character, returning the two substrings on /// either side. Returns `None` if there’s no equals or a string is missing. fn split_on_equals(input: &OsStr) -> Option<(&OsStr, &OsStr)> { use std::os::unix::ffi::OsStrExt; if let Some(index) = input.as_bytes().iter().position(|elem| *elem == b'=') { let (before, after) = input.as_bytes().split_at(index); // The after string contains the = that we need to remove. if before.len() >= 1 && after.len() >= 2 { return Some((OsStr::from_bytes(before), OsStr::from_bytes(&after[1..]))) } } None } /// Creates an `OSString` (used in tests) #[cfg(test)] fn os(input: &'static str) -> OsString { let mut os = OsString::new(); os.push(input); os } #[cfg(test)] mod split_test { use super::{split_on_equals, os}; macro_rules! test_split { ($name:ident: $input:expr => None) => { #[test] fn $name() { assert_eq!(split_on_equals(&os($input)), None); } }; ($name:ident: $input:expr => $before:expr, $after:expr) => { #[test] fn $name() { assert_eq!(split_on_equals(&os($input)), Some((&*os($before), &*os($after)))); } }; } test_split!(empty: "" => None); test_split!(letter: "a" => None); test_split!(just: "=" => None); test_split!(intro: "=bbb" => None); test_split!(denou: "aaa=" => None); test_split!(equals: "aaa=bbb" => "aaa", "bbb"); test_split!(sort: "--sort=size" => "--sort", "size"); test_split!(more: "this=that=other" => "this", "that=other"); } #[cfg(test)] mod parse_test { use super::*; pub fn os(input: &'static str) -> OsString { let mut os = OsString::new(); os.push(input); os } macro_rules! test { ($name:ident: $inputs:expr => frees: $frees:expr, flags: $flags:expr) => { #[test] fn $name() { // Annoyingly the input &strs need to be converted to OsStrings let inputs: Vec<OsString> = $inputs.as_ref().into_iter().map(|&o| os(o)).collect(); // Same with the frees let frees: Vec<OsString> = $frees.as_ref().into_iter().map(|&o| os(o)).collect(); let frees: Vec<&OsStr> = frees.iter().map(|os| os.as_os_str()).collect(); let flags = <[_]>::into_vec(Box::new($flags)); let strictness = Strictness::UseLastArguments; // this isn’t even used let got = Args(TEST_ARGS).parse(inputs.iter(), strictness); let expected = Ok(Matches { frees, flags: MatchedFlags { flags, strictness } }); assert_eq!(got, expected); } }; ($name:ident: $inputs:expr => error $error:expr) => { #[test] fn $name() { use self::ParseError::*; let strictness = Strictness::UseLastArguments; // this isn’t even used let bits = $inputs.as_ref().into_iter().map(|&o| os(o)).collect::<Vec<OsString>>(); let got = Args(TEST_ARGS).parse(bits.iter(), strictness); assert_eq!(got, Err($error)); } }; } const SUGGESTIONS: Values = &[ "example" ]; static TEST_ARGS: &[&Arg] = &[ &Arg { short: Some(b'l'), long: "long", takes_value: TakesValue::Forbidden }, &Arg { short: Some(b'v'), long: "verbose", takes_value: TakesValue::Forbidden }, &Arg { short: Some(b'c'), long: "count", takes_value: TakesValue::Necessary(None) }, &Arg { short: Some(b't'), long: "type", takes_value: TakesValue::Necessary(Some(SUGGESTIONS)) } ]; // Just filenames test!(empty: [] => frees: [], flags: []); test!(one_arg: ["exa"] => frees: [ "exa" ], flags: []); // Dashes and double dashes test!(one_dash: ["-"] => frees: [ "-" ], flags: []); test!(two_dashes: ["--"] => frees: [], flags: []); test!(two_file: ["--", "file"] => frees: [ "file" ], flags: []); test!(two_arg_l: ["--", "--long"] => frees: [ "--long" ], flags: []); test!(two_arg_s: ["--", "-l"] => frees: [ "-l" ], flags: []); // Long args test!(long: ["--long"] => frees: [], flags: [ (Flag::Long("long"), None) ]); test!(long_then: ["--long", "4"] => frees: [ "4" ], flags: [ (Flag::Long("long"), None) ]); test!(long_two: ["--long", "--verbose"] => frees: [], flags: [ (Flag::Long("long"), None), (Flag::Long("verbose"), None) ]); // Long args with values test!(bad_equals: ["--long=equals"] => error ForbiddenValue { flag: Flag::Long("long") }); test!(no_arg: ["--count"] => error NeedsValue { flag: Flag::Long("count"), values: None }); test!(arg_equals: ["--count=4"] => frees: [], flags: [ (Flag::Long("count"), Some(OsStr::new("4"))) ]); test!(arg_then: ["--count", "4"] => frees: [], flags: [ (Flag::Long("count"), Some(OsStr::new("4"))) ]); // Long args with values and suggestions test!(no_arg_s: ["--type"] => error NeedsValue { flag: Flag::Long("type"), values: Some(SUGGESTIONS) }); test!(arg_equals_s: ["--type=exa"] => frees: [], flags: [ (Flag::Long("type"), Some(OsStr::new("exa"))) ]); test!(arg_then_s: ["--type", "exa"] => frees: [], flags: [ (Flag::Long("type"), Some(OsStr::new("exa"))) ]); // Short args test!(short: ["-l"] => frees: [], flags: [ (Flag::Short(b'l'), None) ]); test!(short_then: ["-l", "4"] => frees: [ "4" ], flags: [ (Flag::Short(b'l'), None) ]); test!(short_two: ["-lv"] => frees: [], flags: [ (Flag::Short(b'l'), None), (Flag::Short(b'v'), None) ]); test!(mixed: ["-v", "--long"] => frees: [], flags: [ (Flag::Short(b'v'), None), (Flag::Long("long"), None) ]); // Short args with values test!(bad_short: ["-l=equals"] => error ForbiddenValue { flag: Flag::Short(b'l') }); test!(short_none: ["-c"] => error NeedsValue { flag: Flag::Short(b'c'), values: None }); test!(short_arg_eq: ["-c=4"] => frees: [], flags: [(Flag::Short(b'c'), Some(OsStr::new("4"))) ]); test!(short_arg_then: ["-c", "4"] => frees: [], flags: [(Flag::Short(b'c'), Some(OsStr::new("4"))) ]); test!(short_two_together: ["-lctwo"] => frees: [], flags: [(Flag::Short(b'l'), None), (Flag::Short(b'c'), Some(OsStr::new("two"))) ]); test!(short_two_equals: ["-lc=two"] => frees: [], flags: [(Flag::Short(b'l'), None), (Flag::Short(b'c'), Some(OsStr::new("two"))) ]); test!(short_two_next: ["-lc", "two"] => frees: [], flags: [(Flag::Short(b'l'), None), (Flag::Short(b'c'), Some(OsStr::new("two"))) ]); // Short args with values and suggestions test!(short_none_s: ["-t"] => error NeedsValue { flag: Flag::Short(b't'), values: Some(SUGGESTIONS) }); test!(short_two_together_s: ["-texa"] => frees: [], flags: [(Flag::Short(b't'), Some(OsStr::new("exa"))) ]); test!(short_two_equals_s: ["-t=exa"] => frees: [], flags: [(Flag::Short(b't'), Some(OsStr::new("exa"))) ]); test!(short_two_next_s: ["-t", "exa"] => frees: [], flags: [(Flag::Short(b't'), Some(OsStr::new("exa"))) ]); // Unknown args test!(unknown_long: ["--quiet"] => error UnknownArgument { attempt: os("quiet") }); test!(unknown_long_eq: ["--quiet=shhh"] => error UnknownArgument { attempt: os("quiet") }); test!(unknown_short: ["-q"] => error UnknownShortArgument { attempt: b'q' }); test!(unknown_short_2nd: ["-lq"] => error UnknownShortArgument { attempt: b'q' }); test!(unknown_short_eq: ["-q=shhh"] => error UnknownShortArgument { attempt: b'q' }); test!(unknown_short_2nd_eq: ["-lq=shhh"] => error UnknownShortArgument { attempt: b'q' }); } #[cfg(test)] mod matches_test { use super::*; macro_rules! test { ($name:ident: $input:expr, has $param:expr => $result:expr) => { #[test] fn $name() { let flags = MatchedFlags { flags: $input.to_vec(), strictness: Strictness::UseLastArguments, }; assert_eq!(flags.has(&$param), Ok($result)); } }; } static VERBOSE: Arg = Arg { short: Some(b'v'), long: "verbose", takes_value: TakesValue::Forbidden }; static COUNT: Arg = Arg { short: Some(b'c'), long: "count", takes_value: TakesValue::Necessary(None) }; test!(short_never: [], has VERBOSE => false); test!(short_once: [(Flag::Short(b'v'), None)], has VERBOSE => true); test!(short_twice: [(Flag::Short(b'v'), None), (Flag::Short(b'v'), None)], has VERBOSE => true); test!(long_once: [(Flag::Long("verbose"), None)], has VERBOSE => true); test!(long_twice: [(Flag::Long("verbose"), None), (Flag::Long("verbose"), None)], has VERBOSE => true); test!(long_mixed: [(Flag::Long("verbose"), None), (Flag::Short(b'v'), None)], has VERBOSE => true); #[test] fn only_count() { let everything = os("everything"); let flags = MatchedFlags { flags: vec![ (Flag::Short(b'c'), Some(&*everything)) ], strictness: Strictness::UseLastArguments, }; assert_eq!(flags.get(&COUNT), Ok(Some(&*everything))); } #[test] fn rightmost_count() { let everything = os("everything"); let nothing = os("nothing"); let flags = MatchedFlags { flags: vec![ (Flag::Short(b'c'), Some(&*everything)), (Flag::Short(b'c'), Some(&*nothing)) ], strictness: Strictness::UseLastArguments, }; assert_eq!(flags.get(&COUNT), Ok(Some(&*nothing))); } #[test] fn no_count() { let flags = MatchedFlags { flags: Vec::new(), strictness: Strictness::UseLastArguments }; assert!(!flags.has(&COUNT).unwrap()); } }
40.674891
144
0.527921
561d96813b0d153d14f3a8ffd1b5ad5321c89021
24,992
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Format string literals. use regex::Regex; use unicode_segmentation::UnicodeSegmentation; use config::Config; use shape::Shape; use utils::wrap_str; const MIN_STRING: usize = 10; /// Describes the layout of a piece of text. pub struct StringFormat<'a> { /// The opening sequence of characters for the piece of text pub opener: &'a str, /// The closing sequence of characters for the piece of text pub closer: &'a str, /// The opening sequence of characters for a line pub line_start: &'a str, /// The closing sequence of characters for a line pub line_end: &'a str, /// The allocated box to fit the text into pub shape: Shape, /// Trim trailing whitespaces pub trim_end: bool, pub config: &'a Config, } impl<'a> StringFormat<'a> { pub fn new(shape: Shape, config: &'a Config) -> StringFormat<'a> { StringFormat { opener: "\"", closer: "\"", line_start: " ", line_end: "\\", shape, trim_end: false, config, } } /// Returns the maximum number of graphemes that is possible on a line while taking the /// indentation into account. /// /// If we cannot put at least a single character per line, the rewrite won't succeed. fn max_chars_with_indent(&self) -> Option<usize> { Some( self.shape .width .checked_sub(self.opener.len() + self.line_end.len() + 1)? + 1, ) } /// Like max_chars_with_indent but the indentation is not subtracted. /// This allows to fit more graphemes from the string on a line when /// SnippetState::EndWithLineFeed. fn max_chars_without_indent(&self) -> Option<usize> { Some(self.config.max_width().checked_sub(self.line_end.len())?) } } pub fn rewrite_string<'a>( orig: &str, fmt: &StringFormat<'a>, newline_max_chars: usize, ) -> Option<String> { let max_chars_with_indent = fmt.max_chars_with_indent()?; let max_chars_without_indent = fmt.max_chars_without_indent()?; let indent_with_newline = fmt.shape.indent.to_string_with_newline(fmt.config); let indent_without_newline = fmt.shape.indent.to_string(fmt.config); // Strip line breaks. // With this regex applied, all remaining whitespaces are significant let strip_line_breaks_re = Regex::new(r"([^\\](\\\\)*)\\[\n\r][[:space:]]*").unwrap(); let stripped_str = strip_line_breaks_re.replace_all(orig, "$1"); let graphemes = UnicodeSegmentation::graphemes(&*stripped_str, false).collect::<Vec<&str>>(); // `cur_start` is the position in `orig` of the start of the current line. let mut cur_start = 0; let mut result = String::with_capacity( stripped_str .len() .checked_next_power_of_two() .unwrap_or(usize::max_value()), ); result.push_str(fmt.opener); // Snip a line at a time from `stripped_str` until it is used up. Push the snippet // onto result. let mut cur_max_chars = max_chars_with_indent; let is_bareline_ok = fmt.line_start.is_empty() || is_whitespace(fmt.line_start); loop { // All the input starting at cur_start fits on the current line if graphemes.len() - cur_start <= cur_max_chars { for (i, grapheme) in graphemes[cur_start..].iter().enumerate() { if is_line_feed(grapheme) { // take care of blank lines result = trim_right_but_line_feed(fmt.trim_end, result); result.push_str("\n"); if !is_bareline_ok && cur_start + i + 1 < graphemes.len() { result.push_str(&indent_without_newline); result.push_str(fmt.line_start); } } else { result.push_str(grapheme); } } result = trim_right_but_line_feed(fmt.trim_end, result); break; } // The input starting at cur_start needs to be broken match break_string( cur_max_chars, fmt.trim_end, fmt.line_end, &graphemes[cur_start..], ) { SnippetState::LineEnd(line, len) => { result.push_str(&line); result.push_str(fmt.line_end); result.push_str(&indent_with_newline); result.push_str(fmt.line_start); cur_max_chars = newline_max_chars; cur_start += len; } SnippetState::EndWithLineFeed(line, len) => { if line == "\n" && fmt.trim_end { result = result.trim_right().to_string(); } result.push_str(&line); if is_bareline_ok { // the next line can benefit from the full width cur_max_chars = max_chars_without_indent; } else { result.push_str(&indent_without_newline); result.push_str(fmt.line_start); cur_max_chars = max_chars_with_indent; } cur_start += len; } SnippetState::EndOfInput(line) => { result.push_str(&line); break; } } } result.push_str(fmt.closer); wrap_str(result, fmt.config.max_width(), fmt.shape) } /// Returns the index to the end of the url if the given string includes an /// URL or alike. Otherwise, returns None; fn detect_url(s: &[&str], index: usize) -> Option<usize> { let start = match s[..=index].iter().rposition(|g| is_whitespace(g)) { Some(pos) => pos + 1, None => 0, }; if s.len() < start + 8 { return None; } let prefix = s[start..start + 8].concat(); if prefix.starts_with("https://") || prefix.starts_with("http://") || prefix.starts_with("ftp://") || prefix.starts_with("file://") { match s[index..].iter().position(|g| is_whitespace(g)) { Some(pos) => Some(index + pos - 1), None => Some(s.len() - 1), } } else { None } } /// Trims whitespaces to the right except for the line feed character. fn trim_right_but_line_feed(trim_end: bool, result: String) -> String { let whitespace_except_line_feed = |c: char| c.is_whitespace() && c != '\n'; if trim_end && result.ends_with(whitespace_except_line_feed) { result .trim_right_matches(whitespace_except_line_feed) .to_string() } else { result } } /// Result of breaking a string so it fits in a line and the state it ended in. /// The state informs about what to do with the snippet and how to continue the breaking process. #[derive(Debug, PartialEq)] enum SnippetState { /// The input could not be broken and so rewriting the string is finished. EndOfInput(String), /// The input could be broken and the returned snippet should be ended with a /// `[StringFormat::line_end]`. The next snippet needs to be indented. /// /// The returned string is the line to print out and the number is the length that got read in /// the text being rewritten. That length may be greater than the returned string if trailing /// whitespaces got trimmed. LineEnd(String, usize), /// The input could be broken but a newline is present that cannot be trimmed. The next snippet /// to be rewritten *could* use more width than what is specified by the given shape. For /// example with a multiline string, the next snippet does not need to be indented, allowing /// more characters to be fit within a line. /// /// The returned string is the line to print out and the number is the length that got read in /// the text being rewritten. EndWithLineFeed(String, usize), } fn not_whitespace_except_line_feed(g: &str) -> bool { is_line_feed(g) || !is_whitespace(g) } /// Break the input string at a boundary character around the offset `max_chars`. A boundary /// character is either a punctuation or a whitespace. fn break_string(max_chars: usize, trim_end: bool, line_end: &str, input: &[&str]) -> SnippetState { let break_at = |index /* grapheme at index is included */| { // Take in any whitespaces to the left/right of `input[index]` while // preserving line feeds let index_minus_ws = input[0..=index] .iter() .rposition(|grapheme| not_whitespace_except_line_feed(grapheme)) .unwrap_or(index); // Take into account newlines occurring in input[0..=index], i.e., the possible next new // line. If there is one, then text after it could be rewritten in a way that the available // space is fully used. for (i, grapheme) in input[0..=index].iter().enumerate() { if is_line_feed(grapheme) { if i <= index_minus_ws { let mut line = &input[0..i].concat()[..]; if trim_end { line = line.trim_right(); } return SnippetState::EndWithLineFeed(format!("{}\n", line), i + 1); } break; } } let mut index_plus_ws = index; for (i, grapheme) in input[index + 1..].iter().enumerate() { if !trim_end && is_line_feed(grapheme) { return SnippetState::EndWithLineFeed( input[0..=index + 1 + i].concat(), index + 2 + i, ); } else if not_whitespace_except_line_feed(grapheme) { index_plus_ws = index + i; break; } } if trim_end { SnippetState::LineEnd(input[0..=index_minus_ws].concat(), index_plus_ws + 1) } else { SnippetState::LineEnd(input[0..=index_plus_ws].concat(), index_plus_ws + 1) } }; // Find the position in input for breaking the string if line_end.is_empty() && trim_end && !is_whitespace(input[max_chars - 1]) && is_whitespace(input[max_chars]) { // At a breaking point already // The line won't invalidate the rewriting because: // - no extra space needed for the line_end character // - extra whitespaces to the right can be trimmed return break_at(max_chars - 1); } if let Some(url_index_end) = detect_url(input, max_chars) { let index_plus_ws = url_index_end + input[url_index_end..] .iter() .skip(1) .position(|grapheme| not_whitespace_except_line_feed(grapheme)) .unwrap_or(0); return if trim_end { SnippetState::LineEnd(input[..=url_index_end].concat(), index_plus_ws + 1) } else { return SnippetState::LineEnd(input[..=index_plus_ws].concat(), index_plus_ws + 1); }; } match input[0..max_chars] .iter() .rposition(|grapheme| is_whitespace(grapheme)) { // Found a whitespace and what is on its left side is big enough. Some(index) if index >= MIN_STRING => break_at(index), // No whitespace found, try looking for a punctuation instead _ => match input[0..max_chars] .iter() .rposition(|grapheme| is_punctuation(grapheme)) { // Found a punctuation and what is on its left side is big enough. Some(index) if index >= MIN_STRING => break_at(index), // Either no boundary character was found to the left of `input[max_chars]`, or the line // got too small. We try searching for a boundary character to the right. _ => match input[max_chars..] .iter() .position(|grapheme| is_whitespace(grapheme) || is_punctuation(grapheme)) { // A boundary was found after the line limit Some(index) => break_at(max_chars + index), // No boundary to the right, the input cannot be broken None => SnippetState::EndOfInput(input.concat()), }, }, } } fn is_line_feed(grapheme: &str) -> bool { grapheme.as_bytes()[0] == b'\n' } fn is_whitespace(grapheme: &str) -> bool { grapheme.chars().all(|c| c.is_whitespace()) } fn is_punctuation(grapheme: &str) -> bool { match grapheme.as_bytes()[0] { b':' | b',' | b';' | b'.' => true, _ => false, } } #[cfg(test)] mod test { use super::{break_string, detect_url, rewrite_string, SnippetState, StringFormat}; use config::Config; use shape::{Indent, Shape}; use unicode_segmentation::UnicodeSegmentation; #[test] fn issue343() { let config = Default::default(); let fmt = StringFormat::new(Shape::legacy(2, Indent::empty()), &config); rewrite_string("eq_", &fmt, 2); } #[test] fn should_break_on_whitespace() { let string = "Placerat felis. Mauris porta ante sagittis purus."; let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>(); assert_eq!( break_string(20, false, "", &graphemes[..]), SnippetState::LineEnd("Placerat felis. ".to_string(), 16) ); assert_eq!( break_string(20, true, "", &graphemes[..]), SnippetState::LineEnd("Placerat felis.".to_string(), 16) ); } #[test] fn should_break_on_punctuation() { let string = "Placerat_felis._Mauris_porta_ante_sagittis_purus."; let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>(); assert_eq!( break_string(20, false, "", &graphemes[..]), SnippetState::LineEnd("Placerat_felis.".to_string(), 15) ); } #[test] fn should_break_forward() { let string = "Venenatis_tellus_vel_tellus. Aliquam aliquam dolor at justo."; let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>(); assert_eq!( break_string(20, false, "", &graphemes[..]), SnippetState::LineEnd("Venenatis_tellus_vel_tellus. ".to_string(), 29) ); assert_eq!( break_string(20, true, "", &graphemes[..]), SnippetState::LineEnd("Venenatis_tellus_vel_tellus.".to_string(), 29) ); } #[test] fn nothing_to_break() { let string = "Venenatis_tellus_vel_tellus"; let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>(); assert_eq!( break_string(20, false, "", &graphemes[..]), SnippetState::EndOfInput("Venenatis_tellus_vel_tellus".to_string()) ); } #[test] fn significant_whitespaces() { let string = "Neque in sem. \n Pellentesque tellus augue."; let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>(); assert_eq!( break_string(15, false, "", &graphemes[..]), SnippetState::EndWithLineFeed("Neque in sem. \n".to_string(), 20) ); assert_eq!( break_string(25, false, "", &graphemes[..]), SnippetState::EndWithLineFeed("Neque in sem. \n".to_string(), 20) ); assert_eq!( break_string(15, true, "", &graphemes[..]), SnippetState::LineEnd("Neque in sem.".to_string(), 19) ); assert_eq!( break_string(25, true, "", &graphemes[..]), SnippetState::EndWithLineFeed("Neque in sem.\n".to_string(), 20) ); } #[test] fn big_whitespace() { let string = "Neque in sem. Pellentesque tellus augue."; let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>(); assert_eq!( break_string(20, false, "", &graphemes[..]), SnippetState::LineEnd("Neque in sem. ".to_string(), 25) ); assert_eq!( break_string(20, true, "", &graphemes[..]), SnippetState::LineEnd("Neque in sem.".to_string(), 25) ); } #[test] fn newline_in_candidate_line() { let string = "Nulla\nconsequat erat at massa. Vivamus id mi."; let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>(); assert_eq!( break_string(25, false, "", &graphemes[..]), SnippetState::EndWithLineFeed("Nulla\n".to_string(), 6) ); assert_eq!( break_string(25, true, "", &graphemes[..]), SnippetState::EndWithLineFeed("Nulla\n".to_string(), 6) ); let mut config: Config = Default::default(); config.set().max_width(27); let fmt = StringFormat::new(Shape::legacy(25, Indent::empty()), &config); let rewritten_string = rewrite_string(string, &fmt, 27); assert_eq!( rewritten_string, Some("\"Nulla\nconsequat erat at massa. \\\n Vivamus id mi.\"".to_string()) ); } #[test] fn last_line_fit_with_trailing_whitespaces() { let string = "Vivamus id mi. "; let config: Config = Default::default(); let mut fmt = StringFormat::new(Shape::legacy(25, Indent::empty()), &config); fmt.trim_end = true; let rewritten_string = rewrite_string(string, &fmt, 25); assert_eq!(rewritten_string, Some("\"Vivamus id mi.\"".to_string())); fmt.trim_end = false; // default value of trim_end let rewritten_string = rewrite_string(string, &fmt, 25); assert_eq!(rewritten_string, Some("\"Vivamus id mi. \"".to_string())); } #[test] fn last_line_fit_with_newline() { let string = "Vivamus id mi.\nVivamus id mi."; let config: Config = Default::default(); let fmt = StringFormat { opener: "", closer: "", line_start: "// ", line_end: "", shape: Shape::legacy(100, Indent::from_width(&config, 4)), trim_end: true, config: &config, }; let rewritten_string = rewrite_string(string, &fmt, 100); assert_eq!( rewritten_string, Some("Vivamus id mi.\n // Vivamus id mi.".to_string()) ); } #[test] fn overflow_in_non_string_content() { let comment = "Aenean metus.\nVestibulum ac lacus. Vivamus porttitor"; let config: Config = Default::default(); let fmt = StringFormat { opener: "", closer: "", line_start: "// ", line_end: "", shape: Shape::legacy(30, Indent::from_width(&config, 8)), trim_end: true, config: &config, }; assert_eq!( rewrite_string(comment, &fmt, 30), Some( "Aenean metus.\n // Vestibulum ac lacus. Vivamus\n // porttitor" .to_string() ) ); } #[test] fn overflow_in_non_string_content_with_line_end() { let comment = "Aenean metus.\nVestibulum ac lacus. Vivamus porttitor"; let config: Config = Default::default(); let fmt = StringFormat { opener: "", closer: "", line_start: "// ", line_end: "@", shape: Shape::legacy(30, Indent::from_width(&config, 8)), trim_end: true, config: &config, }; assert_eq!( rewrite_string(comment, &fmt, 30), Some( "Aenean metus.\n // Vestibulum ac lacus. Vivamus@\n // porttitor" .to_string() ) ); } #[test] fn blank_line_with_non_empty_line_start() { let config: Config = Default::default(); let mut fmt = StringFormat { opener: "", closer: "", line_start: "// ", line_end: "", shape: Shape::legacy(30, Indent::from_width(&config, 4)), trim_end: true, config: &config, }; let comment = "Aenean metus. Vestibulum\n\nac lacus. Vivamus porttitor"; assert_eq!( rewrite_string(comment, &fmt, 30), Some( "Aenean metus. Vestibulum\n //\n // ac lacus. Vivamus porttitor".to_string() ) ); fmt.shape = Shape::legacy(15, Indent::from_width(&config, 4)); let comment = "Aenean\n\nmetus. Vestibulum ac lacus. Vivamus porttitor"; assert_eq!( rewrite_string(comment, &fmt, 15), Some( r#"Aenean // // metus. Vestibulum // ac lacus. Vivamus // porttitor"# .to_string() ) ); } #[test] fn retain_blank_lines() { let config: Config = Default::default(); let fmt = StringFormat { opener: "", closer: "", line_start: "// ", line_end: "", shape: Shape::legacy(20, Indent::from_width(&config, 4)), trim_end: true, config: &config, }; let comment = "Aenean\n\nmetus. Vestibulum ac lacus.\n\n"; assert_eq!( rewrite_string(comment, &fmt, 20), Some( "Aenean\n //\n // metus. Vestibulum ac\n // lacus.\n //\n".to_string() ) ); let comment = "Aenean\n\nmetus. Vestibulum ac lacus.\n"; assert_eq!( rewrite_string(comment, &fmt, 20), Some("Aenean\n //\n // metus. Vestibulum ac\n // lacus.\n".to_string()) ); let comment = "Aenean\n \nmetus. Vestibulum ac lacus."; assert_eq!( rewrite_string(comment, &fmt, 20), Some("Aenean\n //\n // metus. Vestibulum ac\n // lacus.".to_string()) ); } #[test] fn boundary_on_edge() { let config: Config = Default::default(); let mut fmt = StringFormat { opener: "", closer: "", line_start: "// ", line_end: "", shape: Shape::legacy(13, Indent::from_width(&config, 4)), trim_end: true, config: &config, }; let comment = "Aenean metus. Vestibulum ac lacus."; assert_eq!( rewrite_string(comment, &fmt, 13), Some("Aenean metus.\n // Vestibulum ac\n // lacus.".to_string()) ); fmt.trim_end = false; let comment = "Vestibulum ac lacus."; assert_eq!( rewrite_string(comment, &fmt, 13), Some("Vestibulum \n // ac lacus.".to_string()) ); fmt.trim_end = true; fmt.line_end = "\\"; let comment = "Vestibulum ac lacus."; assert_eq!( rewrite_string(comment, &fmt, 13), Some("Vestibulum\\\n // ac lacus.".to_string()) ); } #[test] fn detect_urls() { let string = "aaa http://example.org something"; let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>(); assert_eq!(detect_url(&graphemes, 8), Some(21)); let string = "https://example.org something"; let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>(); assert_eq!(detect_url(&graphemes, 0), Some(18)); let string = "aaa ftp://example.org something"; let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>(); assert_eq!(detect_url(&graphemes, 8), Some(20)); let string = "aaa file://example.org something"; let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>(); assert_eq!(detect_url(&graphemes, 8), Some(21)); let string = "aaa http not an url"; let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>(); assert_eq!(detect_url(&graphemes, 6), None); let string = "aaa file://example.org"; let graphemes = UnicodeSegmentation::graphemes(&*string, false).collect::<Vec<&str>>(); assert_eq!(detect_url(&graphemes, 8), Some(21)); } }
36.752941
100
0.56298
23a240954b9d244ed01d15e08afada146c64d7b3
2,106
use gristmill::event; use gristmill::geometry2d::Point; use super::GuiNode; // ------------------------------------------------------------------------------------------------- #[derive(Clone)] pub enum GuiInputEvent { CursorMoved(Point), PrimaryButton(bool), } impl event::Event for GuiInputEvent {} pub type GuiInputSystem = event::EventSystem<GuiInputEvent>; #[derive(Clone)] pub enum GuiActionEvent { Generic, Named(String), Index(usize), NamedIndex(String, usize), } impl event::Event for GuiActionEvent {} // This type is a convenience for pattern-matching #[derive(Copy, Clone)] pub enum GuiActionEventRef<'a> { Generic, Named(&'a str), Index(usize), NamedIndex(&'a str, usize), } impl GuiActionEvent { pub fn as_ref(&self) -> GuiActionEventRef { match self { GuiActionEvent::Generic => GuiActionEventRef::Generic, GuiActionEvent::Named(s) => GuiActionEventRef::Named(s), GuiActionEvent::Index(i) => GuiActionEventRef::Index(*i), GuiActionEvent::NamedIndex(s, i) => GuiActionEventRef::NamedIndex(s, *i), } } } pub enum GuiNavigationEvent { Hover(GuiNode), Focus(GuiNode), } impl event::Event for GuiNavigationEvent {} pub type GuiActionEventSystem = event::EventSystem<GuiActionEvent>; pub type GuiNavigationEventSystem = event::EventSystem<GuiNavigationEvent>; pub struct GuiEventSystem<'a> { action_system: Option<&'a mut GuiActionEventSystem>, navigation_system: &'a mut GuiNavigationEventSystem, } impl<'a> GuiEventSystem<'a> { pub(crate) fn new(action_system: Option<&'a mut GuiActionEventSystem>, navigation_system: &'a mut GuiNavigationEventSystem) -> GuiEventSystem<'a> { GuiEventSystem { action_system, navigation_system } } pub fn fire_action(&mut self, event: GuiActionEvent) { if let Some(action_system) = &mut self.action_system { action_system.fire_event(event); } } pub fn fire_navigation(&mut self, event: GuiNavigationEvent) { self.navigation_system.fire_event(event); } }
28.08
151
0.655745
fe914d8bdaeef970b508cda66028c0378303a204
5,412
use std::io::Read; use std::{ fs, io::BufReader, path::{Path, PathBuf}, }; #[cfg(windows)] use std::ptr::write_volatile; use libafl::{ bolts::{current_nanos, rands::StdRand, tuples::tuple_list}, corpus::{InMemoryCorpus, OnDiskCorpus, QueueCorpusScheduler}, events::SimpleEventManager, executors::{inprocess::InProcessExecutor, ExitKind}, feedbacks::{CrashFeedback, MapFeedbackState, MaxMapFeedback}, fuzzer::{Fuzzer, StdFuzzer}, generators::{Automaton, GramatronGenerator}, inputs::GramatronInput, monitors::SimpleMonitor, mutators::{ GramatronRandomMutator, GramatronRecursionMutator, GramatronSpliceMutator, StdScheduledMutator, }, observers::StdMapObserver, stages::mutational::StdMutationalStage, state::StdState, }; /// Coverage map with explicit assignments due to the lack of instrumentation static mut SIGNALS: [u8; 16] = [0; 16]; /* /// Assign a signal to the signals map fn signals_set(idx: usize) { unsafe { SIGNALS[idx] = 1 }; } */ fn read_automaton_from_file<P: AsRef<Path>>(path: P) -> Automaton { let file = fs::File::open(path).unwrap(); let mut reader = BufReader::new(file); let mut buffer = Vec::new(); reader.read_to_end(&mut buffer).unwrap(); postcard::from_bytes(&buffer).unwrap() } #[allow(clippy::similar_names)] pub fn main() { let mut bytes = vec![]; // The closure that we want to fuzz let mut harness = |input: &GramatronInput| { input.unparse(&mut bytes); unsafe { println!(">>> {}", std::str::from_utf8_unchecked(&bytes)); } ExitKind::Ok }; // Create an observation channel using the signals map let observer = StdMapObserver::new("signals", unsafe { &mut SIGNALS }); // The state of the edges feedback. let feedback_state = MapFeedbackState::with_observer(&observer); // Feedback to rate the interestingness of an input let feedback = MaxMapFeedback::new(&feedback_state, &observer); // A feedback to choose if an input is a solution or not let objective = CrashFeedback::new(); // create a State from scratch let mut state = StdState::new( // RNG StdRand::with_seed(current_nanos()), // Corpus that will be evolved, we keep it in memory for performance InMemoryCorpus::new(), // Corpus in which we store solutions (crashes in this example), // on disk so the user can get them after stopping the fuzzer OnDiskCorpus::new(PathBuf::from("./crashes")).unwrap(), // States of the feedbacks. // They are the data related to the feedbacks that you want to persist in the State. tuple_list!(feedback_state), ); // The Monitor trait define how the fuzzer stats are reported to the user let monitor = SimpleMonitor::new(|s| println!("{}", s)); // The event manager handle the various events generated during the fuzzing loop // such as the notification of the addition of a new item to the corpus let mut mgr = SimpleEventManager::new(monitor); // A queue policy to get testcasess from the corpus let scheduler = QueueCorpusScheduler::new(); // A fuzzer with feedbacks and a corpus scheduler let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective); // Create the executor for an in-process function with just one observer let mut executor = InProcessExecutor::new( &mut harness, tuple_list!(observer), &mut fuzzer, &mut state, &mut mgr, ) .expect("Failed to create the Executor"); let automaton = read_automaton_from_file(PathBuf::from("auto.postcard")); let mut generator = GramatronGenerator::new(&automaton); // Use this code to profile the generator performance /* use libafl::generators::Generator; use std::collections::HashSet; use std::collections::hash_map::DefaultHasher; use std::hash::{Hash, Hasher}; fn calculate_hash<T: Hash>(t: &T) -> u64 { let mut s = DefaultHasher::new(); t.hash(&mut s); s.finish() } let mut set = HashSet::new(); let st = libafl::bolts::current_milliseconds(); let mut b = vec![]; let mut c = 0; for _ in 0..100000 { let i = generator.generate(&mut state).unwrap(); i.unparse(&mut b); set.insert(calculate_hash(&b)); c += b.len(); } println!("{} / {}", c, libafl::bolts::current_milliseconds() - st); println!("{} / 100000", set.len()); return; */ // Generate 8 initial inputs state .generate_initial_inputs_forced(&mut fuzzer, &mut executor, &mut generator, &mut mgr, 8) .expect("Failed to generate the initial corpus"); // Setup a mutational stage with a basic bytes mutator let mutator = StdScheduledMutator::with_max_iterations( tuple_list!( GramatronRandomMutator::new(&generator), GramatronRandomMutator::new(&generator), GramatronRandomMutator::new(&generator), GramatronSpliceMutator::new(), GramatronSpliceMutator::new(), GramatronRecursionMutator::new() ), 2, ); let mut stages = tuple_list!(StdMutationalStage::new(mutator)); fuzzer .fuzz_loop(&mut stages, &mut executor, &mut state, &mut mgr) .expect("Error in the fuzzing loop"); }
33
96
0.645418
5607f6be310e73536c7c3f06d5086c182bc59254
749
/* * * * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) * * The version of the OpenAPI document: 1.0.0 * * Generated by: https://openapi-generator.tech */ #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct LolLoginUsernameAndPassword { #[serde(rename = "password", skip_serializing_if = "Option::is_none")] pub password: Option<String>, #[serde(rename = "username", skip_serializing_if = "Option::is_none")] pub username: Option<String>, } impl LolLoginUsernameAndPassword { pub fn new() -> LolLoginUsernameAndPassword { LolLoginUsernameAndPassword { password: None, username: None, } } }
23.40625
109
0.674232
c1934dab605abfe43f1acb7ec5654cd67dcbcc89
19,535
#[doc = "Register `HSTPIPCFG[%s]` reader"] pub struct R(crate::R<HSTPIPCFG_SPEC>); impl core::ops::Deref for R { type Target = crate::R<HSTPIPCFG_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::convert::From<crate::R<HSTPIPCFG_SPEC>> for R { fn from(reader: crate::R<HSTPIPCFG_SPEC>) -> Self { R(reader) } } #[doc = "Register `HSTPIPCFG[%s]` writer"] pub struct W(crate::W<HSTPIPCFG_SPEC>); impl core::ops::Deref for W { type Target = crate::W<HSTPIPCFG_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl core::convert::From<crate::W<HSTPIPCFG_SPEC>> for W { fn from(writer: crate::W<HSTPIPCFG_SPEC>) -> Self { W(writer) } } #[doc = "Field `ALLOC` reader - Pipe Memory Allocate"] pub struct ALLOC_R(crate::FieldReader<bool, bool>); impl ALLOC_R { pub(crate) fn new(bits: bool) -> Self { ALLOC_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for ALLOC_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `ALLOC` writer - Pipe Memory Allocate"] pub struct ALLOC_W<'a> { w: &'a mut W, } impl<'a> ALLOC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | ((value as u32 & 0x01) << 1); self.w } } #[doc = "Pipe Banks"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum PBK_A { #[doc = "0: Single-bank pipe"] _1_BANK = 0, #[doc = "1: Double-bank pipe"] _2_BANK = 1, #[doc = "2: Triple-bank pipe"] _3_BANK = 2, } impl From<PBK_A> for u8 { #[inline(always)] fn from(variant: PBK_A) -> Self { variant as _ } } #[doc = "Field `PBK` reader - Pipe Banks"] pub struct PBK_R(crate::FieldReader<u8, PBK_A>); impl PBK_R { pub(crate) fn new(bits: u8) -> Self { PBK_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> Option<PBK_A> { match self.bits { 0 => Some(PBK_A::_1_BANK), 1 => Some(PBK_A::_2_BANK), 2 => Some(PBK_A::_3_BANK), _ => None, } } #[doc = "Checks if the value of the field is `_1_BANK`"] #[inline(always)] pub fn is_1_bank(&self) -> bool { **self == PBK_A::_1_BANK } #[doc = "Checks if the value of the field is `_2_BANK`"] #[inline(always)] pub fn is_2_bank(&self) -> bool { **self == PBK_A::_2_BANK } #[doc = "Checks if the value of the field is `_3_BANK`"] #[inline(always)] pub fn is_3_bank(&self) -> bool { **self == PBK_A::_3_BANK } } impl core::ops::Deref for PBK_R { type Target = crate::FieldReader<u8, PBK_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `PBK` writer - Pipe Banks"] pub struct PBK_W<'a> { w: &'a mut W, } impl<'a> PBK_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: PBK_A) -> &'a mut W { unsafe { self.bits(variant.into()) } } #[doc = "Single-bank pipe"] #[inline(always)] pub fn _1_bank(self) -> &'a mut W { self.variant(PBK_A::_1_BANK) } #[doc = "Double-bank pipe"] #[inline(always)] pub fn _2_bank(self) -> &'a mut W { self.variant(PBK_A::_2_BANK) } #[doc = "Triple-bank pipe"] #[inline(always)] pub fn _3_bank(self) -> &'a mut W { self.variant(PBK_A::_3_BANK) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 2)) | ((value as u32 & 0x03) << 2); self.w } } #[doc = "Pipe Size"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum PSIZE_A { #[doc = "0: 8 bytes"] _8_BYTE = 0, #[doc = "1: 16 bytes"] _16_BYTE = 1, #[doc = "2: 32 bytes"] _32_BYTE = 2, #[doc = "3: 64 bytes"] _64_BYTE = 3, #[doc = "4: 128 bytes"] _128_BYTE = 4, #[doc = "5: 256 bytes"] _256_BYTE = 5, #[doc = "6: 512 bytes"] _512_BYTE = 6, #[doc = "7: 1024 bytes"] _1024_BYTE = 7, } impl From<PSIZE_A> for u8 { #[inline(always)] fn from(variant: PSIZE_A) -> Self { variant as _ } } #[doc = "Field `PSIZE` reader - Pipe Size"] pub struct PSIZE_R(crate::FieldReader<u8, PSIZE_A>); impl PSIZE_R { pub(crate) fn new(bits: u8) -> Self { PSIZE_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> PSIZE_A { match self.bits { 0 => PSIZE_A::_8_BYTE, 1 => PSIZE_A::_16_BYTE, 2 => PSIZE_A::_32_BYTE, 3 => PSIZE_A::_64_BYTE, 4 => PSIZE_A::_128_BYTE, 5 => PSIZE_A::_256_BYTE, 6 => PSIZE_A::_512_BYTE, 7 => PSIZE_A::_1024_BYTE, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `_8_BYTE`"] #[inline(always)] pub fn is_8_byte(&self) -> bool { **self == PSIZE_A::_8_BYTE } #[doc = "Checks if the value of the field is `_16_BYTE`"] #[inline(always)] pub fn is_16_byte(&self) -> bool { **self == PSIZE_A::_16_BYTE } #[doc = "Checks if the value of the field is `_32_BYTE`"] #[inline(always)] pub fn is_32_byte(&self) -> bool { **self == PSIZE_A::_32_BYTE } #[doc = "Checks if the value of the field is `_64_BYTE`"] #[inline(always)] pub fn is_64_byte(&self) -> bool { **self == PSIZE_A::_64_BYTE } #[doc = "Checks if the value of the field is `_128_BYTE`"] #[inline(always)] pub fn is_128_byte(&self) -> bool { **self == PSIZE_A::_128_BYTE } #[doc = "Checks if the value of the field is `_256_BYTE`"] #[inline(always)] pub fn is_256_byte(&self) -> bool { **self == PSIZE_A::_256_BYTE } #[doc = "Checks if the value of the field is `_512_BYTE`"] #[inline(always)] pub fn is_512_byte(&self) -> bool { **self == PSIZE_A::_512_BYTE } #[doc = "Checks if the value of the field is `_1024_BYTE`"] #[inline(always)] pub fn is_1024_byte(&self) -> bool { **self == PSIZE_A::_1024_BYTE } } impl core::ops::Deref for PSIZE_R { type Target = crate::FieldReader<u8, PSIZE_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `PSIZE` writer - Pipe Size"] pub struct PSIZE_W<'a> { w: &'a mut W, } impl<'a> PSIZE_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: PSIZE_A) -> &'a mut W { self.bits(variant.into()) } #[doc = "8 bytes"] #[inline(always)] pub fn _8_byte(self) -> &'a mut W { self.variant(PSIZE_A::_8_BYTE) } #[doc = "16 bytes"] #[inline(always)] pub fn _16_byte(self) -> &'a mut W { self.variant(PSIZE_A::_16_BYTE) } #[doc = "32 bytes"] #[inline(always)] pub fn _32_byte(self) -> &'a mut W { self.variant(PSIZE_A::_32_BYTE) } #[doc = "64 bytes"] #[inline(always)] pub fn _64_byte(self) -> &'a mut W { self.variant(PSIZE_A::_64_BYTE) } #[doc = "128 bytes"] #[inline(always)] pub fn _128_byte(self) -> &'a mut W { self.variant(PSIZE_A::_128_BYTE) } #[doc = "256 bytes"] #[inline(always)] pub fn _256_byte(self) -> &'a mut W { self.variant(PSIZE_A::_256_BYTE) } #[doc = "512 bytes"] #[inline(always)] pub fn _512_byte(self) -> &'a mut W { self.variant(PSIZE_A::_512_BYTE) } #[doc = "1024 bytes"] #[inline(always)] pub fn _1024_byte(self) -> &'a mut W { self.variant(PSIZE_A::_1024_BYTE) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 4)) | ((value as u32 & 0x07) << 4); self.w } } #[doc = "Pipe Token"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum PTOKEN_A { #[doc = "0: SETUP"] SETUP = 0, #[doc = "1: IN"] IN = 1, #[doc = "2: OUT"] OUT = 2, } impl From<PTOKEN_A> for u8 { #[inline(always)] fn from(variant: PTOKEN_A) -> Self { variant as _ } } #[doc = "Field `PTOKEN` reader - Pipe Token"] pub struct PTOKEN_R(crate::FieldReader<u8, PTOKEN_A>); impl PTOKEN_R { pub(crate) fn new(bits: u8) -> Self { PTOKEN_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> Option<PTOKEN_A> { match self.bits { 0 => Some(PTOKEN_A::SETUP), 1 => Some(PTOKEN_A::IN), 2 => Some(PTOKEN_A::OUT), _ => None, } } #[doc = "Checks if the value of the field is `SETUP`"] #[inline(always)] pub fn is_setup(&self) -> bool { **self == PTOKEN_A::SETUP } #[doc = "Checks if the value of the field is `IN`"] #[inline(always)] pub fn is_in(&self) -> bool { **self == PTOKEN_A::IN } #[doc = "Checks if the value of the field is `OUT`"] #[inline(always)] pub fn is_out(&self) -> bool { **self == PTOKEN_A::OUT } } impl core::ops::Deref for PTOKEN_R { type Target = crate::FieldReader<u8, PTOKEN_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `PTOKEN` writer - Pipe Token"] pub struct PTOKEN_W<'a> { w: &'a mut W, } impl<'a> PTOKEN_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: PTOKEN_A) -> &'a mut W { unsafe { self.bits(variant.into()) } } #[doc = "SETUP"] #[inline(always)] pub fn setup(self) -> &'a mut W { self.variant(PTOKEN_A::SETUP) } #[doc = "IN"] #[inline(always)] pub fn in_(self) -> &'a mut W { self.variant(PTOKEN_A::IN) } #[doc = "OUT"] #[inline(always)] pub fn out(self) -> &'a mut W { self.variant(PTOKEN_A::OUT) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 8)) | ((value as u32 & 0x03) << 8); self.w } } #[doc = "Field `AUTOSW` reader - Automatic Switch"] pub struct AUTOSW_R(crate::FieldReader<bool, bool>); impl AUTOSW_R { pub(crate) fn new(bits: bool) -> Self { AUTOSW_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for AUTOSW_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `AUTOSW` writer - Automatic Switch"] pub struct AUTOSW_W<'a> { w: &'a mut W, } impl<'a> AUTOSW_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 10)) | ((value as u32 & 0x01) << 10); self.w } } #[doc = "Pipe Type"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum PTYPE_A { #[doc = "0: Control"] CTRL = 0, #[doc = "1: Isochronous"] ISO = 1, #[doc = "2: Bulk"] BLK = 2, #[doc = "3: Interrupt"] INTRPT = 3, } impl From<PTYPE_A> for u8 { #[inline(always)] fn from(variant: PTYPE_A) -> Self { variant as _ } } #[doc = "Field `PTYPE` reader - Pipe Type"] pub struct PTYPE_R(crate::FieldReader<u8, PTYPE_A>); impl PTYPE_R { pub(crate) fn new(bits: u8) -> Self { PTYPE_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> PTYPE_A { match self.bits { 0 => PTYPE_A::CTRL, 1 => PTYPE_A::ISO, 2 => PTYPE_A::BLK, 3 => PTYPE_A::INTRPT, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `CTRL`"] #[inline(always)] pub fn is_ctrl(&self) -> bool { **self == PTYPE_A::CTRL } #[doc = "Checks if the value of the field is `ISO`"] #[inline(always)] pub fn is_iso(&self) -> bool { **self == PTYPE_A::ISO } #[doc = "Checks if the value of the field is `BLK`"] #[inline(always)] pub fn is_blk(&self) -> bool { **self == PTYPE_A::BLK } #[doc = "Checks if the value of the field is `INTRPT`"] #[inline(always)] pub fn is_intrpt(&self) -> bool { **self == PTYPE_A::INTRPT } } impl core::ops::Deref for PTYPE_R { type Target = crate::FieldReader<u8, PTYPE_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `PTYPE` writer - Pipe Type"] pub struct PTYPE_W<'a> { w: &'a mut W, } impl<'a> PTYPE_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: PTYPE_A) -> &'a mut W { self.bits(variant.into()) } #[doc = "Control"] #[inline(always)] pub fn ctrl(self) -> &'a mut W { self.variant(PTYPE_A::CTRL) } #[doc = "Isochronous"] #[inline(always)] pub fn iso(self) -> &'a mut W { self.variant(PTYPE_A::ISO) } #[doc = "Bulk"] #[inline(always)] pub fn blk(self) -> &'a mut W { self.variant(PTYPE_A::BLK) } #[doc = "Interrupt"] #[inline(always)] pub fn intrpt(self) -> &'a mut W { self.variant(PTYPE_A::INTRPT) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 12)) | ((value as u32 & 0x03) << 12); self.w } } #[doc = "Field `PEPNUM` reader - Pipe Endpoint Number"] pub struct PEPNUM_R(crate::FieldReader<u8, u8>); impl PEPNUM_R { pub(crate) fn new(bits: u8) -> Self { PEPNUM_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for PEPNUM_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `PEPNUM` writer - Pipe Endpoint Number"] pub struct PEPNUM_W<'a> { w: &'a mut W, } impl<'a> PEPNUM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 16)) | ((value as u32 & 0x0f) << 16); self.w } } #[doc = "Field `INTFRQ` reader - Pipe Interrupt Request Frequency"] pub struct INTFRQ_R(crate::FieldReader<u8, u8>); impl INTFRQ_R { pub(crate) fn new(bits: u8) -> Self { INTFRQ_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for INTFRQ_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `INTFRQ` writer - Pipe Interrupt Request Frequency"] pub struct INTFRQ_W<'a> { w: &'a mut W, } impl<'a> INTFRQ_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 24)) | ((value as u32 & 0xff) << 24); self.w } } impl R { #[doc = "Bit 1 - Pipe Memory Allocate"] #[inline(always)] pub fn alloc(&self) -> ALLOC_R { ALLOC_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bits 2:3 - Pipe Banks"] #[inline(always)] pub fn pbk(&self) -> PBK_R { PBK_R::new(((self.bits >> 2) & 0x03) as u8) } #[doc = "Bits 4:6 - Pipe Size"] #[inline(always)] pub fn psize(&self) -> PSIZE_R { PSIZE_R::new(((self.bits >> 4) & 0x07) as u8) } #[doc = "Bits 8:9 - Pipe Token"] #[inline(always)] pub fn ptoken(&self) -> PTOKEN_R { PTOKEN_R::new(((self.bits >> 8) & 0x03) as u8) } #[doc = "Bit 10 - Automatic Switch"] #[inline(always)] pub fn autosw(&self) -> AUTOSW_R { AUTOSW_R::new(((self.bits >> 10) & 0x01) != 0) } #[doc = "Bits 12:13 - Pipe Type"] #[inline(always)] pub fn ptype(&self) -> PTYPE_R { PTYPE_R::new(((self.bits >> 12) & 0x03) as u8) } #[doc = "Bits 16:19 - Pipe Endpoint Number"] #[inline(always)] pub fn pepnum(&self) -> PEPNUM_R { PEPNUM_R::new(((self.bits >> 16) & 0x0f) as u8) } #[doc = "Bits 24:31 - Pipe Interrupt Request Frequency"] #[inline(always)] pub fn intfrq(&self) -> INTFRQ_R { INTFRQ_R::new(((self.bits >> 24) & 0xff) as u8) } } impl W { #[doc = "Bit 1 - Pipe Memory Allocate"] #[inline(always)] pub fn alloc(&mut self) -> ALLOC_W { ALLOC_W { w: self } } #[doc = "Bits 2:3 - Pipe Banks"] #[inline(always)] pub fn pbk(&mut self) -> PBK_W { PBK_W { w: self } } #[doc = "Bits 4:6 - Pipe Size"] #[inline(always)] pub fn psize(&mut self) -> PSIZE_W { PSIZE_W { w: self } } #[doc = "Bits 8:9 - Pipe Token"] #[inline(always)] pub fn ptoken(&mut self) -> PTOKEN_W { PTOKEN_W { w: self } } #[doc = "Bit 10 - Automatic Switch"] #[inline(always)] pub fn autosw(&mut self) -> AUTOSW_W { AUTOSW_W { w: self } } #[doc = "Bits 12:13 - Pipe Type"] #[inline(always)] pub fn ptype(&mut self) -> PTYPE_W { PTYPE_W { w: self } } #[doc = "Bits 16:19 - Pipe Endpoint Number"] #[inline(always)] pub fn pepnum(&mut self) -> PEPNUM_W { PEPNUM_W { w: self } } #[doc = "Bits 24:31 - Pipe Interrupt Request Frequency"] #[inline(always)] pub fn intfrq(&mut self) -> INTFRQ_W { INTFRQ_W { w: self } } #[doc = "Writes raw bits to the register."] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "Host Pipe Configuration Register (n = 0)\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [hstpipcfg](index.html) module"] pub struct HSTPIPCFG_SPEC; impl crate::RegisterSpec for HSTPIPCFG_SPEC { type Ux = u32; } #[doc = "`read()` method returns [hstpipcfg::R](R) reader structure"] impl crate::Readable for HSTPIPCFG_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [hstpipcfg::W](W) writer structure"] impl crate::Writable for HSTPIPCFG_SPEC { type Writer = W; }
28.270622
352
0.545892
fb3d3efd650368b4cf564ae6f6ea9abc58d94549
885
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // -*- rust -*- // Tests for standalone blocks as expressions fn test_basic() { let rs: bool = { true }; assert (rs); } struct RS { v1: int, v2: int } fn test_rec() { let rs = { RS {v1: 10, v2: 20} }; assert (rs.v2 == 20); } fn test_filled_with_stuff() { let rs = { let mut a = 0; while a < 10 { a += 1; } a }; assert (rs == 10); } pub fn main() { test_basic(); test_rec(); test_filled_with_stuff(); }
30.517241
73
0.659887
9bacfab8f239d59c78d0830e2a37eb4e21d3fd7c
4,234
use crate::{ error::Result, hash, sync::{self, FileBlame}, AsyncGitNotification, CWD, }; use crossbeam_channel::Sender; use std::{ hash::Hash, sync::{ atomic::{AtomicUsize, Ordering}, Arc, Mutex, }, }; /// #[derive(Hash, Clone, PartialEq)] pub struct BlameParams { /// path to the file to blame pub file_path: String, } struct Request<R, A>(R, Option<A>); #[derive(Default, Clone)] struct LastResult<P, R> { params: P, hash: u64, result: R, } /// pub struct AsyncBlame { current: Arc<Mutex<Request<u64, FileBlame>>>, last: Arc<Mutex<Option<LastResult<BlameParams, FileBlame>>>>, sender: Sender<AsyncGitNotification>, pending: Arc<AtomicUsize>, } impl AsyncBlame { /// pub fn new(sender: &Sender<AsyncGitNotification>) -> Self { Self { current: Arc::new(Mutex::new(Request(0, None))), last: Arc::new(Mutex::new(None)), sender: sender.clone(), pending: Arc::new(AtomicUsize::new(0)), } } /// pub fn last( &mut self, ) -> Result<Option<(BlameParams, FileBlame)>> { let last = self.last.lock()?; Ok(last.clone().map(|last_result| { (last_result.params, last_result.result) })) } /// pub fn refresh(&mut self) -> Result<()> { if let Ok(Some(param)) = self.get_last_param() { self.clear_current()?; self.request(param)?; } Ok(()) } /// pub fn is_pending(&self) -> bool { self.pending.load(Ordering::Relaxed) > 0 } /// pub fn request( &mut self, params: BlameParams, ) -> Result<Option<FileBlame>> { log::trace!("request"); let hash = hash(&params); { let mut current = self.current.lock()?; if current.0 == hash { return Ok(current.1.clone()); } current.0 = hash; current.1 = None; } let arc_current = Arc::clone(&self.current); let arc_last = Arc::clone(&self.last); let sender = self.sender.clone(); let arc_pending = Arc::clone(&self.pending); self.pending.fetch_add(1, Ordering::Relaxed); rayon_core::spawn(move || { let notify = Self::get_blame_helper( params, &arc_last, &arc_current, hash, ); let notify = match notify { Err(err) => { log::error!("get_blame_helper error: {}", err); true } Ok(notify) => notify, }; arc_pending.fetch_sub(1, Ordering::Relaxed); sender .send(if notify { AsyncGitNotification::Blame } else { AsyncGitNotification::FinishUnchanged }) .expect("error sending blame"); }); Ok(None) } fn get_blame_helper( params: BlameParams, arc_last: &Arc< Mutex<Option<LastResult<BlameParams, FileBlame>>>, >, arc_current: &Arc<Mutex<Request<u64, FileBlame>>>, hash: u64, ) -> Result<bool> { let file_blame = sync::blame::blame_file(CWD, &params.file_path)?; let mut notify = false; { let mut current = arc_current.lock()?; if current.0 == hash { current.1 = Some(file_blame.clone()); notify = true; } } { let mut last = arc_last.lock()?; *last = Some(LastResult { result: file_blame, hash, params, }); } Ok(notify) } fn get_last_param(&self) -> Result<Option<BlameParams>> { Ok(self .last .lock()? .clone() .map(|last_result| last_result.params)) } fn clear_current(&mut self) -> Result<()> { let mut current = self.current.lock()?; current.0 = 0; current.1 = None; Ok(()) } }
23.522222
67
0.485357
2fc20e88814021c0c5d1eb2f63a7fa5507ea902c
56
pub mod config; pub mod messagemap; pub mod reactionmap;
18.666667
20
0.803571
5d129ef8ee6a86f5313db24ded7b92c97f21616a
1,138
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Enable just this file for logging to just see packets. // e.g. "RUST_LOG=neqo_transport::dump neqo-client ..." use crate::connection::Connection; use crate::frame::decode_frame; use crate::packet::PacketHdr; use neqo_common::{qdebug, Decoder}; #[allow(clippy::module_name_repetitions)] pub fn dump_packet(conn: &Connection, dir: &str, hdr: &PacketHdr, payload: &[u8]) { let mut s = String::from(""); let mut d = Decoder::from(payload); while d.remaining() > 0 { let f = match decode_frame(&mut d) { Ok(f) => f, Err(_) => { s.push_str(" [broken]..."); break; } }; if let Some(x) = f.dump() { s.push_str(&format!("\n {} {}", dir, &x)); } } qdebug!([conn], "pn={} type={:?}{}", hdr.pn, hdr.tipe, s); }
34.484848
83
0.592267
ab78b75fc1b1a901a93d72724040243f915af2cb
2,189
use anyhow::Result; use mio; use mio::net::UdpSocket; use fdns_format::parse; pub fn serve_forever() -> Result<()> { let poll = mio::Poll::new()?; let mut events = mio::Events::with_capacity(1024); let bind_addresses = &["[::1]:6953", "127.0.0.1:6953"]; let sockets = bind_addresses .iter() .enumerate() .map(|(id, addr)| -> Result<UdpSocket> { let socket = UdpSocket::bind(&addr.parse()?)?; poll.register( &socket, mio::Token(id), mio::Ready::readable(), mio::PollOpt::edge(), )?; Ok(socket) }) .collect::<Result<Vec<UdpSocket>>>()?; loop { poll.poll(&mut events, None)?; for event in &events { let id: usize = event.token().into(); if id >= sockets.len() { unreachable!() } let socket: &UdpSocket = &sockets[id]; let mut buf = [0u8; 512]; let (amt, whom) = socket.recv_from(&mut buf)?; if amt < 12 { println!("[{:?}]: short read", whom); continue; } socket.send_to( match handle(&buf[..amt]) { Ok(Handle::ShortReply(r)) => short_reply(&mut buf, true, r), Err(e) => { println!("[{:?}]: error: {:?}", whom, e); short_reply(&mut buf, true, 2) } }, &whom, )?; } } } enum Handle { ShortReply(u8), } fn handle(buf: &[u8]) -> Result<Handle> { let parsed = parse::parse(buf)?; println!("{:?}", parsed); Ok(Handle::ShortReply(5)) } fn short_reply(buf: &mut [u8], recursion_available: bool, rcode: u8) -> &[u8] { assert!(buf.len() >= 12); assert!(rcode < 6); // response = yes, (opcode, recursion-desired) copied buf[2] = 0b1000_0000 | (buf[2] & 0b0111_1000) | (buf[2] & 0b1); buf[3] = rcode; if recursion_available { buf[3] |= 0b1000_0000; } for i in 4..12 { buf[i] = 0; } &buf[..12] }
25.453488
80
0.451348
bf3d7b92ae951e8c7f2befec531accef29d7715b
3,339
use crate::common::*; use core_foundation::{base::*, dictionary::*, number::*, string::*}; use io_kit_sys::{types::*, usb::lib::*, *}; use mach::kern_return::*; use std::{error::Error, mem::MaybeUninit}; pub fn enumerate_platform(vid: Option<u16>, pid: Option<u16>) -> Vec<UsbDevice> { let mut output = Vec::new(); unsafe { let matching_dict = IOServiceMatching(kIOUSBDeviceClassName); if matching_dict.as_ref().is_none() { panic!("Failed to get IOServiceMatching"); } let mut iter: io_iterator_t = 0; let kr = IOServiceGetMatchingServices(kIOMasterPortDefault, matching_dict, &mut iter); if kr != KERN_SUCCESS { panic!("Failed IOServiceGetMatchingServices"); } #[allow(unused_assignments)] let mut device: io_service_t = 0; #[allow(clippy::unit_cmp)] while (device = IOIteratorNext(iter)) == () && device > 0 { #[allow(clippy::uninit_assumed_init)] let mut props: CFMutableDictionaryRef = MaybeUninit::uninit().assume_init(); let _result = IORegistryEntryCreateCFProperties(device, &mut props, kCFAllocatorDefault, 0); let properties: CFDictionary<CFString, CFType> = CFMutableDictionary::wrap_under_get_rule(props).to_immutable(); let _ = || -> Result<(), Box<dyn Error>> { let key = CFString::from_static_string("idVendor"); let vendor_id = properties .find(&key) .and_then(|value_ref| value_ref.downcast::<CFNumber>()) .ok_or(ParseError)? .to_i32() .ok_or(ParseError)? as u16; if let Some(vid) = vid { if vid != vendor_id { return Ok(()); } } let key = CFString::from_static_string("idProduct"); let product_id = properties .find(&key) .and_then(|value_ref| value_ref.downcast::<CFNumber>()) .ok_or(ParseError)? .to_i32() .ok_or(ParseError)? as u16; if let Some(pid) = pid { if pid != product_id { return Ok(()); } } let key = CFString::from_static_string("sessionID"); let id = properties .find(&key) .and_then(|value_ref| value_ref.downcast::<CFNumber>()) .ok_or(ParseError)? .to_i64() .ok_or(ParseError)?; let key = CFString::from_static_string("USB Product Name"); let description = properties .find(&key) .and_then(|value_ref| value_ref.downcast::<CFString>()) .map(|s| s.to_string()); output.push(UsbDevice { id: id.to_string(), vendor_id, product_id, description, }); Ok(()) }(); IOObjectRelease(device); } IOObjectRelease(iter); } output }
34.071429
94
0.48847
3381698d0b94f3303758661c8424bc7d6e2d66e9
603
use proc_macro2 as m4; use quote::quote; use super::utilities::ident_new; use crate::ast; pub(super) trait GenImport { fn import_crate(&self) -> m4::TokenStream; } impl GenImport for ast::ImportStmt { fn import_crate(&self) -> m4::TokenStream { let mut stmt = quote!(use super::); for _ in 0..self.path_supers() { stmt = quote!(#stmt super::); } for part in self.paths() { let part = ident_new(part); stmt = quote!(#stmt #part::); } let name = ident_new(self.name()); quote!(#stmt #name::*;) } }
24.12
47
0.557214
4aad9105ebdf9c71462e2cb729689f83f074d6ed
6,426
use crate::os::process::can_run_services_as_svc_user; use nix::unistd::{setgid, setuid, Gid, Uid}; use std::{ffi::OsStr, io, os::unix::process::CommandExt, process::{Command, Stdio}, result}; /// Prepare a `Command` to execute a lifecycle hook. // TODO (CM): Ideally, `ids` would not be an `Option`, but separate // `Uid` and `Gid` inputs. However, the `Option` interface provides // the least disruption to other existing code for the time being. pub fn hook_command<X, I, K, V>(executable: X, env: I, ids: Option<(Uid, Gid)>) -> Command where X: AsRef<OsStr>, I: IntoIterator<Item = (K, V)>, K: AsRef<OsStr>, V: AsRef<OsStr> { let mut cmd = Command::new(executable); // NOTE: CommandExt::uid and CommandExt::guid should *not* be // called here! They are set in `with_user_and_group_information`; // see there for further details. cmd.stdin(Stdio::null()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .envs(env); with_own_process_group(&mut cmd); if let Some((uid, gid)) = ids { with_user_and_group_information(&mut cmd, uid, gid); } cmd } /// Ensures that the `Command` is executed within its own process /// group, and not that of its parent process. /// /// This should be used when spawning all hooks. This ensures that /// they are not the same process group as the Launcher (for `run` /// hooks) or the Supervisor (for all other hooks). Otherwise, if a /// child process were to send `SIGTERM`, the Launcher could be /// terminated. Similarly, it prevents a `^C` sent to a foregrounded /// Supervisor from terminating any hooks prematurely. /// /// This basically ensures that all hooks are properly isolated, /// without signaling cross-talk between them and the Launcher / /// Supervisor. fn with_own_process_group(cmd: &mut Command) -> &mut Command { unsafe { cmd.pre_exec(set_own_process_group); } cmd } /// Set the process group of the calling process to be the same as its /// PID. /// /// Intended for use in a /// `std::os::unix::process::CommandExt::pre_exec` callback. fn set_own_process_group() -> result::Result<(), io::Error> { unsafe { if libc::setpgid(0, 0) == 0 { Ok(()) } else { Err(io::Error::last_os_error()) } } } /// Sets uid, gid, and supplementary groups on command. /// /// DO NOT call `CommandExt#uid` or `CommandExt#gid` on this command, /// either before or after calling this function, or it will probably /// not work like you want it to. fn with_user_and_group_information(cmd: &mut Command, uid: Uid, gid: Gid) -> &mut Command { unsafe { cmd.pre_exec(set_supplementary_groups(uid, gid)); } cmd } /// Stupid little private helper macro to make mapping `Nix` errors to /// IO errors for our `pre_exec` hooks. /// /// The format string should have a single variable placeholder for /// the actual error. /// /// e.g. `result.map_err(io_err!("blah blah {:?}"))` macro_rules! io_error { ($format_string:tt) => { move |e| io::Error::new(io::ErrorKind::Other, format!($format_string, e)) }; } /// Returns a function that sets the supplementary group IDs of the /// process to those that `user_id` belongs to. /// /// Also sets the uid and gid of the process. We must do that here, /// rather than using the `CommandExt::uid` and `CommandExt::gid` /// methods to ensure that all the IDs are set on the process in the /// correct order (that is, supplementary groups, gid, and finally uid). /// /// Once https://github.com/rust-lang/rust/pull/72160 merges, we can /// use all `CommandExt` methods, and thus simplify things a (little) /// bit. fn set_supplementary_groups(user_id: Uid, group_id: Gid) -> impl Fn() -> result::Result<(), io::Error> { // Note: since this function will be run a separate process that doesn't // inherit RUST_LOG, none of the log! macros will work actually // work here. move || { // Note that if we *can't* run services as another user, // that's OK; not an error. We just won't set supplementary // groups, and run all hooks as the user we currently are. if can_run_services_as_svc_user() { // These calls don't have a macOS counterpart (well, not a // direct one in nix, at least) and we don't need to // execute hooks on macOS, so it's not important to // implement this. Our crates aren't really factored well // enough to let us cut out larger chunks of code on macOS // at the moment, so we just won't compile this particular // bit for now. #[cfg(not(target_os = "macos"))] { use nix::unistd::{getgrouplist, setgroups, User}; use std::ffi::CString; if let Some(user) = User::from_uid(user_id).map_err(io_error!("Error resolving \ user from ID: \ {:?}"))? { let user = CString::new(user.name).map_err(io_error!("User name cannot \ convert to CString!: \ {:?}"))?; let groups = getgrouplist(&user, group_id).map_err(io_error!("getgrouplist \ failed!: {:?}"))?; setgroups(&groups).map_err(io_error!("setgroups failed! {:?}"))?; // CAP_SETGID } else { return Err(io::Error::new(io::ErrorKind::Other, "Could not find user from user ID")); } } // These calls replace `CommandExt::uid` and `CommandExt::gid` setgid(group_id).map_err(io_error!("setgid failed! {:?}"))?; // CAP_SETGID setuid(user_id).map_err(io_error!("setuid failed! {:?}"))?; // CAP_SETUID } Ok(()) } }
39.913043
100
0.561158
8947b28845dc649afb9f5b6a05fda92535ba0a28
258
//! Test suite for Node.js #![cfg(target_arch = "wasm32")] #![cfg(feature = "node-tests")] extern crate wasm_bindgen_test; use frappe_tea::prelude::*; use wasm_bindgen_test::*; #[wasm_bindgen_test] fn is_not_browser() { assert!(!env::is_browser()); }
18.428571
32
0.686047
acd771a57a070e400f4aa71ff99fd79c386ca4cd
48,344
// This file was generated by gir (https://github.com/gtk-rs/gir) // from gir-files (https://github.com/gtk-rs/gir-files) // DO NOT EDIT #![allow(non_camel_case_types, non_upper_case_globals, non_snake_case)] #![allow( clippy::approx_constant, clippy::type_complexity, clippy::unreadable_literal )] #![cfg_attr(feature = "dox", feature(doc_cfg))] use cairo_sys as cairo; use gdk4_sys as gdk; use glib_sys as glib; use gobject_sys as gobject; use graphene_sys as graphene; use pango_sys as pango; #[allow(unused_imports)] use libc::{ c_char, c_double, c_float, c_int, c_long, c_short, c_uchar, c_uint, c_ulong, c_ushort, c_void, intptr_t, size_t, ssize_t, time_t, uintptr_t, FILE, }; #[allow(unused_imports)] use glib::{gboolean, gconstpointer, gpointer, GType}; // Enums pub type GskBlendMode = c_int; pub const GSK_BLEND_MODE_DEFAULT: GskBlendMode = 0; pub const GSK_BLEND_MODE_MULTIPLY: GskBlendMode = 1; pub const GSK_BLEND_MODE_SCREEN: GskBlendMode = 2; pub const GSK_BLEND_MODE_OVERLAY: GskBlendMode = 3; pub const GSK_BLEND_MODE_DARKEN: GskBlendMode = 4; pub const GSK_BLEND_MODE_LIGHTEN: GskBlendMode = 5; pub const GSK_BLEND_MODE_COLOR_DODGE: GskBlendMode = 6; pub const GSK_BLEND_MODE_COLOR_BURN: GskBlendMode = 7; pub const GSK_BLEND_MODE_HARD_LIGHT: GskBlendMode = 8; pub const GSK_BLEND_MODE_SOFT_LIGHT: GskBlendMode = 9; pub const GSK_BLEND_MODE_DIFFERENCE: GskBlendMode = 10; pub const GSK_BLEND_MODE_EXCLUSION: GskBlendMode = 11; pub const GSK_BLEND_MODE_COLOR: GskBlendMode = 12; pub const GSK_BLEND_MODE_HUE: GskBlendMode = 13; pub const GSK_BLEND_MODE_SATURATION: GskBlendMode = 14; pub const GSK_BLEND_MODE_LUMINOSITY: GskBlendMode = 15; pub type GskCorner = c_int; pub const GSK_CORNER_TOP_LEFT: GskCorner = 0; pub const GSK_CORNER_TOP_RIGHT: GskCorner = 1; pub const GSK_CORNER_BOTTOM_RIGHT: GskCorner = 2; pub const GSK_CORNER_BOTTOM_LEFT: GskCorner = 3; pub type GskGLUniformType = c_int; pub const GSK_GL_UNIFORM_TYPE_NONE: GskGLUniformType = 0; pub const GSK_GL_UNIFORM_TYPE_FLOAT: GskGLUniformType = 1; pub const GSK_GL_UNIFORM_TYPE_INT: GskGLUniformType = 2; pub const GSK_GL_UNIFORM_TYPE_UINT: GskGLUniformType = 3; pub const GSK_GL_UNIFORM_TYPE_BOOL: GskGLUniformType = 4; pub const GSK_GL_UNIFORM_TYPE_VEC2: GskGLUniformType = 5; pub const GSK_GL_UNIFORM_TYPE_VEC3: GskGLUniformType = 6; pub const GSK_GL_UNIFORM_TYPE_VEC4: GskGLUniformType = 7; pub type GskRenderNodeType = c_int; pub const GSK_NOT_A_RENDER_NODE: GskRenderNodeType = 0; pub const GSK_CONTAINER_NODE: GskRenderNodeType = 1; pub const GSK_CAIRO_NODE: GskRenderNodeType = 2; pub const GSK_COLOR_NODE: GskRenderNodeType = 3; pub const GSK_LINEAR_GRADIENT_NODE: GskRenderNodeType = 4; pub const GSK_REPEATING_LINEAR_GRADIENT_NODE: GskRenderNodeType = 5; pub const GSK_RADIAL_GRADIENT_NODE: GskRenderNodeType = 6; pub const GSK_REPEATING_RADIAL_GRADIENT_NODE: GskRenderNodeType = 7; pub const GSK_BORDER_NODE: GskRenderNodeType = 8; pub const GSK_TEXTURE_NODE: GskRenderNodeType = 9; pub const GSK_INSET_SHADOW_NODE: GskRenderNodeType = 10; pub const GSK_OUTSET_SHADOW_NODE: GskRenderNodeType = 11; pub const GSK_TRANSFORM_NODE: GskRenderNodeType = 12; pub const GSK_OPACITY_NODE: GskRenderNodeType = 13; pub const GSK_COLOR_MATRIX_NODE: GskRenderNodeType = 14; pub const GSK_REPEAT_NODE: GskRenderNodeType = 15; pub const GSK_CLIP_NODE: GskRenderNodeType = 16; pub const GSK_ROUNDED_CLIP_NODE: GskRenderNodeType = 17; pub const GSK_SHADOW_NODE: GskRenderNodeType = 18; pub const GSK_BLEND_NODE: GskRenderNodeType = 19; pub const GSK_CROSS_FADE_NODE: GskRenderNodeType = 20; pub const GSK_TEXT_NODE: GskRenderNodeType = 21; pub const GSK_BLUR_NODE: GskRenderNodeType = 22; pub const GSK_DEBUG_NODE: GskRenderNodeType = 23; pub const GSK_GL_SHADER_NODE: GskRenderNodeType = 24; pub type GskScalingFilter = c_int; pub const GSK_SCALING_FILTER_LINEAR: GskScalingFilter = 0; pub const GSK_SCALING_FILTER_NEAREST: GskScalingFilter = 1; pub const GSK_SCALING_FILTER_TRILINEAR: GskScalingFilter = 2; pub type GskSerializationError = c_int; pub const GSK_SERIALIZATION_UNSUPPORTED_FORMAT: GskSerializationError = 0; pub const GSK_SERIALIZATION_UNSUPPORTED_VERSION: GskSerializationError = 1; pub const GSK_SERIALIZATION_INVALID_DATA: GskSerializationError = 2; pub type GskTransformCategory = c_int; pub const GSK_TRANSFORM_CATEGORY_UNKNOWN: GskTransformCategory = 0; pub const GSK_TRANSFORM_CATEGORY_ANY: GskTransformCategory = 1; pub const GSK_TRANSFORM_CATEGORY_3D: GskTransformCategory = 2; pub const GSK_TRANSFORM_CATEGORY_2D: GskTransformCategory = 3; pub const GSK_TRANSFORM_CATEGORY_2D_AFFINE: GskTransformCategory = 4; pub const GSK_TRANSFORM_CATEGORY_2D_TRANSLATE: GskTransformCategory = 5; pub const GSK_TRANSFORM_CATEGORY_IDENTITY: GskTransformCategory = 6; // Callbacks pub type GskParseErrorFunc = Option<unsafe extern "C" fn(gconstpointer, *const glib::GError, gpointer)>; // Records #[repr(C)] pub struct _GskBroadwayRendererClass(c_void); pub type GskBroadwayRendererClass = *mut _GskBroadwayRendererClass; #[repr(C)] pub struct _GskCairoRendererClass(c_void); pub type GskCairoRendererClass = *mut _GskCairoRendererClass; #[repr(C)] #[derive(Copy, Clone)] pub struct GskColorStop { pub offset: c_float, pub color: gdk::GdkRGBA, } impl ::std::fmt::Debug for GskColorStop { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskColorStop @ {:?}", self as *const _)) .field("offset", &self.offset) .field("color", &self.color) .finish() } } #[repr(C)] pub struct _GskGLRendererClass(c_void); pub type GskGLRendererClass = *mut _GskGLRendererClass; #[repr(C)] #[derive(Copy, Clone)] pub struct GskGLShaderClass { pub parent_class: gobject::GObjectClass, } impl ::std::fmt::Debug for GskGLShaderClass { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskGLShaderClass @ {:?}", self as *const _)) .field("parent_class", &self.parent_class) .finish() } } #[repr(C)] pub struct _GskRendererClass(c_void); pub type GskRendererClass = *mut _GskRendererClass; #[repr(C)] #[derive(Copy, Clone)] pub struct GskRoundedRect { pub bounds: graphene::graphene_rect_t, pub corner: [graphene::graphene_size_t; 4], } impl ::std::fmt::Debug for GskRoundedRect { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskRoundedRect @ {:?}", self as *const _)) .field("bounds", &self.bounds) .field("corner", &self.corner) .finish() } } #[repr(C)] pub struct GskShaderArgsBuilder(c_void); impl ::std::fmt::Debug for GskShaderArgsBuilder { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskShaderArgsBuilder @ {:?}", self as *const _)) .finish() } } #[repr(C)] #[derive(Copy, Clone)] pub struct GskShadow { pub color: gdk::GdkRGBA, pub dx: c_float, pub dy: c_float, pub radius: c_float, } impl ::std::fmt::Debug for GskShadow { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskShadow @ {:?}", self as *const _)) .field("color", &self.color) .field("dx", &self.dx) .field("dy", &self.dy) .field("radius", &self.radius) .finish() } } #[repr(C)] pub struct GskTransform(c_void); impl ::std::fmt::Debug for GskTransform { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskTransform @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct _GskVulkanRendererClass(c_void); pub type GskVulkanRendererClass = *mut _GskVulkanRendererClass; // Classes #[repr(C)] pub struct GskBlendNode(c_void); impl ::std::fmt::Debug for GskBlendNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskBlendNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskBlurNode(c_void); impl ::std::fmt::Debug for GskBlurNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskBlurNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskBorderNode(c_void); impl ::std::fmt::Debug for GskBorderNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskBorderNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskBroadwayRenderer(c_void); impl ::std::fmt::Debug for GskBroadwayRenderer { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskBroadwayRenderer @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskCairoNode(c_void); impl ::std::fmt::Debug for GskCairoNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskCairoNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskCairoRenderer(c_void); impl ::std::fmt::Debug for GskCairoRenderer { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskCairoRenderer @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskClipNode(c_void); impl ::std::fmt::Debug for GskClipNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskClipNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskColorMatrixNode(c_void); impl ::std::fmt::Debug for GskColorMatrixNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskColorMatrixNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskColorNode(c_void); impl ::std::fmt::Debug for GskColorNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskColorNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskContainerNode(c_void); impl ::std::fmt::Debug for GskContainerNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskContainerNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskCrossFadeNode(c_void); impl ::std::fmt::Debug for GskCrossFadeNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskCrossFadeNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskDebugNode(c_void); impl ::std::fmt::Debug for GskDebugNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskDebugNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskGLRenderer(c_void); impl ::std::fmt::Debug for GskGLRenderer { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskGLRenderer @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskGLShader(c_void); impl ::std::fmt::Debug for GskGLShader { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskGLShader @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskGLShaderNode(c_void); impl ::std::fmt::Debug for GskGLShaderNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskGLShaderNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskInsetShadowNode(c_void); impl ::std::fmt::Debug for GskInsetShadowNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskInsetShadowNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskLinearGradientNode(c_void); impl ::std::fmt::Debug for GskLinearGradientNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskLinearGradientNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskOpacityNode(c_void); impl ::std::fmt::Debug for GskOpacityNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskOpacityNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskOutsetShadowNode(c_void); impl ::std::fmt::Debug for GskOutsetShadowNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskOutsetShadowNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskRadialGradientNode(c_void); impl ::std::fmt::Debug for GskRadialGradientNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskRadialGradientNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskRenderNode(c_void); impl ::std::fmt::Debug for GskRenderNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskRenderNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskRenderer(c_void); impl ::std::fmt::Debug for GskRenderer { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskRenderer @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskRepeatNode(c_void); impl ::std::fmt::Debug for GskRepeatNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskRepeatNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskRepeatingLinearGradientNode(c_void); impl ::std::fmt::Debug for GskRepeatingLinearGradientNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!( "GskRepeatingLinearGradientNode @ {:?}", self as *const _ )) .finish() } } #[repr(C)] pub struct GskRepeatingRadialGradientNode(c_void); impl ::std::fmt::Debug for GskRepeatingRadialGradientNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!( "GskRepeatingRadialGradientNode @ {:?}", self as *const _ )) .finish() } } #[repr(C)] pub struct GskRoundedClipNode(c_void); impl ::std::fmt::Debug for GskRoundedClipNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskRoundedClipNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskShadowNode(c_void); impl ::std::fmt::Debug for GskShadowNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskShadowNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskTextNode(c_void); impl ::std::fmt::Debug for GskTextNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskTextNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskTextureNode(c_void); impl ::std::fmt::Debug for GskTextureNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskTextureNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskTransformNode(c_void); impl ::std::fmt::Debug for GskTransformNode { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskTransformNode @ {:?}", self as *const _)) .finish() } } #[repr(C)] pub struct GskVulkanRenderer(c_void); impl ::std::fmt::Debug for GskVulkanRenderer { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { f.debug_struct(&format!("GskVulkanRenderer @ {:?}", self as *const _)) .finish() } } #[link(name = "gtk-4")] extern "C" { //========================================================================= // GskBlendMode //========================================================================= pub fn gsk_blend_mode_get_type() -> GType; //========================================================================= // GskCorner //========================================================================= pub fn gsk_corner_get_type() -> GType; //========================================================================= // GskGLUniformType //========================================================================= pub fn gsk_gl_uniform_type_get_type() -> GType; //========================================================================= // GskRenderNodeType //========================================================================= pub fn gsk_render_node_type_get_type() -> GType; //========================================================================= // GskScalingFilter //========================================================================= pub fn gsk_scaling_filter_get_type() -> GType; //========================================================================= // GskSerializationError //========================================================================= pub fn gsk_serialization_error_get_type() -> GType; pub fn gsk_serialization_error_quark() -> glib::GQuark; //========================================================================= // GskTransformCategory //========================================================================= pub fn gsk_transform_category_get_type() -> GType; //========================================================================= // GskRoundedRect //========================================================================= pub fn gsk_rounded_rect_contains_point( self_: *const GskRoundedRect, point: *const graphene::graphene_point_t, ) -> gboolean; pub fn gsk_rounded_rect_contains_rect( self_: *const GskRoundedRect, rect: *const graphene::graphene_rect_t, ) -> gboolean; pub fn gsk_rounded_rect_init( self_: *mut GskRoundedRect, bounds: *const graphene::graphene_rect_t, top_left: *const graphene::graphene_size_t, top_right: *const graphene::graphene_size_t, bottom_right: *const graphene::graphene_size_t, bottom_left: *const graphene::graphene_size_t, ) -> *mut GskRoundedRect; pub fn gsk_rounded_rect_init_copy( self_: *mut GskRoundedRect, src: *const GskRoundedRect, ) -> *mut GskRoundedRect; pub fn gsk_rounded_rect_init_from_rect( self_: *mut GskRoundedRect, bounds: *const graphene::graphene_rect_t, radius: c_float, ) -> *mut GskRoundedRect; pub fn gsk_rounded_rect_intersects_rect( self_: *const GskRoundedRect, rect: *const graphene::graphene_rect_t, ) -> gboolean; pub fn gsk_rounded_rect_is_rectilinear(self_: *const GskRoundedRect) -> gboolean; pub fn gsk_rounded_rect_normalize(self_: *mut GskRoundedRect) -> *mut GskRoundedRect; pub fn gsk_rounded_rect_offset( self_: *mut GskRoundedRect, dx: c_float, dy: c_float, ) -> *mut GskRoundedRect; pub fn gsk_rounded_rect_shrink( self_: *mut GskRoundedRect, top: c_float, right: c_float, bottom: c_float, left: c_float, ) -> *mut GskRoundedRect; //========================================================================= // GskShaderArgsBuilder //========================================================================= pub fn gsk_shader_args_builder_get_type() -> GType; pub fn gsk_shader_args_builder_new( shader: *mut GskGLShader, initial_values: *mut glib::GBytes, ) -> *mut GskShaderArgsBuilder; pub fn gsk_shader_args_builder_free_to_args( builder: *mut GskShaderArgsBuilder, ) -> *mut glib::GBytes; pub fn gsk_shader_args_builder_ref( builder: *mut GskShaderArgsBuilder, ) -> *mut GskShaderArgsBuilder; pub fn gsk_shader_args_builder_set_bool( builder: *mut GskShaderArgsBuilder, idx: c_int, value: gboolean, ); pub fn gsk_shader_args_builder_set_float( builder: *mut GskShaderArgsBuilder, idx: c_int, value: c_float, ); pub fn gsk_shader_args_builder_set_int( builder: *mut GskShaderArgsBuilder, idx: c_int, value: i32, ); pub fn gsk_shader_args_builder_set_uint( builder: *mut GskShaderArgsBuilder, idx: c_int, value: u32, ); pub fn gsk_shader_args_builder_set_vec2( builder: *mut GskShaderArgsBuilder, idx: c_int, value: *const graphene::graphene_vec2_t, ); pub fn gsk_shader_args_builder_set_vec3( builder: *mut GskShaderArgsBuilder, idx: c_int, value: *const graphene::graphene_vec3_t, ); pub fn gsk_shader_args_builder_set_vec4( builder: *mut GskShaderArgsBuilder, idx: c_int, value: *const graphene::graphene_vec4_t, ); pub fn gsk_shader_args_builder_to_args(builder: *mut GskShaderArgsBuilder) -> *mut glib::GBytes; pub fn gsk_shader_args_builder_unref(builder: *mut GskShaderArgsBuilder); //========================================================================= // GskTransform //========================================================================= pub fn gsk_transform_get_type() -> GType; pub fn gsk_transform_new() -> *mut GskTransform; pub fn gsk_transform_equal(first: *mut GskTransform, second: *mut GskTransform) -> gboolean; pub fn gsk_transform_get_category(self_: *mut GskTransform) -> GskTransformCategory; pub fn gsk_transform_invert(self_: *mut GskTransform) -> *mut GskTransform; pub fn gsk_transform_matrix( next: *mut GskTransform, matrix: *const graphene::graphene_matrix_t, ) -> *mut GskTransform; pub fn gsk_transform_perspective(next: *mut GskTransform, depth: c_float) -> *mut GskTransform; pub fn gsk_transform_print(self_: *mut GskTransform, string: *mut glib::GString); pub fn gsk_transform_ref(self_: *mut GskTransform) -> *mut GskTransform; pub fn gsk_transform_rotate(next: *mut GskTransform, angle: c_float) -> *mut GskTransform; pub fn gsk_transform_rotate_3d( next: *mut GskTransform, angle: c_float, axis: *const graphene::graphene_vec3_t, ) -> *mut GskTransform; pub fn gsk_transform_scale( next: *mut GskTransform, factor_x: c_float, factor_y: c_float, ) -> *mut GskTransform; pub fn gsk_transform_scale_3d( next: *mut GskTransform, factor_x: c_float, factor_y: c_float, factor_z: c_float, ) -> *mut GskTransform; pub fn gsk_transform_to_2d( self_: *mut GskTransform, out_xx: *mut c_float, out_yx: *mut c_float, out_xy: *mut c_float, out_yy: *mut c_float, out_dx: *mut c_float, out_dy: *mut c_float, ); pub fn gsk_transform_to_affine( self_: *mut GskTransform, out_scale_x: *mut c_float, out_scale_y: *mut c_float, out_dx: *mut c_float, out_dy: *mut c_float, ); pub fn gsk_transform_to_matrix( self_: *mut GskTransform, out_matrix: *mut graphene::graphene_matrix_t, ); pub fn gsk_transform_to_string(self_: *mut GskTransform) -> *mut c_char; pub fn gsk_transform_to_translate( self_: *mut GskTransform, out_dx: *mut c_float, out_dy: *mut c_float, ); pub fn gsk_transform_transform( next: *mut GskTransform, other: *mut GskTransform, ) -> *mut GskTransform; pub fn gsk_transform_transform_bounds( self_: *mut GskTransform, rect: *const graphene::graphene_rect_t, out_rect: *mut graphene::graphene_rect_t, ); pub fn gsk_transform_transform_point( self_: *mut GskTransform, point: *const graphene::graphene_point_t, out_point: *mut graphene::graphene_point_t, ); pub fn gsk_transform_translate( next: *mut GskTransform, point: *const graphene::graphene_point_t, ) -> *mut GskTransform; pub fn gsk_transform_translate_3d( next: *mut GskTransform, point: *const graphene::graphene_point3d_t, ) -> *mut GskTransform; pub fn gsk_transform_unref(self_: *mut GskTransform); pub fn gsk_transform_parse( string: *const c_char, out_transform: *mut *mut GskTransform, ) -> gboolean; //========================================================================= // GskBlendNode //========================================================================= pub fn gsk_blend_node_get_type() -> GType; pub fn gsk_blend_node_new( bottom: *mut GskRenderNode, top: *mut GskRenderNode, blend_mode: GskBlendMode, ) -> *mut GskBlendNode; pub fn gsk_blend_node_get_blend_mode(node: *mut GskBlendNode) -> GskBlendMode; pub fn gsk_blend_node_get_bottom_child(node: *mut GskBlendNode) -> *mut GskRenderNode; pub fn gsk_blend_node_get_top_child(node: *mut GskBlendNode) -> *mut GskRenderNode; //========================================================================= // GskBlurNode //========================================================================= pub fn gsk_blur_node_get_type() -> GType; pub fn gsk_blur_node_new(child: *mut GskRenderNode, radius: c_float) -> *mut GskBlurNode; pub fn gsk_blur_node_get_child(node: *mut GskBlurNode) -> *mut GskRenderNode; pub fn gsk_blur_node_get_radius(node: *mut GskBlurNode) -> c_float; //========================================================================= // GskBorderNode //========================================================================= pub fn gsk_border_node_get_type() -> GType; pub fn gsk_border_node_new( outline: *const GskRoundedRect, border_width: *const [c_float; 4], border_color: *const [gdk::GdkRGBA; 4], ) -> *mut GskBorderNode; pub fn gsk_border_node_peek_colors(node: *mut GskBorderNode) -> *const gdk::GdkRGBA; pub fn gsk_border_node_peek_outline(node: *mut GskBorderNode) -> *const GskRoundedRect; pub fn gsk_border_node_peek_widths(node: *mut GskBorderNode) -> *const c_float; //========================================================================= // GskBroadwayRenderer //========================================================================= pub fn gsk_broadway_renderer_get_type() -> GType; pub fn gsk_broadway_renderer_new() -> *mut GskRenderer; //========================================================================= // GskCairoNode //========================================================================= pub fn gsk_cairo_node_get_type() -> GType; pub fn gsk_cairo_node_new(bounds: *const graphene::graphene_rect_t) -> *mut GskCairoNode; pub fn gsk_cairo_node_get_draw_context(node: *mut GskCairoNode) -> *mut cairo::cairo_t; pub fn gsk_cairo_node_peek_surface(node: *mut GskCairoNode) -> *mut cairo::cairo_surface_t; //========================================================================= // GskCairoRenderer //========================================================================= pub fn gsk_cairo_renderer_get_type() -> GType; pub fn gsk_cairo_renderer_new() -> *mut GskRenderer; //========================================================================= // GskClipNode //========================================================================= pub fn gsk_clip_node_get_type() -> GType; pub fn gsk_clip_node_new( child: *mut GskRenderNode, clip: *const graphene::graphene_rect_t, ) -> *mut GskClipNode; pub fn gsk_clip_node_get_child(node: *mut GskClipNode) -> *mut GskRenderNode; pub fn gsk_clip_node_peek_clip(node: *mut GskClipNode) -> *const graphene::graphene_rect_t; //========================================================================= // GskColorMatrixNode //========================================================================= pub fn gsk_color_matrix_node_get_type() -> GType; pub fn gsk_color_matrix_node_new( child: *mut GskRenderNode, color_matrix: *const graphene::graphene_matrix_t, color_offset: *const graphene::graphene_vec4_t, ) -> *mut GskColorMatrixNode; pub fn gsk_color_matrix_node_get_child(node: *mut GskColorMatrixNode) -> *mut GskRenderNode; pub fn gsk_color_matrix_node_peek_color_matrix( node: *mut GskColorMatrixNode, ) -> *const graphene::graphene_matrix_t; pub fn gsk_color_matrix_node_peek_color_offset( node: *mut GskColorMatrixNode, ) -> *const graphene::graphene_vec4_t; //========================================================================= // GskColorNode //========================================================================= pub fn gsk_color_node_get_type() -> GType; pub fn gsk_color_node_new( rgba: *const gdk::GdkRGBA, bounds: *const graphene::graphene_rect_t, ) -> *mut GskColorNode; pub fn gsk_color_node_peek_color(node: *mut GskColorNode) -> *const gdk::GdkRGBA; //========================================================================= // GskContainerNode //========================================================================= pub fn gsk_container_node_get_type() -> GType; pub fn gsk_container_node_new( children: *mut *mut GskRenderNode, n_children: c_uint, ) -> *mut GskContainerNode; pub fn gsk_container_node_get_child( node: *mut GskContainerNode, idx: c_uint, ) -> *mut GskRenderNode; pub fn gsk_container_node_get_n_children(node: *mut GskContainerNode) -> c_uint; //========================================================================= // GskCrossFadeNode //========================================================================= pub fn gsk_cross_fade_node_get_type() -> GType; pub fn gsk_cross_fade_node_new( start: *mut GskRenderNode, end: *mut GskRenderNode, progress: c_float, ) -> *mut GskCrossFadeNode; pub fn gsk_cross_fade_node_get_end_child(node: *mut GskCrossFadeNode) -> *mut GskRenderNode; pub fn gsk_cross_fade_node_get_progress(node: *mut GskCrossFadeNode) -> c_float; pub fn gsk_cross_fade_node_get_start_child(node: *mut GskCrossFadeNode) -> *mut GskRenderNode; //========================================================================= // GskDebugNode //========================================================================= pub fn gsk_debug_node_get_type() -> GType; pub fn gsk_debug_node_new(child: *mut GskRenderNode, message: *mut c_char) -> *mut GskDebugNode; pub fn gsk_debug_node_get_child(node: *mut GskDebugNode) -> *mut GskRenderNode; pub fn gsk_debug_node_get_message(node: *mut GskDebugNode) -> *const c_char; //========================================================================= // GskGLRenderer //========================================================================= pub fn gsk_gl_renderer_get_type() -> GType; pub fn gsk_gl_renderer_new() -> *mut GskRenderer; //========================================================================= // GskGLShader //========================================================================= pub fn gsk_gl_shader_get_type() -> GType; pub fn gsk_gl_shader_new_from_bytes(sourcecode: *mut glib::GBytes) -> *mut GskGLShader; pub fn gsk_gl_shader_new_from_resource(resource_path: *const c_char) -> *mut GskGLShader; pub fn gsk_gl_shader_compile( shader: *mut GskGLShader, renderer: *mut GskRenderer, error: *mut *mut glib::GError, ) -> gboolean; pub fn gsk_gl_shader_find_uniform_by_name( shader: *mut GskGLShader, name: *const c_char, ) -> c_int; pub fn gsk_gl_shader_format_args(shader: *mut GskGLShader, ...) -> *mut glib::GBytes; //pub fn gsk_gl_shader_format_args_va(shader: *mut GskGLShader, uniforms: /*Unimplemented*/va_list) -> *mut glib::GBytes; pub fn gsk_gl_shader_get_arg_bool( shader: *mut GskGLShader, args: *mut glib::GBytes, idx: c_int, ) -> gboolean; pub fn gsk_gl_shader_get_arg_float( shader: *mut GskGLShader, args: *mut glib::GBytes, idx: c_int, ) -> c_float; pub fn gsk_gl_shader_get_arg_int( shader: *mut GskGLShader, args: *mut glib::GBytes, idx: c_int, ) -> i32; pub fn gsk_gl_shader_get_arg_uint( shader: *mut GskGLShader, args: *mut glib::GBytes, idx: c_int, ) -> u32; pub fn gsk_gl_shader_get_arg_vec2( shader: *mut GskGLShader, args: *mut glib::GBytes, idx: c_int, out_value: *mut graphene::graphene_vec2_t, ); pub fn gsk_gl_shader_get_arg_vec3( shader: *mut GskGLShader, args: *mut glib::GBytes, idx: c_int, out_value: *mut graphene::graphene_vec3_t, ); pub fn gsk_gl_shader_get_arg_vec4( shader: *mut GskGLShader, args: *mut glib::GBytes, idx: c_int, out_value: *mut graphene::graphene_vec4_t, ); pub fn gsk_gl_shader_get_args_size(shader: *mut GskGLShader) -> size_t; pub fn gsk_gl_shader_get_n_textures(shader: *mut GskGLShader) -> c_int; pub fn gsk_gl_shader_get_n_uniforms(shader: *mut GskGLShader) -> c_int; pub fn gsk_gl_shader_get_resource(shader: *mut GskGLShader) -> *const c_char; pub fn gsk_gl_shader_get_source(shader: *mut GskGLShader) -> *mut glib::GBytes; pub fn gsk_gl_shader_get_uniform_name(shader: *mut GskGLShader, idx: c_int) -> *const c_char; pub fn gsk_gl_shader_get_uniform_offset(shader: *mut GskGLShader, idx: c_int) -> c_int; pub fn gsk_gl_shader_get_uniform_type(shader: *mut GskGLShader, idx: c_int) -> GskGLUniformType; //========================================================================= // GskGLShaderNode //========================================================================= pub fn gsk_gl_shader_node_get_type() -> GType; pub fn gsk_gl_shader_node_new( shader: *mut GskGLShader, bounds: *const graphene::graphene_rect_t, args: *mut glib::GBytes, children: *mut *mut GskRenderNode, n_children: c_uint, ) -> *mut GskGLShaderNode; pub fn gsk_gl_shader_node_get_args(node: *mut GskGLShaderNode) -> *mut glib::GBytes; pub fn gsk_gl_shader_node_get_child( node: *mut GskGLShaderNode, idx: c_uint, ) -> *mut GskRenderNode; pub fn gsk_gl_shader_node_get_n_children(node: *mut GskGLShaderNode) -> c_uint; pub fn gsk_gl_shader_node_get_shader(node: *mut GskGLShaderNode) -> *mut GskGLShader; //========================================================================= // GskInsetShadowNode //========================================================================= pub fn gsk_inset_shadow_node_get_type() -> GType; pub fn gsk_inset_shadow_node_new( outline: *const GskRoundedRect, color: *const gdk::GdkRGBA, dx: c_float, dy: c_float, spread: c_float, blur_radius: c_float, ) -> *mut GskInsetShadowNode; pub fn gsk_inset_shadow_node_get_blur_radius(node: *mut GskInsetShadowNode) -> c_float; pub fn gsk_inset_shadow_node_get_dx(node: *mut GskInsetShadowNode) -> c_float; pub fn gsk_inset_shadow_node_get_dy(node: *mut GskInsetShadowNode) -> c_float; pub fn gsk_inset_shadow_node_get_spread(node: *mut GskInsetShadowNode) -> c_float; pub fn gsk_inset_shadow_node_peek_color(node: *mut GskInsetShadowNode) -> *const gdk::GdkRGBA; pub fn gsk_inset_shadow_node_peek_outline( node: *mut GskInsetShadowNode, ) -> *const GskRoundedRect; //========================================================================= // GskLinearGradientNode //========================================================================= pub fn gsk_linear_gradient_node_get_type() -> GType; pub fn gsk_linear_gradient_node_new( bounds: *const graphene::graphene_rect_t, start: *const graphene::graphene_point_t, end: *const graphene::graphene_point_t, color_stops: *const GskColorStop, n_color_stops: size_t, ) -> *mut GskLinearGradientNode; pub fn gsk_linear_gradient_node_get_n_color_stops(node: *mut GskLinearGradientNode) -> size_t; pub fn gsk_linear_gradient_node_peek_color_stops( node: *mut GskLinearGradientNode, n_stops: *mut size_t, ) -> *const GskColorStop; pub fn gsk_linear_gradient_node_peek_end( node: *mut GskLinearGradientNode, ) -> *const graphene::graphene_point_t; pub fn gsk_linear_gradient_node_peek_start( node: *mut GskLinearGradientNode, ) -> *const graphene::graphene_point_t; //========================================================================= // GskOpacityNode //========================================================================= pub fn gsk_opacity_node_get_type() -> GType; pub fn gsk_opacity_node_new(child: *mut GskRenderNode, opacity: c_float) -> *mut GskOpacityNode; pub fn gsk_opacity_node_get_child(node: *mut GskOpacityNode) -> *mut GskRenderNode; pub fn gsk_opacity_node_get_opacity(node: *mut GskOpacityNode) -> c_float; //========================================================================= // GskOutsetShadowNode //========================================================================= pub fn gsk_outset_shadow_node_get_type() -> GType; pub fn gsk_outset_shadow_node_new( outline: *const GskRoundedRect, color: *const gdk::GdkRGBA, dx: c_float, dy: c_float, spread: c_float, blur_radius: c_float, ) -> *mut GskOutsetShadowNode; pub fn gsk_outset_shadow_node_get_blur_radius(node: *mut GskOutsetShadowNode) -> c_float; pub fn gsk_outset_shadow_node_get_dx(node: *mut GskOutsetShadowNode) -> c_float; pub fn gsk_outset_shadow_node_get_dy(node: *mut GskOutsetShadowNode) -> c_float; pub fn gsk_outset_shadow_node_get_spread(node: *mut GskOutsetShadowNode) -> c_float; pub fn gsk_outset_shadow_node_peek_color(node: *mut GskOutsetShadowNode) -> *const gdk::GdkRGBA; pub fn gsk_outset_shadow_node_peek_outline( node: *mut GskOutsetShadowNode, ) -> *const GskRoundedRect; //========================================================================= // GskRadialGradientNode //========================================================================= pub fn gsk_radial_gradient_node_get_type() -> GType; pub fn gsk_radial_gradient_node_new( bounds: *const graphene::graphene_rect_t, center: *const graphene::graphene_point_t, hradius: c_float, vradius: c_float, start: c_float, end: c_float, color_stops: *const GskColorStop, n_color_stops: size_t, ) -> *mut GskRadialGradientNode; pub fn gsk_radial_gradient_node_get_end(node: *mut GskRadialGradientNode) -> c_float; pub fn gsk_radial_gradient_node_get_hradius(node: *mut GskRadialGradientNode) -> c_float; pub fn gsk_radial_gradient_node_get_n_color_stops(node: *mut GskRadialGradientNode) -> size_t; pub fn gsk_radial_gradient_node_get_start(node: *mut GskRadialGradientNode) -> c_float; pub fn gsk_radial_gradient_node_get_vradius(node: *mut GskRadialGradientNode) -> c_float; pub fn gsk_radial_gradient_node_peek_center( node: *mut GskRadialGradientNode, ) -> *const graphene::graphene_point_t; pub fn gsk_radial_gradient_node_peek_color_stops( node: *mut GskRadialGradientNode, n_stops: *mut size_t, ) -> *const GskColorStop; //========================================================================= // GskRenderNode //========================================================================= pub fn gsk_render_node_get_type() -> GType; pub fn gsk_render_node_deserialize( bytes: *mut glib::GBytes, error_func: GskParseErrorFunc, user_data: gpointer, ) -> *mut GskRenderNode; pub fn gsk_render_node_draw(node: *mut GskRenderNode, cr: *mut cairo::cairo_t); pub fn gsk_render_node_get_bounds( node: *mut GskRenderNode, bounds: *mut graphene::graphene_rect_t, ); pub fn gsk_render_node_get_node_type(node: *mut GskRenderNode) -> GskRenderNodeType; pub fn gsk_render_node_ref(node: *mut GskRenderNode) -> *mut GskRenderNode; pub fn gsk_render_node_serialize(node: *mut GskRenderNode) -> *mut glib::GBytes; pub fn gsk_render_node_unref(node: *mut GskRenderNode); pub fn gsk_render_node_write_to_file( node: *mut GskRenderNode, filename: *const c_char, error: *mut *mut glib::GError, ) -> gboolean; //========================================================================= // GskRenderer //========================================================================= pub fn gsk_renderer_get_type() -> GType; pub fn gsk_renderer_new_for_surface(surface: *mut gdk::GdkSurface) -> *mut GskRenderer; pub fn gsk_renderer_get_surface(renderer: *mut GskRenderer) -> *mut gdk::GdkSurface; pub fn gsk_renderer_is_realized(renderer: *mut GskRenderer) -> gboolean; pub fn gsk_renderer_realize( renderer: *mut GskRenderer, surface: *mut gdk::GdkSurface, error: *mut *mut glib::GError, ) -> gboolean; pub fn gsk_renderer_render( renderer: *mut GskRenderer, root: *mut GskRenderNode, region: *const cairo::cairo_region_t, ); pub fn gsk_renderer_render_texture( renderer: *mut GskRenderer, root: *mut GskRenderNode, viewport: *const graphene::graphene_rect_t, ) -> *mut gdk::GdkTexture; pub fn gsk_renderer_unrealize(renderer: *mut GskRenderer); //========================================================================= // GskRepeatNode //========================================================================= pub fn gsk_repeat_node_get_type() -> GType; pub fn gsk_repeat_node_new( bounds: *const graphene::graphene_rect_t, child: *mut GskRenderNode, child_bounds: *const graphene::graphene_rect_t, ) -> *mut GskRepeatNode; pub fn gsk_repeat_node_get_child(node: *mut GskRepeatNode) -> *mut GskRenderNode; pub fn gsk_repeat_node_peek_child_bounds( node: *mut GskRepeatNode, ) -> *const graphene::graphene_rect_t; //========================================================================= // GskRepeatingLinearGradientNode //========================================================================= pub fn gsk_repeating_linear_gradient_node_get_type() -> GType; pub fn gsk_repeating_linear_gradient_node_new( bounds: *const graphene::graphene_rect_t, start: *const graphene::graphene_point_t, end: *const graphene::graphene_point_t, color_stops: *const GskColorStop, n_color_stops: size_t, ) -> *mut GskRepeatingLinearGradientNode; //========================================================================= // GskRepeatingRadialGradientNode //========================================================================= pub fn gsk_repeating_radial_gradient_node_get_type() -> GType; pub fn gsk_repeating_radial_gradient_node_new( bounds: *const graphene::graphene_rect_t, center: *const graphene::graphene_point_t, hradius: c_float, vradius: c_float, start: c_float, end: c_float, color_stops: *const GskColorStop, n_color_stops: size_t, ) -> *mut GskRepeatingRadialGradientNode; //========================================================================= // GskRoundedClipNode //========================================================================= pub fn gsk_rounded_clip_node_get_type() -> GType; pub fn gsk_rounded_clip_node_new( child: *mut GskRenderNode, clip: *const GskRoundedRect, ) -> *mut GskRoundedClipNode; pub fn gsk_rounded_clip_node_get_child(node: *mut GskRoundedClipNode) -> *mut GskRenderNode; pub fn gsk_rounded_clip_node_peek_clip(node: *mut GskRoundedClipNode) -> *const GskRoundedRect; //========================================================================= // GskShadowNode //========================================================================= pub fn gsk_shadow_node_get_type() -> GType; pub fn gsk_shadow_node_new( child: *mut GskRenderNode, shadows: *const GskShadow, n_shadows: size_t, ) -> *mut GskShadowNode; pub fn gsk_shadow_node_get_child(node: *mut GskShadowNode) -> *mut GskRenderNode; pub fn gsk_shadow_node_get_n_shadows(node: *mut GskShadowNode) -> size_t; pub fn gsk_shadow_node_peek_shadow(node: *mut GskShadowNode, i: size_t) -> *const GskShadow; //========================================================================= // GskTextNode //========================================================================= pub fn gsk_text_node_get_type() -> GType; pub fn gsk_text_node_new( font: *mut pango::PangoFont, glyphs: *mut pango::PangoGlyphString, color: *const gdk::GdkRGBA, offset: *const graphene::graphene_point_t, ) -> *mut GskTextNode; pub fn gsk_text_node_get_num_glyphs(node: *mut GskTextNode) -> c_uint; pub fn gsk_text_node_get_offset(node: *mut GskTextNode) -> *const graphene::graphene_point_t; pub fn gsk_text_node_has_color_glyphs(node: *mut GskTextNode) -> gboolean; pub fn gsk_text_node_peek_color(node: *mut GskTextNode) -> *const gdk::GdkRGBA; pub fn gsk_text_node_peek_font(node: *mut GskTextNode) -> *mut pango::PangoFont; pub fn gsk_text_node_peek_glyphs( node: *mut GskTextNode, n_glyphs: *mut c_uint, ) -> *const pango::PangoGlyphInfo; //========================================================================= // GskTextureNode //========================================================================= pub fn gsk_texture_node_get_type() -> GType; pub fn gsk_texture_node_new( texture: *mut gdk::GdkTexture, bounds: *const graphene::graphene_rect_t, ) -> *mut GskTextureNode; pub fn gsk_texture_node_get_texture(node: *mut GskTextureNode) -> *mut gdk::GdkTexture; //========================================================================= // GskTransformNode //========================================================================= pub fn gsk_transform_node_get_type() -> GType; pub fn gsk_transform_node_new( child: *mut GskRenderNode, transform: *mut GskTransform, ) -> *mut GskTransformNode; pub fn gsk_transform_node_get_child(node: *mut GskTransformNode) -> *mut GskRenderNode; pub fn gsk_transform_node_get_transform(node: *mut GskTransformNode) -> *mut GskTransform; //========================================================================= // GskVulkanRenderer //========================================================================= pub fn gsk_vulkan_renderer_get_type() -> GType; pub fn gsk_vulkan_renderer_new() -> *mut GskRenderer; }
38.706165
125
0.58576
7a16d75c1f06323559eeb898592f697d36a3110c
1,892
// Copyright 2019 The Exonum Team // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Cryptocurrency wallet. use exonum::crypto::{Hash, PublicKey}; use exonum_proto::ProtobufConvert; use super::proto; /// Wallet information stored in the database. #[derive(Clone, Debug, ProtobufConvert, BinaryValue, ObjectHash)] #[protobuf_convert(source = "proto::Wallet", serde_pb_convert)] pub struct Wallet { /// `PublicKey` of the wallet. pub pub_key: PublicKey, /// Name of the wallet. pub name: String, /// Current balance of the wallet. pub balance: u64, /// Length of the transactions history. pub history_len: u64, /// `Hash` of the transactions history. pub history_hash: Hash, } impl Wallet { /// Create new Wallet. pub fn new( &pub_key: &PublicKey, name: &str, balance: u64, history_len: u64, &history_hash: &Hash, ) -> Self { Self { pub_key, name: name.to_owned(), balance, history_len, history_hash, } } /// Returns a copy of this wallet with updated balance. pub fn set_balance(self, balance: u64, history_hash: &Hash) -> Self { Self::new( &self.pub_key, &self.name, balance, self.history_len + 1, history_hash, ) } }
28.666667
75
0.631078
71e2ce5f737cf8ab8bb9d72cc9500a642d84af68
15,695
#[doc = "Register `TACTL` reader"] pub struct R(crate::R<TACTL_SPEC>); impl core::ops::Deref for R { type Target = crate::R<TACTL_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::convert::From<crate::R<TACTL_SPEC>> for R { fn from(reader: crate::R<TACTL_SPEC>) -> Self { R(reader) } } #[doc = "Register `TACTL` writer"] pub struct W(crate::W<TACTL_SPEC>); impl core::ops::Deref for W { type Target = crate::W<TACTL_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl core::convert::From<crate::W<TACTL_SPEC>> for W { fn from(writer: crate::W<TACTL_SPEC>) -> Self { W(writer) } } #[doc = "Field `TAIFG` reader - Timer A counter interrupt flag"] pub struct TAIFG_R(crate::FieldReader<bool, bool>); impl TAIFG_R { pub(crate) fn new(bits: bool) -> Self { TAIFG_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for TAIFG_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `TAIFG` writer - Timer A counter interrupt flag"] pub struct TAIFG_W<'a> { w: &'a mut W, } impl<'a> TAIFG_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | (value as u16 & 0x01); self.w } } #[doc = "Field `TAIE` reader - Timer A counter interrupt enable"] pub struct TAIE_R(crate::FieldReader<bool, bool>); impl TAIE_R { pub(crate) fn new(bits: bool) -> Self { TAIE_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for TAIE_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `TAIE` writer - Timer A counter interrupt enable"] pub struct TAIE_W<'a> { w: &'a mut W, } impl<'a> TAIE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | ((value as u16 & 0x01) << 1); self.w } } #[doc = "Field `TACLR` reader - Timer A counter clear"] pub struct TACLR_R(crate::FieldReader<bool, bool>); impl TACLR_R { pub(crate) fn new(bits: bool) -> Self { TACLR_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for TACLR_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `TACLR` writer - Timer A counter clear"] pub struct TACLR_W<'a> { w: &'a mut W, } impl<'a> TACLR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | ((value as u16 & 0x01) << 2); self.w } } #[doc = "Timer A mode control 1\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum MC_A { #[doc = "0: Timer A mode control: 0 - Stop"] MC_0 = 0, #[doc = "1: Timer A mode control: 1 - Up to CCR0"] MC_1 = 1, #[doc = "2: Timer A mode control: 2 - Continous up"] MC_2 = 2, #[doc = "3: Timer A mode control: 3 - Up/Down"] MC_3 = 3, } impl From<MC_A> for u8 { #[inline(always)] fn from(variant: MC_A) -> Self { variant as _ } } #[doc = "Field `MC` reader - Timer A mode control 1"] pub struct MC_R(crate::FieldReader<u8, MC_A>); impl MC_R { pub(crate) fn new(bits: u8) -> Self { MC_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> MC_A { match self.bits { 0 => MC_A::MC_0, 1 => MC_A::MC_1, 2 => MC_A::MC_2, 3 => MC_A::MC_3, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `MC_0`"] #[inline(always)] pub fn is_mc_0(&self) -> bool { **self == MC_A::MC_0 } #[doc = "Checks if the value of the field is `MC_1`"] #[inline(always)] pub fn is_mc_1(&self) -> bool { **self == MC_A::MC_1 } #[doc = "Checks if the value of the field is `MC_2`"] #[inline(always)] pub fn is_mc_2(&self) -> bool { **self == MC_A::MC_2 } #[doc = "Checks if the value of the field is `MC_3`"] #[inline(always)] pub fn is_mc_3(&self) -> bool { **self == MC_A::MC_3 } } impl core::ops::Deref for MC_R { type Target = crate::FieldReader<u8, MC_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `MC` writer - Timer A mode control 1"] pub struct MC_W<'a> { w: &'a mut W, } impl<'a> MC_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: MC_A) -> &'a mut W { self.bits(variant.into()) } #[doc = "Timer A mode control: 0 - Stop"] #[inline(always)] pub fn mc_0(self) -> &'a mut W { self.variant(MC_A::MC_0) } #[doc = "Timer A mode control: 1 - Up to CCR0"] #[inline(always)] pub fn mc_1(self) -> &'a mut W { self.variant(MC_A::MC_1) } #[doc = "Timer A mode control: 2 - Continous up"] #[inline(always)] pub fn mc_2(self) -> &'a mut W { self.variant(MC_A::MC_2) } #[doc = "Timer A mode control: 3 - Up/Down"] #[inline(always)] pub fn mc_3(self) -> &'a mut W { self.variant(MC_A::MC_3) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 4)) | ((value as u16 & 0x03) << 4); self.w } } #[doc = "Timer A clock input divider 1\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum ID_A { #[doc = "0: Timer A input divider: 0 - /1"] ID_0 = 0, #[doc = "1: Timer A input divider: 1 - /2"] ID_1 = 1, #[doc = "2: Timer A input divider: 2 - /4"] ID_2 = 2, #[doc = "3: Timer A input divider: 3 - /8"] ID_3 = 3, } impl From<ID_A> for u8 { #[inline(always)] fn from(variant: ID_A) -> Self { variant as _ } } #[doc = "Field `ID` reader - Timer A clock input divider 1"] pub struct ID_R(crate::FieldReader<u8, ID_A>); impl ID_R { pub(crate) fn new(bits: u8) -> Self { ID_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> ID_A { match self.bits { 0 => ID_A::ID_0, 1 => ID_A::ID_1, 2 => ID_A::ID_2, 3 => ID_A::ID_3, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `ID_0`"] #[inline(always)] pub fn is_id_0(&self) -> bool { **self == ID_A::ID_0 } #[doc = "Checks if the value of the field is `ID_1`"] #[inline(always)] pub fn is_id_1(&self) -> bool { **self == ID_A::ID_1 } #[doc = "Checks if the value of the field is `ID_2`"] #[inline(always)] pub fn is_id_2(&self) -> bool { **self == ID_A::ID_2 } #[doc = "Checks if the value of the field is `ID_3`"] #[inline(always)] pub fn is_id_3(&self) -> bool { **self == ID_A::ID_3 } } impl core::ops::Deref for ID_R { type Target = crate::FieldReader<u8, ID_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `ID` writer - Timer A clock input divider 1"] pub struct ID_W<'a> { w: &'a mut W, } impl<'a> ID_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: ID_A) -> &'a mut W { self.bits(variant.into()) } #[doc = "Timer A input divider: 0 - /1"] #[inline(always)] pub fn id_0(self) -> &'a mut W { self.variant(ID_A::ID_0) } #[doc = "Timer A input divider: 1 - /2"] #[inline(always)] pub fn id_1(self) -> &'a mut W { self.variant(ID_A::ID_1) } #[doc = "Timer A input divider: 2 - /4"] #[inline(always)] pub fn id_2(self) -> &'a mut W { self.variant(ID_A::ID_2) } #[doc = "Timer A input divider: 3 - /8"] #[inline(always)] pub fn id_3(self) -> &'a mut W { self.variant(ID_A::ID_3) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 6)) | ((value as u16 & 0x03) << 6); self.w } } #[doc = "Timer A clock source select 1\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum TASSEL_A { #[doc = "0: Timer A clock source select: 0 - TACLK"] TASSEL_0 = 0, #[doc = "1: Timer A clock source select: 1 - ACLK"] TASSEL_1 = 1, #[doc = "2: Timer A clock source select: 2 - SMCLK"] TASSEL_2 = 2, #[doc = "3: Timer A clock source select: 3 - INCLK"] TASSEL_3 = 3, } impl From<TASSEL_A> for u8 { #[inline(always)] fn from(variant: TASSEL_A) -> Self { variant as _ } } #[doc = "Field `TASSEL` reader - Timer A clock source select 1"] pub struct TASSEL_R(crate::FieldReader<u8, TASSEL_A>); impl TASSEL_R { pub(crate) fn new(bits: u8) -> Self { TASSEL_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> TASSEL_A { match self.bits { 0 => TASSEL_A::TASSEL_0, 1 => TASSEL_A::TASSEL_1, 2 => TASSEL_A::TASSEL_2, 3 => TASSEL_A::TASSEL_3, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `TASSEL_0`"] #[inline(always)] pub fn is_tassel_0(&self) -> bool { **self == TASSEL_A::TASSEL_0 } #[doc = "Checks if the value of the field is `TASSEL_1`"] #[inline(always)] pub fn is_tassel_1(&self) -> bool { **self == TASSEL_A::TASSEL_1 } #[doc = "Checks if the value of the field is `TASSEL_2`"] #[inline(always)] pub fn is_tassel_2(&self) -> bool { **self == TASSEL_A::TASSEL_2 } #[doc = "Checks if the value of the field is `TASSEL_3`"] #[inline(always)] pub fn is_tassel_3(&self) -> bool { **self == TASSEL_A::TASSEL_3 } } impl core::ops::Deref for TASSEL_R { type Target = crate::FieldReader<u8, TASSEL_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `TASSEL` writer - Timer A clock source select 1"] pub struct TASSEL_W<'a> { w: &'a mut W, } impl<'a> TASSEL_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: TASSEL_A) -> &'a mut W { self.bits(variant.into()) } #[doc = "Timer A clock source select: 0 - TACLK"] #[inline(always)] pub fn tassel_0(self) -> &'a mut W { self.variant(TASSEL_A::TASSEL_0) } #[doc = "Timer A clock source select: 1 - ACLK"] #[inline(always)] pub fn tassel_1(self) -> &'a mut W { self.variant(TASSEL_A::TASSEL_1) } #[doc = "Timer A clock source select: 2 - SMCLK"] #[inline(always)] pub fn tassel_2(self) -> &'a mut W { self.variant(TASSEL_A::TASSEL_2) } #[doc = "Timer A clock source select: 3 - INCLK"] #[inline(always)] pub fn tassel_3(self) -> &'a mut W { self.variant(TASSEL_A::TASSEL_3) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 8)) | ((value as u16 & 0x03) << 8); self.w } } impl R { #[doc = "Bit 0 - Timer A counter interrupt flag"] #[inline(always)] pub fn taifg(&self) -> TAIFG_R { TAIFG_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Timer A counter interrupt enable"] #[inline(always)] pub fn taie(&self) -> TAIE_R { TAIE_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - Timer A counter clear"] #[inline(always)] pub fn taclr(&self) -> TACLR_R { TACLR_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bits 4:5 - Timer A mode control 1"] #[inline(always)] pub fn mc(&self) -> MC_R { MC_R::new(((self.bits >> 4) & 0x03) as u8) } #[doc = "Bits 6:7 - Timer A clock input divider 1"] #[inline(always)] pub fn id(&self) -> ID_R { ID_R::new(((self.bits >> 6) & 0x03) as u8) } #[doc = "Bits 8:9 - Timer A clock source select 1"] #[inline(always)] pub fn tassel(&self) -> TASSEL_R { TASSEL_R::new(((self.bits >> 8) & 0x03) as u8) } } impl W { #[doc = "Bit 0 - Timer A counter interrupt flag"] #[inline(always)] pub fn taifg(&mut self) -> TAIFG_W { TAIFG_W { w: self } } #[doc = "Bit 1 - Timer A counter interrupt enable"] #[inline(always)] pub fn taie(&mut self) -> TAIE_W { TAIE_W { w: self } } #[doc = "Bit 2 - Timer A counter clear"] #[inline(always)] pub fn taclr(&mut self) -> TACLR_W { TACLR_W { w: self } } #[doc = "Bits 4:5 - Timer A mode control 1"] #[inline(always)] pub fn mc(&mut self) -> MC_W { MC_W { w: self } } #[doc = "Bits 6:7 - Timer A clock input divider 1"] #[inline(always)] pub fn id(&mut self) -> ID_W { ID_W { w: self } } #[doc = "Bits 8:9 - Timer A clock source select 1"] #[inline(always)] pub fn tassel(&mut self) -> TASSEL_W { TASSEL_W { w: self } } #[doc = "Writes raw bits to the register."] pub unsafe fn bits(&mut self, bits: u16) -> &mut Self { self.0.bits(bits); self } } #[doc = "Timer A Control\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [tactl](index.html) module"] pub struct TACTL_SPEC; impl crate::RegisterSpec for TACTL_SPEC { type Ux = u16; } #[doc = "`read()` method returns [tactl::R](R) reader structure"] impl crate::Readable for TACTL_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [tactl::W](W) writer structure"] impl crate::Writable for TACTL_SPEC { type Writer = W; } #[doc = "`reset()` method sets TACTL to value 0"] impl crate::Resettable for TACTL_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
29.336449
401
0.549028
b9a3a24585b5c8431afbd0b77ffc89f896e1cdcd
1,179
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // run-pass // pretty-expanded FIXME #23616 use std::mem; /// Returns the size of a type pub fn size_of<T>() -> usize { TypeInfo::size_of(None::<T>) } /// Returns the size of the type that `val` points to pub fn size_of_val<T>(val: &T) -> usize { val.size_of_val() } pub trait TypeInfo: Sized { fn size_of(_lame_type_hint: Option<Self>) -> usize; fn size_of_val(&self) -> usize; } impl<T> TypeInfo for T { /// The size of the type in bytes. fn size_of(_lame_type_hint: Option<T>) -> usize { mem::size_of::<T>() } /// Returns the size of the type of `self` in bytes. fn size_of_val(&self) -> usize { TypeInfo::size_of(None::<T>) } } pub fn main() {}
26.795455
68
0.662426
1e338a9381465bfa924f9c79373b66daee8e89da
4,977
use crate::co; use crate::kernel::decl::WinResult; use crate::msg::WndMsg; use crate::prelude::MsgSend; use crate::user::decl::{BmpIcon, HBITMAP, HICON}; pub_struct_msg_empty! { Click: co::BM::CLICK.into(); "user"; /// [`BM_CLICK`](https://docs.microsoft.com/en-us/windows/win32/controls/bm-click) } /// [`BM_GETCHECK`](https://docs.microsoft.com/en-us/windows/win32/controls/bm-getcheck) /// message parameters. /// /// Return type: `co::BST`. #[cfg_attr(docsrs, doc(cfg(feature = "user")))] pub struct GetCheck {} impl MsgSend for GetCheck { type RetType = co::BST; fn convert_ret(&self, v: isize) -> Self::RetType { co::BST(v as _) } fn as_generic_wm(&mut self) -> WndMsg { WndMsg { msg_id: co::BM::GETCHECK.into(), wparam: 0, lparam: 0, } } } /// [`BM_GETIMAGE`](https://docs.microsoft.com/en-us/windows/win32/controls/bm-getimage) /// message parameters. /// /// Return type: `WinResult<BmpIcon>`. #[cfg_attr(docsrs, doc(cfg(feature = "user")))] pub struct GetImage { pub img_type: co::IMAGE_TYPE, } impl MsgSend for GetImage { type RetType = WinResult<BmpIcon>; fn convert_ret(&self, v: isize) -> Self::RetType { match self.img_type { co::IMAGE_TYPE::BITMAP => Ok(BmpIcon::Bmp(HBITMAP(v as _))), co::IMAGE_TYPE::ICON => Ok(BmpIcon::Icon(HICON(v as _))), _ => Err(co::ERROR::BAD_ARGUMENTS), } } fn as_generic_wm(&mut self) -> WndMsg { WndMsg { msg_id: co::BM::GETIMAGE.into(), wparam: self.img_type.0 as _, lparam: 0, } } } /// [`BM_GETSTATE`](https://docs.microsoft.com/en-us/windows/win32/controls/bm-getstate) /// message, which has no parameters. /// /// Return type: `co::BST`. #[cfg_attr(docsrs, doc(cfg(feature = "user")))] pub struct GetState {} impl MsgSend for GetState { type RetType = co::BST; fn convert_ret(&self, v: isize) -> Self::RetType { co::BST(v as _) } fn as_generic_wm(&mut self) -> WndMsg { WndMsg { msg_id: co::BM::GETSTATE.into(), wparam: 0, lparam: 0, } } } /// [`BM_SETCHECK`](https://docs.microsoft.com/en-us/windows/win32/controls/bm-setcheck) /// message parameters. /// /// Return type: `()`. #[cfg_attr(docsrs, doc(cfg(feature = "user")))] pub struct SetCheck { pub state: co::BST, } impl MsgSend for SetCheck { type RetType = (); fn convert_ret(&self, _: isize) -> Self::RetType { () } fn as_generic_wm(&mut self) -> WndMsg { WndMsg { msg_id: co::BM::SETCHECK.into(), wparam: self.state.0 as _, lparam: 0, } } } /// [`BM_SETDONTCLICK`](https://docs.microsoft.com/en-us/windows/win32/controls/bm-setdontclick) /// message parameters. /// /// Return type: `()`. #[cfg_attr(docsrs, doc(cfg(feature = "user")))] pub struct SetDontClick { pub dont_click: bool, } impl MsgSend for SetDontClick { type RetType = (); fn convert_ret(&self, _: isize) -> Self::RetType { () } fn as_generic_wm(&mut self) -> WndMsg { WndMsg { msg_id: co::BM::SETDONTCLICK.into(), wparam: self.dont_click as _, lparam: 0, } } } /// [`BM_SETIMAGE`](https://docs.microsoft.com/en-us/windows/win32/controls/bm-setimage) /// message parameters. /// /// Return type: `WinResult<BmpIcon>`. #[cfg_attr(docsrs, doc(cfg(feature = "user")))] pub struct SetImage { pub image: BmpIcon, } impl MsgSend for SetImage { type RetType = WinResult<BmpIcon>; fn convert_ret(&self, v: isize) -> Self::RetType { match self.image { BmpIcon::Bmp(_) => Ok(BmpIcon::Bmp(HBITMAP(v as _))), BmpIcon::Icon(_) => Ok(BmpIcon::Icon(HICON(v as _))), } } fn as_generic_wm(&mut self) -> WndMsg { WndMsg { msg_id: co::BM::SETIMAGE.into(), wparam: match self.image { BmpIcon::Bmp(_) => co::IMAGE_TYPE::BITMAP.0, BmpIcon::Icon(_) => co::IMAGE_TYPE::ICON.0, } as _, lparam: self.image.as_isize(), } } } /// [`BM_SETSTATE`](https://docs.microsoft.com/en-us/windows/win32/controls/bm-setstate) /// message parameters. /// /// Return type: `()`. #[cfg_attr(docsrs, doc(cfg(feature = "user")))] pub struct SetState { pub highlight: bool, } impl MsgSend for SetState { type RetType = (); fn convert_ret(&self, _: isize) -> Self::RetType { () } fn as_generic_wm(&mut self) -> WndMsg { WndMsg { msg_id: co::BM::SETSTATE.into(), wparam: self.highlight as _, lparam: 0, } } } /// [`BM_SETSTYLE`](https://docs.microsoft.com/en-us/windows/win32/controls/bm-setstyle) /// message parameters. /// /// Return type: `()`. #[cfg_attr(docsrs, doc(cfg(feature = "user")))] pub struct SetStyle { pub style: co::BS, pub redraw: bool, } impl MsgSend for SetStyle { type RetType = (); fn convert_ret(&self, _: isize) -> Self::RetType { () } fn as_generic_wm(&mut self) -> WndMsg { WndMsg { msg_id: co::BM::SETSTYLE.into(), wparam: self.style.0 as _, lparam: self.redraw as _, } } }
22.935484
97
0.613422
1d39b3356da0dae427abdbc5623fa907208d69b6
4,280
use crate::testing::FragmentSender; use crate::testing::FragmentSenderSetup; use crate::testing::RemoteJormungandr; use crate::testing::SyncNode; use crate::wallet::LinearFee; use crate::wallet::Wallet; use chain_impl_mockchain::fragment::Fragment; use jormungandr_lib::crypto::hash::Hash; use jortestkit::load::{Id, RequestFailure, RequestGenerator}; use rand_core::OsRng; pub struct BatchFragmentGenerator<'a, S: SyncNode + Send> { wallets: Vec<Wallet>, jormungandr: RemoteJormungandr, fragment_sender: FragmentSender<'a, S>, rand: OsRng, split_marker: usize, batch_size: u8, } impl<'a, S: SyncNode + Send> BatchFragmentGenerator<'a, S> { pub fn new( fragment_sender_setup: FragmentSenderSetup<'a, S>, jormungandr: RemoteJormungandr, block_hash: Hash, fees: LinearFee, batch_size: u8, ) -> Self { Self { wallets: Vec::new(), fragment_sender: FragmentSender::new(block_hash, fees, fragment_sender_setup), rand: OsRng, jormungandr, split_marker: 0, batch_size, } } pub fn fill_from_faucet(&mut self, faucet: &mut Wallet) { let mut wallets: Vec<Wallet> = std::iter::from_fn(|| Some(Wallet::new_account(&mut self.rand))) .take(90) .collect(); let fragment_sender = self .fragment_sender .clone_with_setup(FragmentSenderSetup::resend_3_times()); fragment_sender .send_transaction_to_many(faucet, &wallets, &self.jormungandr, 1_000_000.into()) .unwrap(); let mut additional_wallets = Vec::new(); for mut wallet in wallets.iter_mut().take(10) { let mut pack_of_wallets: Vec<Wallet> = std::iter::from_fn(|| Some(Wallet::new_account(&mut self.rand))) .take(90) .collect(); fragment_sender .send_transaction_to_many( &mut wallet, &pack_of_wallets, &self.jormungandr, 1000.into(), ) .unwrap(); additional_wallets.append(&mut pack_of_wallets); } self.wallets.append(&mut additional_wallets); self.wallets.append(&mut wallets); } pub fn increment_split_marker(&mut self) { self.split_marker += 1; if self.split_marker >= self.wallets.len() - 1 { self.split_marker = 1; } } pub fn generate_transaction(&mut self) -> Result<Fragment, RequestFailure> { self.increment_split_marker(); let (senders, recievers) = self.wallets.split_at_mut(self.split_marker); let sender = senders.get_mut(senders.len() - 1).unwrap(); let reciever = recievers.get(0).unwrap(); let fragment = sender .transaction_to( &self.fragment_sender.block0_hash(), &self.fragment_sender.fees(), reciever.address(), 1.into(), ) .map_err(|e| RequestFailure::General(format!("{:?}", e))); sender.confirm_transaction(); fragment } pub fn batch_size(&self) -> u8 { self.batch_size } pub fn generate_batch_transaction(&mut self) -> Result<Vec<Fragment>, RequestFailure> { let mut transactions = vec![]; for _ in 0..self.batch_size { transactions.push(self.generate_transaction()?); } Ok(transactions) } pub fn send_batch(&mut self) -> Result<Vec<Option<Id>>, RequestFailure> { let transactions = self.generate_batch_transaction()?; self.fragment_sender .send_batch_fragments(transactions, false, &self.jormungandr) .map(|checks| { checks .iter() .map(|x| Some(x.fragment_id().to_string())) .collect() }) .map_err(|e| RequestFailure::General(format!("{:?}", e))) } } impl<S: SyncNode + Send> RequestGenerator for BatchFragmentGenerator<'_, S> { fn next(&mut self) -> Result<Vec<Option<Id>>, RequestFailure> { self.send_batch() } }
32.923077
92
0.575
f801ac6d0474e6cc166b1cdfb19cfc522d095104
3,825
use super::*; use proptest::strategy::Strategy; #[test] fn without_function_right_returns_false() { with_process_arc(|arc_process| { TestRunner::new(Config::with_source_file(file!())) .run( &( strategy::term::is_function(arc_process.clone()), strategy::term(arc_process.clone()) .prop_filter("Right must not be function", |v| !v.is_function()), ), |(left, right)| { prop_assert_eq!(native(left, right), false.into()); Ok(()) }, ) .unwrap(); }); } #[test] fn with_same_function_right_returns_true() { with_process_arc(|arc_process| { TestRunner::new(Config::with_source_file(file!())) .run( &strategy::term::is_function(arc_process.clone()), |operand| { prop_assert_eq!(native(operand, operand), true.into()); Ok(()) }, ) .unwrap(); }); } #[test] fn with_same_value_function_right_returns_true() { with_process_arc(|arc_process| { TestRunner::new(Config::with_source_file(file!())) .run( &( strategy::module_function_arity::module(), strategy::module_function_arity::function(), strategy::module_function_arity::arity(), ) .prop_map(move |(module, function, arity)| { let code = |arc_process: &Arc<Process>| { arc_process.wait(); Ok(()) }; let left_term = arc_process .export_closure(module, function, arity, Some(code)) .unwrap(); let right_term = arc_process .export_closure(module, function, arity, Some(code)) .unwrap(); (left_term, right_term) }), |(left, right)| { prop_assert_eq!(native(left, right), true.into()); Ok(()) }, ) .unwrap(); }); } #[test] fn with_different_function_right_returns_false() { with_process_arc(|arc_process| { TestRunner::new(Config::with_source_file(file!())) .run( &( strategy::module_function_arity::module(), strategy::module_function_arity::function(), strategy::module_function_arity::arity(), ) .prop_map(move |(module, function, arity)| { let left_code = |arc_process: &Arc<Process>| { arc_process.wait(); Ok(()) }; let left_term = arc_process .export_closure(module, function, arity, Some(left_code)) .unwrap(); let right_code = |arc_process: &Arc<Process>| { arc_process.wait(); Ok(()) }; let right_term = arc_process .export_closure(module, function, arity, Some(right_code)) .unwrap(); (left_term, right_term) }), |(left, right)| { prop_assert_eq!(native(left, right), false.into()); Ok(()) }, ) .unwrap(); }); }
32.692308
89
0.423007
fe60ff497483756a2ed7341b25f8350d3bfeb930
14,748
//! Procedural macro for the `#[async]` attribute. //! //! This crate is an implementation of the `#[async]` attribute as a procedural //! macro. This is nightly-only for now as it's using the unstable features of //! procedural macros. Furthermore it's generating code that's using a new //! keyword, `yield`, and a new construct, generators, both of which are also //! unstable. //! //! Currently this crate depends on `syn` and `quote` to do all the heavy //! lifting, this is just a very small shim around creating a closure/future out //! of a generator. #![feature(proc_macro)] #![recursion_limit = "128"] extern crate proc_macro; extern crate proc_macro2; #[macro_use] extern crate quote; #[macro_use] extern crate syn; use proc_macro::{Delimiter, TokenStream, TokenTree}; use proc_macro2::{Span, TokenStream as Tokens}; use quote::ToTokens; use syn::fold::Fold; use syn::punctuated::Punctuated; use syn::*; macro_rules! quote_cs { ($($t:tt)*) => (quote_spanned!(Span::call_site() => $($t)*)) } fn async_inner<F>( boxed: bool, function: TokenStream, gen_function: Tokens, return_ty: F, ) -> TokenStream where F: FnOnce(&Type) -> proc_macro2::TokenStream, { // Parse our item, expecting a function. This function may be an actual // top-level function or it could be a method (typically dictated by the // arguments). We then extract everything we'd like to use. let ItemFn { ident, vis, unsafety, constness, abi, block, decl, attrs, .. } = match syn::parse(function).expect("failed to parse tokens as a function") { Item::Fn(item) => item, _ => panic!("#[async] can only be applied to functions"), }; let FnDecl { inputs, output, variadic, generics, fn_token, .. } = { *decl }; let where_clause = &generics.where_clause; assert!(variadic.is_none(), "variadic functions cannot be async"); let (output, rarrow_token) = match output { ReturnType::Type(rarrow_token, t) => (*t, rarrow_token), ReturnType::Default => ( TypeTuple { elems: Default::default(), paren_token: Default::default(), }.into(), Default::default(), ), }; // We've got to get a bit creative with our handling of arguments. For a // number of reasons we translate this: // // fn foo(ref a: u32) -> Result<u32, u32> { // // ... // } // // into roughly: // // fn foo(__arg_0: u32) -> impl Future<...> { // gen(move || { // let ref a = __arg0; // // // ... // }) // } // // The intention here is to ensure that all local function variables get // moved into the generator we're creating, and they're also all then bound // appropriately according to their patterns and whatnot. // // We notably skip everything related to `self` which typically doesn't have // many patterns with it and just gets captured naturally. let mut inputs_no_patterns = Vec::new(); let mut patterns = Vec::new(); let mut temp_bindings = Vec::new(); for (i, input) in inputs.into_iter().enumerate() { // `self: Box<Self>` will get captured naturally let mut is_input_no_pattern = false; if let FnArg::Captured(ref arg) = input { if let Pat::Ident(PatIdent { ref ident, .. }) = arg.pat { if ident == "self" { is_input_no_pattern = true; } } } if is_input_no_pattern { inputs_no_patterns.push(input); continue; } match input { FnArg::Captured(ArgCaptured { pat: syn::Pat::Ident(syn::PatIdent { by_ref: None, .. }), .. }) => { inputs_no_patterns.push(input); } // `ref a: B` (or some similar pattern) FnArg::Captured(ArgCaptured { pat, ty, colon_token, }) => { patterns.push(pat); let ident = Ident::new(&format!("__arg_{}", i), Span::call_site()); temp_bindings.push(ident.clone()); let pat = PatIdent { by_ref: None, mutability: None, ident: ident, subpat: None, }; inputs_no_patterns.push( ArgCaptured { pat: pat.into(), ty, colon_token, }.into(), ); } // Other `self`-related arguments get captured naturally _ => { inputs_no_patterns.push(input); } } } // This is the point where we handle // // #[async] // for x in y { // } // // Basically just take all those expression and expand them. let block = ExpandAsyncFor.fold_block(*block); let return_ty = return_ty(&output); let block_inner = quote_cs! { #( let #patterns = #temp_bindings; )* #block }; let mut result = Tokens::empty(); block.brace_token.surround(&mut result, |tokens| { block_inner.to_tokens(tokens); }); syn::token::Semi([block.brace_token.0]).to_tokens(&mut result); let gen_body_inner = quote_cs! { let __e: #output = #result // Ensure that this closure is a generator, even if it doesn't // have any `yield` statements. #[allow(unreachable_code)] { return __e; loop { yield futures::Async::NotReady } } }; let mut gen_body = Tokens::empty(); block.brace_token.surround(&mut gen_body, |tokens| { gen_body_inner.to_tokens(tokens); }); // Give the invocation of the `gen` function the same span as the output // as currently errors related to it being a result are targeted here. Not // sure if more errors will highlight this function call... let output_span = first_last(&output); let gen_function = respan(gen_function.into(), &output_span); let body_inner = quote_cs! { #gen_function (move || -> #output #gen_body) }; let body_inner = if boxed { let body = quote_cs! { futures::__rt::std::boxed::Box::new(#body_inner) }; respan(body.into(), &output_span) } else { body_inner.into() }; let mut body = Tokens::empty(); block.brace_token.surround(&mut body, |tokens| { body_inner.to_tokens(tokens); }); let output = quote_cs! { #(#attrs)* #vis #unsafety #abi #constness #fn_token #ident #generics(#(#inputs_no_patterns),*) #rarrow_token #return_ty #where_clause #body }; // println!("{}", output); output.into() } #[proc_macro_attribute] pub fn async(attribute: TokenStream, function: TokenStream) -> TokenStream { // Handle arguments to the #[async] attribute, if any let (boxed, send) = match &attribute.to_string() as &str { "boxed" => (true, false), "boxed_send" => (true, true), "" => (false, false), _ => panic!("the #[async] attribute currently only takes `boxed` as an arg"), }; async_inner( boxed, function, quote_cs! { futures::__rt::gen }, |output| { // TODO: can we lift the restriction that `futures` must be at the root of // the crate? let output_span = first_last(&output); let return_ty = if boxed && !send { quote_cs! { futures::__rt::std::boxed::Box<futures::Future< Item = <! as futures::__rt::IsResult>::Ok, Error = <! as futures::__rt::IsResult>::Err, >> } } else if boxed && send { quote_cs! { futures::__rt::std::boxed::Box<futures::Future< Item = <! as futures::__rt::IsResult>::Ok, Error = <! as futures::__rt::IsResult>::Err, > + Send> } } else { // Dunno why this is buggy, hits weird typecheck errors in tests // // quote_cs! { // impl futures::Future< // Item = <#output as futures::__rt::MyTry>::MyOk, // Error = <#output as futures::__rt::MyTry>::MyError, // > // } quote_cs! { impl futures::__rt::MyFuture<!> + 'static } }; let return_ty = respan(return_ty.into(), &output_span); replace_bang(return_ty, &output) }, ) } #[proc_macro_attribute] pub fn async_stream(attribute: TokenStream, function: TokenStream) -> TokenStream { // Handle arguments to the #[async_stream] attribute, if any let args = syn::parse::<AsyncStreamArgs>(attribute).expect("failed to parse attribute arguments"); let mut boxed = false; let mut item_ty = None; for arg in args.0 { match arg { AsyncStreamArg(term, None) => { if term == "boxed" { if boxed { panic!("duplicate 'boxed' argument to #[async_stream]"); } boxed = true; } else { panic!("unexpected #[async_stream] argument '{}'", term); } } AsyncStreamArg(term, Some(ty)) => { if term == "item" { if item_ty.is_some() { panic!("duplicate 'item' argument to #[async_stream]"); } item_ty = Some(ty); } else { panic!( "unexpected #[async_stream] argument '{}'", quote_cs!(#term = #ty) ); } } } } let boxed = boxed; let item_ty = item_ty.expect("#[async_stream] requires item type to be specified"); async_inner( boxed, function, quote_cs! { futures::__rt::gen_stream }, |output| { let output_span = first_last(&output); let return_ty = if boxed { quote_cs! { futures::__rt::std::boxed::Box<futures::Stream< Item = !, Error = <! as futures::__rt::IsResult>::Err, >> } } else { quote_cs! { impl futures::__rt::MyStream<!, !> + 'static } }; let return_ty = respan(return_ty.into(), &output_span); replace_bangs(return_ty, &[&item_ty, &output]) }, ) } #[proc_macro] pub fn async_block(input: TokenStream) -> TokenStream { let mut t_tree = TokenTree::Group(proc_macro::Group::new(Delimiter::Brace, input)); t_tree.set_span(proc_macro::Span::call_site()); let input = TokenStream::from(t_tree); let expr = syn::parse(input).expect("failed to parse tokens as an expression"); let expr = ExpandAsyncFor.fold_expr(expr); let mut tokens = quote_cs! { futures::__rt::gen }; // Use some manual token construction here instead of `quote_cs!` to ensure // that we get the `call_site` span instead of the default span. let span = Span::call_site(); syn::token::Paren(span).surround(&mut tokens, |tokens| { syn::token::Move(span).to_tokens(tokens); syn::token::OrOr([span, span]).to_tokens(tokens); syn::token::Brace(span).surround(tokens, |tokens| { (quote_cs! { if false { yield futures::Async::NotReady } }).to_tokens(tokens); expr.to_tokens(tokens); }); }); tokens.into() } #[proc_macro] pub fn async_stream_block(input: TokenStream) -> TokenStream { let mut t_tree = TokenTree::Group(proc_macro::Group::new(Delimiter::Brace, input)); t_tree.set_span(proc_macro::Span::call_site()); let input = TokenStream::from(t_tree); let expr = syn::parse(input).expect("failed to parse tokens as an expression"); let expr = ExpandAsyncFor.fold_expr(expr); let mut tokens = quote_cs! { futures::__rt::gen_stream }; // Use some manual token construction here instead of `quote_cs!` to ensure // that we get the `call_site` span instead of the default span. let span = Span::call_site(); syn::token::Paren(span).surround(&mut tokens, |tokens| { syn::token::Move(span).to_tokens(tokens); syn::token::OrOr([span, span]).to_tokens(tokens); syn::token::Brace(span).surround(tokens, |tokens| { (quote_cs! { if false { yield futures::Async::NotReady } }).to_tokens(tokens); expr.to_tokens(tokens); }); }); tokens.into() } struct ExpandAsyncFor; impl Fold for ExpandAsyncFor { fn fold_expr(&mut self, expr: Expr) -> Expr { let expr = fold::fold_expr(self, expr); let mut async = false; { let attrs = match expr { Expr::ForLoop(syn::ExprForLoop { ref attrs, .. }) => attrs, _ => return expr, }; if attrs.len() == 1 { // TODO: more validation here if attrs[0].path.segments.first().unwrap().value().ident == "async" { async = true; } } } if !async { return expr; } let all = match expr { Expr::ForLoop(item) => item, _ => panic!("only for expressions can have #[async]"), }; let ExprForLoop { pat, expr, body, label, .. } = all; // Basically just expand to a `poll` loop let tokens = quote_cs! {{ let mut __stream = #expr; #label loop { let #pat = { extern crate futures_await; let r = futures_await::Stream::poll(&mut __stream)?; match r { futures_await::Async::Ready(e) => { match e { futures_await::__rt::std::option::Option::Some(e) => e, futures_await::__rt::std::option::Option::None => break, } } futures_await::Async::NotReady => { yield futures_await::Async::NotReady; continue } } }; #body } }}; syn::parse(tokens.into()).unwrap() } // Don't recurse into items fn fold_item(&mut self, item: Item) -> Item { item } } fn first_last(tokens: &ToTokens) -> (Span, Span) { let mut spans = Tokens::empty(); tokens.to_tokens(&mut spans); let good_tokens = proc_macro2::TokenStream::from(spans) .into_iter() .collect::<Vec<_>>(); let first_span = good_tokens .first() .map(|t| t.span()) .unwrap_or(Span::call_site()); let last_span = good_tokens.last().map(|t| t.span()).unwrap_or(first_span); (first_span, last_span) } fn respan( input: proc_macro2::TokenStream, &(first_span, last_span): &(Span, Span), ) -> proc_macro2::TokenStream { let mut new_tokens = input.into_iter().collect::<Vec<_>>(); if let Some(token) = new_tokens.first_mut() { token.set_span(first_span); } for token in new_tokens.iter_mut().skip(1) { token.set_span(last_span); } new_tokens.into_iter().collect() } fn replace_bang(input: proc_macro2::TokenStream, tokens: &ToTokens) -> proc_macro2::TokenStream { let mut new_tokens = Tokens::empty(); for token in input.into_iter() { match token { proc_macro2::TokenTree::Punct(ref op) if op.as_char() == '!' => tokens.to_tokens(&mut new_tokens), _ => token.to_tokens(&mut new_tokens), } } new_tokens.into() } fn replace_bangs( input: proc_macro2::TokenStream, replacements: &[&ToTokens], ) -> proc_macro2::TokenStream { let mut replacements = replacements.iter().cycle(); let mut new_tokens = Tokens::empty(); for token in input.into_iter() { match token { proc_macro2::TokenTree::Punct(ref op) if op.as_char() == '!' => { replacements.next().unwrap().to_tokens(&mut new_tokens); } _ => token.to_tokens(&mut new_tokens), } } new_tokens.into() } struct AsyncStreamArg(syn::Ident, Option<syn::Type>); impl synom::Synom for AsyncStreamArg { named!(parse -> Self, do_parse!( i: syn!(syn::Ident) >> p: option!(do_parse!( syn!(syn::token::Eq) >> p: syn!(syn::Type) >> (p))) >> (AsyncStreamArg(i, p)))); } struct AsyncStreamArgs(Vec<AsyncStreamArg>); impl synom::Synom for AsyncStreamArgs { named!(parse -> Self, map!( option!(call!(Punctuated::<AsyncStreamArg, syn::token::Comma>::parse_separated_nonempty)), |p| AsyncStreamArgs(p.map(|d| d.into_iter().collect()).unwrap_or_default()) )); }
27.210332
101
0.631272
c1ff0f985cbaaa732e652aa16010b750bc94de80
37,292
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Code for projecting associated types out of trait references. use super::elaborate_predicates; use super::report_overflow_error; use super::Obligation; use super::ObligationCause; use super::PredicateObligation; use super::SelectionContext; use super::SelectionError; use super::VtableClosureData; use super::VtableImplData; use super::util; use middle::infer; use middle::subst::Subst; use middle::ty::{self, ToPredicate, RegionEscape, HasTypeFlags, ToPolyTraitRef, Ty}; use middle::ty::fold::{TypeFoldable, TypeFolder}; use syntax::parse::token; use util::common::FN_OUTPUT_NAME; pub type PolyProjectionObligation<'tcx> = Obligation<'tcx, ty::PolyProjectionPredicate<'tcx>>; pub type ProjectionObligation<'tcx> = Obligation<'tcx, ty::ProjectionPredicate<'tcx>>; pub type ProjectionTyObligation<'tcx> = Obligation<'tcx, ty::ProjectionTy<'tcx>>; /// When attempting to resolve `<T as TraitRef>::Name` ... #[derive(Debug)] pub enum ProjectionTyError<'tcx> { /// ...we found multiple sources of information and couldn't resolve the ambiguity. TooManyCandidates, /// ...an error occurred matching `T : TraitRef` TraitSelectionError(SelectionError<'tcx>), } #[derive(Clone)] pub struct MismatchedProjectionTypes<'tcx> { pub err: ty::error::TypeError<'tcx> } #[derive(PartialEq, Eq, Debug)] enum ProjectionTyCandidate<'tcx> { // from a where-clause in the env or object type ParamEnv(ty::PolyProjectionPredicate<'tcx>), // from the definition of `Trait` when you have something like <<A as Trait>::B as Trait2>::C TraitDef(ty::PolyProjectionPredicate<'tcx>), // defined in an impl Impl(VtableImplData<'tcx, PredicateObligation<'tcx>>), // closure return type Closure(VtableClosureData<'tcx, PredicateObligation<'tcx>>), // fn pointer return type FnPointer(Ty<'tcx>), } struct ProjectionTyCandidateSet<'tcx> { vec: Vec<ProjectionTyCandidate<'tcx>>, ambiguous: bool } /// Evaluates constraints of the form: /// /// for<...> <T as Trait>::U == V /// /// If successful, this may result in additional obligations. pub fn poly_project_and_unify_type<'cx,'tcx>( selcx: &mut SelectionContext<'cx,'tcx>, obligation: &PolyProjectionObligation<'tcx>) -> Result<Option<Vec<PredicateObligation<'tcx>>>, MismatchedProjectionTypes<'tcx>> { debug!("poly_project_and_unify_type(obligation={:?})", obligation); let infcx = selcx.infcx(); infcx.commit_if_ok(|snapshot| { let (skol_predicate, skol_map) = infcx.skolemize_late_bound_regions(&obligation.predicate, snapshot); let skol_obligation = obligation.with(skol_predicate); match project_and_unify_type(selcx, &skol_obligation) { Ok(result) => { match infcx.leak_check(&skol_map, snapshot) { Ok(()) => Ok(infcx.plug_leaks(skol_map, snapshot, &result)), Err(e) => Err(MismatchedProjectionTypes { err: e }), } } Err(e) => { Err(e) } } }) } /// Evaluates constraints of the form: /// /// <T as Trait>::U == V /// /// If successful, this may result in additional obligations. fn project_and_unify_type<'cx,'tcx>( selcx: &mut SelectionContext<'cx,'tcx>, obligation: &ProjectionObligation<'tcx>) -> Result<Option<Vec<PredicateObligation<'tcx>>>, MismatchedProjectionTypes<'tcx>> { debug!("project_and_unify_type(obligation={:?})", obligation); let Normalized { value: normalized_ty, obligations } = match opt_normalize_projection_type(selcx, obligation.predicate.projection_ty.clone(), obligation.cause.clone(), obligation.recursion_depth) { Some(n) => n, None => { consider_unification_despite_ambiguity(selcx, obligation); return Ok(None); } }; debug!("project_and_unify_type: normalized_ty={:?} obligations={:?}", normalized_ty, obligations); let infcx = selcx.infcx(); let origin = infer::RelateOutputImplTypes(obligation.cause.span); match infer::mk_eqty(infcx, true, origin, normalized_ty, obligation.predicate.ty) { Ok(()) => Ok(Some(obligations)), Err(err) => Err(MismatchedProjectionTypes { err: err }), } } fn consider_unification_despite_ambiguity<'cx,'tcx>(selcx: &mut SelectionContext<'cx,'tcx>, obligation: &ProjectionObligation<'tcx>) { debug!("consider_unification_despite_ambiguity(obligation={:?})", obligation); let def_id = obligation.predicate.projection_ty.trait_ref.def_id; match selcx.tcx().lang_items.fn_trait_kind(def_id) { Some(_) => { } None => { return; } } let infcx = selcx.infcx(); let self_ty = obligation.predicate.projection_ty.trait_ref.self_ty(); let self_ty = infcx.shallow_resolve(self_ty); debug!("consider_unification_despite_ambiguity: self_ty.sty={:?}", self_ty.sty); match self_ty.sty { ty::TyClosure(closure_def_id, ref substs) => { let closure_typer = selcx.closure_typer(); let closure_type = closure_typer.closure_type(closure_def_id, substs); let ty::Binder((_, ret_type)) = util::closure_trait_ref_and_return_type(infcx.tcx, def_id, self_ty, &closure_type.sig, util::TupleArgumentsFlag::No); // We don't have to normalize the return type here - this is only // reached for TyClosure: Fn inputs where the closure kind is // still unknown, which should only occur in typeck where the // closure type is already normalized. let (ret_type, _) = infcx.replace_late_bound_regions_with_fresh_var( obligation.cause.span, infer::AssocTypeProjection(obligation.predicate.projection_ty.item_name), &ty::Binder(ret_type)); debug!("consider_unification_despite_ambiguity: ret_type={:?}", ret_type); let origin = infer::RelateOutputImplTypes(obligation.cause.span); let obligation_ty = obligation.predicate.ty; match infer::mk_eqty(infcx, true, origin, obligation_ty, ret_type) { Ok(()) => { } Err(_) => { /* ignore errors */ } } } _ => { } } } /// Normalizes any associated type projections in `value`, replacing /// them with a fully resolved type where possible. The return value /// combines the normalized result and any additional obligations that /// were incurred as result. pub fn normalize<'a,'b,'tcx,T>(selcx: &'a mut SelectionContext<'b,'tcx>, cause: ObligationCause<'tcx>, value: &T) -> Normalized<'tcx, T> where T : TypeFoldable<'tcx> + HasTypeFlags { normalize_with_depth(selcx, cause, 0, value) } /// As `normalize`, but with a custom depth. pub fn normalize_with_depth<'a,'b,'tcx,T>(selcx: &'a mut SelectionContext<'b,'tcx>, cause: ObligationCause<'tcx>, depth: usize, value: &T) -> Normalized<'tcx, T> where T : TypeFoldable<'tcx> + HasTypeFlags { let mut normalizer = AssociatedTypeNormalizer::new(selcx, cause, depth); let result = normalizer.fold(value); Normalized { value: result, obligations: normalizer.obligations, } } struct AssociatedTypeNormalizer<'a,'b:'a,'tcx:'b> { selcx: &'a mut SelectionContext<'b,'tcx>, cause: ObligationCause<'tcx>, obligations: Vec<PredicateObligation<'tcx>>, depth: usize, } impl<'a,'b,'tcx> AssociatedTypeNormalizer<'a,'b,'tcx> { fn new(selcx: &'a mut SelectionContext<'b,'tcx>, cause: ObligationCause<'tcx>, depth: usize) -> AssociatedTypeNormalizer<'a,'b,'tcx> { AssociatedTypeNormalizer { selcx: selcx, cause: cause, obligations: vec!(), depth: depth, } } fn fold<T:TypeFoldable<'tcx> + HasTypeFlags>(&mut self, value: &T) -> T { let value = self.selcx.infcx().resolve_type_vars_if_possible(value); if !value.has_projection_types() { value.clone() } else { value.fold_with(self) } } } impl<'a,'b,'tcx> TypeFolder<'tcx> for AssociatedTypeNormalizer<'a,'b,'tcx> { fn tcx(&self) -> &ty::ctxt<'tcx> { self.selcx.tcx() } fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { // We don't want to normalize associated types that occur inside of region // binders, because they may contain bound regions, and we can't cope with that. // // Example: // // for<'a> fn(<T as Foo<&'a>>::A) // // Instead of normalizing `<T as Foo<&'a>>::A` here, we'll // normalize it when we instantiate those bound regions (which // should occur eventually). let ty = ty::fold::super_fold_ty(self, ty); match ty.sty { ty::TyProjection(ref data) if !data.has_escaping_regions() => { // (*) // (*) This is kind of hacky -- we need to be able to // handle normalization within binders because // otherwise we wind up a need to normalize when doing // trait matching (since you can have a trait // obligation like `for<'a> T::B : Fn(&'a int)`), but // we can't normalize with bound regions in scope. So // far now we just ignore binders but only normalize // if all bound regions are gone (and then we still // have to renormalize whenever we instantiate a // binder). It would be better to normalize in a // binding-aware fashion. let Normalized { value: ty, obligations } = normalize_projection_type(self.selcx, data.clone(), self.cause.clone(), self.depth); self.obligations.extend(obligations); ty } _ => { ty } } } } #[derive(Clone)] pub struct Normalized<'tcx,T> { pub value: T, pub obligations: Vec<PredicateObligation<'tcx>>, } pub type NormalizedTy<'tcx> = Normalized<'tcx, Ty<'tcx>>; impl<'tcx,T> Normalized<'tcx,T> { pub fn with<U>(self, value: U) -> Normalized<'tcx,U> { Normalized { value: value, obligations: self.obligations } } } /// The guts of `normalize`: normalize a specific projection like `<T /// as Trait>::Item`. The result is always a type (and possibly /// additional obligations). If ambiguity arises, which implies that /// there are unresolved type variables in the projection, we will /// substitute a fresh type variable `$X` and generate a new /// obligation `<T as Trait>::Item == $X` for later. pub fn normalize_projection_type<'a,'b,'tcx>( selcx: &'a mut SelectionContext<'b,'tcx>, projection_ty: ty::ProjectionTy<'tcx>, cause: ObligationCause<'tcx>, depth: usize) -> NormalizedTy<'tcx> { opt_normalize_projection_type(selcx, projection_ty.clone(), cause.clone(), depth) .unwrap_or_else(move || { // if we bottom out in ambiguity, create a type variable // and a deferred predicate to resolve this when more type // information is available. let ty_var = selcx.infcx().next_ty_var(); let projection = ty::Binder(ty::ProjectionPredicate { projection_ty: projection_ty, ty: ty_var }); let obligation = Obligation::with_depth( cause, depth + 1, projection.to_predicate()); Normalized { value: ty_var, obligations: vec!(obligation) } }) } /// The guts of `normalize`: normalize a specific projection like `<T /// as Trait>::Item`. The result is always a type (and possibly /// additional obligations). Returns `None` in the case of ambiguity, /// which indicates that there are unbound type variables. fn opt_normalize_projection_type<'a,'b,'tcx>( selcx: &'a mut SelectionContext<'b,'tcx>, projection_ty: ty::ProjectionTy<'tcx>, cause: ObligationCause<'tcx>, depth: usize) -> Option<NormalizedTy<'tcx>> { debug!("normalize_projection_type(\ projection_ty={:?}, \ depth={})", projection_ty, depth); let obligation = Obligation::with_depth(cause.clone(), depth, projection_ty.clone()); match project_type(selcx, &obligation) { Ok(ProjectedTy::Progress(projected_ty, mut obligations)) => { // if projection succeeded, then what we get out of this // is also non-normalized (consider: it was derived from // an impl, where-clause etc) and hence we must // re-normalize it debug!("normalize_projection_type: projected_ty={:?} depth={} obligations={:?}", projected_ty, depth, obligations); if projected_ty.has_projection_types() { let mut normalizer = AssociatedTypeNormalizer::new(selcx, cause, depth+1); let normalized_ty = normalizer.fold(&projected_ty); debug!("normalize_projection_type: normalized_ty={:?} depth={}", normalized_ty, depth); obligations.extend(normalizer.obligations); Some(Normalized { value: normalized_ty, obligations: obligations, }) } else { Some(Normalized { value: projected_ty, obligations: obligations, }) } } Ok(ProjectedTy::NoProgress(projected_ty)) => { debug!("normalize_projection_type: projected_ty={:?} no progress", projected_ty); Some(Normalized { value: projected_ty, obligations: vec!() }) } Err(ProjectionTyError::TooManyCandidates) => { debug!("normalize_projection_type: too many candidates"); None } Err(ProjectionTyError::TraitSelectionError(_)) => { debug!("normalize_projection_type: ERROR"); // if we got an error processing the `T as Trait` part, // just return `ty::err` but add the obligation `T : // Trait`, which when processed will cause the error to be // reported later Some(normalize_to_error(selcx, projection_ty, cause, depth)) } } } /// in various error cases, we just set TyError and return an obligation /// that, when fulfilled, will lead to an error. /// /// FIXME: the TyError created here can enter the obligation we create, /// leading to error messages involving TyError. fn normalize_to_error<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>, projection_ty: ty::ProjectionTy<'tcx>, cause: ObligationCause<'tcx>, depth: usize) -> NormalizedTy<'tcx> { let trait_ref = projection_ty.trait_ref.to_poly_trait_ref(); let trait_obligation = Obligation { cause: cause, recursion_depth: depth, predicate: trait_ref.to_predicate() }; Normalized { value: selcx.tcx().types.err, obligations: vec!(trait_obligation) } } enum ProjectedTy<'tcx> { Progress(Ty<'tcx>, Vec<PredicateObligation<'tcx>>), NoProgress(Ty<'tcx>), } /// Compute the result of a projection type (if we can). fn project_type<'cx,'tcx>( selcx: &mut SelectionContext<'cx,'tcx>, obligation: &ProjectionTyObligation<'tcx>) -> Result<ProjectedTy<'tcx>, ProjectionTyError<'tcx>> { debug!("project(obligation={:?})", obligation); let recursion_limit = selcx.tcx().sess.recursion_limit.get(); if obligation.recursion_depth >= recursion_limit { debug!("project: overflow!"); report_overflow_error(selcx.infcx(), &obligation); } let obligation_trait_ref = selcx.infcx().resolve_type_vars_if_possible(&obligation.predicate.trait_ref); debug!("project: obligation_trait_ref={:?}", obligation_trait_ref); if obligation_trait_ref.references_error() { return Ok(ProjectedTy::Progress(selcx.tcx().types.err, vec!())); } let mut candidates = ProjectionTyCandidateSet { vec: Vec::new(), ambiguous: false, }; assemble_candidates_from_param_env(selcx, obligation, &obligation_trait_ref, &mut candidates); assemble_candidates_from_trait_def(selcx, obligation, &obligation_trait_ref, &mut candidates); if let Err(e) = assemble_candidates_from_impls(selcx, obligation, &obligation_trait_ref, &mut candidates) { return Err(ProjectionTyError::TraitSelectionError(e)); } debug!("{} candidates, ambiguous={}", candidates.vec.len(), candidates.ambiguous); // Inherent ambiguity that prevents us from even enumerating the // candidates. if candidates.ambiguous { return Err(ProjectionTyError::TooManyCandidates); } // Drop duplicates. // // Note: `candidates.vec` seems to be on the critical path of the // compiler. Replacing it with an hash set was also tried, which would // render the following dedup unnecessary. It led to cleaner code but // prolonged compiling time of `librustc` from 5m30s to 6m in one test, or // ~9% performance lost. if candidates.vec.len() > 1 { let mut i = 0; while i < candidates.vec.len() { let has_dup = (0..i).any(|j| candidates.vec[i] == candidates.vec[j]); if has_dup { candidates.vec.swap_remove(i); } else { i += 1; } } } // Prefer where-clauses. As in select, if there are multiple // candidates, we prefer where-clause candidates over impls. This // may seem a bit surprising, since impls are the source of // "truth" in some sense, but in fact some of the impls that SEEM // applicable are not, because of nested obligations. Where // clauses are the safer choice. See the comment on // `select::SelectionCandidate` and #21974 for more details. if candidates.vec.len() > 1 { debug!("retaining param-env candidates only from {:?}", candidates.vec); candidates.vec.retain(|c| match *c { ProjectionTyCandidate::ParamEnv(..) => true, ProjectionTyCandidate::Impl(..) | ProjectionTyCandidate::Closure(..) | ProjectionTyCandidate::TraitDef(..) | ProjectionTyCandidate::FnPointer(..) => false, }); debug!("resulting candidate set: {:?}", candidates.vec); if candidates.vec.len() != 1 { return Err(ProjectionTyError::TooManyCandidates); } } assert!(candidates.vec.len() <= 1); match candidates.vec.pop() { Some(candidate) => { let (ty, obligations) = confirm_candidate(selcx, obligation, candidate); Ok(ProjectedTy::Progress(ty, obligations)) } None => { Ok(ProjectedTy::NoProgress(selcx.tcx().mk_projection( obligation.predicate.trait_ref.clone(), obligation.predicate.item_name))) } } } /// The first thing we have to do is scan through the parameter /// environment to see whether there are any projection predicates /// there that can answer this question. fn assemble_candidates_from_param_env<'cx,'tcx>( selcx: &mut SelectionContext<'cx,'tcx>, obligation: &ProjectionTyObligation<'tcx>, obligation_trait_ref: &ty::TraitRef<'tcx>, candidate_set: &mut ProjectionTyCandidateSet<'tcx>) { debug!("assemble_candidates_from_param_env(..)"); let env_predicates = selcx.param_env().caller_bounds.iter().cloned(); assemble_candidates_from_predicates(selcx, obligation, obligation_trait_ref, candidate_set, ProjectionTyCandidate::ParamEnv, env_predicates); } /// In the case of a nested projection like <<A as Foo>::FooT as Bar>::BarT, we may find /// that the definition of `Foo` has some clues: /// /// ``` /// trait Foo { /// type FooT : Bar<BarT=i32> /// } /// ``` /// /// Here, for example, we could conclude that the result is `i32`. fn assemble_candidates_from_trait_def<'cx,'tcx>( selcx: &mut SelectionContext<'cx,'tcx>, obligation: &ProjectionTyObligation<'tcx>, obligation_trait_ref: &ty::TraitRef<'tcx>, candidate_set: &mut ProjectionTyCandidateSet<'tcx>) { debug!("assemble_candidates_from_trait_def(..)"); // Check whether the self-type is itself a projection. let trait_ref = match obligation_trait_ref.self_ty().sty { ty::TyProjection(ref data) => data.trait_ref.clone(), ty::TyInfer(ty::TyVar(_)) => { // If the self-type is an inference variable, then it MAY wind up // being a projected type, so induce an ambiguity. candidate_set.ambiguous = true; return; } _ => { return; } }; // If so, extract what we know from the trait and try to come up with a good answer. let trait_predicates = selcx.tcx().lookup_predicates(trait_ref.def_id); let bounds = trait_predicates.instantiate(selcx.tcx(), trait_ref.substs); let bounds = elaborate_predicates(selcx.tcx(), bounds.predicates.into_vec()); assemble_candidates_from_predicates(selcx, obligation, obligation_trait_ref, candidate_set, ProjectionTyCandidate::TraitDef, bounds) } fn assemble_candidates_from_predicates<'cx,'tcx,I>( selcx: &mut SelectionContext<'cx,'tcx>, obligation: &ProjectionTyObligation<'tcx>, obligation_trait_ref: &ty::TraitRef<'tcx>, candidate_set: &mut ProjectionTyCandidateSet<'tcx>, ctor: fn(ty::PolyProjectionPredicate<'tcx>) -> ProjectionTyCandidate<'tcx>, env_predicates: I) where I: Iterator<Item=ty::Predicate<'tcx>> { debug!("assemble_candidates_from_predicates(obligation={:?})", obligation); let infcx = selcx.infcx(); for predicate in env_predicates { debug!("assemble_candidates_from_predicates: predicate={:?}", predicate); match predicate { ty::Predicate::Projection(ref data) => { let same_name = data.item_name() == obligation.predicate.item_name; let is_match = same_name && infcx.probe(|_| { let origin = infer::Misc(obligation.cause.span); let data_poly_trait_ref = data.to_poly_trait_ref(); let obligation_poly_trait_ref = obligation_trait_ref.to_poly_trait_ref(); infcx.sub_poly_trait_refs(false, origin, data_poly_trait_ref, obligation_poly_trait_ref).is_ok() }); debug!("assemble_candidates_from_predicates: candidate={:?} \ is_match={} same_name={}", data, is_match, same_name); if is_match { candidate_set.vec.push(ctor(data.clone())); } } _ => { } } } } fn assemble_candidates_from_object_type<'cx,'tcx>( selcx: &mut SelectionContext<'cx,'tcx>, obligation: &ProjectionTyObligation<'tcx>, obligation_trait_ref: &ty::TraitRef<'tcx>, candidate_set: &mut ProjectionTyCandidateSet<'tcx>) { let self_ty = obligation_trait_ref.self_ty(); let object_ty = selcx.infcx().shallow_resolve(self_ty); debug!("assemble_candidates_from_object_type(object_ty={:?})", object_ty); let data = match object_ty.sty { ty::TyTrait(ref data) => data, _ => { selcx.tcx().sess.span_bug( obligation.cause.span, &format!("assemble_candidates_from_object_type called with non-object: {:?}", object_ty)); } }; let projection_bounds = data.projection_bounds_with_self_ty(selcx.tcx(), object_ty); let env_predicates = projection_bounds.iter() .map(|p| p.to_predicate()) .collect(); let env_predicates = elaborate_predicates(selcx.tcx(), env_predicates); assemble_candidates_from_predicates(selcx, obligation, obligation_trait_ref, candidate_set, ProjectionTyCandidate::ParamEnv, env_predicates) } fn assemble_candidates_from_impls<'cx,'tcx>( selcx: &mut SelectionContext<'cx,'tcx>, obligation: &ProjectionTyObligation<'tcx>, obligation_trait_ref: &ty::TraitRef<'tcx>, candidate_set: &mut ProjectionTyCandidateSet<'tcx>) -> Result<(), SelectionError<'tcx>> { // If we are resolving `<T as TraitRef<...>>::Item == Type`, // start out by selecting the predicate `T as TraitRef<...>`: let poly_trait_ref = obligation_trait_ref.to_poly_trait_ref(); let trait_obligation = obligation.with(poly_trait_ref.to_poly_trait_predicate()); let vtable = match selcx.select(&trait_obligation) { Ok(Some(vtable)) => vtable, Ok(None) => { candidate_set.ambiguous = true; return Ok(()); } Err(e) => { debug!("assemble_candidates_from_impls: selection error {:?}", e); return Err(e); } }; match vtable { super::VtableImpl(data) => { debug!("assemble_candidates_from_impls: impl candidate {:?}", data); candidate_set.vec.push( ProjectionTyCandidate::Impl(data)); } super::VtableObject(_) => { assemble_candidates_from_object_type( selcx, obligation, obligation_trait_ref, candidate_set); } super::VtableClosure(data) => { candidate_set.vec.push( ProjectionTyCandidate::Closure(data)); } super::VtableFnPointer(fn_type) => { candidate_set.vec.push( ProjectionTyCandidate::FnPointer(fn_type)); } super::VtableParam(..) => { // This case tell us nothing about the value of an // associated type. Consider: // // ``` // trait SomeTrait { type Foo; } // fn foo<T:SomeTrait>(...) { } // ``` // // If the user writes `<T as SomeTrait>::Foo`, then the `T // : SomeTrait` binding does not help us decide what the // type `Foo` is (at least, not more specifically than // what we already knew). // // But wait, you say! What about an example like this: // // ``` // fn bar<T:SomeTrait<Foo=usize>>(...) { ... } // ``` // // Doesn't the `T : Sometrait<Foo=usize>` predicate help // resolve `T::Foo`? And of course it does, but in fact // that single predicate is desugared into two predicates // in the compiler: a trait predicate (`T : SomeTrait`) and a // projection. And the projection where clause is handled // in `assemble_candidates_from_param_env`. } super::VtableDefaultImpl(..) | super::VtableBuiltin(..) => { // These traits have no associated types. selcx.tcx().sess.span_bug( obligation.cause.span, &format!("Cannot project an associated type from `{:?}`", vtable)); } } Ok(()) } fn confirm_candidate<'cx,'tcx>( selcx: &mut SelectionContext<'cx,'tcx>, obligation: &ProjectionTyObligation<'tcx>, candidate: ProjectionTyCandidate<'tcx>) -> (Ty<'tcx>, Vec<PredicateObligation<'tcx>>) { debug!("confirm_candidate(candidate={:?}, obligation={:?})", candidate, obligation); match candidate { ProjectionTyCandidate::ParamEnv(poly_projection) | ProjectionTyCandidate::TraitDef(poly_projection) => { confirm_param_env_candidate(selcx, obligation, poly_projection) } ProjectionTyCandidate::Impl(impl_vtable) => { confirm_impl_candidate(selcx, obligation, impl_vtable) } ProjectionTyCandidate::Closure(closure_vtable) => { confirm_closure_candidate(selcx, obligation, closure_vtable) } ProjectionTyCandidate::FnPointer(fn_type) => { confirm_fn_pointer_candidate(selcx, obligation, fn_type) } } } fn confirm_fn_pointer_candidate<'cx,'tcx>( selcx: &mut SelectionContext<'cx,'tcx>, obligation: &ProjectionTyObligation<'tcx>, fn_type: Ty<'tcx>) -> (Ty<'tcx>, Vec<PredicateObligation<'tcx>>) { let fn_type = selcx.infcx().shallow_resolve(fn_type); let sig = fn_type.fn_sig(); confirm_callable_candidate(selcx, obligation, sig, util::TupleArgumentsFlag::Yes) } fn confirm_closure_candidate<'cx,'tcx>( selcx: &mut SelectionContext<'cx,'tcx>, obligation: &ProjectionTyObligation<'tcx>, vtable: VtableClosureData<'tcx, PredicateObligation<'tcx>>) -> (Ty<'tcx>, Vec<PredicateObligation<'tcx>>) { let closure_typer = selcx.closure_typer(); let closure_type = closure_typer.closure_type(vtable.closure_def_id, &vtable.substs); let Normalized { value: closure_type, mut obligations } = normalize_with_depth(selcx, obligation.cause.clone(), obligation.recursion_depth+1, &closure_type); let (ty, mut cc_obligations) = confirm_callable_candidate(selcx, obligation, &closure_type.sig, util::TupleArgumentsFlag::No); obligations.append(&mut cc_obligations); (ty, obligations) } fn confirm_callable_candidate<'cx,'tcx>( selcx: &mut SelectionContext<'cx,'tcx>, obligation: &ProjectionTyObligation<'tcx>, fn_sig: &ty::PolyFnSig<'tcx>, flag: util::TupleArgumentsFlag) -> (Ty<'tcx>, Vec<PredicateObligation<'tcx>>) { let tcx = selcx.tcx(); debug!("confirm_callable_candidate({:?},{:?})", obligation, fn_sig); // the `Output` associated type is declared on `FnOnce` let fn_once_def_id = tcx.lang_items.fn_once_trait().unwrap(); // Note: we unwrap the binder here but re-create it below (1) let ty::Binder((trait_ref, ret_type)) = util::closure_trait_ref_and_return_type(tcx, fn_once_def_id, obligation.predicate.trait_ref.self_ty(), fn_sig, flag); let predicate = ty::Binder(ty::ProjectionPredicate { // (1) recreate binder here projection_ty: ty::ProjectionTy { trait_ref: trait_ref, item_name: token::intern(FN_OUTPUT_NAME), }, ty: ret_type }); confirm_param_env_candidate(selcx, obligation, predicate) } fn confirm_param_env_candidate<'cx,'tcx>( selcx: &mut SelectionContext<'cx,'tcx>, obligation: &ProjectionTyObligation<'tcx>, poly_projection: ty::PolyProjectionPredicate<'tcx>) -> (Ty<'tcx>, Vec<PredicateObligation<'tcx>>) { let infcx = selcx.infcx(); let projection = infcx.replace_late_bound_regions_with_fresh_var( obligation.cause.span, infer::LateBoundRegionConversionTime::HigherRankedType, &poly_projection).0; assert_eq!(projection.projection_ty.item_name, obligation.predicate.item_name); let origin = infer::RelateOutputImplTypes(obligation.cause.span); match infcx.sub_trait_refs(false, origin, obligation.predicate.trait_ref.clone(), projection.projection_ty.trait_ref.clone()) { Ok(()) => { } Err(e) => { selcx.tcx().sess.span_bug( obligation.cause.span, &format!("Failed to unify `{:?}` and `{:?}` in projection: {}", obligation, projection, e)); } } (projection.ty, vec!()) } fn confirm_impl_candidate<'cx,'tcx>( selcx: &mut SelectionContext<'cx,'tcx>, obligation: &ProjectionTyObligation<'tcx>, impl_vtable: VtableImplData<'tcx, PredicateObligation<'tcx>>) -> (Ty<'tcx>, Vec<PredicateObligation<'tcx>>) { // there don't seem to be nicer accessors to these: let impl_or_trait_items_map = selcx.tcx().impl_or_trait_items.borrow(); // Look for the associated type in the impl for impl_item in &selcx.tcx().impl_items.borrow()[&impl_vtable.impl_def_id] { if let ty::TypeTraitItem(ref assoc_ty) = impl_or_trait_items_map[&impl_item.def_id()] { if assoc_ty.name == obligation.predicate.item_name { return (assoc_ty.ty.unwrap().subst(selcx.tcx(), &impl_vtable.substs), impl_vtable.nested); } } } // It is not in the impl - get the default from the trait. let trait_ref = obligation.predicate.trait_ref; for trait_item in selcx.tcx().trait_items(trait_ref.def_id).iter() { if let &ty::TypeTraitItem(ref assoc_ty) = trait_item { if assoc_ty.name == obligation.predicate.item_name { if let Some(ty) = assoc_ty.ty { return (ty.subst(selcx.tcx(), trait_ref.substs), impl_vtable.nested); } else { // This means that the impl is missing a // definition for the associated type. This error // ought to be reported by the type checker method // `check_impl_items_against_trait`, so here we // just return TyError. debug!("confirm_impl_candidate: no associated type {:?} for {:?}", assoc_ty.name, trait_ref); return (selcx.tcx().types.err, vec!()); } } } } selcx.tcx().sess.span_bug(obligation.cause.span, &format!("No associated type for {:?}", trait_ref)); }
38.485036
97
0.570122
bffdab8aa480bdd694146970a5d3f657c45cea8f
3,186
use crate::{args::DeployErc20RepresentationOpts, utils::TIMEOUT}; use cosmos_gravity::query::get_gravity_params; use ethereum_gravity::deploy_erc20::deploy_erc20; use gravity_proto::gravity::QueryDenomToErc20Request; use gravity_utils::{ connection_prep::{check_for_eth, create_rpc_connections}, error::GravityError, }; use std::time::Duration; use tokio::time::sleep; use web30::types::SendTxOption; pub async fn deploy_erc20_representation( args: DeployErc20RepresentationOpts, address_prefix: String, ) -> Result<(), GravityError> { let grpc_url = args.cosmos_grpc; let ethereum_rpc = args.ethereum_rpc; let ethereum_key = args.ethereum_key; let denom = args.cosmos_denom; let connections = create_rpc_connections(address_prefix, Some(grpc_url), Some(ethereum_rpc), TIMEOUT).await; let web3 = connections.web3.unwrap(); let mut grpc = connections.grpc.unwrap(); let ethereum_public_key = ethereum_key.to_address(); check_for_eth(ethereum_public_key, &web3).await?; let contract_address = if let Some(c) = args.gravity_contract_address { c } else { let params = get_gravity_params(&mut grpc).await.unwrap(); let c = params.bridge_ethereum_address.parse(); if c.is_err() { return Err(GravityError::UnrecoverableError( "The Gravity address is not yet set as a chain parameter! You must specify --gravity-contract-address".into(), )); } c.unwrap() }; let res = grpc .denom_to_erc20(QueryDenomToErc20Request { denom: denom.clone(), }) .await; if let Ok(val) = res { let erc20 = val.into_inner().erc20; return Err(GravityError::UnrecoverableError(format!( "Asset {} already has ERC20 representation {}", denom, erc20 ))); } info!("Starting deploy of ERC20"); let res = deploy_erc20( denom.clone(), args.erc20_name, args.erc20_symbol, args.erc20_decimals, contract_address, &web3, Some(TIMEOUT), ethereum_key, vec![SendTxOption::GasPriceMultiplier(1.5)], ) .await .unwrap(); info!("We have deployed ERC20 contract {:#066x}, waiting to see if the Cosmos chain choses to adopt it", res); let keep_querying_for_erc20 = async { loop { let res = grpc .denom_to_erc20(QueryDenomToErc20Request { denom: denom.clone(), }) .await; if let Ok(val) = res { info!( "Asset {} has accepted new ERC20 representation {}", denom, val.into_inner().erc20 ); break; } sleep(Duration::from_secs(1)).await; } }; match tokio::time::timeout(Duration::from_secs(100), keep_querying_for_erc20).await { Ok(_) => Ok(()), Err(_) => Err(GravityError::UnrecoverableError( "Your ERC20 contract was not adopted, double check the metadata and try again".into(), )), } }
31.544554
126
0.603892
e9b64b231072147e97b2e0b69ecc09087df19b7b
18,589
//! Common traits for input backends to receive input from. use std::{error::Error, string::ToString}; /// A seat describes a group of input devices and at least one /// graphics device belonging together. /// /// By default only one seat exists for most systems and smithay backends /// however multiseat configurations are possible and should be treated as /// separated users, all with their own focus, input and cursor available. /// /// Seats referring to the same internal id will always be equal and result in the same /// hash, but capabilities of cloned and copied [`Seat`]s will not be updated by smithay. /// Always refer to the [`Seat`] given by a callback for up-to-date information. You may /// use this to calculate the differences since the last callback. #[derive(Debug, Clone, Eq)] pub struct Seat { id: u64, name: String, capabilities: SeatCapabilities, } impl Seat { pub(crate) fn new<S: ToString>(id: u64, name: S, capabilities: SeatCapabilities) -> Seat { Seat { id, name: name.to_string(), capabilities, } } pub(crate) fn capabilities_mut(&mut self) -> &mut SeatCapabilities { &mut self.capabilities } /// Get the currently capabilities of this [`Seat`] pub fn capabilities(&self) -> &SeatCapabilities { &self.capabilities } /// Get the name of this [`Seat`] pub fn name(&self) -> &str { &*self.name } } impl ::std::cmp::PartialEq for Seat { fn eq(&self, other: &Seat) -> bool { self.id == other.id } } impl ::std::hash::Hash for Seat { fn hash<H>(&self, state: &mut H) where H: ::std::hash::Hasher, { self.id.hash(state); } } /// Describes capabilities a [`Seat`] has. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct SeatCapabilities { /// [`Seat`] has a pointer pub pointer: bool, /// [`Seat`] has a keyboard pub keyboard: bool, /// [`Seat`] has a touchscreen pub touch: bool, } /// Trait for generic functions every input event does provide pub trait Event { /// Returns an upward counting variable useful for event ordering. /// /// Makes no guarantees about actual time passed between events. // # TODO: // - check if events can even arrive out of order. // - Make stronger time guarantees, if possible fn time(&self) -> u32; } /// Used to mark events never emitted by an [`InputBackend`] implementation. /// /// Implements all event types and can be used in place for any [`Event`] type, /// that is not used by an [`InputBackend`] implementation. Initialization is not /// possible, making accidental use impossible and enabling a lot of possible /// compiler optimizations. pub enum UnusedEvent {} impl Event for UnusedEvent { fn time(&self) -> u32 { match *self {} } } /// State of key on a keyboard. Either pressed or released #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum KeyState { /// Key is released Released, /// Key is pressed Pressed, } /// Trait for keyboard event pub trait KeyboardKeyEvent: Event { /// Code of the pressed key. See `linux/input-event-codes.h` fn key_code(&self) -> u32; /// State of the key fn state(&self) -> KeyState; /// Total number of keys pressed on all devices on the associated [`Seat`] fn count(&self) -> u32; } impl KeyboardKeyEvent for UnusedEvent { fn key_code(&self) -> u32 { match *self {} } fn state(&self) -> KeyState { match *self {} } fn count(&self) -> u32 { match *self {} } } /// A particular mouse button #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum MouseButton { /// Left mouse button Left, /// Middle mouse button Middle, /// Right mouse button Right, /// Other mouse button with index Other(u8), } /// State of a button on a mouse. Either pressed or released #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum MouseButtonState { /// Button is released Released, /// Button is pressed Pressed, } /// Common methods pointer event generated by pressed buttons do implement pub trait PointerButtonEvent: Event { /// Pressed button of the event fn button(&self) -> MouseButton; /// State of the button fn state(&self) -> MouseButtonState; } impl PointerButtonEvent for UnusedEvent { fn button(&self) -> MouseButton { match *self {} } fn state(&self) -> MouseButtonState { match *self {} } } /// Axis when scrolling #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum Axis { /// Vertical axis Vertical, /// Horizontal axis Horizontal, } /// Source of an axis when scrolling #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum AxisSource { /// Finger. Mostly used for trackpads. /// /// Guarantees that a scroll sequence is terminated with a scroll value of 0. /// A caller may use this information to decide on whether kinetic scrolling should /// be triggered on this scroll sequence. /// /// The coordinate system is identical to the /// cursor movement, i.e. a scroll value of 1 represents the equivalent relative /// motion of 1. Finger, /// Continuous scrolling device. Almost identical to [`Finger`](AxisSource::Finger) /// /// No terminating event is guaranteed (though it may happen). /// /// The coordinate system is identical to /// the cursor movement, i.e. a scroll value of 1 represents the equivalent relative /// motion of 1. Continuous, /// Scroll wheel. /// /// No terminating event is guaranteed (though it may happen). Scrolling is in /// discrete steps. It is up to the caller how to interpret such different step sizes. Wheel, /// Scrolling through tilting the scroll wheel. /// /// No terminating event is guaranteed (though it may happen). Scrolling is in /// discrete steps. It is up to the caller how to interpret such different step sizes. WheelTilt, } /// Trait for pointer events generated by scrolling on an axis. pub trait PointerAxisEvent: Event { /// Amount of scrolling in pixels on the given [`Axis`]. /// /// Guaranteed to be `Some` when source returns either [`AxisSource::Finger`] or [`AxisSource::Continuous`]. fn amount(&self, axis: Axis) -> Option<f64>; /// Amount of scrolling in discrete steps on the given [`Axis`]. /// /// Guaranteed to be `Some` when source returns either [`AxisSource::Wheel`] or [`AxisSource::WheelTilt`]. fn amount_discrete(&self, axis: Axis) -> Option<f64>; /// Source of the scroll event. fn source(&self) -> AxisSource; } impl PointerAxisEvent for UnusedEvent { fn amount(&self, _axis: Axis) -> Option<f64> { match *self {} } fn amount_discrete(&self, _axis: Axis) -> Option<f64> { match *self {} } fn source(&self) -> AxisSource { match *self {} } } /// Trait for pointer events generated by relative device movement. pub trait PointerMotionEvent: Event { /// Delta between the last and new pointer device position interpreted as pixel movement fn delta(&self) -> (i32, i32) { (self.delta_x(), self.delta_y()) } /// Delta on the x axis between the last and new pointer device position interpreted as pixel movement fn delta_x(&self) -> i32; /// Delta on the y axis between the last and new pointer device position interpreted as pixel movement fn delta_y(&self) -> i32; } impl PointerMotionEvent for UnusedEvent { fn delta_x(&self) -> i32 { match *self {} } fn delta_y(&self) -> i32 { match *self {} } } /// Trait for pointer events generated by absolute device positioning. pub trait PointerMotionAbsoluteEvent: Event { /// Device position in it's original coordinate space. /// /// The format is defined by the backend implementation. fn position(&self) -> (f64, f64) { (self.x(), self.y()) } /// Device x position in it's original coordinate space. /// /// The format is defined by the backend implementation. fn x(&self) -> f64; /// Device y position in it's original coordinate space. /// /// The format is defined by the backend implementation. fn y(&self) -> f64; /// Device position converted to the targets coordinate space. /// E.g. the focused output's resolution. fn position_transformed(&self, coordinate_space: (u32, u32)) -> (u32, u32) { ( self.x_transformed(coordinate_space.0), self.y_transformed(coordinate_space.1), ) } /// Device x position converted to the targets coordinate space's width. /// E.g. the focused output's width. fn x_transformed(&self, width: u32) -> u32; /// Device y position converted to the targets coordinate space's height. /// E.g. the focused output's height. fn y_transformed(&self, height: u32) -> u32; } impl PointerMotionAbsoluteEvent for UnusedEvent { fn x(&self) -> f64 { match *self {} } fn y(&self) -> f64 { match *self {} } fn x_transformed(&self, _width: u32) -> u32 { match *self {} } fn y_transformed(&self, _height: u32) -> u32 { match *self {} } } /// Slot of a different touch event. /// /// Touch events are grouped by slots, usually to identify different /// fingers on a multi-touch enabled input device. Events should only /// be interpreted in the context of other events on the same slot. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct TouchSlot { id: u64, } impl TouchSlot { pub(crate) fn new(id: u64) -> Self { TouchSlot { id } } } /// Trait for touch events starting at a given position. pub trait TouchDownEvent: Event { /// [`TouchSlot`], if the device has multi-touch capabilities fn slot(&self) -> Option<TouchSlot>; /// Touch position in the device's native coordinate space /// /// The actual format is defined by the implementation. fn position(&self) -> (f64, f64) { (self.x(), self.y()) } /// Touch position converted into the target coordinate space. /// E.g. the focused output's resolution. fn position_transformed(&self, coordinate_space: (u32, u32)) -> (u32, u32) { ( self.x_transformed(coordinate_space.0), self.y_transformed(coordinate_space.1), ) } /// Touch event's x-coordinate in the device's native coordinate space /// /// The actual format is defined by the implementation. fn x(&self) -> f64; /// Touch event's x-coordinate in the device's native coordinate space /// /// The actual format is defined by the implementation. fn y(&self) -> f64; /// Touch event's x position converted to the targets coordinate space's width. /// E.g. the focused output's width. fn x_transformed(&self, width: u32) -> u32; /// Touch event's y position converted to the targets coordinate space's width. /// E.g. the focused output's width. fn y_transformed(&self, height: u32) -> u32; } impl TouchDownEvent for UnusedEvent { fn slot(&self) -> Option<TouchSlot> { match *self {} } fn x(&self) -> f64 { match *self {} } fn y(&self) -> f64 { match *self {} } fn x_transformed(&self, _width: u32) -> u32 { match *self {} } fn y_transformed(&self, _height: u32) -> u32 { match *self {} } } /// Trait for touch events regarding movement on the screen pub trait TouchMotionEvent: Event { /// [`TouchSlot`], if the device has multi-touch capabilities fn slot(&self) -> Option<TouchSlot>; /// Touch position in the device's native coordinate space /// /// The actual format is defined by the implementation. fn position(&self) -> (f64, f64) { (self.x(), self.y()) } /// Touch position converted into the target coordinate space. /// E.g. the focused output's resolution. fn position_transformed(&self, coordinate_space: (u32, u32)) -> (u32, u32) { ( self.x_transformed(coordinate_space.0), self.y_transformed(coordinate_space.1), ) } /// Touch event's x-coordinate in the device's native coordinate space /// /// The actual format is defined by the implementation. fn x(&self) -> f64; /// Touch event's x-coordinate in the device's native coordinate space /// /// The actual format is defined by the implementation. fn y(&self) -> f64; /// Touch event's x position converted to the targets coordinate space's width. /// E.g. the focused output's width. fn x_transformed(&self, width: u32) -> u32; /// Touch event's y position converted to the targets coordinate space's width. /// E.g. the focused output's width. fn y_transformed(&self, height: u32) -> u32; } impl TouchMotionEvent for UnusedEvent { fn slot(&self) -> Option<TouchSlot> { match *self {} } fn x(&self) -> f64 { match *self {} } fn y(&self) -> f64 { match *self {} } fn x_transformed(&self, _width: u32) -> u32 { match *self {} } fn y_transformed(&self, _height: u32) -> u32 { match *self {} } } /// Trait for touch events finishing. pub trait TouchUpEvent: Event { /// [`TouchSlot`], if the device has multi-touch capabilities fn slot(&self) -> Option<TouchSlot>; } impl TouchUpEvent for UnusedEvent { fn slot(&self) -> Option<TouchSlot> { match *self {} } } /// Trait for touch events cancelling the chain pub trait TouchCancelEvent: Event { /// [`TouchSlot`], if the device has multi-touch capabilities fn slot(&self) -> Option<TouchSlot>; } impl TouchCancelEvent for UnusedEvent { fn slot(&self) -> Option<TouchSlot> { match *self {} } } /// Trait for touch frame events pub trait TouchFrameEvent: Event {} impl TouchFrameEvent for UnusedEvent {} /// Trait that describes objects providing a source of input events. All input backends /// need to implement this and provide the same base guarantees about the precision of /// given events. pub trait InputBackend: Sized { /// Type representing errors that may be returned when processing events type EventError: Error; /// Type representing keyboard events type KeyboardKeyEvent: KeyboardKeyEvent; /// Type representing axis events on pointer devices type PointerAxisEvent: PointerAxisEvent; /// Type representing button events on pointer devices type PointerButtonEvent: PointerButtonEvent; /// Type representing motion events of pointer devices type PointerMotionEvent: PointerMotionEvent; /// Type representing motion events of pointer devices type PointerMotionAbsoluteEvent: PointerMotionAbsoluteEvent; /// Type representing touch events starting type TouchDownEvent: TouchDownEvent; /// Type representing touch events ending type TouchUpEvent: TouchUpEvent; /// Type representing touch events from moving type TouchMotionEvent: TouchMotionEvent; /// Type representing cancelling of touch events type TouchCancelEvent: TouchCancelEvent; /// Type representing touch frame events type TouchFrameEvent: TouchFrameEvent; /// Special events that are custom to this backend type SpecialEvent; /// Backend-specific type allowing you to configure it type InputConfig: ?Sized; /// Get the list of currently known Seats fn seats(&self) -> Vec<Seat>; /// Access the input configuration interface fn input_config(&mut self) -> &mut Self::InputConfig; /// Processes new events of the underlying backend and drives the [`InputHandler`]. /// /// The callback can only assume its second argument to be usable if the event is /// `InputEvent::ConfigChanged`. fn dispatch_new_events<F>(&mut self, callback: F) -> Result<(), Self::EventError> where F: FnMut(InputEvent<Self>, &mut Self::InputConfig); } /// Different events that can be generated by an input backend pub enum InputEvent<B: InputBackend> { /// A new seat has been created NewSeat(Seat), /// A seat has changed SeatChanged(Seat), /// A seat has been removed SeatRemoved(Seat), /// A keyboard event occured Keyboard { /// Seat that generated the event seat: Seat, /// The keyboard event event: B::KeyboardKeyEvent, }, /// A relative pointer motion occured PointerMotion { /// Seat that generated the event seat: Seat, /// The pointer motion event event: B::PointerMotionEvent, }, /// An absolute pointer motion occures PointerMotionAbsolute { /// Seat that generated the event seat: Seat, /// The absolute pointer motion event event: B::PointerMotionAbsoluteEvent, }, /// A pointer button was pressed or released PointerButton { /// Seat that generated the event seat: Seat, /// The pointer button event event: B::PointerButtonEvent, }, /// A pointer axis was actionned PointerAxis { /// Seat that generated the event seat: Seat, /// The pointer axis event event: B::PointerAxisEvent, }, /// A new touchpoint appeared TouchDown { /// Seat that generated the event seat: Seat, /// The touch down event event: B::TouchDownEvent, }, /// A touchpoint moved TouchMotion { /// Seat that generated the event seat: Seat, /// The touch motion event event: B::TouchMotionEvent, }, /// A touchpoint was removed TouchUp { /// Seat that generated the event seat: Seat, /// The touch up event event: B::TouchUpEvent, }, /// A touch sequence was cancelled TouchCancel { /// Seat that generated the event seat: Seat, /// The touch cancel event event: B::TouchCancelEvent, }, /// A touch frame was emmited /// /// A set of two events received on the same seat between two frames should /// be interpreted as an atomic event. TouchFrame { /// Seat that generated the event seat: Seat, /// The touch frame event event: B::TouchFrameEvent, }, /// Special event specific of this backend Special(B::SpecialEvent), }
29.837881
112
0.638334
d521a09f01aca5a5288e20f24d005fc9e7a62e19
4,014
// This file is part of file-descriptors. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/file-descriptors/master/COPYRIGHT. No part of file-descriptors, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file. // Copyright © 2019 The developers of file-descriptors. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/file-descriptors/master/COPYRIGHT. /// Miscellaneous control mode flags. #[derive(EnumIter, Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[cfg_attr(not(any(target_os = "ios", target_os = "macos")), repr(u32))] #[cfg_attr(all(any(target_os = "ios", target_os = "macos"), target_pointer_width = "32"), repr(u32))] #[cfg_attr(all(any(target_os = "ios", target_os = "macos"), target_pointer_width = "64"), repr(u64))] pub enum MiscellaneousControlModeFlag { /// Enable receiver. /// /// If set, the receiver is enabled, and characters can be received. EnableReceiver = CREAD, /// Hang up on last close. /// /// If set, the modem control lines are lowered (ie, the modem connection is broken) when the last process closes the device. HangUpOnLastClose = HUPCL, /// Ignore modem status lines (actually, ignores only the `CD` signal). /// /// This usually means that the device is directly attached. /// When this flag is not set, an open of a terminal device usually blocks until the modem answers a call and establishes a connection, for example. IgnoreModemStatusLines = CLOCAL, /// Ignore control flags. #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos", target_os = "openbsd"))] Ignore = CIGNORE, /// Enable hardware flow control of the input and output, using the Clear-To-Send (`CTS`) and Request-To-Send (`RTS`) RS-232 signals. /// /// Can be separated into `RequestToSendFlowControlOfInput` and `ClearToSendFlowControlOfOutput` on some platforms. RequestToSendClearToSendFlowControlOfInputAndOutput = CRTSCTS, /// Enable hardware flow control of the output using the Clear-To-Send (`CTS`) RS-232 signal. #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos"))] ClearToSendFlowControlOfOutput = CCTS_OFLOW, /// Enable hardware flow control of the input using the Request-To-Send (`RTS`) RS-232 signal. #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos"))] RequestToSendFlowControlOfInput = CRTS_IFLOW, /// Enable hardware flow control of the input according to the Data-Terminal-Ready (`DTR`) RS-232 signal. #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos"))] DataTerminalReadyFlowControlOfInput = CDTR_IFLOW, /// Enable hardware flow control of the output according to the Data-Set-Ready (`DSR`) RS-232 signal. #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos"))] DataSetReadyFlowControlOfOutput = CDSR_OFLOW, /// Enable hardware flow control of the output using the Data-Carrier-Detect (`DCD`, also known as `CD`) RS-232 modem carrier signal. #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos"))] DataCarrierDetectFlowControlOfOutput = CCAR_OFLOW, #[cfg(target_os = "openbsd")] DataCarrierDetectFlowControlOfOutput = MDMBUF, } impl MiscellaneousControlModeFlag { /// Enable hardware flow control of the output using the Clear-To-Send (`CTS`) RS-232 signal. #[cfg(target_os = "openbsd")] pub const ClearToSendFlowControlOfOutput: Self = MiscellaneousControlModeFlag::CRTSCTS; /// Enable hardware flow control of the input using the Request-To-Send (`RTS`) RS-232 signal. #[cfg(target_os = "openbsd")] pub const RequestToSendFlowControlOfInput: Self = MiscellaneousControlModeFlag::CRTSCTS; }
65.803279
403
0.746637
1df37cf4838f24b333399e15d763d98db32a6ab2
14,412
use crate::scheduler::{NativeScheduler, Scheduler}; use std::any::Any; use std::collections::{ btree_map::BTreeMap, btree_set::BTreeSet, vec_deque::VecDeque, HashMap, HashSet, }; use std::iter::FromIterator; use std::net::{Ipv4Addr, SocketAddrV4, TcpStream}; use std::rc::Rc; use std::sync::{ atomic::{AtomicUsize, Ordering}, Arc, }; use std::thread; use std::time; use std::time::{Duration, Instant}; use crate::dag_scheduler::{CompletionEvent, FetchFailedVals, TastEndReason}; use crate::dependency::{Dependency, ShuffleDependencyTrait}; use crate::env; use crate::error::{Error, Result}; use crate::job::{Job, JobTracker}; use crate::local_scheduler::LocalScheduler; use crate::map_output_tracker::MapOutputTracker; use crate::rdd::{Rdd, RddBase}; use crate::result_task::ResultTask; use crate::scheduler::*; use crate::serializable_traits::{Data, SerFunc}; use crate::serialized_data_capnp::serialized_data; use crate::shuffle_map_task::ShuffleMapTask; use crate::stage::Stage; use crate::task::{TaskBase, TaskContext, TaskOption, TaskResult}; use capnp::serialize_packed; use log::info; use parking_lot::Mutex; use threadpool::ThreadPool; //just for now, creating an entire scheduler functions without dag scheduler trait. Later change it to extend from dag scheduler #[derive(Clone, Default)] pub struct DistributedScheduler { threads: usize, max_failures: usize, attempt_id: Arc<AtomicUsize>, resubmit_timeout: u128, poll_timeout: u64, event_queues: Arc<Mutex<HashMap<usize, VecDeque<CompletionEvent>>>>, next_job_id: Arc<AtomicUsize>, next_run_id: Arc<AtomicUsize>, next_task_id: Arc<AtomicUsize>, next_stage_id: Arc<AtomicUsize>, stage_cache: Arc<Mutex<HashMap<usize, Stage>>>, shuffle_to_map_stage: Arc<Mutex<HashMap<usize, Stage>>>, cache_locs: Arc<Mutex<HashMap<usize, Vec<Vec<Ipv4Addr>>>>>, master: bool, framework_name: String, is_registered: bool, //TODO check if it is necessary active_jobs: HashMap<usize, Job>, active_job_queue: Vec<Job>, taskid_to_jobid: HashMap<String, usize>, taskid_to_slaveid: HashMap<String, String>, job_tasks: HashMap<usize, HashSet<String>>, slaves_with_executors: HashSet<String>, server_uris: Arc<Mutex<VecDeque<SocketAddrV4>>>, port: u16, map_output_tracker: MapOutputTracker, // TODO fix proper locking mechanism scheduler_lock: Arc<Mutex<bool>>, } impl DistributedScheduler { pub fn new( threads: usize, max_failures: usize, master: bool, servers: Option<Vec<SocketAddrV4>>, port: u16, ) -> Self { info!( "starting distributed scheduler in client - {} {}", master, port ); DistributedScheduler { // threads, threads: 100, max_failures, attempt_id: Arc::new(AtomicUsize::new(0)), resubmit_timeout: 2000, poll_timeout: 50, event_queues: Arc::new(Mutex::new(HashMap::new())), next_job_id: Arc::new(AtomicUsize::new(0)), next_run_id: Arc::new(AtomicUsize::new(0)), next_task_id: Arc::new(AtomicUsize::new(0)), next_stage_id: Arc::new(AtomicUsize::new(0)), stage_cache: Arc::new(Mutex::new(HashMap::new())), shuffle_to_map_stage: Arc::new(Mutex::new(HashMap::new())), cache_locs: Arc::new(Mutex::new(HashMap::new())), master, framework_name: "spark".to_string(), is_registered: true, //TODO check if it is necessary active_jobs: HashMap::new(), active_job_queue: Vec::new(), taskid_to_jobid: HashMap::new(), taskid_to_slaveid: HashMap::new(), job_tasks: HashMap::new(), slaves_with_executors: HashSet::new(), server_uris: if let Some(servers) = servers { Arc::new(Mutex::new(VecDeque::from_iter(servers))) } else { Arc::new(Mutex::new(VecDeque::new())) }, port, map_output_tracker: env::Env::get().map_output_tracker.clone(), scheduler_lock: Arc::new(Mutex::new(true)), } } fn task_ended( event_queues: Arc<Mutex<HashMap<usize, VecDeque<CompletionEvent>>>>, task: Box<dyn TaskBase>, reason: TastEndReason, result: Box<dyn Any + Send + Sync>, //TODO accumvalues needs to be done ) { let result = Some(result); if let Some(queue) = event_queues.lock().get_mut(&(task.get_run_id())) { queue.push_back(CompletionEvent { task, reason, result, accum_updates: HashMap::new(), }); } else { info!("ignoring completion event for DAG Job"); } } pub fn run_job<T: Data, U: Data, F>( &self, func: Arc<F>, final_rdd: Arc<dyn Rdd<Item = T>>, partitions: Vec<usize>, allow_local: bool, ) -> Result<Vec<U>> where F: SerFunc((TaskContext, Box<dyn Iterator<Item = T>>)) -> U, { // acquiring lock so that only one job can run a same time // this lock is just a temporary patch for preventing multiple jobs to update cache locks // which affects construction of dag task graph. dag task graph construction need to be // altered let lock = self.scheduler_lock.lock(); info!( "shuffle manager in final rdd of run job {:?}", env::Env::get().shuffle_manager ); let mut jt = JobTracker::from_scheduler(self, func, final_rdd.clone(), partitions); let mut results: Vec<Option<U>> = (0..jt.num_output_parts).map(|_| None).collect(); let mut num_finished = 0; let mut fetch_failure_duration = Duration::new(0, 0); //TODO update cache //TODO logging if allow_local { if let Some(result) = LocalScheduler::local_execution(jt.clone())? { return Ok(result); } } self.event_queues.lock().insert(jt.run_id, VecDeque::new()); self.submit_stage(jt.final_stage.clone(), jt.clone()); info!( "pending stages and tasks {:?}", jt.pending_tasks .borrow() .iter() .map(|(k, v)| (k.id, v.iter().map(|x| x.get_task_id()).collect::<Vec<_>>())) .collect::<Vec<_>>() ); while num_finished != jt.num_output_parts { let event_option = self.wait_for_event(jt.run_id, self.poll_timeout); let start_time = Instant::now(); if let Some(mut evt) = event_option { info!("event starting"); let stage = self.stage_cache.lock()[&evt.task.get_stage_id()].clone(); info!( "removing stage task from pending tasks {} {}", stage.id, evt.task.get_task_id() ); jt.pending_tasks .borrow_mut() .get_mut(&stage) .unwrap() .remove(&evt.task); use super::dag_scheduler::TastEndReason::*; match evt.reason { Success => { self.on_event_success(evt, &mut results, &mut num_finished, jt.clone()) } FetchFailed(failed_vals) => { self.on_event_failure(jt.clone(), failed_vals, evt.task.get_stage_id()); fetch_failure_duration = start_time.elapsed(); } _ => { //TODO error handling } } } if !jt.failed.borrow().is_empty() && fetch_failure_duration.as_millis() > self.resubmit_timeout { self.update_cache_locs(); for stage in jt.failed.borrow().iter() { self.submit_stage(stage.clone(), jt.clone()); } jt.failed.borrow_mut().clear(); } } self.event_queues.lock().remove(&jt.run_id); Ok(results .into_iter() .map(|s| match s { Some(v) => v, None => panic!("some results still missing"), }) .collect()) } fn wait_for_event(&self, run_id: usize, timeout: u64) -> Option<CompletionEvent> { let end = Instant::now() + Duration::from_millis(timeout); while self.event_queues.lock().get(&run_id).unwrap().is_empty() { if Instant::now() > end { return None; } else { thread::sleep(end - Instant::now()); } } self.event_queues .lock() .get_mut(&run_id) .unwrap() .pop_front() } } impl NativeScheduler for DistributedScheduler { fn submit_task<T: Data, U: Data, F>( &self, task: TaskOption, id_in_job: usize, thread_pool: Rc<ThreadPool>, target_executor: SocketAddrV4, ) where F: SerFunc((TaskContext, Box<dyn Iterator<Item = T>>)) -> U, { if self.master { info!("inside submit task"); let my_attempt_id = self.attempt_id.fetch_add(1, Ordering::SeqCst); let event_queues = self.event_queues.clone(); let event_queues_clone = event_queues.clone(); thread_pool.execute(move || { while let Err(_) = TcpStream::connect(&target_executor) { continue; } let ser_task = task; let task_bytes = bincode::serialize(&ser_task).unwrap(); info!( "task in executor {} {:?} master", target_executor.port(), ser_task.get_task_id() ); let mut stream = TcpStream::connect(&target_executor).unwrap(); info!( "task in executor {} {} master task len", target_executor.port(), task_bytes.len() ); let mut message = ::capnp::message::Builder::new_default(); let mut task_data = message.init_root::<serialized_data::Builder>(); info!("sending data to server"); task_data.set_msg(&task_bytes); serialize_packed::write_message(&mut stream, &message); let r = ::capnp::message::ReaderOptions { traversal_limit_in_words: std::u64::MAX, nesting_limit: 64, }; let mut stream_r = std::io::BufReader::new(&mut stream); let message_reader = serialize_packed::read_message(&mut stream_r, r).unwrap(); let task_data = message_reader .get_root::<serialized_data::Reader>() .unwrap(); info!( "task in executor {} {} master task result len", target_executor.port(), task_data.get_msg().unwrap().len() ); let result: TaskResult = bincode::deserialize(&task_data.get_msg().unwrap()).unwrap(); match ser_task { TaskOption::ResultTask(tsk) => { let result = match result { TaskResult::ResultTask(r) => r, _ => panic!("wrong result type"), }; if let Ok(task_final) = tsk.downcast::<ResultTask<T, U, F>>() { let task_final = task_final as Box<dyn TaskBase>; DistributedScheduler::task_ended( event_queues_clone, task_final, TastEndReason::Success, // Can break in future. But actually not needed for distributed scheduler since task runs on different processes. // Currently using this because local scheduler needs it. It can be solved by refactoring tasks differently for local and distributed scheduler result.into_any_send_sync(), ); } } TaskOption::ShuffleMapTask(tsk) => { let result = match result { TaskResult::ShuffleTask(r) => r, _ => panic!("wrong result type"), }; if let Ok(task_final) = tsk.downcast::<ShuffleMapTask>() { let task_final = task_final as Box<dyn TaskBase>; DistributedScheduler::task_ended( event_queues_clone, task_final, TastEndReason::Success, result.into_any_send_sync(), ); } } }; }) } } fn next_executor_server(&self, task: &dyn TaskBase) -> SocketAddrV4 { if !task.is_pinned() { // pick the first available server let socket_addrs = self.server_uris.lock().pop_back().unwrap(); self.server_uris.lock().push_front(socket_addrs); socket_addrs } else { // seek and pick the selected host let servers = &mut *self.server_uris.lock(); let location: Ipv4Addr = task.preferred_locations()[0].into(); if let Some((pos, _)) = servers .iter() .enumerate() .find(|(i, e)| *e.ip() == location) { let target_host = servers.remove(pos).unwrap(); servers.push_front(target_host.clone()); target_host } else { unreachable!() } } } impl_common_scheduler_funcs!(); }
38.741935
175
0.525396
87b2f585598fba990034d7e4a2ab87c9f67334d8
9,371
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // The google dataset 2011-v2 can be found here: https://github.com/google/cluster-data/blob/master/ClusterData2011_2.md // it is licensed under CC-BY license: https://creativecommons.org/licenses/by/4.0/ use std::collections::HashSet; use std::fs::create_dir_all; use std::fs::File; use std::io::BufRead; use std::io::BufReader; use std::io::BufWriter; use std::io::Write; use std::path::Path; use std::path::PathBuf; use std::str::FromStr; use anyhow::bail; use anyhow::Context; use anyhow::Result; use clap::Arg; use clap::Command; #[derive(Debug)] struct Node { uid: usize, cores: f32, memory: f32, } impl FromStr for Node { type Err = anyhow::Error; fn from_str(s: &str) -> Result<Self> { /* VV: The file format of machine events CSV files is 0. time,INTEGER,YES 1. machine ID,INTEGER,YES 2. event type,INTEGER,YES 3. platform ID,STRING_HASH,NO 4. CPUs,FLOAT,NO <--+ Use just these 2 5. Memory,FLOAT,NO <--| */ let toks = s.split(',').collect::<Vec<_>>(); if toks.len() != 6 { bail!("Expected a string with 6 comma separated tokens, \ but got {:?} instead.", toks); } let uid: usize = toks[1].parse() .context(format!("Unable to parse uid \"{}\" into a usize", toks[1]))?; let cores: f32 = toks[4].parse() .context(format!("Unable to parse cores \"{}\" into a f32", toks[4]))?; let memory: f32 = toks[5].parse() .context(format!("Unable to parse memory \"{}\" into a f32", toks[5]))?; Ok(Node { cores, memory, uid }) } } #[derive(Debug)] struct Arguments { input_file: PathBuf, output_file_prefix: PathBuf, fraction_memory_local: f32, fraction_memory_remote: f32, pool_population: usize, } fn parse_arguments() -> Result<Arguments> { let app = Command::new("Parse google trace machine events") .version("0.1.0") .author("Vassilis Vassiliadis") .about("Generates a nodes and connections file for dismem simulator") .arg(Arg::new("input_file") .short('i') .long("input_file") .takes_value(true) .required(true) .help("Path to the google trace .csv file to process")) .arg(Arg::new("outputFilePrefix") .short('o') .long("outputFilePrefix") .takes_value(true) .required(true) .help("Prefix to the <prefix>.nodes and <prefix>.connections paths that \ the script will generate")) .arg(Arg::new("fractionMemoryLocal") .short('l') .long("fractionMemoryLocal") .help("Fraction of memory that the original node defines which \ the resulting node should use as its local memory. Setting this \ to 1.0 and fractionRemoteMemory to 0.0 will effectively generate \ a node which is exactly as the one defined in the original trace.") .default_value("1.0")) .arg(Arg::new("fractionMemoryRemote") .short('r') .long("fractionRemoteMemory") .help("Fraction of memory that the original node defines which \ the resulting node can only use by borrowing it from the shared pool in the \ rack. Setting this to 1.0 and fractionLocalMemory to 0.0 will produce \ a node with 0 local memory that must always borrow memory from the pool.") .default_value("0.0")) .arg(Arg::new("poolPopulation") .short('p') .long("poolPopulation") .help("Maximum number of nodes that can borrow from a pool. The nodes are first \ sorted based on their core capacity. Then @poolPopulation of them are grouped \ together. This simulates nodes in the same rack being able to borrow memory from \ the same pool.") .default_value("24")); let args = app.get_matches(); let input_file = args.value_of("input_file").unwrap(); let output_file_prefix = args.value_of("outputFilePrefix").unwrap(); let input_file = PathBuf::from(input_file); let output_file_prefix = PathBuf::from(output_file_prefix); let fraction_memory_local = args.value_of("fractionMemoryLocal").unwrap(); let fraction_memory_local: f32 = fraction_memory_local.parse() .context(format!("fractionMemoryLocal \"{}\" is not a valid f32", fraction_memory_local))?; let fraction_memory_remote = args.value_of("fractionMemoryRemote").unwrap(); let fraction_memory_remote: f32 = fraction_memory_remote.parse() .context(format!("fractionMemoryRemote \"{}\" is not a valid f32", fraction_memory_remote))?; let pool_population = args.value_of("poolPopulation").unwrap(); let pool_population: usize = pool_population.parse() .context(format!("poolPopulation \"{}\" is not a valid usize", pool_population))?; Ok( Arguments { input_file, output_file_prefix, fraction_memory_local, fraction_memory_remote, pool_population, } ) } fn parse_unique_nodes(input_file: &Path) -> Result<Vec<Node>> { let file = File::open(input_file) .context(format!("Unable to open input file {}", input_file.display()))?; let br = BufReader::new(file); let mut node_uids: HashSet<usize> = HashSet::new(); let mut nodes: Vec<Node> = vec![]; for line in br.lines() { let line = line?; match line.parse::<Node>() { Ok(node) => { if !node_uids.contains(&node.uid) { let pred = |n: &Node| -> bool { n.cores < node.cores }; let idx = nodes.partition_point(pred); node_uids.insert(node.uid); nodes.insert(idx, node); } } Err(e) => { println!("Skipping line {} because {}", line, e); } } } Ok(nodes) } fn main() -> Result<()> { let arguments = parse_arguments()?; if let Some(parent) = arguments.output_file_prefix.parent() { if !parent.exists() { create_dir_all(parent) .context(format!("Unable to create directory for output files {}", parent.display()))?; } } let out_prefix = arguments.output_file_prefix.to_str().unwrap(); let out_nodes = format!("{}.nodes", out_prefix); let out_nodes = File::create(&out_nodes) .context(format!("Unable to create output Nodes file at {}", out_nodes))?; let mut out_nodes = BufWriter::new(out_nodes); writeln!(&mut out_nodes, "#name:str;cores:f32;memory:f32")?; let out_connections = format!("{}.connections", out_prefix); let out_connections = File::create(&out_connections) .context(format!("Unable to create output Connections file at {}", out_connections))?; let mut out_connections = BufWriter::new(out_connections); writeln!(&mut out_connections, "#node_borrower:str[;lender1:str;...lender_n:str]*")?; let nodes = parse_unique_nodes(&arguments.input_file)?; // println!("{:?}", nodes); println!("Discovered {} unique nodes", nodes.len()); let mut start = 0; let mut end = arguments.pool_population; let mut rack_id: usize = 0; loop { end = end.min(nodes.len()); let terminate = end == nodes.len(); let total_memory = nodes.iter().skip(start).take(end - start) .fold(0., |agg, node| agg + node.memory); let shared_memory = total_memory * arguments.fraction_memory_remote; writeln!(&mut out_nodes, "# Rack: {}", rack_id)?; writeln!(&mut out_connections, "# Rack: {}", rack_id)?; let pool_name = format!("Pool_{}", rack_id); writeln!(&mut out_nodes, "{};0;{}", pool_name, shared_memory)?; for (i, node) in nodes[start..end].iter().enumerate() { let memory = node.memory * arguments.fraction_memory_local; let name = format!("Worker_{}_{}", rack_id, i); writeln!(&mut out_nodes, "{};{};{}", name, node.cores, memory)?; writeln!(&mut out_connections, "{};{}", name, pool_name)?; } writeln!(&mut out_nodes)?; writeln!(&mut out_connections)?; if terminate { break; } start = end; end += arguments.pool_population; rack_id += 1; } Ok(()) }
37.039526
120
0.606659
14aeea34657985a3773149c9fb9f1a20876a0d8f
911
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Test a foreign function that accepts and returns a struct // by value. #[derive(Copy, Clone, PartialEq, Debug)] pub struct TwoU64s { one: u64, two: u64 } #[link(name = "rust_test_helpers")] extern { pub fn rust_dbg_extern_identity_TwoU64s(v: TwoU64s) -> TwoU64s; } pub fn main() { unsafe { let x = TwoU64s {one: 22, two: 23}; let y = rust_dbg_extern_identity_TwoU64s(x); assert_eq!(x, y); } }
29.387097
69
0.692645
dda069f9c02fe5e3e48ee8877025c0c8208fbfa6
2,287
use clap::{App, Arg}; use hyper::Server; use hyper::{ server::conn::AddrStream, service::{make_service_fn, service_fn}, }; use std::net::SocketAddr; use wagi::Router; #[tokio::main] pub async fn main() -> Result<(), anyhow::Error> { env_logger::init(); let matches = App::new("WAGI Server") .version("0.1.0") .author("DeisLabs") .about("Run an HTTP WAGI server") .arg( Arg::with_name("config") .short("c") .long("config") .value_name("MODULES_TOML") .help("the path to the modules.toml configuration file") .takes_value(true), ) .arg( Arg::with_name("cache") .long("cache") .value_name("CACHE_TOML") .help("the path to the cache.toml configuration file") .takes_value(true), ) .arg( Arg::with_name("listen") .short("l") .long("listen") .value_name("IP_PORT") .takes_value(true) .help("the IP address and port to listen on. Default: 127.0.0.1:3000"), ) .get_matches(); let addr: SocketAddr = matches .value_of("listen") .unwrap_or("127.0.0.1:3000") .parse() .unwrap(); log::info!("=> Starting server on {}", addr.to_string()); // We have to pass a cache file configuration path to a Wasmtime engine. let cache_config_path = matches.value_of("cache").unwrap_or("cache.toml").to_owned(); let module_config_path = matches .value_of("config") .unwrap_or("modules.toml") .to_owned(); let router = Router::new(module_config_path, cache_config_path).await?; let mk_svc = make_service_fn(move |conn: &AddrStream| { let addr = conn.remote_addr(); let r = router.clone(); async move { Ok::<_, std::convert::Infallible>(service_fn(move |req| { let r2 = r.clone(); async move { r2.route(req, addr).await } })) } }); let srv = Server::bind(&addr).serve(mk_svc); if let Err(e) = srv.await { log::error!("server error: {}", e); } Ok(()) }
29.701299
89
0.521207
76d898a9ce21128130d0415be4b00a9053138d19
2,693
// iterators3.rs // This is a bigger exercise than most of the others! You can do it! // Here is your mission, should you choose to accept it: // 1. Complete the divide function to get the first four tests to pass. // 2. Get the remaining tests to pass by completing the result_with_list and // list_of_results functions. // Execute `rustlings hint iterators3` to get some hints! #[derive(Debug, PartialEq, Eq)] pub enum DivisionError { NotDivisible(NotDivisibleError), DivideByZero, } #[derive(Debug, PartialEq, Eq)] pub struct NotDivisibleError { dividend: i32, divisor: i32, } // Calculate `a` divided by `b` if `a` is evenly divisible by `b`. // Otherwise, return a suitable error. pub fn divide(a: i32, b: i32) -> Result<i32, DivisionError> { match (a, b) { (_, 0) => Err(DivisionError::DivideByZero), (a, b) if a % b != 0 => Err(DivisionError::NotDivisible(NotDivisibleError { dividend: a, divisor: b, })), _ => Ok(a / b), } } // Complete the function and return a value of the correct type so the test passes. // Desired output: Ok([1, 11, 1426, 3]) fn result_with_list() -> Result<Vec<i32>, DivisionError> { let numbers = vec![27, 297, 38502, 81]; let division_results = numbers .into_iter() .map(|n| divide(n, 27)) .collect::<Result<Vec<i32>, _>>()?; Ok(division_results) } // Complete the function and return a value of the correct type so the test passes. // Desired output: [Ok(1), Ok(11), Ok(1426), Ok(3)] fn list_of_results() -> Vec<Result<i32, DivisionError>> { let numbers = vec![27, 297, 38502, 81]; let division_results: Vec<Result<i32, DivisionError>> = numbers.into_iter().map(|n| divide(n, 27)).collect(); division_results } #[cfg(test)] mod tests { use super::*; #[test] fn test_success() { assert_eq!(divide(81, 9), Ok(9)); } #[test] fn test_not_divisible() { assert_eq!( divide(81, 6), Err(DivisionError::NotDivisible(NotDivisibleError { dividend: 81, divisor: 6 })) ); } #[test] fn test_divide_by_0() { assert_eq!(divide(81, 0), Err(DivisionError::DivideByZero)); } #[test] fn test_divide_0_by_something() { assert_eq!(divide(0, 81), Ok(0)); } #[test] fn test_result_with_list() { assert_eq!(format!("{:?}", result_with_list()), "Ok([1, 11, 1426, 3])"); } #[test] fn test_list_of_results() { assert_eq!( format!("{:?}", list_of_results()), "[Ok(1), Ok(11), Ok(1426), Ok(3)]" ); } }
26.93
83
0.59042
643916c15a56173b15954da65f761f6d5792dd78
14,194
// Tokio/Future Imports use futures::future::ok; use futures::{Future, Stream}; use tokio_core::reactor::Core; // Hyper Imports use hyper::header::{HeaderName, HeaderValue, IF_NONE_MATCH}; use hyper::StatusCode; use hyper::{self, Body, HeaderMap}; use hyper::{Client, Request}; #[cfg(feature = "rustls")] type HttpsConnector = hyper_rustls::HttpsConnector<hyper::client::HttpConnector>; #[cfg(feature = "rust-native-tls")] use hyper_tls; #[cfg(feature = "rust-native-tls")] type HttpsConnector = hyper_tls::HttpsConnector<hyper::client::HttpConnector>; // Serde Imports use serde::de::DeserializeOwned; use serde::Serialize; use serde_json; // Internal Library Imports use crate::errors::*; use crate::gists; use crate::misc; use crate::notifications; use crate::orgs; use crate::repos; use crate::users; use crate::util::url_join; use std::cell::RefCell; use std::rc::Rc; /// Struct used to make calls to the Github API. pub struct Github { token: String, core: Rc<RefCell<Core>>, client: Rc<Client<HttpsConnector>>, } impl Clone for Github { fn clone(&self) -> Self { Self { token: self.token.clone(), core: Rc::clone(&self.core), client: Rc::clone(&self.client), } } } new_type!(GetQueryBuilder); new_type!(PutQueryBuilder); new_type!(PostQueryBuilder); new_type!(DeleteQueryBuilder); new_type!(PatchQueryBuilder); new_type!(CustomQuery); exec!(CustomQuery); pub trait Executor { fn execute<T>(self) -> Result<(HeaderMap, StatusCode, Option<T>)> where T: DeserializeOwned; } impl Github { /// Create a new Github client struct. It takes a type that can convert into /// an &str (`String` or `Vec<u8>` for example). As long as the function is /// given a valid API Token your requests will work. pub fn new<T>(token: T) -> Result<Self> where T: ToString, { let core = Core::new()?; #[cfg(feature = "rustls")] let client = Client::builder().build(HttpsConnector::new(4)); #[cfg(feature = "rust-native-tls")] let client = Client::builder().build(HttpsConnector::new(4)?); Ok(Self { token: token.to_string(), core: Rc::new(RefCell::new(core)), client: Rc::new(client), }) } /// Get the currently set Authorization Token pub fn get_token(&self) -> &str { &self.token } /// Change the currently set Authorization Token using a type that can turn /// into an &str. Must be a valid API Token for requests to work. pub fn set_token<T>(&mut self, token: T) where T: ToString, { self.token = token.to_string(); } /// Exposes the inner event loop for those who need /// access to it. The recommended way to safely access /// the core would be /// /// ```text /// let g = Github::new("API KEY"); /// let core = g.get_core(); /// // Handle the error here. /// let ref mut core_mut = *core.try_borrow_mut()?; /// // Do stuff with the core here. This prevents a runtime failure by /// // having two mutable borrows to the core at the same time. /// ``` /// /// This is how other parts of the API are implemented to avoid causing your /// program to crash unexpectedly. While you could borrow without the /// `Result` being handled it's highly recommended you don't unless you know /// there is no other mutable reference to it. pub fn get_core(&self) -> &Rc<RefCell<Core>> { &self.core } /// Begin building up a GET request to GitHub pub fn get(&self) -> GetQueryBuilder { self.into() } /// Begin building up a PUT request with no data to GitHub pub fn put_empty(&self) -> PutQueryBuilder { self.into() } /// Begin building up a PUT request with data to GitHub pub fn put<T>(&self, body: T) -> PutQueryBuilder where T: Serialize, { let mut qb: PutQueryBuilder = self.into(); if let Ok(mut qbr) = qb.request { let serialized = serde_json::to_vec(&body); match serialized { Ok(json) => { *qbr.get_mut().body_mut() = json.into(); qb.request = Ok(qbr); } Err(_) => { qb.request = Err("Unable to serialize data to JSON".into()); } } } qb } /// Begin building up a POST request with data to GitHub pub fn post<T>(&self, body: T) -> PostQueryBuilder where T: Serialize, { let mut qb: PostQueryBuilder = self.into(); if let Ok(mut qbr) = qb.request { let serialized = serde_json::to_vec(&body); match serialized { Ok(json) => { *qbr.get_mut().body_mut() = json.into(); qb.request = Ok(qbr); } Err(_) => { qb.request = Err("Unable to serialize data to JSON".into()); } } } qb } /// Begin building up a PATCH request with data to GitHub pub fn patch<T>(&self, body: T) -> PatchQueryBuilder where T: Serialize, { let mut qb: PatchQueryBuilder = self.into(); if let Ok(mut qbr) = qb.request { let serialized = serde_json::to_vec(&body); match serialized { Ok(json) => { *qbr.get_mut().body_mut() = json.into(); qb.request = Ok(qbr); } Err(_) => { qb.request = Err("Unable to serialize data to JSON".into()); } } } qb } /// Begin building up a DELETE request with data to GitHub pub fn delete<T>(&self, body: T) -> DeleteQueryBuilder where T: Serialize, { let mut qb: DeleteQueryBuilder = self.into(); if let Ok(mut qbr) = qb.request { let serialized = serde_json::to_vec(&body); match serialized { Ok(json) => { *qbr.get_mut().body_mut() = json.into(); qb.request = Ok(qbr); } Err(_) => { qb.request = Err("Unable to serialize data to JSON".into()); } } } qb } /// Begin building up a DELETE request without data to GitHub pub fn delete_empty(&self) -> DeleteQueryBuilder { self.into() } } impl<'g> GetQueryBuilder<'g> { /// Pass in an endpoint not covered by the API in the form of the following: /// /// ```no_test /// # Don't have the beginning / in it /// repos/mgattozzi/github-rs /// ``` /// /// It can be whatever endpoint or url string that's needed. This will allow /// you to get functionality out of the library as items are still added or /// if you need access to a hidden endpoint. func_client!(custom_endpoint, CustomQuery, endpoint_str); /// Query the emojis endpoint func_client!(emojis, misc::get::Emojis<'g>); /// Query the events endpoint func_client!(events, misc::get::Events<'g>); /// Query the feeds endpoint func_client!(feeds, misc::get::Feeds<'g>); /// Query the gitignore endpoint func_client!(gitignore, misc::get::Gitignore<'g>); /// Query the meta endpoint func_client!(meta, misc::get::Meta<'g>); /// Query the rate limit endpoint func_client!(rate_limit, misc::get::RateLimit<'g>); /// Query the user endpoint func_client!(user, users::get::User<'g>); /// Query the users endpoint func_client!(users, users::get::Users<'g>); /// Query the repos endpoint func_client!(repos, repos::get::Repos<'g>); /// Query the gists endpoint func_client!(gists, gists::get::Gists<'g>); /// Query the orgs endpoint func_client!(orgs, orgs::get::Orgs<'g>); /// Query the organizations endpoint func_client!(organizations, misc::get::Organizations<'g>); /// Query the notifications endpoint func_client!(notifications, notifications::get::Notifications<'g>); /// Add an etag to the headers of the request pub fn set_etag(mut self, tag: impl Into<HeaderValue>) -> Self { match self.request { Ok(mut req) => { req.get_mut() .headers_mut() .insert(IF_NONE_MATCH, tag.into()); self.request = Ok(req); self } Err(_) => self, } } } impl<'g> PutQueryBuilder<'g> { /// Pass in an endpoint not covered by the API in the form of the following: /// /// ```no_test /// # Don't have the beginning / in it /// repos/mgattozzi/github-rs /// ``` /// /// It can be whatever endpoint or url string that's needed. This will allow /// you to get functionality out of the library as items are still added or /// if you need access to a hidden endpoint. func_client!(custom_endpoint, CustomQuery, endpoint_str); func_client!(user, users::put::User<'g>); func_client!(gists, gists::put::Gists<'g>); func_client!(notifications, notifications::put::Notifications<'g>); /// Add an etag to the headers of the request pub fn set_etag(mut self, tag: impl Into<HeaderValue>) -> Self { match self.request { Ok(mut req) => { req.get_mut() .headers_mut() .insert(IF_NONE_MATCH, tag.into()); self.request = Ok(req); self } Err(_) => self, } } } impl<'g> DeleteQueryBuilder<'g> { /// Pass in an endpoint not covered by the API in the form of the following: /// /// ```no_test /// # Don't have the beginning / in it /// repos/mgattozzi/github-rs /// ``` /// /// It can be whatever endpoint or url string that's needed. This will allow /// you to get functionality out of the library as items are still added or /// if you need access to a hidden endpoint. func_client!(custom_endpoint, CustomQuery, endpoint_str); func_client!(user, users::delete::User<'g>); func_client!(gists, gists::delete::Gists<'g>); func_client!(notifications, notifications::delete::Notifications<'g>); /// Add an etag to the headers of the request pub fn set_etag(mut self, tag: impl Into<HeaderValue>) -> Self { match self.request { Ok(mut req) => { req.get_mut() .headers_mut() .insert(IF_NONE_MATCH, tag.into()); self.request = Ok(req); self } Err(_) => self, } } } impl<'g> PostQueryBuilder<'g> { /// Pass in an endpoint not covered by the API in the form of the following: /// /// ```no_test /// # Don't have the beginning / in it /// repos/mgattozzi/github-rs /// ``` /// /// It can be whatever endpoint or url string that's needed. This will allow /// you to get functionality out of the library as items are still added or /// if you need access to a hidden endpoint. func_client!(custom_endpoint, CustomQuery, endpoint_str); func_client!(user, users::post::User<'g>); func_client!(repos, repos::post::Repos<'g>); func_client!(gists, gists::post::Gists<'g>); /// Add an etag to the headers of the request pub fn set_etag(mut self, tag: impl Into<HeaderValue>) -> Self { match self.request { Ok(mut req) => { req.get_mut() .headers_mut() .insert(IF_NONE_MATCH, tag.into()); self.request = Ok(req); self } Err(_) => self, } } } impl<'g> PatchQueryBuilder<'g> { /// Pass in an endpoint not covered by the API in the form of the following: /// /// ```no_test /// # Don't have the beginning / in it /// repos/mgattozzi/github-rs /// ``` /// /// It can be whatever endpoint or url string that's needed. This will allow /// you to get functionality out of the library as items are still added or /// if you need access to a hidden endpoint. func_client!(custom_endpoint, CustomQuery, endpoint_str); func_client!(user, users::patch::User<'g>); func_client!(gists, gists::patch::Gists<'g>); func_client!(notifications, notifications::patch::Notifications<'g>); /// Add an etag to the headers of the request pub fn set_etag(mut self, tag: impl Into<HeaderValue>) -> Self { match self.request { Ok(mut req) => { req.get_mut() .headers_mut() .insert(IF_NONE_MATCH, tag.into()); self.request = Ok(req); self } Err(_) => self, } } } // From derivations of Github to the given type using a certain // request method from!( @GetQueryBuilder => "GET" @PutQueryBuilder => "PUT" @PostQueryBuilder => "POST" @PatchQueryBuilder => "PATCH" @DeleteQueryBuilder => "DELETE" ); // Custom Url based from impls from!( @GetQueryBuilder => CustomQuery @PutQueryBuilder => CustomQuery @PostQueryBuilder => CustomQuery @PatchQueryBuilder => CustomQuery @DeleteQueryBuilder => CustomQuery ); impl<'a> CustomQuery<'a> { /// Set custom header for request. /// Useful for custom headers (sometimes using in api preview). pub fn set_header( mut self, header_name: impl Into<HeaderName>, accept_header: impl Into<HeaderValue>, ) -> Self { match self.request { Ok(mut req) => { req.get_mut() .headers_mut() .insert(header_name.into(), accept_header.into()); self.request = Ok(req); self } Err(_) => self, } } }
30.2
81
0.563971
9b8f89bdb39742906378a98414f18260e4ffa329
84,187
use crate::lexer::{ Lexer, PeekableLexer, }; use std::error; use std::fmt; use std::default::{ Default, }; /// Parse a set of objects and material library names from a string. /// /// ## Example /// /// ``` /// # use wavefront_obj::obj; /// # use wavefront_obj::obj::{ /// # Vertex, /// # NormalVertex, /// # Group, /// # SmoothingGroup, /// # Element, /// # ShapeEntry, /// # Geometry, /// # VTNIndex, /// # Object, /// # ObjectSet, /// # }; /// # /// let obj_file = String::from(r" /// mtllib material_library.mtl \ /// o Object001 \ /// v 0.000000 2.000000 0.000000 \ /// v 0.000000 0.000000 0.000000 \ /// v 2.000000 0.000000 0.000000 \ /// v 2.000000 2.000000 0.000000 \ /// v 4.000000 0.000000 -1.255298 \ /// v 4.000000 2.000000 -1.255298 \ /// vn 0.000000 0.000000 1.000000 \ /// vn 0.000000 0.000000 1.000000 \ /// vn 0.276597 0.000000 0.960986 \ /// vn 0.276597 0.000000 0.960986 \ /// vn 0.531611 0.000000 0.846988 \ /// vn 0.531611 0.000000 0.846988 \ /// ## 6 vertices \ /// ## 6 normals \ /// \ /// usemtl material \ /// g all \ /// s 1 \ /// f 1//1 2//2 3//3 4//4 \ /// f 4//4 3//3 5//5 6//6 \ /// ## 2 elements \ /// \ /// #### End Object001 \ /// \ /// "); /// // let expected = ...; /// # let expected = ObjectSet { /// # material_libraries: vec![ /// # String::from("material_library.mtl"), /// # ], /// # objects: vec![ /// # Object { /// # name: String::from("Object001"), /// # vertex_set: vec![ /// # Vertex { x: 0.000000, y: 2.000000, z: 0.000000, w: 1.0 }, /// # Vertex { x: 0.000000, y: 0.000000, z: 0.000000, w: 1.0 }, /// # Vertex { x: 2.000000, y: 0.000000, z: 0.000000, w: 1.0 }, /// # Vertex { x: 2.000000, y: 2.000000, z: 0.000000, w: 1.0 }, /// # Vertex { x: 4.000000, y: 0.000000, z: -1.255298, w: 1.0 }, /// # Vertex { x: 4.000000, y: 2.000000, z: -1.255298, w: 1.0 }, /// # ], /// # texture_vertex_set: vec![], /// # normal_vertex_set: vec![ /// # NormalVertex { x: 0.000000, y: 0.000000, z: 1.000000 }, /// # NormalVertex { x: 0.000000, y: 0.000000, z: 1.000000 }, /// # NormalVertex { x: 0.276597, y: 0.000000, z: 0.960986 }, /// # NormalVertex { x: 0.276597, y: 0.000000, z: 0.960986 }, /// # NormalVertex { x: 0.531611, y: 0.000000, z: 0.846988 }, /// # NormalVertex { x: 0.531611, y: 0.000000, z: 0.846988 }, /// # ], /// # group_set: vec![ /// # Group(String::from("all")), /// # ], /// # smoothing_group_set: vec![ /// # SmoothingGroup(1), /// # ], /// # element_set: vec![ /// # Element::Face(VTNIndex::VN(0, 0), VTNIndex::VN(1, 1), VTNIndex::VN(2, 2)), /// # Element::Face(VTNIndex::VN(0, 0), VTNIndex::VN(2, 2), VTNIndex::VN(3, 3)), /// # Element::Face(VTNIndex::VN(3, 3), VTNIndex::VN(2, 2), VTNIndex::VN(4, 4)), /// # Element::Face(VTNIndex::VN(3, 3), VTNIndex::VN(4, 4), VTNIndex::VN(5, 5)), /// # ], /// # shape_set: vec![ /// # ShapeEntry { element: 0, groups: vec![0], smoothing_group: 0 }, /// # ShapeEntry { element: 1, groups: vec![0], smoothing_group: 0 }, /// # ShapeEntry { element: 2, groups: vec![0], smoothing_group: 0 }, /// # ShapeEntry { element: 3, groups: vec![0], smoothing_group: 0 }, /// # ], /// # geometry_set: vec![ /// # Geometry { material_name: Some(String::from("material")), shapes: vec![0, 1, 2, 3] }, /// # ] /// # } /// # ] /// # }; /// let result = obj::parse(&obj_file); /// assert!(result.is_ok()); /// /// let result = result.unwrap(); /// assert_eq!(result.material_libraries, expected.material_libraries); /// ``` pub fn parse<T: AsRef<str>>(input: T) -> Result<ObjectSet, ParseError> { Parser::new(input.as_ref()).parse_objset() } /// A single three dimensional point in an object, or a single /// three-dimensional point of an object in homogeneous coordinates /// when the w-component is one. #[derive(Copy, Clone, Debug, PartialEq)] pub struct Vertex { /// The **x-axis** component of a vertex. pub x: f64, /// The **y-axis** component of a vertex. pub y: f64, /// The **z-axis** component of a vertex. pub z: f64, /// The **w-axis** (homogeneous) component of a vertex. The default value /// of this field is 0 when the w coordinate is not present. pub w: f64, } impl fmt::Display for Vertex { fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!(formatter, "v {} {} {} {}", self.x, self.y, self.z, self.w) } } /// A single three-dimensional coordinate in a texture. #[derive(Copy, Clone, Debug, PartialEq)] pub struct TextureVertex { /// The horizontal coordinate of a texture vertex. pub u: f64, /// The vertical coordinate of a texture vertex. pub v: f64, /// The depth coordinate of a texture vertex. pub w: f64, } impl fmt::Display for TextureVertex { fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!(formatter, "vt {} {} {}", self.u, self.v, self.w) } } /// A normal vector at a vertex in an object. #[derive(Copy, Clone, Debug, PartialEq)] pub struct NormalVertex { /// The **x-axis** component of a normal vector. pub x: f64, /// The **y-axis** component of a normal vector. pub y: f64, /// The **z-axis** componont of a normal vector. pub z: f64, } impl fmt::Display for NormalVertex { fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!(formatter, "vn {} {} {}", self.x, self.y, self.z) } } /// A general vertex/texture/normal index representing the indices /// of a vertex, texture vertex, and normal vector in an element /// of a geometry figure. /// /// A VTN index has the forms of **vertex**, **vertex/texture**, /// **vertex//normal**, or **vertex/texture/normal** indices, /// which indicates which data of vertices, texture vertices, and /// normal vectors are bound to each vertex in a shape element. #[derive(Copy, Clone, Debug, PartialEq)] pub enum VTNIndex { V(VertexIndex), VT(VertexIndex, TextureVertexIndex), VN(VertexIndex, NormalVertexIndex), VTN(VertexIndex, TextureVertexIndex, NormalVertexIndex), } impl VTNIndex { /// Determine whether two VTN indices have the same form. /// /// The function returns true if both VTN indices are of the form /// vertex, vertex/texture/ vertex//normal, or vertex/texture/normal. /// Otherwise, the function returns false. /// /// ## Example /// /// ``` /// # use wavefront_obj::obj::{ /// # VTNIndex, /// # }; /// # /// let v_index1 = VTNIndex::V(0); /// let v_index2 = VTNIndex::V(1); /// assert!(v_index1.has_same_type_as(&v_index2)); /// /// let vt_index1 = VTNIndex::VT(2, 3); /// let vt_index2 = VTNIndex::VT(4, 5); /// assert!(vt_index1.has_same_type_as(&vt_index2)); /// /// let vn_index1 = VTNIndex::VN(6, 7); /// let vn_index2 = VTNIndex::VN(8, 9); /// assert!(vn_index1.has_same_type_as(&vn_index2)); /// /// let vtn_index1 = VTNIndex::VTN(10, 11, 12); /// let vtn_index2 = VTNIndex::VTN(13, 14, 15); /// assert!(vtn_index1.has_same_type_as(&vtn_index2)); /// /// assert!(!v_index1.has_same_type_as(&vt_index1)); /// assert!(!v_index1.has_same_type_as(&vn_index1)); /// assert!(!v_index1.has_same_type_as(&vtn_index1)); /// assert!(!vt_index1.has_same_type_as(&vn_index1)); /// assert!(!vt_index1.has_same_type_as(&vtn_index1)); /// assert!(!vn_index1.has_same_type_as(&vtn_index1)); /// ``` pub fn has_same_type_as(&self, other: &VTNIndex) -> bool { matches!( (self, other), (&VTNIndex::V(_), &VTNIndex::V(_)) | (&VTNIndex::VT(_,_), &VTNIndex::VT(_,_)) | (&VTNIndex::VN(_,_), &VTNIndex::VN(_,_)) | (&VTNIndex::VTN(_,_,_), &VTNIndex::VTN(_,_,_)) ) } } impl fmt::Display for VTNIndex { fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> { // NOTE: The library represented VTN indices starting form 0, whereas // *.obj files index starting from 1, so we must add one to each index // when displaying the data back in a form that looks like the original // file. match *self { VTNIndex::V(v) => { write!(formatter, "{}", v + 1) } VTNIndex::VT(v, vt) => { write!(formatter, "{}/{}", v + 1 ,vt + 1) } VTNIndex::VN(v, vn) => { write!(formatter, "{}//{}", v + 1, vn + 1) } VTNIndex::VTN(v, vt, vn) => { write!(formatter, "{}/{}/{}", v + 1, vt + 1, vn + 1) } } } } type ElementIndex = usize; type VertexIndex = usize; type TextureVertexIndex = usize; type NormalVertexIndex = usize; type GroupIndex = usize; type SmoothingGroupIndex = usize; type ShapeEntryIndex = usize; /// An element is the smallest component of a more complex geometric figure. /// /// An element can be either a point, line, or a face (triangle). A geometric figures /// is a collection of elements. Typically, a geometric figure consists of elements that /// are all the same type, i.e. a three-dimensional object is composed of all faces, /// or a line is composed of all line elements. #[derive(Copy, Clone, Debug, PartialEq)] pub enum Element { Point(VTNIndex), Line(VTNIndex, VTNIndex), Face(VTNIndex, VTNIndex, VTNIndex), } impl fmt::Display for Element { fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> { match *self { Element::Point(vtn) => { write!(formatter, "p {}", vtn) }, Element::Line(vtn1, vtn2) => { write!(formatter, "l {} {}", vtn1, vtn2) }, Element::Face(vtn1, vtn2, vtn3) => { write!(formatter, "f {} {} {}", vtn1, vtn2, vtn3) }, } } } /// A group is a label for a collection of elements within an object. /// /// A collection of groups enables one to organize collections of elements /// by group. #[derive(Clone, Debug, PartialEq, Eq)] pub struct Group(pub String); impl fmt::Display for Group { fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!(formatter, "{}", self.0) } } impl Default for Group { fn default() -> Group { Group(String::from("default")) } } /// A smoothing group is a label providing information on which collections /// of elements should have their normal vectors interpolated over give /// those elements a non-faceted appearance. #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct SmoothingGroup(pub usize); impl fmt::Display for SmoothingGroup { fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> { if self.0 == 0 { write!(formatter, "off") } else { write!(formatter, "{}", self.0) } } } impl Default for SmoothingGroup { fn default() -> SmoothingGroup { SmoothingGroup(0) } } /// A shape entry is a collection of indices grouping together all the /// organizational information about each element in an object. #[derive(Clone, Debug, PartialEq)] pub struct ShapeEntry { /// The index of the element in the element set that the shape entry describes. pub element: ElementIndex, /// The groups that a particular element belongs to. pub groups: Vec<GroupIndex>, /// The smoothing group that a particular element belongs to. pub smoothing_group: SmoothingGroupIndex, } /// A shape is a collection of data grouping together all the organizational /// information about each element in an object. #[derive(Clone, Debug, PartialEq)] pub struct Shape { /// The element from the element set. pub element: Element, /// The names of the groups that a particular element belongs to. pub groups: Vec<Group>, /// The smoothing group that a particular element belongs to. pub smoothing_groups: Vec<SmoothingGroup>, } /// The geometry inside an object is a collection of elements along with their /// material description data. /// /// The material description data describes which material from a corresponding /// material library contains the data for rendering each primitive in the set of /// shapes. #[derive(Clone, Debug, PartialEq)] pub struct Geometry { /// The material name that contains the material and lighting properties /// of each shape in this collection. pub material_name: Option<String>, /// The shapes associated with a particular material and geometry. pub shapes: Vec<ShapeEntryIndex>, } /// A VTN triple contains the actual data of each element in an object. #[derive(Copy, Clone, Debug, PartialEq)] pub enum VTNTriple<'a> { V(&'a Vertex), VT(&'a Vertex, &'a TextureVertex), VN(&'a Vertex, &'a NormalVertex), VTN(&'a Vertex, &'a TextureVertex, &'a NormalVertex), } /// An object is a collection of vertices, texture vertices, normal vectors, /// and geometric primitives composing a unit of geometry in a scene to /// be rendered. #[derive(Clone, Debug, PartialEq)] pub struct Object { /// The name of the object. pub name: String, /// The set of vertices in an object. pub vertex_set: Vec<Vertex>, /// The set of texture coordinates in an object for mapping materials onto /// an object. pub texture_vertex_set: Vec<TextureVertex>, /// The set of normal vectors defined at each vertex in an object. pub normal_vertex_set: Vec<NormalVertex>, /// The set of names of groups of elements in an object. pub group_set: Vec<Group>, /// The set of names of smoothing groups of elements in an object. pub smoothing_group_set: Vec<SmoothingGroup>, /// The set of primitives (i.e. points, lines, and faces) in an object. pub element_set: Vec<Element>, /// The set of grouping data associated with each element in an object. pub shape_set: Vec<ShapeEntry>, /// The set of elements associated with each material used in an object. pub geometry_set: Vec<Geometry>, } impl Object { /// Fetch the vertex/texture/normal of a vertex in an object. /// /// The function returns `None` if any of the VTN indices are not found /// inside the object. /// /// ## Example /// /// ``` /// # use wavefront_obj::obj; /// # use wavefront_obj::obj::{ /// # VTNIndex, /// # VTNTriple, /// # Vertex, /// # TextureVertex, /// # NormalVertex, /// # }; /// # /// let obj_file = String::from(r" /// o quad \ /// v -0.5 -0.5 0.0 \ /// v 0.5 -0.5 0.0 \ /// v 0.5 0.5 0.0 \ /// v -0.5 0.5 0.0 \ /// ## 4 vertices \ /// \ /// vt 0.0 0.0 0.0 \ /// vt 1.0 0.0 0.0 \ /// vt 1.0 1.0 0.0 \ /// vt 0.0 1.0 0.0 \ /// ## 4 texture vertices \ /// \ /// vn 0.0 0.0 1.0 \ /// vn 0.0 0.0 1.0 \ /// vn 0.0 0.0 1.0 \ /// vn 0.0 0.0 1.0 \ /// ## 4 normal vertices \ /// \ /// f 1/1/1 2/2/2 3/3/3 4/4/4 \ /// ## 2 faces \ /// ## end quad \ /// "); /// let obj_set = obj::parse(&obj_file).unwrap(); /// /// // The vertex data of an obj file are stored 1-indexed, but the library stores /// // the vertex data 0-indexed, so one must add one to each index to get the indices /// // as they would appear in a *.obj file. /// let vtn_index0 = VTNIndex::VTN(0, 0, 0); /// let vtn_index1 = VTNIndex::VTN(1, 1, 1); /// let vtn_index2 = VTNIndex::VTN(2, 2, 2); /// let vtn_index3 = VTNIndex::VTN(3, 3, 3); /// /// let object = &obj_set.objects[0]; /// let vtn_triple0 = object.get_vtn_triple(vtn_index0); /// let vtn_triple1 = object.get_vtn_triple(vtn_index1); /// let vtn_triple2 = object.get_vtn_triple(vtn_index2); /// let vtn_triple3 = object.get_vtn_triple(vtn_index3); /// /// // Explicitly construct the vertex data so we can make comparisons. /// # let vertex0 = Vertex { x: -0.5, y: -0.5, z: 0.0, w: 1.0 }; /// # let vertex1 = Vertex { x: 0.5, y: -0.5, z: 0.0, w: 1.0 }; /// # let vertex2 = Vertex { x: 0.5, y: 0.5, z: 0.0, w: 1.0 }; /// # let vertex3 = Vertex { x: -0.5, y: 0.5, z: 0.0, w: 1.0 }; /// # let texture_vertex0 = TextureVertex { u: 0.0, v: 0.0, w: 0.0 }; /// # let texture_vertex1 = TextureVertex { u: 1.0, v: 0.0, w: 0.0 }; /// # let texture_vertex2 = TextureVertex { u: 1.0, v: 1.0, w: 0.0 }; /// # let texture_vertex3 = TextureVertex { u: 0.0, v: 1.0, w: 0.0 }; /// # let normal_vertex0 = NormalVertex { x: 0.0, y: 0.0, z: 1.0 }; /// # let normal_vertex1 = NormalVertex { x: 0.0, y: 0.0, z: 1.0 }; /// # let normal_vertex2 = NormalVertex { x: 0.0, y: 0.0, z: 1.0 }; /// # let normal_vertex3 = NormalVertex { x: 0.0, y: 0.0, z: 1.0 }; /// let expected0 = Some(VTNTriple::VTN(&vertex0, &texture_vertex0, &normal_vertex0)); /// let expected1 = Some(VTNTriple::VTN(&vertex1, &texture_vertex1, &normal_vertex1)); /// let expected2 = Some(VTNTriple::VTN(&vertex2, &texture_vertex2, &normal_vertex2)); /// let expected3 = Some(VTNTriple::VTN(&vertex3, &texture_vertex3, &normal_vertex3)); /// /// assert_eq!(vtn_triple0, expected0); /// assert_eq!(vtn_triple1, expected1); /// assert_eq!(vtn_triple2, expected2); /// assert_eq!(vtn_triple3, expected3); /// /// // VTN indices lying outside the ones stored in the oject should return nothing. /// assert!(object.get_vtn_triple(VTNIndex::VTN(4, 4, 4)).is_none()); /// ``` pub fn get_vtn_triple(&self, index: VTNIndex) -> Option<VTNTriple> { match index { VTNIndex::V(v_index) => { let vertex = self.vertex_set.get(v_index)?; Some(VTNTriple::V(vertex)) } VTNIndex::VT(v_index, vt_index) => { let vertex = self.vertex_set.get(v_index)?; let texture_vertex = self.texture_vertex_set.get(vt_index)?; Some(VTNTriple::VT(vertex, texture_vertex)) } VTNIndex::VN(v_index, vn_index) => { let vertex = self.vertex_set.get(v_index)?; let normal_vertex = self.normal_vertex_set.get(vn_index)?; Some(VTNTriple::VN(vertex, normal_vertex)) } VTNIndex::VTN(v_index, vt_index, vn_index) => { let vertex = self.vertex_set.get(v_index)?; let texture_vertex = self.texture_vertex_set.get(vt_index)?; let normal_vertex = self.normal_vertex_set.get(vn_index)?; Some(VTNTriple::VTN(vertex, texture_vertex, normal_vertex)) } } } } struct DisplayObjectCompositor { } impl DisplayObjectCompositor { fn new() -> Self { Self {} } fn compose_set<T: fmt::Display>(&self, set: &[T], name: &str) -> String { let mut string = format!(" {} set:\n", name); if set.is_empty() { string += " data: []\n"; } else { string += &format!(" data: [({}) ... ({})]\n", set[0], set[set.len() - 1]); } string += &format!(" length: {}\n", set.len()); string } fn compose(&self, object: &Object) -> String { let mut string = String::from("Object {\n"); string += &format!(" name: {}\n", object.name); string += &self.compose_set(&object.vertex_set, "vertex"); string += &self.compose_set(&object.texture_vertex_set, "texture vertex"); string += &self.compose_set(&object.normal_vertex_set, "normal vertex"); string += &self.compose_set(&object.group_set, "group"); string += &self.compose_set(&object.smoothing_group_set, "smoothing group"); string += &self.compose_set(&object.element_set, "element"); string += "}}\n"; string } } impl fmt::Display for Object { fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> { let string = DisplayObjectCompositor::new().compose(self); write!(formatter, "{}", string) } } impl Default for Object { fn default() -> Object { Object { name: String::from(""), vertex_set: Default::default(), texture_vertex_set: Default::default(), normal_vertex_set: Default::default(), group_set: Default::default(), smoothing_group_set: Default::default(), element_set: Default::default(), shape_set: Default::default(), geometry_set: Default::default(), } } } /// An object set is a collection of objects and material library named obtained /// from parsing an `*.obj` file. An `*.obj` file may contain more that one object. #[derive(Clone, Debug, PartialEq)] pub struct ObjectSet { /// The set of material libraries associated with the object set. pub material_libraries: Vec<String>, /// The set of objects in an object set. pub objects: Vec<Object>, } impl fmt::Display for ObjectSet { fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> { let compositor = DisplayObjectCompositor::new(); let mut string = String::from("ObjectSet {\n"); for object in self.objects.iter() { string += &compositor.compose(&object); string += &"\n"; } string += &"}\n"; write!(formatter, "{}", string) } } /// A marker indicating the type of error generated during parsing of a /// Wavefront OBJ file. #[derive(Clone, Debug, PartialEq, Eq)] pub enum ErrorKind { /// The parser reached the end of the input early. EndOfFile, /// The parser expected a tag statement that was not present. ExpectedTagStatement, /// The parser expected a floating point number but found something else. ExpectedFloat, /// The parser expected an integer but found something else. ExpectedInteger, /// The parser expected a vertex/texture/normal index but found something else. ExpectedVTNIndex, /// the parser encountered an object element index that is out of range. VTNIndexOutOfRange, /// The parser encountered a face element that did not have enough vertices. EveryFaceElementMustHaveAtLeastThreeVertices, /// An element had VTN indices with different forms. EveryVTNIndexMustHaveTheSameFormForAGivenElement, /// A statement in a wavefront obj file that is either unsupported or does not exist. InvalidObjectStatement, /// The parser encountered an invalid or unsupported element type. ElementMustBeAPointLineOrFace, /// The smoothing group name is something other than an integer or the default /// value `off`. SmoothingGroupNameMustBeOffOrInteger, /// The smoothing group declaration is missing a name. SmoothingGroupDeclarationHasNoName, /// The `usemtl` statement has no corresponding material name. MaterialStatementHasNoName, } /// An error that is returned from parsing an invalid `*.obj` file, or /// another kind of error. #[derive(Clone, Debug, PartialEq, Eq)] pub struct ParseError { /// The line number where the error occurred. pub line_number: usize, /// The kind of error that occurred. pub kind: ErrorKind, /// A message describing why the parse error was generated. pub message: String, } impl ParseError { /// Construct a new parse error. fn new(line_number: usize, kind: ErrorKind, message: String) -> ParseError { ParseError { line_number: line_number, kind: kind, message: message, } } } impl fmt::Display for ParseError { fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!( formatter, "Parse error at line {}: {}", self.line_number, self.message ) } } impl error::Error for ParseError {} /// A Wavefront OBJ file parser extracts three-dimensional geometric data /// from a `*.obj` file. pub struct Parser<'a> { /// The current line position of the parser in the input stream. line_number: usize, /// the underlying lexer that generates tokens. lexer: PeekableLexer<'a>, } /// Triangulate a polygon with a triangle fan. /// /// NOTE: the OBJ specification assumes that polygons are coplanar, and /// consequently the parser does not check this. It is up to the model creator /// to ensure this. #[inline] fn triangulate(elements: &mut Vec<Element>, vtn_indices: &[VTNIndex]) -> usize { let vertex0 = vtn_indices[0]; for i in 0..(vtn_indices.len() - 2) { elements.push(Element::Face(vertex0, vtn_indices[i + 1], vtn_indices[i + 2])); } vtn_indices.len() - 2 } /// Verify that each VTN index has the same type and has a valid form. #[inline] fn verify_vtn_indices(vtn_indices: &[VTNIndex]) -> bool { for i in 1..vtn_indices.len() { if !vtn_indices[i].has_same_type_as(&vtn_indices[0]) { return false; } } true } impl<'a> Parser<'a> { /// Construct a new Wavefront OBJ file parser. pub fn new(input: &'a str) -> Parser<'a> { Parser { line_number: 1, lexer: PeekableLexer::new(Lexer::new(input)), } } /// Construct a new parse error. fn error<T>(&self, kind: ErrorKind, message: String) -> Result<T, ParseError> { Err(ParseError::new(self.line_number, kind, message)) } /// Peek at the currently held token without advancing the token stream. fn peek(&mut self) -> Option<&'a str> { self.lexer.peek() } /// Advance the token stream one step returning the currently held string. fn next(&mut self) -> Option<&'a str> { let token = self.lexer.next(); if let Some(val) = token { if val == "\n" { self.line_number += 1; } } token } /// Advance the token stream one step without returning the current token. fn advance(&mut self) { self.next(); } /// Advance the token stream one step, returning the next token in the /// stream. /// /// This function generates an error is it runs out of input. fn next_string(&mut self) -> Result<&'a str, ParseError> { match self.next() { Some(st) => Ok(st), None => self.error( ErrorKind::EndOfFile, "Reached the end of the input in the process of getting the next token.".to_owned() ) } } /// Advance the token stream if the next token in the stream matches the /// input tag. /// /// This functions returns an error if the expected tag is not present. fn expect_tag(&mut self, tag: &str) -> Result<(), ParseError> { match self.next() { None => self.error( ErrorKind::EndOfFile, "Reached the end of the input in the process of getting the next token.".to_owned() ), Some(st) if st != tag => self.error( ErrorKind::ExpectedTagStatement, format!("Expected `{}` but got `{}` instead.", tag, st) ), _ => Ok(()) } } /// Parse a floating point number from the current token in the stream. fn parse_f64(&mut self) -> Result<f64, ParseError> { let st = self.next_string()?; match st.parse::<f64>() { Ok(val) => Ok(val), Err(_) => self.error( ErrorKind::ExpectedFloat, format!("Expected a floating point number but got `{}` instead.", st) ), } } /// Parse an integer from the current token in the stream. fn parse_isize(&mut self) -> Result<isize, ParseError> { let st = self.next_string()?; match st.parse::<isize>() { Ok(val) => Ok(val), Err(_) => self.error( ErrorKind::ExpectedInteger, format!("Expected an integer but got `{}` instead.", st) ), } } /// Apply a parser to the input stream. /// /// If the parser `parser` fails to parse the current token in the stream, /// it returns nothing and the stream state does not change. Otherwise, the /// stream advances and the corresponding result is returned. fn try_once<P, T>(&mut self, parser: P) -> Option<T> where P: FnOnce(&str) -> Option<T> { match self.peek() { Some(st) => parser(&st).map(|got| { self.advance(); got }), None => None, } } /// Parse a vertex from the input. fn parse_vertex(&mut self) -> Result<Vertex, ParseError> { self.expect_tag("v")?; let x = self.parse_f64()?; let y = self.parse_f64()?; let z = self.parse_f64()?; let mw = self.try_once(|st| st.parse::<f64>().ok()); let w = mw.unwrap_or(1_f64); Ok(Vertex { x: x, y: y, z: z, w: w }) } /// Parse a texture vertex from the input. fn parse_texture_vertex(&mut self) -> Result<TextureVertex, ParseError> { self.expect_tag("vt")?; let u = self.parse_f64()?; let mv = self.try_once(|st| st.parse::<f64>().ok()); let v = mv.unwrap_or(0_f64); let mw = self.try_once(|st| st.parse::<f64>().ok()); let w = mw.unwrap_or(0_f64); Ok(TextureVertex { u: u, v: v, w: w }) } /// Parse a normal vector from the input. fn parse_normal_vertex(&mut self) -> Result<NormalVertex, ParseError> { self.expect_tag("vn")?; let x = self.parse_f64()?; let y = self.parse_f64()?; let z = self.parse_f64()?; Ok(NormalVertex { x: x, y: y, z: z }) } /// Skip over any number of newlines in the input stream. fn skip_zero_or_more_newlines(&mut self) { while let Some("\n") = self.peek() { self.advance(); } } /// Skip over at least one newline in the input stream. /// /// The function returns an error if no newline tokens are present. fn skip_one_or_more_newlines(&mut self) -> Result<(), ParseError> { self.expect_tag("\n")?; self.skip_zero_or_more_newlines(); Ok(()) } /// Parse the name of an object. fn parse_object_name(&mut self) -> Result<&'a str, ParseError> { match self.peek() { Some("o") => { self.expect_tag("o")?; let object_name = self.next_string(); self.skip_one_or_more_newlines()?; object_name } _ => Ok("") } } #[inline(always)] fn calculate_index( &self, value_range: (usize, usize), parsed_value: isize) -> Result<usize, ParseError> { let (min_value, max_value) = value_range; let actual_value = if parsed_value <= 0 { max_value as isize - parsed_value } else { parsed_value - 1 }; if (actual_value >= min_value as isize) && (actual_value < max_value as isize) { debug_assert!(actual_value >= 0); Ok((actual_value - min_value as isize) as usize) } else { self.error( ErrorKind::VTNIndexOutOfRange, format!( "Expected index in range [{}, {}), but got {}.", min_value, max_value, actual_value ) ) } } /// Parse a vertex/texture/normal index. fn parse_vtn_index( &mut self, vertex_index_range: (usize, usize), texture_index_range: (usize, usize), normal_index_range: (usize, usize)) -> Result<VTNIndex, ParseError> { let st = self.next_string()?; let process_split = |split: &str, value_range: (usize, usize)| -> Result<Option<usize>, ParseError> { if !split.is_empty() { let parsed_value = split.parse::<isize>().or_else(|_| { self.error( ErrorKind::ExpectedInteger, format!("Expected an integer but got `{}` instead.", split) ) })?; let index = self.calculate_index(value_range, parsed_value)?; Ok(Some(index)) } else { Ok(None) } }; let mut splits_iter = st.split('/'); let split1 = splits_iter .next() .and_then(|s| process_split(&s, vertex_index_range).transpose()) .transpose()?; let split2 = splits_iter .next() .and_then(|s| process_split(&s, texture_index_range).transpose()) .transpose()?; let split3 = splits_iter .next() .and_then(|s| process_split(&s, normal_index_range).transpose()) .transpose()?; if split1.is_none() || splits_iter.next().is_some() { return self.error( ErrorKind::ExpectedVTNIndex, format!("Expected a `vertex/texture/normal` index but got `{}` instead.", st) ); } match (split1, split2, split3) { (Some(v), None, None) => Ok(VTNIndex::V(v)), (Some(v), None, Some(vn)) => Ok(VTNIndex::VN(v, vn)), (Some(v), Some(vt), None) => Ok(VTNIndex::VT(v, vt)), (Some(v), Some(vt), Some(vn)) => Ok(VTNIndex::VTN(v, vt, vn)), _ => self.error( ErrorKind::ExpectedVTNIndex, format!("Expected a `vertex/texture/normal` index but got `{}` instead.", st) ), } } /// Parse one more more VTN indices. /// /// Return the number of VTN indices parsed if no errors occurred. fn parse_vtn_indices( &mut self, vtn_indices: &mut Vec<VTNIndex>, vertex_index_range: (usize, usize), texture_index_range: (usize, usize), normal_index_range: (usize, usize)) -> Result<usize, ParseError> { let mut indices_parsed = 0; while let Ok(vtn_index) = self.parse_vtn_index( vertex_index_range, texture_index_range, normal_index_range ) { vtn_indices.push(vtn_index); indices_parsed += 1; } Ok(indices_parsed) } /// Parse one or more point from the current line in the input stream. /// /// There can be more than one point in a single line of input, so /// this parsing rule will attempt to read all of them. fn parse_point( &mut self, elements: &mut Vec<Element>, vertex_index_range: (usize, usize)) -> Result<usize, ParseError> { self.expect_tag("p")?; let parsed_value = self.parse_isize()?; let v_index = self.calculate_index(vertex_index_range, parsed_value)?; elements.push(Element::Point(VTNIndex::V(v_index))); let mut elements_parsed = 1; loop { match self.next() { Some(st) if st != "\n" => match st.parse::<isize>() { Ok(val) => { let v_index = self.calculate_index(vertex_index_range, val)?; elements.push(Element::Point(VTNIndex::V(v_index))); elements_parsed += 1; } Err(_) => { return self.error( ErrorKind::ExpectedInteger, format!("Expected an integer but got `{}` instead.", st) ) } } _ => break, } } Ok(elements_parsed) } /// Parse one more more line elements from a line of text input from the input. /// /// If the parser cannot parse each line element from a line of text input, the /// parser returns an error. fn parse_line( &mut self, elements: &mut Vec<Element>, vertex_index_range: (usize, usize), texture_index_range: (usize, usize), normal_index_range: (usize, usize)) -> Result<usize, ParseError> { self.expect_tag("l")?; let mut vtn_indices = vec![]; vtn_indices.push(self.parse_vtn_index( vertex_index_range, texture_index_range, normal_index_range )?); vtn_indices.push(self.parse_vtn_index( vertex_index_range, texture_index_range, normal_index_range )?); self.parse_vtn_indices( &mut vtn_indices, vertex_index_range, texture_index_range, normal_index_range )?; if !verify_vtn_indices(&vtn_indices) { return self.error( ErrorKind::EveryVTNIndexMustHaveTheSameFormForAGivenElement, "Every VTN index for a line must have the same form.".to_owned() ); } // Now that we have verified the indices, build the line elements. for i in 0..(vtn_indices.len() - 1) { elements.push(Element::Line(vtn_indices[i], vtn_indices[i + 1])); } Ok(vtn_indices.len() - 1) } /// Parse one or more faces from a single line of text input. /// /// All face vertices must have the same vertex/texture/normal form on /// a line of input. If they do not, the parser will return an error. Otherwise, /// it succeeds. The face parser unpacks the face elements by treating the line /// of face indices as a triangle fan. /// /// The parser returns the number of triangles generated. fn parse_face( &mut self, elements: &mut Vec<Element>, vertex_index_range: (usize, usize), texture_index_range: (usize, usize), normal_index_range: (usize, usize)) -> Result<usize, ParseError> { self.expect_tag("f")?; let mut vtn_indices = vec![]; self.parse_vtn_indices( &mut vtn_indices, vertex_index_range, texture_index_range, normal_index_range )?; // Check that there are enough vtn indices. if vtn_indices.len() < 3 { return self.error( ErrorKind::EveryFaceElementMustHaveAtLeastThreeVertices, "A face primitive must have at least three vertices.".to_owned() ); } if !verify_vtn_indices(&vtn_indices) { return self.error( ErrorKind::EveryVTNIndexMustHaveTheSameFormForAGivenElement, "Every VTN index for a face must have the same form.".to_owned() ); } let face_count = triangulate(elements, &vtn_indices); Ok(face_count) } /// Parse all the elements of a givne type from a line of text input. fn parse_elements( &mut self, elements: &mut Vec<Element>, vertex_index_range: (usize, usize), texture_index_range: (usize, usize), normal_index_range: (usize, usize)) -> Result<usize, ParseError> { match self.peek() { Some("p") => self.parse_point( elements, vertex_index_range ), Some("l") => self.parse_line( elements, vertex_index_range, texture_index_range, normal_index_range ), Some("f") => self.parse_face( elements, vertex_index_range, texture_index_range, normal_index_range ), _ => self.error( ErrorKind::ElementMustBeAPointLineOrFace, "An element must be a point (`p`), line (`l`), or face (`f`).".to_owned() ), } } /// Parse group names from a line of text input. fn parse_groups(&mut self, groups: &mut Vec<Group>) -> Result<usize, ParseError> { self.expect_tag("g")?; let mut groups_parsed = 0; loop { match self.next() { Some(name) if name != "\n" => { groups.push(Group(String::from(name))); groups_parsed += 1; } _ => break, } } Ok(groups_parsed) } /// Parse a smoothing group name from a line of text input. fn parse_smoothing_group( &mut self, smoothing_groups: &mut Vec<SmoothingGroup>) -> Result<usize, ParseError> { self.expect_tag("s")?; if let Some(name) = self.next() { if name == "off" { smoothing_groups.push(SmoothingGroup(0)); } else if let Ok(number) = name.parse::<usize>() { smoothing_groups.push(SmoothingGroup(number)); } else { return self.error( ErrorKind::SmoothingGroupNameMustBeOffOrInteger, format!( "A smoothing group name must either be `off`, which denotes that an \ object has no smoothing groups, or an integer. The parser got `{}` instead.", name ) ); } } else { return self.error( ErrorKind::SmoothingGroupDeclarationHasNoName, "Got a smoothing group declaration without a smoothing group name.".to_owned() ); } Ok(1) } /// Parse a material name from a line of text input. fn parse_material_name( &mut self, material_names: &mut Vec<Option<&'a str>>) -> Result<usize, ParseError> { self.expect_tag("usemtl")?; if let Some(name) = self.next() { material_names.push(Some(name)); } else { return self.error( ErrorKind::MaterialStatementHasNoName, "Got a `usemtl` material declaration without a material name.".to_owned() ) } Ok(1) } /// Construct a set of shape entries for each element in the element set. fn parse_shape_entries( &self, shape_entry_table: &mut Vec<ShapeEntry>, elements: &[Element], group_entry_table: &[((usize, usize), (usize, usize))], smoothing_group_entry_table: &[((usize, usize), usize)]) { for &((min_element_index, max_element_index), (min_group_index, max_group_index)) in group_entry_table { let groups: Vec<usize> = (min_group_index..max_group_index).collect(); for i in min_element_index..max_element_index { shape_entry_table.push(ShapeEntry { element: i, groups: groups.clone(), smoothing_group: 0 }); } } debug_assert!(shape_entry_table.len() == elements.len()); for &((min_element_index, max_element_index), smoothing_group_index) in smoothing_group_entry_table { for i in min_element_index..max_element_index { shape_entry_table[i].smoothing_group = smoothing_group_index; } } debug_assert!(shape_entry_table.len() == elements.len()); } /// Construct a set of geometries for reach material in an object. fn parse_geometries( &self, geometries: &mut Vec<Geometry>, material_name_entry_table: &[((usize, usize), usize)], material_names: &[Option<&'a str>]) { for &((min_element_index, max_element_index), material_name_index) in material_name_entry_table { let shapes: Vec<ShapeEntryIndex> = (min_element_index..max_element_index).collect(); let material_name = material_names[material_name_index].map(String::from); let geometry = Geometry { material_name: material_name, shapes: shapes }; geometries.push(geometry); } } /// Parse one object from a Wavefront OBJ file. fn parse_object(&mut self, min_vertex_index: &mut usize, max_vertex_index: &mut usize, min_texture_index: &mut usize, max_texture_index: &mut usize, min_normal_index: &mut usize, max_normal_index: &mut usize) -> Result<Object, ParseError> { let object_name = self.parse_object_name()?; let mut vertices: Vec<Vertex> = vec![]; let mut texture_vertices = vec![]; let mut normal_vertices = vec![]; let mut elements = vec![]; let mut group_entry_table = vec![]; let mut groups = vec![]; let mut min_element_group_index = 0; let mut max_element_group_index = 0; let mut min_group_index = 0; let mut max_group_index = 0; let mut smoothing_group_entry_table = vec![]; let mut smoothing_groups = vec![]; let mut min_element_smoothing_group_index = 0; let mut max_element_smoothing_group_index = 0; let mut smoothing_group_index = 0; let mut material_name_entry_table = vec![]; let mut material_names = vec![]; let mut min_element_material_name_index = 0; let mut max_element_material_name_index = 0; let mut material_name_index = 0; loop { match self.peek() { Some("g") if groups.is_empty() => { let amount_parsed = self.parse_groups(&mut groups)?; max_group_index += amount_parsed; } Some("g") => { // Save the shape entry ranges for the current group. group_entry_table.push(( (min_element_group_index, max_element_group_index), (min_group_index, max_group_index) )); let amount_parsed = self.parse_groups(&mut groups)?; min_group_index = max_group_index; max_group_index += amount_parsed; min_element_group_index = max_element_group_index; } Some("s") if smoothing_groups.is_empty() => { self.parse_smoothing_group(&mut smoothing_groups)?; smoothing_group_index = 0; } Some("s") => { if smoothing_groups.is_empty() {} // Save the shape entry ranges for the current smoothing group. smoothing_group_entry_table.push(( (min_element_smoothing_group_index, max_element_smoothing_group_index), smoothing_group_index )); self.parse_smoothing_group(&mut smoothing_groups)?; smoothing_group_index += 1; min_element_smoothing_group_index = max_element_smoothing_group_index; } Some("usemtl") => { if min_element_material_name_index == max_element_material_name_index { if material_names.is_empty() { self.parse_material_name(&mut material_names)?; } else { self.parse_material_name(&mut material_names)?; material_name_index += 1; } } else { material_name_entry_table.push(( (min_element_material_name_index, max_element_material_name_index), material_name_index )); if material_names.is_empty() { self.parse_material_name(&mut material_names)?; } else { self.parse_material_name(&mut material_names)?; material_name_index += 1; } } min_element_material_name_index = max_element_material_name_index; } Some("v") => { let vertex = self.parse_vertex()?; vertices.push(vertex); *max_vertex_index += 1; } Some("vt") => { let texture_vertex = self.parse_texture_vertex()?; texture_vertices.push(texture_vertex); *max_texture_index += 1; } Some("vn") => { let normal_vertex = self.parse_normal_vertex()?; normal_vertices.push(normal_vertex); *max_normal_index += 1; } Some("p") | Some("l") | Some("f") => { if groups.is_empty() { groups.push(Default::default()); min_group_index = 0; max_group_index = 1; } if smoothing_groups.is_empty() { smoothing_groups.push(Default::default()); smoothing_group_index = 0; } if material_names.is_empty() { material_names.push(None); material_name_index = 0; } let elements_parsed = self.parse_elements( &mut elements, (*min_vertex_index, *max_vertex_index), (*min_texture_index, *max_vertex_index), (*min_normal_index, *max_normal_index) )?; max_element_group_index += elements_parsed; max_element_smoothing_group_index += elements_parsed; max_element_material_name_index += elements_parsed; } Some("\n") => { self.skip_one_or_more_newlines()?; } Some("o") | None => { // At the end of file or object, collect any remaining shapes. group_entry_table.push(( (min_element_group_index, max_element_group_index), (min_group_index, max_group_index) )); smoothing_group_entry_table.push(( (min_element_smoothing_group_index, max_element_smoothing_group_index), smoothing_group_index )); material_name_entry_table.push(( (min_element_material_name_index, max_element_material_name_index), material_name_index )); break; } Some(other_st) => { return self.error( ErrorKind::InvalidObjectStatement, format!("Unsupported or invalid object statement `{}`.", other_st) ); } } } let mut shape_entries = vec![]; self.parse_shape_entries( &mut shape_entries, &elements, &group_entry_table, &smoothing_group_entry_table ); let mut geometries = vec![]; self.parse_geometries(&mut geometries, &material_name_entry_table, &material_names); *min_vertex_index += vertices.len(); *min_texture_index += texture_vertices.len(); *min_normal_index += normal_vertices.len(); Ok(Object { name: object_name.into(), vertex_set: vertices, texture_vertex_set: texture_vertices, normal_vertex_set: normal_vertices, group_set: groups, smoothing_group_set: smoothing_groups, element_set: elements, shape_set: shape_entries, geometry_set: geometries, }) } /// Parse a set of objects in a wavefront OBJ file. fn parse_objects(&mut self) -> Result<Vec<Object>, ParseError> { let mut result = Vec::new(); let mut min_vertex_index = 0; let mut max_vertex_index = 0; let mut min_tex_index = 0; let mut max_tex_index = 0; let mut min_normal_index = 0; let mut max_normal_index = 0; self.skip_zero_or_more_newlines(); while self.peek().is_some() { result.push(self.parse_object( &mut min_vertex_index, &mut max_vertex_index, &mut min_tex_index, &mut max_tex_index, &mut min_normal_index, &mut max_normal_index )?); self.skip_zero_or_more_newlines(); } Ok(result) } /// Parse a set of material library file names from a line of text input. fn parse_material_library_line(&mut self, material_libraries: &mut Vec<String>) -> Result<usize, ParseError> { self.expect_tag("mtllib")?; let mut number_of_libraries_found = 0; loop { match self.next() { Some(st) if st != "\n" => { material_libraries.push(String::from(st)); number_of_libraries_found += 1; } _ => break, } } Ok(number_of_libraries_found) } /// Parse a set of material library names from a Wavefront OBJ file. fn parse_material_libraries(&mut self) -> Result<Vec<String>, ParseError> { let mut material_libraries = vec![]; self.skip_zero_or_more_newlines(); while let Some("mtllib") = self.peek() { self.parse_material_library_line(&mut material_libraries)?; self.skip_zero_or_more_newlines(); } Ok(material_libraries) } /// Parse the object set in the wavefront obj file. /// /// ## Example /// /// ``` /// # use wavefront_obj::obj; /// # use wavefront_obj::obj::{ /// # Vertex, /// # NormalVertex, /// # Group, /// # SmoothingGroup, /// # Element, /// # ShapeEntry, /// # Geometry, /// # VTNIndex, /// # Object, /// # ObjectSet, /// # Parser, /// # }; /// # /// let obj_file = String::from(r" /// mtllib material_library.mtl \ /// o object1 \ /// v 0.000000 2.000000 0.000000 \ /// v 0.000000 0.000000 0.000000 \ /// v 2.000000 0.000000 0.000000 \ /// v 2.000000 2.000000 0.000000 \ /// v 4.000000 0.000000 -1.255298 \ /// v 4.000000 2.000000 -1.255298 \ /// ## 6 vertices \ /// \ /// g all \ /// s 1 \ /// usemtl material1 \ /// f 1 2 3 4 \ /// f 4 3 5 6 \ /// ## 2 elements \ /// \ /// o object2 \ /// v 0.000000 2.000000 0.000000 \ /// v 0.000000 0.000000 0.000000 \ /// v 2.000000 0.000000 0.000000 \ /// v 2.000000 2.000000 0.000000 \ /// v 4.000000 0.000000 -1.255298 \ /// v 4.000000 2.000000 -1.255298 \ /// ## 6 vertices \ /// \ /// g all \ /// s 1 \ /// usemtl material2 \ /// f 7 8 9 10 \ /// f 10 9 11 12 \ /// ## 2 elements \ /// \ /// "); /// // let expected = ...; /// # let expected = ObjectSet { /// # material_libraries: vec![ /// # String::from("material_library.mtl"), /// # ], /// # objects: vec![ /// # Object { /// # name: String::from("object1"), /// # vertex_set: vec![ /// # Vertex { x: 0.000000, y: 2.000000, z: 0.000000, w: 1.0 }, /// # Vertex { x: 0.000000, y: 0.000000, z: 0.000000, w: 1.0 }, /// # Vertex { x: 2.000000, y: 0.000000, z: 0.000000, w: 1.0 }, /// # Vertex { x: 2.000000, y: 2.000000, z: 0.000000, w: 1.0 }, /// # Vertex { x: 4.000000, y: 0.000000, z: -1.255298, w: 1.0 }, /// # Vertex { x: 4.000000, y: 2.000000, z: -1.255298, w: 1.0 }, /// # ], /// # texture_vertex_set: vec![], /// # normal_vertex_set: vec![], /// # group_set: vec![ /// # Group(String::from("all")), /// # ], /// # smoothing_group_set: vec![ /// # SmoothingGroup(1), /// # ], /// # element_set: vec![ /// # Element::Face(VTNIndex::V(0), VTNIndex::V(1), VTNIndex::V(2)), /// # Element::Face(VTNIndex::V(0), VTNIndex::V(2), VTNIndex::V(3)), /// # Element::Face(VTNIndex::V(3), VTNIndex::V(2), VTNIndex::V(4)), /// # Element::Face(VTNIndex::V(3), VTNIndex::V(4), VTNIndex::V(5)), /// # ], /// # shape_set: vec![ /// # ShapeEntry { element: 0, groups: vec![0], smoothing_group: 0 }, /// # ShapeEntry { element: 1, groups: vec![0], smoothing_group: 0 }, /// # ShapeEntry { element: 2, groups: vec![0], smoothing_group: 0 }, /// # ShapeEntry { element: 3, groups: vec![0], smoothing_group: 0 }, /// # ], /// # geometry_set: vec![ /// # Geometry { material_name: Some(String::from("material1")), shapes: vec![0, 1, 2, 3] }, /// # ] /// # }, /// # Object { /// # name: String::from("object2"), /// # vertex_set: vec![ /// # Vertex { x: 0.000000, y: 2.000000, z: 0.000000, w: 1.0 }, /// # Vertex { x: 0.000000, y: 0.000000, z: 0.000000, w: 1.0 }, /// # Vertex { x: 2.000000, y: 0.000000, z: 0.000000, w: 1.0 }, /// # Vertex { x: 2.000000, y: 2.000000, z: 0.000000, w: 1.0 }, /// # Vertex { x: 4.000000, y: 0.000000, z: -1.255298, w: 1.0 }, /// # Vertex { x: 4.000000, y: 2.000000, z: -1.255298, w: 1.0 }, /// # ], /// # texture_vertex_set: vec![], /// # normal_vertex_set: vec![], /// # group_set: vec![ /// # Group(String::from("all")), /// # ], /// # smoothing_group_set: vec![ /// # SmoothingGroup(1), /// # ], /// # element_set: vec![ /// # Element::Face(VTNIndex::V(0), VTNIndex::V(1), VTNIndex::V(2)), /// # Element::Face(VTNIndex::V(0), VTNIndex::V(2), VTNIndex::V(3)), /// # Element::Face(VTNIndex::V(3), VTNIndex::V(2), VTNIndex::V(4)), /// # Element::Face(VTNIndex::V(3), VTNIndex::V(4), VTNIndex::V(5)), /// # ], /// # shape_set: vec![ /// # ShapeEntry { element: 0, groups: vec![0], smoothing_group: 0 }, /// # ShapeEntry { element: 1, groups: vec![0], smoothing_group: 0 }, /// # ShapeEntry { element: 2, groups: vec![0], smoothing_group: 0 }, /// # ShapeEntry { element: 3, groups: vec![0], smoothing_group: 0 }, /// # ], /// # geometry_set: vec![ /// # Geometry { material_name: Some(String::from("material2")), shapes: vec![0, 1, 2, 3] }, /// # ] /// # } /// # ] /// # }; /// let mut parser = Parser::new(&obj_file); /// let result = parser.parse_objset(); /// assert!(result.is_ok()); /// /// let result = result.unwrap(); /// assert_eq!(result, expected) /// ``` pub fn parse_objset(&mut self) -> Result<ObjectSet, ParseError> { let material_libraries = self.parse_material_libraries()?; let objects = self.parse_objects()?; Ok(ObjectSet { material_libraries: material_libraries, objects: objects }) } } #[cfg(test)] mod primitive_tests { use super::{ Parser, }; #[test] fn test_parse_f64() { let mut parser = Parser::new("-1.929448"); assert_eq!(parser.parse_f64(), Ok(-1.929448)); } #[test] fn test_parse_isize() { let mut parser = Parser::new(" 763 "); assert_eq!(parser.parse_isize(), Ok(763)); } } #[cfg(test)] mod vertex_tests { use super::{ Parser, Vertex, }; #[test] fn test_parse_vertex1() { let mut parser = Parser::new("v -1.929448 13.329624 -5.221914\n"); let vertex = Vertex { x: -1.929448, y: 13.329624, z: -5.221914, w: 1.0 }; assert_eq!(parser.parse_vertex(), Ok(vertex)); } #[test] fn test_parse_vertex2() { let mut parser = Parser::new("v -1.929448 13.329624 -5.221914 1.329624\n"); let vertex = Vertex { x: -1.929448, y: 13.329624, z: -5.221914, w: 1.329624 }; assert_eq!(parser.parse_vertex(), Ok(vertex)); } #[test] fn test_parse_vertex3() { let mut parser = Parser::new("v -1.929448 13.329624 \n"); assert!(parser.parse_vertex().is_err()); } #[test] fn test_parse_vertex4() { let mut parser = Parser::new("v -1.929448 13.329624 -5.221914 1.329624\n v"); assert!(parser.parse_vertex().is_ok()); } #[test] fn test_parse_vertex5() { let mut parser = Parser::new( "v -6.207583 1.699077 8.466142 v -14.299248 1.700244 8.468981 1.329624" ); assert_eq!( parser.parse_vertex(), Ok(Vertex { x: -6.207583, y: 1.699077, z: 8.466142, w: 1.0 }) ); assert_eq!(parser.next(), Some("\n")); assert_eq!( parser.parse_vertex(), Ok(Vertex { x: -14.299248, y: 1.700244, z: 8.468981, w: 1.329624 }) ); } } #[cfg(test)] mod texture_vertex_tests { use super::{ Parser, TextureVertex, }; #[test] fn test_parse_texture_vertex1() { let mut parser = Parser::new("vt -1.929448"); let vt = TextureVertex { u: -1.929448, v: 0.0, w: 0.0 }; assert_eq!(parser.parse_texture_vertex(), Ok(vt)); } #[test] fn test_parse_texture_vertex2() { let mut parser = Parser::new("vt -1.929448 13.329624 -5.221914"); let vt = TextureVertex { u: -1.929448, v: 13.329624, w: -5.221914 }; assert_eq!(parser.parse_texture_vertex(), Ok(vt)); } #[test] fn test_parse_texture_vertex3() { let mut parser = Parser::new( "vt -1.929448 13.329624 -5.221914 vt -27.6068 31.1438 27.2099" ); assert_eq!( parser.parse_texture_vertex(), Ok(TextureVertex { u: -1.929448, v: 13.329624, w: -5.221914 }) ); assert_eq!(parser.next(), Some("\n")); assert_eq!( parser.parse_texture_vertex(), Ok(TextureVertex { u: -27.6068, v: 31.1438, w: 27.2099 }) ); } } #[cfg(test)] mod normal_vertex_tests { use super::{ Parser, NormalVertex, }; #[test] fn test_parse_normal_vertex1() { let mut parser = Parser::new("vn -0.966742 -0.255752 9.97231e-09"); let vn = NormalVertex { x: -0.966742, y: -0.255752, z: 9.97231e-09 }; assert_eq!(parser.parse_normal_vertex(), Ok(vn)); } #[test] fn test_parse_normal_vertex2() { let mut parser = Parser::new( "vn -1.929448 13.329624 -5.221914 vn -27.6068 31.1438 27.2099" ); assert_eq!( parser.parse_normal_vertex(), Ok(NormalVertex { x: -1.929448, y: 13.329624, z: -5.221914 }) ); assert_eq!(parser.next(), Some("\n")); assert_eq!( parser.parse_normal_vertex(), Ok(NormalVertex { x: -27.6068, y: 31.1438, z: 27.2099 }) ); } } #[cfg(test)] mod object_tests { use super::{ Parser, }; #[test] fn test_parse_object_name1() { let mut parser = Parser::new("o object_name \n\n"); assert_eq!(parser.parse_object_name(), Ok("object_name")); } #[test] fn test_parse_object_name2() { let mut parser = Parser::new("o object_name"); assert!(parser.parse_object_name().is_err()); } } #[cfg(test)] mod vtn_index_tests { use super::{ Parser, VTNIndex, }; #[test] fn test_parse_vtn_index1() { let mut parser = Parser::new("1291"); let expected = VTNIndex::V(1290); let result = parser.parse_vtn_index((0, 1300), (0, 1300), (0, 1300)); assert_eq!(result, Ok(expected)); } #[test] fn test_parse_vtn_index2() { let mut parser = Parser::new("1291/1315"); let expected = VTNIndex::VT(1290, 1314); let result = parser.parse_vtn_index((0, 1316), (0, 1316), (0, 1316)); assert_eq!(result, Ok(expected)); } #[test] fn test_parse_vtn_index3() { let mut parser = Parser::new("1291/1315/1314"); let expected = VTNIndex::VTN(1290, 1314, 1313); let result = parser.parse_vtn_index((0, 1316), (0, 1316), (0, 1316)); assert_eq!(result, Ok(expected)); } #[test] fn test_parse_vtn_index4() { let mut parser = Parser::new("1291//1315"); let expected = VTNIndex::VN(1290, 1314); let result = parser.parse_vtn_index((0, 1316), (0, 1316), (0, 1316)); assert_eq!(result, Ok(expected)); } } #[cfg(test)] mod element_tests { use super::{ Parser, Element, VTNIndex, }; #[test] fn test_parse_point1() { let mut parser = Parser::new("p 1 2 3 4 \n"); let mut result = vec![]; let expected = vec![ Element::Point(VTNIndex::V(0)), Element::Point(VTNIndex::V(1)), Element::Point(VTNIndex::V(2)), Element::Point(VTNIndex::V(3)), ]; assert!(parser.parse_elements(&mut result, (0, 5), (0, 5), (0, 5)).is_ok()); assert_eq!(result, expected); } #[test] fn test_parse_point2() { let mut parser = Parser::new("p 1 1/2 3 4/5"); let mut result = vec![]; assert!(parser.parse_elements(&mut result, (0, 6), (0, 6), (0, 6)).is_err()); } #[test] fn test_parse_line1() { let mut parser = Parser::new("l 297 38 118 108 \n"); let mut result = vec![]; let expected = vec![ Element::Line(VTNIndex::V(296), VTNIndex::V(37)), Element::Line(VTNIndex::V(37), VTNIndex::V(117)), Element::Line(VTNIndex::V(117), VTNIndex::V(107)), ]; assert!(parser.parse_elements(&mut result, (0, 300), (0, 300), (0, 300)).is_ok()); assert_eq!(result, expected); } #[test] fn test_parse_line2() { let mut parser = Parser::new("l 297/38 118/108 \n"); let mut result = vec![]; let expected = vec![ Element::Line(VTNIndex::VT(296, 37), VTNIndex::VT(117, 107)), ]; assert!(parser.parse_elements(&mut result, (0, 300), (0, 300), (0, 300)).is_ok()); assert_eq!(result, expected); } #[test] fn test_parse_line3() { let mut parser = Parser::new("l 297/38 118/108 324/398 \n"); let mut result = vec![]; let expected = vec![ Element::Line(VTNIndex::VT(296, 37), VTNIndex::VT(117, 107)), Element::Line(VTNIndex::VT(117, 107), VTNIndex::VT(323, 397)), ]; assert!(parser.parse_elements(&mut result, (0, 400), (0, 400), (0, 400)).is_ok()); assert_eq!(result, expected); } #[test] fn test_parse_line4() { let mut parser = Parser::new("l 297/38 118 324 \n"); let mut result = vec![]; assert!(parser.parse_elements(&mut result, (0, 340), (0, 340), (0, 340)).is_err()); } #[test] fn test_parse_line5() { let mut parser = Parser::new("l 297 118/108 324/398 \n"); let mut result = vec![]; assert!(parser.parse_elements(&mut result, (0, 400), (0, 400), (0, 400)).is_err()); } #[test] fn test_parse_face1() { let mut parser = Parser::new("f 297 118 108\n"); let mut result = vec![]; let expected = vec![ Element::Face(VTNIndex::V(296), VTNIndex::V(117), VTNIndex::V(107)), ]; assert!(parser.parse_elements(&mut result, (0, 340), (0, 340), (0, 340)).is_ok()); assert_eq!(result, expected); } #[test] fn test_parse_face2() { let mut parser = Parser::new("f 297 118 108 324\n"); let mut result = vec![]; let expected = vec![ Element::Face(VTNIndex::V(296), VTNIndex::V(117), VTNIndex::V(107)), Element::Face(VTNIndex::V(296), VTNIndex::V(107), VTNIndex::V(323)), ]; assert!(parser.parse_elements(&mut result, (0, 340), (0, 340), (0, 340)).is_ok()); assert_eq!(result, expected); } #[test] fn test_parse_face3() { let mut parser = Parser::new("f 297 118 108 324 398 \n"); let mut result = vec![]; let expected = vec![ Element::Face(VTNIndex::V(296), VTNIndex::V(117), VTNIndex::V(107)), Element::Face(VTNIndex::V(296), VTNIndex::V(107), VTNIndex::V(323)), Element::Face(VTNIndex::V(296), VTNIndex::V(323), VTNIndex::V(397)), ]; assert!(parser.parse_elements(&mut result, (0, 400), (0, 400), (0, 400)).is_ok()); assert_eq!(result, expected); } #[test] fn test_parse_face4() { let mut parser = Parser::new("f 297 118 \n"); let mut result = vec![]; assert!(parser.parse_face(&mut result, (0, 400), (0, 400), (0, 400)).is_err()); } #[test] fn test_parse_face5() { let min_index = 320; let max_index = 35000; let vertex_index_range = (min_index, max_index); let texture_index_range = (min_index, max_index); let normal_index_range = (min_index, max_index); let mut parser = Parser::new( "f 34184//34184 34088//34088 34079//34079 34084//34084 34091//34091 34076//34076\n" ); let mut result = vec![]; /* let expected = vec![ Element::Face(VTNIndex::VN(34183, 34183), VTNIndex::VN(34087, 34087), VTNIndex::VN(34078, 34078)), Element::Face(VTNIndex::VN(34183, 34183), VTNIndex::VN(34078, 34078), VTNIndex::VN(34083, 34083)), Element::Face(VTNIndex::VN(34183, 34183), VTNIndex::VN(34083, 34083), VTNIndex::VN(34090, 34090)), Element::Face(VTNIndex::VN(34183, 34183), VTNIndex::VN(34090, 34090), VTNIndex::VN(34075, 34075)), ]; */ let expected = vec![ Element::Face(VTNIndex::VN(33863, 33863), VTNIndex::VN(33767, 33767), VTNIndex::VN(33758, 33758)), Element::Face(VTNIndex::VN(33863, 33863), VTNIndex::VN(33758, 33758), VTNIndex::VN(33763, 33763)), Element::Face(VTNIndex::VN(33863, 33863), VTNIndex::VN(33763, 33763), VTNIndex::VN(33770, 33770)), Element::Face(VTNIndex::VN(33863, 33863), VTNIndex::VN(33770, 33770), VTNIndex::VN(33755, 33755)), ]; parser.parse_elements( &mut result, vertex_index_range, texture_index_range, normal_index_range ).unwrap(); assert_eq!(result, expected); } } #[cfg(test)] mod group_tests { use super::{ Parser, Group, }; #[test] fn parse_group_name1() { let mut parser = Parser::new("g group"); let mut result = vec![]; let expected = vec![Group(String::from("group"))]; let parsed = parser.parse_groups(&mut result); assert!(parsed.is_ok()); assert_eq!(result, expected); } #[test] fn parse_group_name2() { let mut parser = Parser::new("g group1 group2 group3"); let mut result = vec![]; let parsed = parser.parse_groups(&mut result); let expected = vec![ Group(String::from("group1")), Group(String::from("group2")), Group(String::from("group3")) ]; assert!(parsed.is_ok()); assert_eq!(result, expected); } } #[cfg(test)] mod smoothing_group_tests { use super::{ Parser, SmoothingGroup }; #[test] fn test_smoothing_group_name1() { let mut parser = Parser::new("s off"); let mut result = vec![]; let parsed = parser.parse_smoothing_group(&mut result); let expected = vec![SmoothingGroup(0)]; assert!(parsed.is_ok()); assert_eq!(result, expected); } #[test] fn test_smoothing_group_name2() { let mut parser = Parser::new("s 0"); let mut result = vec![]; let parsed = parser.parse_smoothing_group(&mut result); let expected = vec![SmoothingGroup(0)]; assert!(parsed.is_ok()); assert_eq!(result, expected); } #[test] fn test_smoothing_group_name3() { let mut parser = Parser::new("s 3434"); let mut result = vec![]; let parsed = parser.parse_smoothing_group(&mut result); let expected = vec![SmoothingGroup(3434)]; assert!(parsed.is_ok()); assert_eq!(result, expected); } } #[cfg(test)] mod mtllib_tests { use super::{ Parser, }; #[test] fn test_mtllib_empty() { let mut parser = Parser::new("mtllib "); let expected: Vec<String> = vec![]; let expected_count = Ok(0); let mut result = vec![]; let result_count = parser.parse_material_library_line(&mut result); assert_eq!(result, expected); assert_eq!(result_count, expected_count); } #[test] fn test_mtllib1() { let mut parser = Parser::new("mtllib library1.mtl"); let expected: Vec<String> = vec![String::from("library1.mtl")]; let expected_count = Ok(1); let mut result = vec![]; let result_count = parser.parse_material_library_line(&mut result); assert_eq!(result, expected); assert_eq!(result_count, expected_count); } #[test] fn test_mtllib2() { let mut parser = Parser::new("mtllib library1.mtl library2.mtl library3.mtl"); let expected: Vec<String> = vec![ String::from("library1.mtl"), String::from("library2.mtl"), String::from("library3.mtl"), ]; let expected_count = Ok(3); let mut result = vec![]; let result_count = parser.parse_material_library_line(&mut result); assert_eq!(result, expected); assert_eq!(result_count, expected_count); } } #[cfg(test)] mod objectset_tests { use super::{ Parser, ParseError, ObjectSet, Object, Vertex, NormalVertex, Element, VTNIndex, Group, SmoothingGroup, ShapeEntry, Geometry, }; fn test_case() -> (Result<ObjectSet, ParseError>, Result<ObjectSet, ParseError>){ let obj_file =r" \ o object1 \ g cube \ v 0.0 0.0 0.0 \ v 0.0 0.0 1.0 \ v 0.0 1.0 0.0 \ v 0.0 1.0 1.0 \ v 1.0 0.0 0.0 \ v 1.0 0.0 1.0 \ v 1.0 1.0 0.0 \ v 1.0 1.0 1.0 \ \ vn 0.0 0.0 1.0 \ vn 0.0 0.0 -1.0 \ vn 0.0 1.0 0.0 \ vn 0.0 -1.0 0.0 \ vn 1.0 0.0 0.0 \ vn -1.0 0.0 0.0 \ \ f 1//2 7//2 5//2 \ f 1//2 3//2 7//2 \ f 1//6 4//6 3//6 \ f 1//6 2//6 4//6 \ f 3//3 8//3 7//3 \ f 3//3 4//3 8//3 \ f 5//5 7//5 8//5 \ f 5//5 8//5 6//5 \ f 1//4 5//4 6//4 \ f 1//4 6//4 2//4 \ f 2//1 6//1 8//1 \ f 2//1 8//1 4//1 \ "; let vertex_set = vec![ Vertex { x: 0.0, y: 0.0, z: 0.0, w: 1.0 }, Vertex { x: 0.0, y: 0.0, z: 1.0, w: 1.0 }, Vertex { x: 0.0, y: 1.0, z: 0.0, w: 1.0 }, Vertex { x: 0.0, y: 1.0, z: 1.0, w: 1.0 }, Vertex { x: 1.0, y: 0.0, z: 0.0, w: 1.0 }, Vertex { x: 1.0, y: 0.0, z: 1.0, w: 1.0 }, Vertex { x: 1.0, y: 1.0, z: 0.0, w: 1.0 }, Vertex { x: 1.0, y: 1.0, z: 1.0, w: 1.0 }, ]; let texture_vertex_set = vec![]; let element_set = vec![ Element::Face(VTNIndex::VN(0, 1), VTNIndex::VN(6, 1), VTNIndex::VN(4, 1)), Element::Face(VTNIndex::VN(0, 1), VTNIndex::VN(2, 1), VTNIndex::VN(6, 1)), Element::Face(VTNIndex::VN(0, 5), VTNIndex::VN(3, 5), VTNIndex::VN(2, 5)), Element::Face(VTNIndex::VN(0, 5), VTNIndex::VN(1, 5), VTNIndex::VN(3, 5)), Element::Face(VTNIndex::VN(2, 2), VTNIndex::VN(7, 2), VTNIndex::VN(6, 2)), Element::Face(VTNIndex::VN(2, 2), VTNIndex::VN(3, 2), VTNIndex::VN(7, 2)), Element::Face(VTNIndex::VN(4, 4), VTNIndex::VN(6, 4), VTNIndex::VN(7, 4)), Element::Face(VTNIndex::VN(4, 4), VTNIndex::VN(7, 4), VTNIndex::VN(5, 4)), Element::Face(VTNIndex::VN(0, 3), VTNIndex::VN(4, 3), VTNIndex::VN(5, 3)), Element::Face(VTNIndex::VN(0, 3), VTNIndex::VN(5, 3), VTNIndex::VN(1, 3)), Element::Face(VTNIndex::VN(1, 0), VTNIndex::VN(5, 0), VTNIndex::VN(7, 0)), Element::Face(VTNIndex::VN(1, 0), VTNIndex::VN(7, 0), VTNIndex::VN(3, 0)), ]; let name = String::from("object1"); let normal_vertex_set = vec![ NormalVertex { x: 0.0, y: 0.0, z: 1.0 }, NormalVertex { x: 0.0, y: 0.0, z: -1.0 }, NormalVertex { x: 0.0, y: 1.0, z: 0.0 }, NormalVertex { x: 0.0, y: -1.0, z: 0.0 }, NormalVertex { x: 1.0, y: 0.0, z: 0.0 }, NormalVertex { x: -1.0, y: 0.0, z: 0.0 }, ]; let group_set = vec![Group(String::from("cube"))]; let smoothing_group_set = vec![SmoothingGroup(0)]; let shape_set = vec![ ShapeEntry { element: 0, groups: vec![0], smoothing_group: 0 }, ShapeEntry { element: 1, groups: vec![0], smoothing_group: 0 }, ShapeEntry { element: 2, groups: vec![0], smoothing_group: 0 }, ShapeEntry { element: 3, groups: vec![0], smoothing_group: 0 }, ShapeEntry { element: 4, groups: vec![0], smoothing_group: 0 }, ShapeEntry { element: 5, groups: vec![0], smoothing_group: 0 }, ShapeEntry { element: 6, groups: vec![0], smoothing_group: 0 }, ShapeEntry { element: 7, groups: vec![0], smoothing_group: 0 }, ShapeEntry { element: 8, groups: vec![0], smoothing_group: 0 }, ShapeEntry { element: 9, groups: vec![0], smoothing_group: 0 }, ShapeEntry { element: 10, groups: vec![0], smoothing_group: 0 }, ShapeEntry { element: 11, groups: vec![0], smoothing_group: 0 }, ]; let geometry_set = vec![ Geometry { material_name: None, shapes: vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] }, ]; let object = Object { name: name, vertex_set: vertex_set, texture_vertex_set: texture_vertex_set, normal_vertex_set: normal_vertex_set, group_set: group_set, smoothing_group_set: smoothing_group_set, element_set: element_set, shape_set: shape_set, geometry_set: geometry_set, }; let material_libraries = vec![]; let objects = vec![object]; let expected = ObjectSet { material_libraries: material_libraries, objects: objects }; let mut parser = Parser::new(obj_file); let result = parser.parse_objset(); (result, Ok(expected)) } #[test] fn test_parse_object_set1() { let (result, expected) = test_case(); assert_eq!(result, expected); } #[test] fn test_parse_object_set1_tokenwise() { let (result_set, expected_set) = test_case(); let result_set = result_set.unwrap(); let expected_set = expected_set.unwrap(); for (result, expected) in result_set.objects.iter().zip(expected_set.objects.iter()) { assert_eq!(result.name, expected.name); assert_eq!(result.vertex_set, expected.vertex_set); assert_eq!(result.texture_vertex_set, expected.texture_vertex_set); assert_eq!(result.normal_vertex_set, expected.normal_vertex_set); assert_eq!(result.group_set, expected.group_set); assert_eq!(result.smoothing_group_set, expected.smoothing_group_set); assert_eq!(result.element_set, expected.element_set); assert_eq!(result.shape_set, expected.shape_set); } } }
36.334484
114
0.523418
dd67665a92b83d5a56d9813f9f9d5ed614cedd1e
10,005
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use alloc::sync::Arc; use spin::Mutex; use core::ops::Deref; use std::io::{IoSliceMut, IoSlice}; //use std::os::unix::io::AsRawFd; use std::os::unix::io::RawFd; use iou::*; use std::slice; use std::collections::HashMap; use lazy_static::lazy_static; use core::vec::Vec; use super::qlib::common::*; use super::qlib::range::*; use super::qlib::qmsg::*; use super::qlib::linux_def::*; use super::qlib::ShareSpace; use super::util::*; use super::vmspace::hostfdnotifier::*; use super::vmspace::HostFileMap::fdinfo::*; /*lazy_static! { pub static ref URING : Uring<'static> = Uring::New().unwrap(); }*/ pub const IOVS_COUNT : usize = 1024; pub const URING_REQ_COUNT : usize = 1024; #[derive(Clone)] pub struct Uring <'a> (Arc<Mutex<UringIntern<'a>>>); impl <'a> Deref for Uring <'a> { type Target = Arc<Mutex<UringIntern<'a>>>; fn deref(&self) -> &Arc<Mutex<UringIntern<'a>>> { &self.0 } } impl <'a> Uring <'a> { pub fn New() -> Result<Self> { return Ok(Self(Arc::new(Mutex::new(UringIntern::New()?)))) } pub fn Eventfd(&self) -> i32 { return self.lock().eventfd; } } impl <'a> HostFdHandler for Uring <'a> { fn Process(&self, shareSpace: &'static ShareSpace, _event: EventMask) { self.lock().Trigger(shareSpace).ok(); } } pub struct UringIntern<'a> { pub ring: IoUring, pub eventfd: RawFd, pub freeReqCnt: usize, // an array of pre-allocated iovs, workaround for read/write operation. // when the Uring::Read/Uring::Write is ready, it could be deprecated. pub iovsMgrMut: GapMgr, pub iovsMut: Vec<IoSliceMut<'a>>, pub iovsMgr: GapMgr, pub iovs: Vec<IoSlice<'a>>, // to (callback, iovsIdx, Mutable) pub reqs: HashMap<u64, (Arc<UringCallback>, usize, bool)>, pub lastReqId: u64, } impl <'a> UringIntern<'a> { pub fn New() -> Result<Self> { let ret = unsafe { libc::eventfd(0, libc::EFD_CLOEXEC | libc::EFD_NONBLOCK) }; let efd = GetRet(ret)?; // let ring = IoUring::new(URING_REQ_COUNT as u32).map_err(|e| Error::FromIOErr(e))?; let ring = IoUring::new_with_flags(32, SetupFlags::SQPOLL, SetupFeatures::empty()) .map_err(|e| Error::FromIOErr(e))?; let registrar: Registrar = ring.registrar(); registrar.register_eventfd(efd).map_err(|e| Error::FromIOErr(e))?; let mut iovs = Vec::with_capacity(IOVS_COUNT); for _ in 0..IOVS_COUNT { iovs.push(Self::DummyIoSlice()) } let mut iovsMut = Vec::with_capacity(IOVS_COUNT); for _ in 0..IOVS_COUNT { iovsMut.push(Self::DummyIoSliceMut()) } return Ok(Self { ring: ring, eventfd: efd, freeReqCnt: URING_REQ_COUNT, iovsMgrMut: GapMgr::New(0, IOVS_COUNT as u64), iovsMut: iovsMut, iovsMgr: GapMgr::New(0, IOVS_COUNT as u64), iovs: iovs, reqs: HashMap::new(), lastReqId: 0, }) } pub fn DummyIoSliceMut() -> IoSliceMut <'a> { let mut x : u8 = 0; let ptr = &mut x as * mut _; let slice = unsafe { slice::from_raw_parts_mut(ptr, 1) }; return IoSliceMut::new(slice); } pub fn DummyIoSlice() -> IoSlice <'a> { let mut x : u8 = 0; let ptr = &mut x as * mut _; let slice = unsafe { slice::from_raw_parts(ptr, 1) }; return IoSlice::new(slice); } pub fn GetIovs(&mut self) -> Result<usize> { let ret = self.iovsMgr.Alloc(0, 0)?; return Ok(ret as usize) } pub fn FreeIovs(&mut self, idx: usize) { self.iovsMgr.Free(idx as u64, 1); } pub fn GetIovsMut(&mut self) -> Result<usize> { let ret = self.iovsMgrMut.Alloc(0, 0)?; return Ok(ret as usize) } pub fn FreeIovsMut(&mut self, idx: usize) { self.iovsMgrMut.Free(idx as u64, 1); } #[inline] pub fn AllocReqId(&mut self) -> Result<u64> { if self.freeReqCnt == 0 { return Err(Error::NoUringReq) } self.lastReqId += 1; return Ok(self.lastReqId); } pub fn AddReq(&mut self, reqIdx: u64, callback: Arc<UringCallback>, iovIdx: usize, mutable: bool) { self.reqs.insert(reqIdx, (callback, iovIdx, mutable)); } pub fn GetReq(&mut self, reqIdx: u64) -> (Arc<UringCallback>, usize, bool) { return self.reqs.remove(&reqIdx).unwrap(); } pub fn Trigger(&mut self, sp: &'static ShareSpace) -> Result<()> { loop { let mut v : u64 = 0; let ret = unsafe { libc::read(self.eventfd, &mut v as * mut _ as *mut libc::c_void, 8) }; GetRet(ret as i32)?; loop { let mut cq = self.ring.cq(); let cqe = match cq.peek_for_cqe() { None => break, Some(e) => e, }; self.Process(sp, &cqe)?; } } } pub fn Process(&mut self, sp: &'static ShareSpace, cqe: &CQE) -> Result<()> { let reqId = cqe.user_data(); let (req, iovIdx, mutable) = self.GetReq(reqId); if mutable { self.FreeIovsMut(iovIdx); } else { self.FreeIovs(iovIdx) } self.freeReqCnt += 1; return req.Callback(sp, cqe); } pub fn Read(&mut self, fd: RawFd, buf: &'a mut [u8], offset: u64) -> Result<()> { let reqId = self.AllocReqId()?; let iovsIdx = self.GetIovsMut()?; self.freeReqCnt -= 1; unsafe { let mut sq = self.ring.sq(); let mut sqe = sq.prepare_sqe().unwrap(); self.iovsMut[iovsIdx] = IoSliceMut::new(buf); sqe.prep_read_vectored(fd, &mut self.iovsMut[iovsIdx..iovsIdx+1], offset); sqe.set_user_data(reqId); sq.submit().map_err(|e| Error::FromIOErr(e))?; } return Ok(()) } pub fn BufWrite(&mut self, msg: UringBufWrite) -> Result<()> { let reqId = self.AllocReqId()?; let iovsIdx = self.GetIovs()?; self.freeReqCnt -= 1; let fd = msg.fdInfo.lock().osfd; msg.fdInfo.lock().pendingWriteCnt += 1; let ptr = msg.addr as * mut u8; let buf = unsafe { slice::from_raw_parts(ptr, msg.len) }; let offset = msg.offset; unsafe { let mut sq = self.ring.sq(); let mut sqe = sq.prepare_sqe().unwrap(); self.iovs[iovsIdx] = IoSlice::new(buf); sqe.prep_write_vectored(fd, &self.iovs[iovsIdx..iovsIdx+1], offset); sqe.set_user_data(reqId); sq.submit().map_err(|e| Error::FromIOErr(e))?; } self.AddReq(reqId, Arc::new(msg), iovsIdx, true); return Ok(()) } } pub trait UringCallback : Send + Sync { fn Callback(&self, sp: &'static ShareSpace, cqe: &CQE) -> Result<()> ; } pub struct UringReadIntern { pub fdInfo: FdInfo, pub addr: u64, pub len: usize, pub offset: u64, } pub struct UringRead(Arc<UringReadIntern>); impl Deref for UringRead { type Target = Arc<UringReadIntern>; fn deref(&self) -> &Arc<UringReadIntern> { &self.0 } } pub struct UringBufWriteIntern { pub fdInfo: FdInfo, pub addr: u64, pub len: usize, pub offset: u64, } pub struct UringBufWrite(Arc<UringBufWriteIntern>); impl Deref for UringBufWrite { type Target = Arc<UringBufWriteIntern>; fn deref(&self) -> &Arc<UringBufWriteIntern> { &self.0 } } impl UringBufWrite { pub fn New(fdInfo: FdInfo, addr: u64, len: usize, offset: isize) -> Self { return Self(Arc::new(UringBufWriteIntern { fdInfo: fdInfo, addr: addr, len: len, offset: offset as u64, })) } } impl UringCallback for UringBufWrite { fn Callback(&self, shareSpace: &'static ShareSpace, cqe: &CQE) -> Result<()> { self.fdInfo.lock().pendingWriteCnt -= 1; let fd = self.fdInfo.lock().osfd; match cqe.result() { Ok(size) => { // assert!(size as usize == self.len, format!("size is {}, self.len is {}", size, self.len)); if size as usize == self.len { shareSpace.AQHostInputCall(HostInputMsg::IOBufWriteResp(IOBufWriteResp{ fd: fd, addr: self.addr, len: self.len, ret: 0, })); } else { let msg = UringBufWrite::New(self.fdInfo.clone(), self.addr + size as u64, self.len - size as usize, (self.offset + size as u64) as isize); //todo: add back this //URING.lock().BufWrite(msg)?; } } Err(e) => { shareSpace.AQHostInputCall(HostInputMsg::IOBufWriteResp(IOBufWriteResp{ fd: fd, addr: self.addr, len: self.len, ret: e.raw_os_error().unwrap() as i64, })); } } return Ok(()) } }
28.832853
109
0.547326
4897a11cfb7e5bda084f72a0a17b1977090543a2
2,842
use std::sync::Arc; use crate::{ error::PricerError, pricingctx::{InstrumentRef, Measure, PricingCtx, VectorizedPricingCtx}, stackedvectorctx::StackedVectorizedPricingCtx, transform::{InstrumentFilter, RiskFactor, TransformDefinition}, }; fn price_equity_delta( ctx: &Arc<PricingCtx>, instrument: &InstrumentRef, ) -> Result<f64, PricerError> { let transform_def = TransformDefinition::new_vector_measure_transform( Measure::Price, InstrumentFilter::RiskFactorFilter(RiskFactor::Equity), vec![0.0, 0.0, 0.0], vec![-0.01, 0.0, 0.01], )?; let vcxt = StackedVectorizedPricingCtx::shift_base_ctx(ctx, transform_def); let vres = vcxt.price(Measure::Price, instrument)?; if let Some([vdown, v, vup]) = vres.resultview_1d()?.as_slice() { if *v == 0.0 { if f64::abs(*vup - *vdown) < f64::EPSILON { return Ok(0.0); } return Err(PricerError::ShiftExecutionError { ins: instrument.into(), message: "zero price in relshift".to_string(), }); } let h = v * 0.01; Ok((vup - vdown) / (2.0 * h)) } else { Err(PricerError::ShiftExecutionError { ins: instrument.into(), message: "unknown scenario dimension".to_string(), }) } } fn price_equity_gamma( ctx: &Arc<PricingCtx>, instrument: &InstrumentRef, ) -> Result<f64, PricerError> { let transform_def = TransformDefinition::new_vector_measure_transform( Measure::Price, InstrumentFilter::RiskFactorFilter(RiskFactor::Equity), vec![0.0, 0.0, 0.0], vec![-0.01, 0.0, 0.01], )?; let vcxt = StackedVectorizedPricingCtx::shift_base_ctx(ctx, transform_def); let vres = vcxt.price(Measure::Price, instrument)?; if let Some([vdown, v, vup]) = vres.resultview_1d()?.as_slice() { if *v == 0.0 { if f64::abs(*vup - *vdown) < f64::EPSILON { return Ok(0.0); } return Err(PricerError::ShiftExecutionError { ins: instrument.into(), message: "zero price in relshift".to_string(), }); } let h = v * 0.01; Ok((vup - 2.0 * v + vdown) / f64::powf(h, 2.0)) } else { Err(PricerError::ShiftExecutionError { ins: instrument.into(), message: "unknown scenario dimension".to_string(), }) } } pub fn price_generic( ctx: &Arc<PricingCtx>, measure: Measure, instrument: &InstrumentRef, ) -> Result<f64, PricerError> { match measure { Measure::Delta => price_equity_delta(ctx, instrument), Measure::Gamma => price_equity_gamma(ctx, instrument), _ => Err(PricerError::MissingCalculatorError(measure)), } }
33.435294
79
0.585855
b91e84bda9a67d7db87851c2f4fd9feb20987599
3,317
extern crate chrono; extern crate clap; extern crate super_units; use std::io::{self, Read, Write, stderr}; use std::fs::{OpenOptions}; use clap::{Arg, App}; use chrono::prelude::*; use super_units::{Unit, Amount}; const CAPACITY: usize = 4096; fn print_rate(bytes: usize, unit: &SelectedUnit, stream: &mut dyn Write, timestamp: bool) { let bytes = bytes as u64 as f64; let data = match unit { SelectedUnit::Unit(unit) => Amount::new(bytes, *unit), SelectedUnit::Auto => Amount::auto_detect(bytes) }; if timestamp { write!(stream, "[{}] ", Local::now().to_rfc3339()).expect("Couldn't write") } writeln!(stream, "{}/s", data).expect("Couldn't write"); } enum SelectedUnit { Unit(Unit), Auto } fn main() { let unit_values = ["b", "k", "m", "g", "t"]; let matches = App::new("measure") .version("1.1") .author("Mota") .about("Measures data transfer given in standard input") .arg(Arg::with_name("unit") .short("u") .long("unit") .value_name("UNIT") .required(false) .help("Display the result in a different unit format (otherwise, auto detect)") .takes_value(true) .possible_values(&unit_values) ) .arg(Arg::with_name("file") .short("f") .long("file") .value_name("FILE") .required(false) .help("File to output the transfer rate to instead of stderr") .takes_value(true) ) .arg(Arg::with_name("timestamp") .short("t") .long("timestamp") .help("Add timestamp per transfer rate line") ) .get_matches(); let handle = stderr(); let mut stream: Box<dyn Write> = match matches.value_of("file") { Some(path) => { let file = OpenOptions::new().write(true).create(true).open(path).unwrap(); Box::new(file) }, None => Box::new(handle.lock()) }; let unit = match matches.value_of("unit") { Some(value) => { match value { "b" => SelectedUnit::Unit(Unit::Byte), "k" => SelectedUnit::Unit(Unit::Kilo), "m" => SelectedUnit::Unit(Unit::Mega), "g" => SelectedUnit::Unit(Unit::Giga), "t" => SelectedUnit::Unit(Unit::Tera), _ => SelectedUnit::Auto } }, None => SelectedUnit::Auto }; let mut buffer: [u8; CAPACITY] = [0; CAPACITY]; let mut bytes_read = 0; let mut dt = Local::now(); loop { bytes_read += match io::stdin().read(&mut buffer) { Ok(count) => count, Err(e) => { panic!("An error occured: {:?}", e); } }; match io::stdout().write(&buffer) { Ok(_) => (), Err(e) => { panic!("An error occured: {:?}", e); } } let now = Local::now(); let diff = (now - dt).to_std().unwrap().as_secs(); if diff >= 1 { dt = now; print_rate(bytes_read, &unit, &mut stream, matches.is_present("timestamp")); bytes_read = 0; } } }
27.641667
92
0.497136
d780a374952bfeef202c9ba3a27d5894da1cc44a
3,673
use std::convert::TryFrom; use async_std::io::prelude::*; use crate::types::field::Field; use crate::types::{PacketError, PacketResult, VarIntField}; use async_trait::async_trait; use std::fmt::{Debug, Display, Formatter}; #[derive(Debug)] pub struct StringField { value: String, /// String length length: VarIntField, } impl StringField { pub fn new(value: String) -> Self { assert!(i32::try_from(value.len()).is_ok()); let len = value.len(); Self { value, length: VarIntField::new(len as i32), } } pub fn new_chat(value: impl Display) -> Self { Self::new(format!(r#"{{"text": "{}"}}"#, value)) } pub fn take(self) -> String { self.value } } #[async_trait] impl Field for StringField { type Displayable = String; fn value(&self) -> &Self::Displayable { &self.value } fn size(&self) -> usize { self.length.size() + self.length.value() as usize } async fn read_field<R: Read + Unpin + Send>(r: &mut R) -> PacketResult<Self> { let length = VarIntField::read_field(r).await?.value() as usize; let value = { let mut vec = vec![0u8; length]; r.read_exact(&mut vec).await.map_err(PacketError::Io)?; String::from_utf8(vec)? }; Ok(Self::new(value)) } async fn write_field<W: Write + Unpin + Send>(&self, w: &mut W) -> PacketResult<()> { self.length.write_field(w).await?; w.write_all(self.value.as_bytes()) .await .map_err(PacketError::Io) } } pub struct IdentifierField { string: StringField, colon: Option<usize>, } impl IdentifierField { pub fn new(s: String) -> Self { let colon = s.find(':'); Self { string: StringField::new(s), colon, } } pub fn namespace(&self) -> &str { match self.colon { Some(idx) => &self.string.value[..idx], None => "minecraft", } } pub fn location(&self) -> &str { match self.colon { Some(idx) => &self.string.value[idx + 1..], None => &self.string.value, } } } impl From<StringField> for IdentifierField { fn from(s: StringField) -> Self { Self::new(s.take()) } } impl Debug for IdentifierField { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!(f, "{}:{}", self.namespace(), self.location()) } } #[async_trait] impl Field for IdentifierField { type Displayable = String; fn value(&self) -> &Self::Displayable { self.string.value() } fn size(&self) -> usize { self.string.size() } async fn read_field<R: Read + Unpin + Send>(r: &mut R) -> PacketResult<Self> { StringField::read_field(r) .await .map(|s| Self::new(s.take())) } async fn write_field<W: Write + Unpin + Send>(&self, w: &mut W) -> PacketResult<()> { self.string.write_field(w).await } } #[cfg(test)] mod test { use crate::types::IdentifierField; #[test] fn identifier() { let default = IdentifierField::new("bonbon".to_owned()); let custom = IdentifierField::new("colon:sunglass".to_lowercase()); let bad = IdentifierField::new("ohno:".to_lowercase()); assert_eq!(default.namespace(), "minecraft"); assert_eq!(default.location(), "bonbon"); assert_eq!(custom.namespace(), "colon"); assert_eq!(custom.location(), "sunglass"); assert_eq!(bad.namespace(), "ohno"); assert_eq!(bad.location(), ""); } }
23.696774
89
0.554588
1d504af4e6c176c25cfc39604df9099f5f52116c
2,590
use std::sync::Arc; use std::sync::Mutex; use std::sync::mpsc; use std::thread; pub fn string_trim_end(mut s: &str) -> &str { const TRAILER: &'static str = "\0"; while s.ends_with(TRAILER) { let new_len = s.len().saturating_sub(TRAILER.len()); s = &s[..new_len]; } s } enum Message { NewJob(Job), Terminate, } pub struct ThreadPool { workers: Vec<Worker>, sender: mpsc::Sender<Message>, } trait FnBox { fn call_box(self: Box<Self>); } impl<F: FnOnce()> FnBox for F { fn call_box(self: Box<F>) { (*self)() } } type Job = Box<FnBox + Send + 'static>; impl ThreadPool { /// Create a new ThreadPool. /// /// The size is the number of threads in the pool. /// /// # Panics /// /// The `new` function will panic if the size is zero. pub fn new(size: usize) -> ThreadPool { assert!(size > 0); let (sender, receiver) = mpsc::channel(); let receiver = Arc::new(Mutex::new(receiver)); let mut workers = Vec::with_capacity(size); for id in 0..size { workers.push(Worker::new(id, Arc::clone(&receiver))); } ThreadPool { workers, sender } } pub fn execute<F>(&self, f: F) where F: FnOnce() + Send + 'static, { let job = Box::new(f); self.sender.send(Message::NewJob(job)).unwrap(); } } impl Drop for ThreadPool { fn drop(&mut self) { println!("Sending terminate message to all workers."); for _ in &mut self.workers { self.sender.send(Message::Terminate).unwrap(); } println!("Shutting down all workers."); for worker in &mut self.workers { println!("Shutting down worker {}", worker.id); if let Some(thread) = worker.thread.take() { thread.join().unwrap(); } } } } struct Worker { id: usize, thread: Option<thread::JoinHandle<()>>, } impl Worker { fn new(id: usize, receiver: Arc<Mutex<mpsc::Receiver<Message>>>) -> Worker { let thread = thread::spawn(move || loop { let message = receiver.lock().unwrap().recv().unwrap(); match message { Message::NewJob(job) => { job.call_box(); } Message::Terminate => { println!("Worker {} was told to terminate.", id); break; } } }); Worker { id, thread: Some(thread), } } }
21.583333
80
0.513127
483b920e428524f65c449221a20a158b470f85f0
9,410
mod shells; // Std use std::io::Write; // Internal use clap::{find_subcmd, flags, match_alias, subcommands, App, AppSettings, Arg}; pub use shells::*; /// Generator trait which can be used to write generators pub trait Generator { /// Returns the file name that is created when this generator is called during compile time. /// /// # Examples /// /// ``` /// # use std::io::Write; /// # use clap::App; /// use clap_generate::Generator; /// /// pub struct Fish; /// /// impl Generator for Fish { /// # fn generate(app: &App, buf: &mut dyn Write) {} /// fn file_name(name: &str) -> String { /// format!("{}.fish", name) /// } /// } /// ``` fn file_name(name: &str) -> String; /// Generates output out of [`clap::App`](../clap/struct.App.html). /// /// # Examples /// /// The following example generator displays the [`clap::App`](../clap/struct.App.html) /// as if it is printed using [`std::println`](https://doc.rust-lang.org/std/macro.println.html). /// /// ``` /// use std::{io::Write, fmt::write}; /// use clap::App; /// use clap_generate::Generator; /// /// pub struct ClapDebug; /// /// impl Generator for ClapDebug { /// fn generate(app: &App, buf: &mut dyn Write) { /// write!(buf, "{}", app).unwrap(); /// } /// # fn file_name(name: &str) -> String { /// # name.into() /// # } /// } /// ``` fn generate(app: &App, buf: &mut dyn Write); /// Gets all subcommands including child subcommands in the form of `("name", "bin_name")`. /// /// Subcommand `rustup toolchain install` would be converted to /// `("install", "rustup toolchain install")`. fn all_subcommands(app: &App) -> Vec<(String, String)> { let mut subcmds: Vec<_> = Self::subcommands(app); for sc_v in subcommands!(app).map(|s| Self::all_subcommands(&s)) { subcmds.extend(sc_v); } subcmds } /// Finds the subcommand [`clap::App`][clap] from the given [`clap::App`][clap] with the given path. /// /// **NOTE:** `path` should not contain the root `bin_name`. /// /// [clap]: ../clap/struct.App.html fn find_subcommand_with_path<'b>(p: &'b App<'b>, path: Vec<&str>) -> &'b App<'b> { let mut app = p; for sc in path { app = find_subcmd!(app, sc).unwrap(); } app } /// Gets subcommands of [`clap::App`](../clap/struct.App.html) in the form of `("name", "bin_name")`. /// /// Subcommand `rustup toolchain install` would be converted to /// `("install", "rustup toolchain install")`. fn subcommands(p: &App) -> Vec<(String, String)> { debugln!("subcommands: name={}", p.name); debugln!("subcommands: Has subcommands...{:?}", p.has_subcommands()); let mut subcmds = vec![]; if !p.has_subcommands() { return subcmds; } for sc in &p.subcommands { let sc_bin_name = sc.get_bin_name().unwrap(); debugln!( "subcommands:iter: name={}, bin_name={}", sc.name, sc_bin_name ); subcmds.push((sc.name.clone(), sc_bin_name.to_string())); } subcmds } /// Gets all the short options and flags of a [`clap::App`](../clap/struct.App.html). /// Includes `h` and `V` depending on the [`clap::AppSettings`](../clap/enum.AppSettings.html). fn shorts<'b>(p: &'b App<'b>) -> Vec<char> { debugln!("shorts: name={}", p.name); let mut shorts: Vec<char> = p .args .args .iter() .filter_map(|a| { if a.index.is_none() && a.short.is_some() { Some(a.short.unwrap()) } else { None } }) .collect(); if shorts.iter().find(|x| **x == 'h').is_none() { shorts.push('h'); } if !p.is_set(AppSettings::DisableVersion) && shorts.iter().find(|x| **x == 'V').is_none() { shorts.push('V'); } shorts } /// Gets all the long options and flags of a [`clap::App`](../clap/struct.App.html). /// Includes `help` and `version` depending on the [`clap::AppSettings`](../clap/enum.AppSettings.html). fn longs<'b>(p: &'b App<'b>) -> Vec<String> { debugln!("longs: name={}", p.name); let mut longs: Vec<String> = p .args .args .iter() .filter_map(|a| { if a.index.is_none() && a.long.is_some() { Some(a.long.unwrap().to_string()) } else { None } }) .collect(); if longs.iter().find(|x| **x == "help").is_none() { longs.push(String::from("help")); } if !p.is_set(AppSettings::DisableVersion) && longs.iter().find(|x| **x == "version").is_none() { longs.push(String::from("version")); } longs } /// Gets all the flags of a [`clap::App`](../clap/struct.App.html). /// Includes `help` and `version` depending on the [`clap::AppSettings`](../clap/enum.AppSettings.html). fn flags<'b>(p: &'b App<'b>) -> Vec<Arg> { debugln!("flags: name={}", p.name); let mut flags: Vec<_> = flags!(p).cloned().collect(); if flags.iter().find(|x| x.name == "help").is_none() { flags.push( Arg::with_name("help") .short('h') .long("help") .help("Prints help information"), ); } if !p.is_set(AppSettings::DisableVersion) && flags.iter().find(|x| x.name == "version").is_none() { flags.push( Arg::with_name("version") .short('V') .long("version") .help("Prints version information"), ); } flags } } #[cfg(test)] mod tests { use super::*; use pretty_assertions::assert_eq; struct Foo; impl Generator for Foo { fn generate(_: &App, _: &mut dyn Write) {} fn file_name(name: &str) -> String { name.to_string() } } fn common() -> App<'static> { let mut app = App::new("myapp") .subcommand( App::new("test") .subcommand(App::new("config")) .arg(Arg::with_name("file").short('f').long("file")), ) .subcommand(App::new("hello")) .bin_name("my-app"); app._build(); app._build_bin_names(); app } #[test] fn test_subcommands() { let app = common(); assert_eq!( Foo::subcommands(&app), vec![ ("test".to_string(), "my-app test".to_string()), ("hello".to_string(), "my-app hello".to_string()), ("help".to_string(), "my-app help".to_string()), ] ); } #[test] fn test_all_subcommands() { let app = common(); assert_eq!( Foo::all_subcommands(&app), vec![ ("test".to_string(), "my-app test".to_string()), ("hello".to_string(), "my-app hello".to_string()), ("help".to_string(), "my-app help".to_string()), ("config".to_string(), "my-app test config".to_string()), ] ); } #[test] fn test_find_subcommand_with_path() { let app = common(); let sc_app = Foo::find_subcommand_with_path(&app, "test config".split(' ').collect()); assert_eq!(sc_app.name, "config"); } #[test] fn test_flags() { let app = common(); let flags = Foo::flags(&app); assert_eq!(flags.len(), 2); assert_eq!(flags[0].long, Some("help")); assert_eq!(flags[1].long, Some("version")); let sc_flags = Foo::flags(Foo::find_subcommand_with_path(&app, vec!["test"])); assert_eq!(sc_flags.len(), 3); assert_eq!(sc_flags[0].long, Some("file")); assert_eq!(sc_flags[1].long, Some("help")); assert_eq!(sc_flags[2].long, Some("version")); } #[test] fn test_shorts() { let app = common(); let shorts = Foo::shorts(&app); assert_eq!(shorts.len(), 2); assert_eq!(shorts[0], 'h'); assert_eq!(shorts[1], 'V'); let sc_shorts = Foo::shorts(Foo::find_subcommand_with_path(&app, vec!["test"])); assert_eq!(sc_shorts.len(), 3); assert_eq!(sc_shorts[0], 'f'); assert_eq!(sc_shorts[1], 'h'); assert_eq!(sc_shorts[2], 'V'); } #[test] fn test_longs() { let app = common(); let longs = Foo::longs(&app); assert_eq!(longs.len(), 2); assert_eq!(longs[0], "help"); assert_eq!(longs[1], "version"); let sc_longs = Foo::longs(Foo::find_subcommand_with_path(&app, vec!["test"])); assert_eq!(sc_longs.len(), 3); assert_eq!(sc_longs[0], "file"); assert_eq!(sc_longs[1], "help"); assert_eq!(sc_longs[2], "version"); } }
29.04321
108
0.491817
ebafb3b64c915eaefc4fb3061f53f4e73dd4f372
1,275
// This file is part of mlnx-ofed. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/mlnx-ofed/master/COPYRIGHT. No part of mlnx-ofed, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file. // Copyright © 2016 The developers of mlnx-ofed. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/mlnx-ofed/master/COPYRIGHT. #[repr(C)] pub struct general_data_hot { pub wqe_head: *mut c_uint, pub post_send_one: Option<unsafe extern "C" fn(wr: *mut ibv_exp_send_wr, qp: *mut mlx5_qp, exp_send_flags: u64, seg: *mut c_void, total_size: *mut c_int) -> c_int>, pub sqstart: *mut c_void, pub sqend: *mut c_void, pub db: *mut u32, pub bf: *mut mlx5_bf, pub scur_post: u32, pub last_post: u32, pub create_flags: u16, pub fm_cache: u8, pub model_flags: u8, } impl Default for general_data_hot { #[inline(always)] fn default() -> Self { unsafe { zeroed() } } } impl Debug for general_data_hot { #[inline(always)] fn fmt(&self, f: &mut Formatter) -> Result { write!(f, "general_data_hot {{ }}") } }
33.552632
382
0.732549