hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
bfa1e79a0a8816245ce1604a0cd64d78f9d55fde
| 11,435 |
/*
This tool is part of the WhiteboxTools geospatial analysis library.
Authors: Dr. John Lindsay
Created: 17/10/2018
Last Modified: 08/04/2019
License: MIT
*/
extern crate kdtree;
use whitebox_common::algorithms::{
find_split_points_at_line_intersections, interior_point, is_clockwise_order,
};
use whitebox_common::structures::{BoundingBox, Polyline};
use crate::tools::*;
use whitebox_vector::*;
use kdtree::distance::squared_euclidean;
use kdtree::KdTree;
use std::cmp::Ordering;
use std::collections::{BinaryHeap, HashSet};
use std::env;
use std::f64::EPSILON;
use std::io::{Error, ErrorKind};
use std::path;
pub struct SnapEndnodes {
name: String,
description: String,
toolbox: String,
parameters: Vec<ToolParameter>,
example_usage: String,
}
impl SnapEndnodes {
pub fn new() -> SnapEndnodes {
// public constructor
let name = "SnapEndnodes".to_string();
let toolbox = "GIS Analysis/Overlay Tools".to_string();
let description =
"Snaps end-nodes in a vector line coverage."
.to_string();
let mut parameters = vec![];
parameters.push(ToolParameter {
name: "Input Vector Lines File".to_owned(),
flags: vec!["-i".to_owned(), "--input".to_owned()],
description: "Input vector line file.".to_owned(),
parameter_type: ParameterType::ExistingFile(ParameterFileType::Vector(
VectorGeometryType::Lines,
)),
default_value: None,
optional: false,
});
parameters.push(ToolParameter {
name: "Output Vector File".to_owned(),
flags: vec!["-o".to_owned(), "--output".to_owned()],
description: "Output vector file.".to_owned(),
parameter_type: ParameterType::NewFile(ParameterFileType::Vector(
VectorGeometryType::Lines,
)),
default_value: None,
optional: false,
});
parameters.push(ToolParameter {
name: "Snap Tolerance".to_owned(),
flags: vec!["--snap".to_owned()],
description: "Snap tolerance.".to_owned(),
parameter_type: ParameterType::Float,
default_value: Some("0.0".to_owned()),
optional: true,
});
let sep: String = path::MAIN_SEPARATOR.to_string();
let e = format!("{}", env::current_exe().unwrap().display());
let mut parent = env::current_exe().unwrap();
parent.pop();
let p = format!("{}", parent.display());
let mut short_exe = e
.replace(&p, "")
.replace(".exe", "")
.replace(".", "")
.replace(&sep, "");
if e.contains(".exe") {
short_exe += ".exe";
}
let usage = format!(
">>.*{0} -r={1} -v --wd=\"*path*to*data*\" -input=layer1.shp -o=out_file.shp --snap=0.0000001",
short_exe, name
).replace("*", &sep);
SnapEndnodes {
name: name,
description: description,
toolbox: toolbox,
parameters: parameters,
example_usage: usage,
}
}
}
impl WhiteboxTool for SnapEndnodes {
fn get_source_file(&self) -> String {
String::from(file!())
}
fn get_tool_name(&self) -> String {
self.name.clone()
}
fn get_tool_description(&self) -> String {
self.description.clone()
}
fn get_tool_parameters(&self) -> String {
let mut s = String::from("{\"parameters\": [");
for i in 0..self.parameters.len() {
if i < self.parameters.len() - 1 {
s.push_str(&(self.parameters[i].to_string()));
s.push_str(",");
} else {
s.push_str(&(self.parameters[i].to_string()));
}
}
s.push_str("]}");
s
}
fn get_example_usage(&self) -> String {
self.example_usage.clone()
}
fn get_toolbox(&self) -> String {
self.toolbox.clone()
}
fn run<'a>(
&self,
args: Vec<String>,
working_directory: &'a str,
verbose: bool,
) -> Result<(), Error> {
let mut input_file: String = "".to_string();
let mut output_file: String = "".to_string();
let mut precision = std::f64::EPSILON;
// read the arguments
if args.len() == 0 {
return Err(Error::new(
ErrorKind::InvalidInput,
"Tool run with no parameters.",
));
}
for i in 0..args.len() {
let mut arg = args[i].replace("\"", "");
arg = arg.replace("\'", "");
let cmd = arg.split("="); // in case an equals sign was used
let vec = cmd.collect::<Vec<&str>>();
let mut keyval = false;
if vec.len() > 1 {
keyval = true;
}
let flag_val = vec[0].to_lowercase().replace("--", "-");
if flag_val == "-i" || flag_val == "-input" {
input_file = if keyval {
vec[1].to_string()
} else {
args[i + 1].to_string()
};
} else if flag_val == "-o" || flag_val == "-output" {
output_file = if keyval {
vec[1].to_string()
} else {
args[i + 1].to_string()
};
} else if flag_val == "-snap" {
precision = if keyval {
vec[1].to_string().parse::<f64>().expect(&format!("Error parsing {}", flag_val))
} else {
args[i + 1].to_string().parse::<f64>().expect(&format!("Error parsing {}", flag_val))
};
if precision == 0f64 {
precision = std::f64::EPSILON;
}
}
}
let sep: String = path::MAIN_SEPARATOR.to_string();
let mut progress: usize;
let mut old_progress: usize = 1;
let start = Instant::now();
if verbose {
let tool_name = self.get_tool_name();
let welcome_len = format!("* Welcome to {} *", tool_name).len().max(28);
// 28 = length of the 'Powered by' by statement.
println!("{}", "*".repeat(welcome_len));
println!("* Welcome to {} {}*", tool_name, " ".repeat(welcome_len - 15 - tool_name.len()));
println!("* Powered by WhiteboxTools {}*", " ".repeat(welcome_len - 28));
println!("* www.whiteboxgeo.com {}*", " ".repeat(welcome_len - 23));
println!("{}", "*".repeat(welcome_len));
}
if !input_file.contains(path::MAIN_SEPARATOR) && !input_file.contains("/") {
input_file = format!("{}{}", working_directory, input_file);
}
if !output_file.contains(&sep) && !output_file.contains("/") {
output_file = format!("{}{}", working_directory, output_file);
}
let input = Shapefile::read(&input_file)?;
// create output file
let mut output = Shapefile::initialize_using_file(&output_file, &input, input.header.shape_type, true)?;
// make sure the input vector file is of polyline type
if input.header.shape_type.base_shape_type() != ShapeType::PolyLine {
return Err(Error::new(
ErrorKind::InvalidInput,
"The input vector data must be of POLYLINE base shape type.",
));
}
let mut first_point_in_part: usize;
let mut last_point_in_part: usize;
let mut polylines: Vec<Polyline> = vec![];
for record_num in 0..input.num_records {
let record = input.get_record(record_num);
if record.shape_type != ShapeType::Null {
for part in 0..record.num_parts as usize {
first_point_in_part = record.parts[part] as usize;
last_point_in_part = if part < record.num_parts as usize - 1 {
record.parts[part + 1] as usize - 1
} else {
record.num_points as usize - 1
};
// Create a polyline from the part
let mut pl = Polyline::new(
&(record.points[first_point_in_part..=last_point_in_part]),
record_num,
);
pl.source_file = 1;
polylines.push(pl);
}
}
}
let num_endnodes = polylines.len() * 2;
/*
The structure of endnodes is as such:
1. the starting node for polyline 'a' is a * 2.
2. the ending node for polyline 'a' is a * 2 + 1.
3. endnode to polyline = e / 2
4. is an endnode a starting point? e % 2 == 0
*/
let mut endnodes: Vec<Vec<usize>> = vec![vec![]; num_endnodes];
// now add the endpoints of each polyline into a kd tree
let dimensions = 2;
let capacity_per_node = 64;
let mut kdtree = KdTree::new_with_capacity(dimensions, capacity_per_node);
let mut p1: Point2D;
let mut p2: Point2D;
let mut p3: Point2D;
let mut p4: Point2D;
for i in 0..polylines.len() {
p1 = polylines[i].first_vertex();
kdtree.add([p1.x, p1.y], first_node_id(i)).unwrap();
p2 = polylines[i].last_vertex();
kdtree.add([p2.x, p2.y], last_node_id(i)).unwrap();
}
// Find the neighbours of each endnode.
for i in 0..polylines.len() {
p1 = polylines[i].first_vertex();
p2 = polylines[i].last_vertex();
// check the first vertex
let ret = kdtree.within(&[p1.x, p1.y], precision, &squared_euclidean).unwrap();
if ret.len() > 1 {
for a in 0..ret.len() {
}
}
}
let elapsed_time = get_formatted_elapsed_time(start);
if verbose {
println!("{}", &format!("Elapsed Time: {}", elapsed_time));
}
Ok(())
}
}
#[derive(Debug)]
struct Link {
id: usize,
priority: f64,
}
impl PartialEq for Link {
fn eq(&self, other: &Self) -> bool {
(self.priority - other.priority).abs() < EPSILON && self.id == other.id
}
}
impl Eq for Link {}
impl Ord for Link {
fn cmp(&self, other: &Link) -> Ordering {
// this sorts priorities from low to high
// and when priorities are equal, id's from
// high to low.
let mut ord = other.priority.partial_cmp(&self.priority).unwrap();
if ord == Ordering::Equal {
ord = self.id.cmp(&other.id);
}
ord
}
}
impl PartialOrd for Link {
fn partial_cmp(&self, other: &Link) -> Option<Ordering> {
Some(self.cmp(other))
}
}
fn get_other_endnode(index: usize) -> usize {
if index % 2 == 0 {
// it's a starting node and we need the end
return index + 1;
}
// it's an end node and we need the starting node
index - 1
}
fn is_first_node(index: usize) -> bool {
index % 2 == 0
}
fn first_node_id(polyline: usize) -> usize {
polyline * 2
}
fn last_node_id(polyline: usize) -> usize {
polyline * 2 + 1
}
| 32.30226 | 113 | 0.51937 |
d9ccbae75a332e793e95a1754e4a845ca6634d3a
| 667 |
#[cfg(not(any(
feature = "u32_backend",
feature = "u64_backend",
feature = "fiat_u32_backend",
feature = "fiat_u64_backend"
)))]
compile_error!(
"no curve25519 backend cargo feature enabled! \
please enable one of: u32_backend, u64_backend, fiat_u32_backend, fiat_u64_backend"
);
#[cfg(feature = "u32_backend")]
pub mod u32;
#[cfg(feature = "u64_backend")]
pub mod u64;
#[cfg(feature = "fiat_u32_backend")]
pub mod fiat_u32;
#[cfg(feature = "fiat_u64_backend")]
pub mod fiat_u64;
pub mod curve_models;
#[cfg(not(all(
feature = "simd_backend",
any(target_feature = "avx2", target_feature = "avx512ifma")
)))]
pub mod scalar_mul;
| 21.516129 | 88 | 0.689655 |
cce53597044248169e782d24c2dbe23474e1b28f
| 1,112 |
#[doc = "Reader of register AUTOFLUSHEDPKTS"]
pub type R = crate::R<u32, super::AUTOFLUSHEDPKTS>;
#[doc = "Writer for register AUTOFLUSHEDPKTS"]
pub type W = crate::W<u32, super::AUTOFLUSHEDPKTS>;
#[doc = "Register AUTOFLUSHEDPKTS `reset()`'s with value 0"]
impl crate::ResetValue for super::AUTOFLUSHEDPKTS {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type { 0 }
}
#[doc = "Reader of field `COUNT`"]
pub type COUNT_R = crate::R<u16, u16>;
#[doc = "Write proxy for field `COUNT`"]
pub struct COUNT_W<'a> {
w: &'a mut W,
}
impl<'a> COUNT_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff) | ((value as u32) & 0xffff);
self.w
}
}
impl R {
#[doc = "Bits 0:15 - Flushed RX pkts counter"]
#[inline(always)]
pub fn count(&self) -> COUNT_R { COUNT_R::new((self.bits & 0xffff) as u16) }
}
impl W {
#[doc = "Bits 0:15 - Flushed RX pkts counter"]
#[inline(always)]
pub fn count(&mut self) -> COUNT_W { COUNT_W { w: self } }
}
| 31.771429 | 80 | 0.608813 |
7677d67c0ee5d6b0f7fdb173e12bfbc9e92d5b10
| 105 |
// src/arch/x64/reg/mod.rs
//
// This is the mod.rs file for the arch::x64::reg module.
pub mod rflags;
| 17.5 | 57 | 0.666667 |
f873832e94f091c94036a6ed20dc5c6e71f41ab8
| 4,370 |
#![deny(missing_docs)]
//! This module provides helper functions for generating rvalue and lvalues
//! corresponding to a single Rust expression.
use super::*;
impl<'c> Translation<'c> {
/// Get back a Rust lvalue corresponding to the expression passed in.
///
/// Do not use the output lvalue expression more than once.
pub fn name_reference_write(
&self,
ctx: ExprContext,
reference: CExprId,
) -> Result<WithStmts<P<Expr>>, TranslationError> {
self.name_reference(ctx, reference, false)
.map(|ws| ws.map(|(lvalue, _)| lvalue))
}
/// Get back a Rust (lvalue, rvalue) pair corresponding to the expression passed in.
///
/// You may reuse either of these expressions.
pub fn name_reference_write_read(
&self,
ctx: ExprContext,
reference: CExprId,
) -> Result<WithStmts<(P<Expr>, P<Expr>)>, TranslationError> {
let msg: &str = "When called with `uses_read = true`, `name_reference` should always \
return an rvalue (something from which to read the memory location)";
self.name_reference(ctx, reference, true)
.map(|ws| ws.map(|(lvalue, rvalue)| (lvalue, rvalue.expect(msg))))
}
/// This function transforms an expression that should refer to a memory location (a C lvalue)
/// into a Rust lvalue for writing to that location.
///
/// When called with `uses_read`, this function returns an rvalue too. The rvalue can be used to
/// read multiple times without duplicating side-effects.
///
/// NOTE: Use `name_reference_write` or `name_reference_write_read` instead of calling this
/// directly.
fn name_reference(
&self,
ctx: ExprContext,
reference: CExprId,
uses_read: bool,
) -> Result<WithStmts<(P<Expr>, Option<P<Expr>>)>, TranslationError> {
let reference_ty = self
.ast_context
.index(reference)
.kind
.get_qual_type()
.ok_or_else(|| format_err!("bad reference type"))?;
let WithStmts {
val: reference,
mut stmts,
} = self.convert_expr(ctx.used(), reference)?;
/// Check if something is a valid Rust lvalue. Inspired by `librustc::ty::expr_is_lval`.
fn is_lvalue(e: &Expr) -> bool {
match e.node {
ExprKind::Path(..)
| ExprKind::Unary(ast::UnOp::Deref, _)
| ExprKind::Field(..)
| ExprKind::Index(..) => true,
_ => false,
}
}
// Check if something is a side-effect free Rust lvalue.
fn is_simple_lvalue(e: &Expr) -> bool {
match e.node {
ExprKind::Path(..) => true,
ExprKind::Unary(ast::UnOp::Deref, ref e)
| ExprKind::Field(ref e, _)
| ExprKind::Index(ref e, _) => is_simple_lvalue(e),
_ => false,
}
}
// Given the LHS access to a variable, produce the RHS one
let read = |write: P<Expr>| -> Result<P<Expr>, TranslationError> {
if reference_ty.qualifiers.is_volatile {
self.volatile_read(&write, reference_ty)
} else {
Ok(write)
}
};
if !uses_read && is_lvalue(&*reference) {
Ok(WithStmts {
stmts,
val: (reference, None),
})
} else if is_simple_lvalue(&*reference) {
Ok(WithStmts {
stmts,
val: (reference.clone(), Some(read(reference)?)),
})
} else {
// This is the case where we explicitly need to factor out possible side-effects.
let ptr_name = self.renamer.borrow_mut().fresh();
// let ref mut p = lhs;
let compute_ref = mk().local_stmt(P(mk().local(
mk().mutbl().ident_ref_pat(&ptr_name),
None as Option<P<Ty>>,
Some(reference),
)));
stmts.push(compute_ref);
let write = mk().unary_expr(ast::UnOp::Deref, mk().ident_expr(&ptr_name));
Ok(WithStmts {
stmts,
val: (write.clone(), Some(read(write)?)),
})
}
}
}
| 35.528455 | 100 | 0.540046 |
1412bc7607e312178e4900322cfc442050835ee2
| 407 |
#[derive(Clone, Copy)]
/// Enumeration of multicodecs used in the Holium framework.
/// Reference: https://github.com/multiformats/multicodec
pub enum BlockMulticodec {
Raw,
DagCbor,
}
impl From<&BlockMulticodec> for u64 {
fn from(codec: &BlockMulticodec) -> Self {
match codec {
BlockMulticodec::Raw => 0x55,
BlockMulticodec::DagCbor => 0x71,
}
}
}
| 23.941176 | 60 | 0.63145 |
911af5b40ea10f0fa72c790ec65530d272c5ec7b
| 15,619 |
#![warn(clippy::all)]
#![warn(clippy::pedantic)]
#![warn(clippy::cargo)]
#![warn(clippy::needless_borrow)]
#![allow(clippy::let_underscore_drop)]
// https://github.com/rust-lang/rust-clippy/pull/5998#issuecomment-731855891
#![allow(clippy::map_err_ignore)]
#![allow(clippy::option_if_let_else)]
#![allow(unknown_lints)]
#![warn(missing_docs)]
#![warn(missing_debug_implementations)]
#![warn(missing_copy_implementations)]
#![warn(rust_2018_idioms)]
#![warn(trivial_casts, trivial_numeric_casts)]
#![warn(unused_qualifications)]
#![warn(variant_size_differences)]
#![forbid(unsafe_code)]
// Enable feature callouts in generated documentation:
// https://doc.rust-lang.org/beta/unstable-book/language-features/doc-cfg.html
//
// This approach is borrowed from tokio.
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(docsrs, feature(doc_alias))]
//! An implementation of [Ruby's pseudo-random number generator][ruby-random],
//! or PRNG.
//!
//! The PRNG produces a deterministic sequence of bits which approximate true
//! randomness. The sequence may be represented by integers, floats, or binary
//! strings.
//!
//! The generator may be initialized with either a system-generated or
//! user-supplied seed value.
//!
//! PRNGs are currently implemented as a modified Mersenne Twister with a period
//! of 2**19937-1.
//!
//! # Implementation notes
//!
//! This RNG reproduces the same random bytes and floats as MRI. It may differ
//! when returning elements confined to a distribution.
//!
//! # Examples
//!
//! Generate integers:
//!
//! ```
//! # use spinoso_random::Random;
//! let seed = [627457_u32, 697550, 16438, 41926];
//! let mut random = Random::with_array_seed(seed);
//! let rand = random.next_int32();
//! ```
//!
//! Generate random numbers in a range:
//!
//! ```
//! # #[cfg(feature = "random-rand")]
//! # use spinoso_random::{rand, Error, Max, Rand, Random};
//! # #[cfg(feature = "random-rand")]
//! # fn example() -> Result<(), Error> {
//! let mut random = Random::new()?;
//! let max = Max::Integer(10);
//! let mut rand = rand(&mut random, max)?;
//! assert!(matches!(rand, Rand::Integer(x) if x < 10));
//! # Ok(())
//! # }
//! # #[cfg(feature = "random-rand")]
//! # example().unwrap();
//! ```
//!
//! # `no_std`
//!
//! This crate is `no_std` compatible when built without the `std` feature. This
//! crate does not depend on [`alloc`].
//!
//! # Crate features
//!
//! All features are enabled by default.
//!
//! - **random-rand** - Enables range sampling methods for the [`rand()`]
//! function. Activating this feature also activates the **rand-traits**
//! feature. Dropping this feature removes the [`rand`] dependency.
//! - **rand-traits** - Enables implementations of [`RngCore`] on [`Random`] and
//! [`Mt`] types. Dropping this feature removes the [`rand_core`] dependency.
//! - **std** - Enables a dependency on the Rust Standard Library. Activating
//! this feature enables [`std::error::Error`] impls on error types in this
//! crate.
//!
//! [ruby-random]: https://ruby-doc.org/core-2.6.3/Random.html
//! [`alloc`]: https://doc.rust-lang.org/alloc/
//! [`rand`]: ::rand
//! [`RngCore`]: rand_core::RngCore
#![no_std]
#[cfg(any(feature = "std", test, doctest))]
extern crate std;
use core::fmt;
#[cfg(feature = "std")]
use std::error;
#[cfg(feature = "random-rand")]
mod rand;
mod random;
mod urandom;
#[cfg(feature = "random-rand")]
pub use self::rand::{rand, Max, Rand};
pub use random::ruby::Mt;
pub use random::{new_seed, seed_to_key, Random};
pub use urandom::urandom;
// Ensure code blocks in README.md compile
#[cfg(doctest)]
macro_rules! readme {
($x:expr) => {
#[doc = $x]
mod readme {}
};
() => {
readme!(include_str!("../README.md"));
};
}
#[cfg(all(feature = "rand", doctest))]
readme!();
/// Sum type of all errors possibly returned from `Random` functions.
///
/// Random functions in `spinoso-random` return errors in the following
/// conditions:
///
/// - The platform source of cryptographic randomness is unavailable.
/// - The platform source of cryptographic randomness does not have sufficient
/// entropy to return the requested bytes.
/// - Constraints for bounding random numbers are invalid.
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd)]
pub enum Error {
/// Error that indicates [`rand()`] was passed an invalid constraint.
///
/// See [`ArgumentError`].
Argument(ArgumentError),
/// Error that indicates that [`Random::new`] failed to generate a random
/// seed.
///
/// See [`InitializeError`].
Initialize(InitializeError),
/// Error that indicates that [`new_seed`] failed to generate a random seed.
///
/// See [`NewSeedError`].
NewSeed(NewSeedError),
/// Error that indicates that [`urandom()`] failed to generate the requested
/// random bytes from the platform source of randomness.
///
/// See [`UrandomError`].
Urandom(UrandomError),
}
impl From<ArgumentError> for Error {
#[inline]
fn from(err: ArgumentError) -> Self {
Self::Argument(err)
}
}
impl From<InitializeError> for Error {
#[inline]
fn from(err: InitializeError) -> Self {
Self::Initialize(err)
}
}
impl From<NewSeedError> for Error {
#[inline]
fn from(err: NewSeedError) -> Self {
Self::NewSeed(err)
}
}
impl From<UrandomError> for Error {
#[inline]
fn from(err: UrandomError) -> Self {
Self::Urandom(err)
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("Random error")
}
}
#[cfg(feature = "std")]
impl error::Error for Error {
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
match self {
Self::Argument(ref err) => Some(err),
Self::Initialize(ref err) => Some(err),
Self::NewSeed(ref err) => Some(err),
Self::Urandom(ref err) => Some(err),
}
}
}
/// Error that indicates a `Random` random number generator failed to
/// initialize.
///
/// When initializing an [`Mt`] with a random seed, gathering entropy from the
/// host system can fail.
///
/// This error corresponds to the [Ruby `RuntimeError` Exception class].
///
/// # Examples
///
/// ```
/// # use spinoso_random::InitializeError;
/// let err = InitializeError::new();
/// assert_eq!(err.message(), "failed to get urandom");
/// ```
///
/// [Ruby `RuntimeError` Exception class]: https://ruby-doc.org/core-2.6.3/RuntimeError.html
#[derive(Default, Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct InitializeError {
_private: (),
}
impl InitializeError {
/// Construct a new, default initialize error.
///
/// # Examples
///
/// ```
/// # use spinoso_random::InitializeError;
/// const ERR: InitializeError = InitializeError::new();
/// assert_eq!(ERR.message(), "failed to get urandom");
/// ```
#[inline]
#[must_use]
pub const fn new() -> Self {
Self { _private: () }
}
/// Retrieve the exception message associated with this initialization
/// error.
///
/// # Examples
///
/// ```
/// # use spinoso_random::InitializeError;
/// let err = InitializeError::new();
/// assert_eq!(err.message(), "failed to get urandom");
/// ```
#[inline]
#[must_use]
#[allow(clippy::unused_self)]
pub const fn message(self) -> &'static str {
"failed to get urandom"
}
}
impl fmt::Display for InitializeError {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.message())
}
}
#[cfg(feature = "std")]
impl error::Error for InitializeError {}
/// Error that indicates the system source of cryptographically secure
/// randomness failed to read the requested bytes.
///
/// This can occur if the source is unknown or lacks sufficient entropy.
///
/// This error is returned by [`urandom()`]. See its documentation for more
/// details.
///
/// This error corresponds to the [Ruby `RuntimeError` Exception class].
///
/// # Examples
///
/// ```
/// # use spinoso_random::UrandomError;
/// let err = UrandomError::new();
/// assert_eq!(err.message(), "failed to get urandom");
/// ```
///
/// [Ruby `RuntimeError` Exception class]: https://ruby-doc.org/core-2.6.3/RuntimeError.html
#[derive(Default, Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct UrandomError {
_private: (),
}
impl UrandomError {
/// Construct a new, default urandom error.
///
/// # Examples
///
/// ```
/// # use spinoso_random::UrandomError;
/// const ERR: UrandomError = UrandomError::new();
/// assert_eq!(ERR.message(), "failed to get urandom");
/// ```
#[inline]
#[must_use]
pub const fn new() -> Self {
Self { _private: () }
}
/// Retrieve the exception message associated with this urandom error.
///
/// # Examples
///
/// ```
/// # use spinoso_random::UrandomError;
/// let err = UrandomError::new();
/// assert_eq!(err.message(), "failed to get urandom");
/// ```
#[inline]
#[must_use]
#[allow(clippy::unused_self)]
pub const fn message(self) -> &'static str {
"failed to get urandom"
}
}
impl fmt::Display for UrandomError {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.message())
}
}
#[cfg(feature = "std")]
impl error::Error for UrandomError {}
/// Error that indicates the system source of cryptographically secure
/// randomness failed to read sufficient bytes to create a new seed.
///
/// This can occur if the source is unknown or lacks sufficient entropy.
///
/// This error is returned by [`new_seed`]. See its documentation for more
/// details.
///
/// This error corresponds to the [Ruby `RuntimeError` Exception class].
///
/// # Examples
///
/// ```
/// # use spinoso_random::NewSeedError;
/// let err = NewSeedError::new();
/// assert_eq!(err.message(), "failed to get urandom");
/// ```
///
/// [Ruby `RuntimeError` Exception class]: https://ruby-doc.org/core-2.6.3/RuntimeError.html
#[derive(Default, Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct NewSeedError {
_private: (),
}
impl NewSeedError {
/// Construct a new, default new seed error.
///
/// # Examples
///
/// ```
/// # use spinoso_random::NewSeedError;
/// const ERR: NewSeedError = NewSeedError::new();
/// assert_eq!(ERR.message(), "failed to get urandom");
/// ```
#[inline]
#[must_use]
pub const fn new() -> Self {
Self { _private: () }
}
/// Retrieve the exception message associated with this new seed error.
///
/// # Examples
///
/// ```
/// # use spinoso_random::NewSeedError;
/// let err = NewSeedError::new();
/// assert_eq!(err.message(), "failed to get urandom");
/// ```
#[inline]
#[must_use]
#[allow(clippy::unused_self)]
pub const fn message(self) -> &'static str {
"failed to get urandom"
}
}
impl fmt::Display for NewSeedError {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.message())
}
}
#[cfg(feature = "std")]
impl error::Error for NewSeedError {}
/// Error that indicates a random number could not be generated with the given
/// bounds.
///
/// This error is returned by [`rand()`]. See its documentation for more
/// details.
///
/// This error corresponds to the [Ruby `ArgumentError` Exception class].
///
/// # Examples
///
/// ```
/// # use spinoso_random::ArgumentError;
/// let err = ArgumentError::new();
/// assert_eq!(err.message(), "ArgumentError");
/// ```
///
/// [Ruby `ArgumentError` Exception class]: https://ruby-doc.org/core-2.6.3/ArgumentError.html
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd)]
pub struct ArgumentError(ArgumentErrorInner);
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd)]
enum ArgumentErrorInner {
Default,
DomainError,
#[cfg(feature = "random-rand")]
#[cfg_attr(docsrs, doc(cfg(feature = "random-rand")))]
Rand(Max),
}
impl ArgumentError {
/// Construct a new, default argument error.
///
/// # Examples
///
/// ```
/// # use spinoso_random::ArgumentError;
/// const ERR: ArgumentError = ArgumentError::new();
/// assert_eq!(ERR.message(), "ArgumentError");
/// ```
#[inline]
#[must_use]
pub const fn new() -> Self {
Self(ArgumentErrorInner::Default)
}
/// Construct a new domain error.
///
/// # Examples
///
/// ```
/// # use spinoso_random::ArgumentError;
/// const ERR: ArgumentError = ArgumentError::domain_error();
/// assert_eq!(ERR.message(), "Numerical argument out of domain");
/// ```
#[inline]
#[must_use]
pub const fn domain_error() -> Self {
Self(ArgumentErrorInner::DomainError)
}
/// Construct a new argument error from an invalid [`Max`] constraint.
///
/// # Examples
///
/// ```
/// # use spinoso_random::{ArgumentError, Max};
/// const ERR: ArgumentError = ArgumentError::with_rand_max(Max::Integer(-1));
/// assert_eq!(ERR.message(), "invalid argument");
/// ```
#[inline]
#[must_use]
#[cfg(feature = "random-rand")]
#[cfg_attr(docsrs, doc(cfg(feature = "random-rand")))]
pub const fn with_rand_max(max: Max) -> Self {
Self(ArgumentErrorInner::Rand(max))
}
/// Retrieve the exception message associated with this new seed error.
///
/// # Implementation notes
///
/// Argument errors constructed with [`ArgumentError::with_rand_max`] return
/// an incomplete error message. Prefer to use the [`Display`] impl to
/// retrieve error messages from [`ArgumentError`].
///
/// # Examples
///
/// ```
/// # use spinoso_random::ArgumentError;
/// let err = ArgumentError::new();
/// assert_eq!(err.message(), "ArgumentError");
/// let err = ArgumentError::domain_error();
/// assert_eq!(err.message(), "Numerical argument out of domain");
/// ```
///
/// [`Display`]: fmt::Display
#[inline]
#[must_use]
pub const fn message(self) -> &'static str {
match self.0 {
ArgumentErrorInner::Default => "ArgumentError",
ArgumentErrorInner::DomainError => "Numerical argument out of domain",
#[cfg(feature = "random-rand")]
ArgumentErrorInner::Rand(_) => "invalid argument",
}
}
/// Return whether this argument error is a domain error.
///
/// Domain errors are typically reported as `Errno::EDOM` in MRI.
///
/// # Examples
///
/// ```
/// # use spinoso_random::ArgumentError;
/// let err = ArgumentError::domain_error();
/// assert!(err.is_domain_error());
/// let err = ArgumentError::new();
/// assert!(!err.is_domain_error());
/// ```
#[inline]
#[must_use]
pub const fn is_domain_error(self) -> bool {
matches!(self.0, ArgumentErrorInner::DomainError)
}
}
impl fmt::Display for ArgumentError {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.0 {
ArgumentErrorInner::Default | ArgumentErrorInner::DomainError => f.write_str(self.message()),
#[cfg(feature = "random-rand")]
ArgumentErrorInner::Rand(max) => write!(f, "invalid argument - {}", max),
}
}
}
#[cfg(feature = "std")]
impl error::Error for ArgumentError {}
| 28.658716 | 105 | 0.61566 |
deac07d4e66e7f1f40fd711958a772f98ac4ce96
| 1,891 |
use super::super::sorts::comparative_sort::insertion_sort_try;
use super::super::sorts::utils::Params;
#[test]
fn test_comparative_sort_insertion_sort_try() {
let p = Params::new(0, 8, 0, 1); // level, radix, offset, max_level
let mut arr: Vec<u16> = vec![
512,
1024,
1024,
1024,
1024,
1024 + 74,
1024 + 73,
1024 + 72,
1024 + 71,
1024 + 70,
1024 + 69,
1024 + 68,
1024 + 67,
1024 + 66,
1024 + 65,
1024 + 64,
1024 + 62,
1024 + 61,
1024 + 60,
1024 + 59,
1024 + 58,
1024 + 57,
1024 + 56,
1024 + 55,
1024 + 54,
1024 + 53,
1024 + 52,
1024 + 51,
1024 + 50,
1024 + 49,
1024 + 48,
1024 + 47,
1024 + 46,
1024 + 45,
1024 + 44,
1024 + 43,
1024 + 42,
1024 + 41,
1024 + 40,
1024 + 39,
1024 + 38,
1024 + 37,
1024 + 36,
1024 + 35,
1024 + 34,
1024 + 33,
1024 + 32,
1024 + 31,
1024 + 30,
1024 + 29,
1024 + 28,
1024 + 27,
1024 + 26,
1024 + 25,
1024 + 24,
1024 + 23,
1024 + 22,
1024 + 21,
1024 + 20,
1024 + 19,
1024 + 18,
1024 + 17,
1024 + 16,
1024 + 15,
1024 + 14,
1024 + 13,
1024 + 12,
1024 + 11,
1024 + 10,
1024 + 9,
1024 + 8,
1024 + 7,
1024 + 6,
1024 + 5,
1024 + 4,
1024 + 3,
1024 + 2,
1024 + 1,
1024,
2048,
];
let unsorted_parts = insertion_sort_try(&mut arr, &p);
assert_eq!(unsorted_parts[0].0, 1);
assert_eq!(unsorted_parts[0].1, 79);
}
| 20.117021 | 71 | 0.39027 |
75e306b23402fc5f9e6237a0cfa9b168ef201fa5
| 3,594 |
//! This module contains data types for interacting with `Scope`s.
//!
//! ## Relevant examples
//! - [Counter](https://github.com/yewstack/yew/tree/master/examples/counter)
//! - [Timer](https://github.com/yewstack/yew/tree/master/examples/timer)
use std::cell::RefCell;
use std::fmt;
use std::rc::Rc;
/// Universal callback wrapper.
/// <aside class="warning">
/// Use callbacks carefully, because if you call one from the `update` loop
/// of a `Component` (even from JS) it will delay a message until next.
/// Callbacks should be used from JS callbacks or `setTimeout` calls.
/// </aside>
/// An `Rc` wrapper is used to make it cloneable.
pub enum Callback<IN> {
/// A callback which can be called multiple times
Callback(Rc<dyn Fn(IN)>),
/// A callback which can only be called once. The callback will panic if it is
/// called more than once.
CallbackOnce(Rc<CallbackOnce<IN>>),
}
type CallbackOnce<IN> = RefCell<Option<Box<dyn FnOnce(IN)>>>;
impl<IN, F: Fn(IN) + 'static> From<F> for Callback<IN> {
fn from(func: F) -> Self {
Callback::Callback(Rc::new(func))
}
}
impl<IN> Clone for Callback<IN> {
fn clone(&self) -> Self {
match self {
Callback::Callback(cb) => Callback::Callback(cb.clone()),
Callback::CallbackOnce(cb) => Callback::CallbackOnce(cb.clone()),
}
}
}
#[allow(clippy::vtable_address_comparisons)]
impl<IN> PartialEq for Callback<IN> {
fn eq(&self, other: &Callback<IN>) -> bool {
match (&self, &other) {
(Callback::Callback(cb), Callback::Callback(other_cb)) => Rc::ptr_eq(cb, other_cb),
(Callback::CallbackOnce(cb), Callback::CallbackOnce(other_cb)) => {
Rc::ptr_eq(cb, other_cb)
}
_ => false,
}
}
}
impl<IN> fmt::Debug for Callback<IN> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let data = match self {
Callback::Callback(_) => "Callback<_>",
Callback::CallbackOnce(_) => "CallbackOnce<_>",
};
f.write_str(data)
}
}
impl<IN> Callback<IN> {
/// This method calls the callback's function.
pub fn emit(&self, value: IN) {
match self {
Callback::Callback(cb) => cb(value),
Callback::CallbackOnce(rc) => {
let cb = rc.replace(None);
let f = cb.expect("callback in CallbackOnce has already been used");
f(value)
}
};
}
/// Creates a callback from an `FnOnce`. The programmer is responsible for ensuring
/// that the callback is only called once. If it is called more than once, the callback
/// will panic.
pub fn once<F>(func: F) -> Self
where
F: FnOnce(IN) + 'static,
{
Callback::CallbackOnce(Rc::new(RefCell::new(Some(Box::new(func)))))
}
/// Creates a "no-op" callback which can be used when it is not suitable to use an
/// `Option<Callback>`.
pub fn noop() -> Self {
Self::from(|_| {})
}
}
impl<IN> Default for Callback<IN> {
fn default() -> Self {
Self::noop()
}
}
impl<IN: 'static> Callback<IN> {
/// Changes the input type of the callback to another.
/// Works like the `map` method but in the opposite direction.
pub fn reform<F, T>(&self, func: F) -> Callback<T>
where
F: Fn(T) -> IN + 'static,
{
let this = self.clone();
let func = move |input| {
let output = func(input);
this.emit(output);
};
Callback::from(func)
}
}
| 30.457627 | 95 | 0.581803 |
22e5fe738ee1cf04174afaef8b154d1b68b95c09
| 1,388 |
use custom_mutator::{export_mutator, CustomMutator};
use lain::{
mutator::Mutator,
prelude::*,
rand::{rngs::StdRng, SeedableRng},
};
#[derive(Debug, Mutatable, NewFuzzed, BinarySerialize)]
struct MyStruct {
field_1: u8,
#[lain(bits = 3)]
field_2: u8,
#[lain(bits = 5)]
field_3: u8,
#[lain(min = 5, max = 10000)]
field_4: u32,
#[lain(ignore)]
ignored_field: u64,
}
struct LainMutator {
mutator: Mutator<StdRng>,
buffer: Vec<u8>,
}
impl CustomMutator for LainMutator {
type Error = ();
fn init(seed: u32) -> Result<Self, ()> {
Ok(Self {
mutator: Mutator::new(StdRng::seed_from_u64(seed as u64)),
buffer: Vec::new(),
})
}
fn fuzz<'b, 's: 'b>(
&'s mut self,
_buffer: &'b mut [u8],
_add_buff: Option<&[u8]>,
max_size: usize,
) -> Result<Option<&'b [u8]>, ()> {
// we just sample an instance of MyStruct, ignoring the current input
let instance = MyStruct::new_fuzzed(&mut self.mutator, None);
let size = instance.serialized_size();
if size > max_size {
return Err(());
}
self.buffer.clear();
self.buffer.reserve(size);
instance.binary_serialize::<_, BigEndian>(&mut self.buffer);
Ok(Some(self.buffer.as_slice()))
}
}
export_mutator!(LainMutator);
| 23.133333 | 77 | 0.567723 |
d6fc5a5d20bc3939a95132b9e613fee5425fcd48
| 2,686 |
use proc_macro::TokenStream;
use quote::quote;
use syn::{parse_macro_input, Attribute, Data, DataStruct, DeriveInput, Error, Ident, Lit, Meta};
#[proc_macro_derive(SettingGroup, attributes(setting_prefix))]
pub fn setting_group(item: TokenStream) -> TokenStream {
let input = parse_macro_input!(item as DeriveInput);
let prefix = setting_prefix(input.attrs.as_ref())
.map(|p| format!("{}_", p))
.unwrap_or_else(|| "".to_string());
stream(input, prefix)
}
fn stream(input: DeriveInput, prefix: String) -> TokenStream {
const ERR_MSG: &str = "Derive macro expects a struct";
match input.data {
Data::Struct(ref data) => struct_stream(input.ident, prefix, data),
Data::Enum(data) => Error::new_spanned(data.enum_token, ERR_MSG)
.to_compile_error()
.into(),
Data::Union(data) => Error::new_spanned(data.union_token, ERR_MSG)
.to_compile_error()
.into(),
}
}
fn struct_stream(name: Ident, prefix: String, data: &DataStruct) -> TokenStream {
let fragments = data.fields.iter().map(|field| match field.ident {
Some(ref ident) => {
let vim_setting_name = format!("{}{}", prefix, ident);
quote! {{
fn update_func(value: rmpv::Value) {
let mut s = crate::settings::SETTINGS.get::<#name>();
s.#ident.parse_from_value(value);
crate::settings::SETTINGS.set(&s);
}
fn reader_func() -> rmpv::Value {
let s = crate::settings::SETTINGS.get::<#name>();
s.#ident.into()
}
crate::settings::SETTINGS.set_setting_handlers(
#vim_setting_name,
update_func,
reader_func
);
}}
}
None => {
Error::new_spanned(field.colon_token, "Expected named struct fields").to_compile_error()
}
});
let expanded = quote! {
impl #name {
pub fn register() {
let s: Self = Default::default();
crate::settings::SETTINGS.set(&s);
#(#fragments)*
}
}
};
TokenStream::from(expanded)
}
fn setting_prefix(attrs: &[Attribute]) -> Option<String> {
for attr in attrs.iter() {
if let Ok(Meta::NameValue(name_value)) = attr.parse_meta() {
if name_value.path.is_ident("setting_prefix") {
if let Lit::Str(literal) = name_value.lit {
return Some(literal.value());
}
}
}
}
None
}
| 34.435897 | 100 | 0.537602 |
f73e08ad4866bd273c510add77df9054ecfce419
| 2,676 |
//! [POST /_matrix/client/r0/user/{userId}/filter](https://matrix.org/docs/spec/client_server/r0.6.0#post-matrix-client-r0-user-userid-filter)
use ruma_api::ruma_api;
use ruma_identifiers::UserId;
use super::{FilterDefinition, IncomingFilterDefinition};
ruma_api! {
metadata: {
description: "Create a new filter for event retrieval.",
method: POST,
name: "create_filter",
path: "/_matrix/client/r0/user/:user_id/filter",
rate_limited: false,
authentication: AccessToken,
}
request: {
/// The ID of the user uploading the filter.
///
/// The access token must be authorized to make requests for this user ID.
#[ruma_api(path)]
pub user_id: &'a UserId,
/// The filter definition.
#[ruma_api(body)]
pub filter: FilterDefinition<'a>,
}
response: {
/// The ID of the filter that was created.
pub filter_id: String,
}
error: crate::Error
}
impl<'a> Request<'a> {
/// Creates a new `Request` with the given user ID and filter definition.
pub fn new(user_id: &'a UserId, filter: FilterDefinition<'a>) -> Self {
Self { user_id, filter }
}
}
impl Response {
/// Creates a new `Response` with the given filter ID.
pub fn new(filter_id: String) -> Self {
Self { filter_id }
}
}
#[cfg(all(test, any(feature = "client", feature = "server")))]
mod tests {
use matches::assert_matches;
#[cfg(feature = "server")]
#[test]
fn deserialize_request() {
use ruma_api::IncomingRequest as _;
use super::IncomingRequest;
assert_matches!(
IncomingRequest::try_from_http_request(
http::Request::builder()
.method(http::Method::POST)
.uri("https://matrix.org/_matrix/client/r0/user/@foo:bar.com/filter")
.body(b"{}" as &[u8])
.unwrap(),
),
Ok(IncomingRequest { user_id, filter })
if user_id == "@foo:bar.com" && filter.is_empty()
);
}
#[cfg(feature = "client")]
#[test]
fn serialize_request() {
use ruma_api::{OutgoingRequest, SendAccessToken};
use ruma_identifiers::user_id;
use crate::r0::filter::FilterDefinition;
assert_matches!(
super::Request::new(&user_id!("@foo:bar.com"), FilterDefinition::default())
.try_into_http_request::<Vec<u8>>(
"https://matrix.org",
SendAccessToken::IfRequired("tok"),
),
Ok(res) if res.body() == b"{}"
);
}
}
| 28.468085 | 142 | 0.565022 |
3962a20b3781d862f8df21a51bdbfa7cbfef0b77
| 2,694 |
use super::apply::Apply;
pub trait Applicative: Apply {
fn pure(value: Self::Inner) -> Self::Outter<Self::Inner>;
}
impl<A> Applicative for Option<A> {
fn pure(value: Self::Inner) -> Self::Outter<Self::Inner> {
Some(value)
}
}
impl<A, E> Applicative for Result<A, E> {
fn pure(value: Self::Inner) -> Self::Outter<Self::Inner> {
Ok(value)
}
}
impl<A: Clone> Applicative for Vec<A> {
fn pure(value: Self::Inner) -> Self::Outter<Self::Inner> {
vec![value]
}
}
pub trait ApplicativeError: Applicative {
type ErrorT;
fn handle_error_with<F>(self, f: F) -> Self::Outter<Self::Inner>
where
F: FnMut(Self::ErrorT) -> Self::Outter<Self::Inner>;
fn raise_error(error: Self::ErrorT) -> Self::Outter<Self::Inner>;
}
impl<A, E> ApplicativeError for Result<A, E> {
type ErrorT = E;
fn handle_error_with<F>(self, mut f: F) -> Self::Outter<Self::Inner>
where
F: FnMut(Self::ErrorT) -> Self::Outter<Self::Inner>,
{
match self {
Err(e) => f(e),
_ => self,
}
}
fn raise_error(error: Self::ErrorT) -> Self::Outter<Self::Inner> {
Err(error)
}
}
impl<A> ApplicativeError for Option<A> {
type ErrorT = ();
fn handle_error_with<F>(self, mut f: F) -> Self::Outter<Self::Inner>
where
F: FnMut(Self::ErrorT) -> Self::Outter<Self::Inner>,
{
match self {
None => f(()),
_ => self,
}
}
fn raise_error(_error: Self::ErrorT) -> Self::Outter<Self::Inner> {
None
}
}
#[cfg(test)]
mod tests {
use super::{Applicative, ApplicativeError};
#[test]
fn option() {
let value = 3;
assert_eq!(Option::pure(value), Some(value));
}
#[test]
fn result() {
let value = 3;
assert_eq!(Result::<i32, ()>::pure(value), Ok(value));
}
#[test]
fn vec() {
let value = 3;
assert_eq!(Vec::pure(value), vec![value]);
}
#[test]
fn handle_error_with_for_result() {
let value = Err(());
let handler = |_err| Ok(3);
assert_eq!(value.handle_error_with(handler), Ok(3));
}
#[test]
fn raise_error_for_result() {
let err = Result::<u64, String>::raise_error("ERROR!".to_string());
assert_eq!(err, Err("ERROR!".to_string()));
}
#[test]
fn handle_error_with_for_option() {
let value = None;
let handler = |_| Some(3);
assert_eq!(value.handle_error_with(handler), Some(3));
}
#[test]
fn raise_error_for_option() {
let err = Option::<u64>::raise_error(());
assert_eq!(err, None);
}
}
| 22.638655 | 75 | 0.547513 |
619615eab28c9ef4bbafdfcb4df46007cac6ce86
| 1,815 |
// c:f
use super::super::super::Address;
use quick_xml::events::{BytesStart, Event};
use quick_xml::Reader;
use quick_xml::Writer;
use std::io::Cursor;
use writer::driver::*;
#[derive(Clone, Default, Debug)]
pub struct Formula {
address: Address,
}
impl Formula {
pub fn get_address(&self) -> &Address {
&self.address
}
pub fn get_address_mut(&mut self) -> &mut Address {
&mut self.address
}
pub fn get_address_str(&self) -> String {
self.address.get_address()
}
pub fn set_address(&mut self, value: Address) -> &mut Formula {
self.address = value;
self
}
pub fn set_address_str<S: Into<String>>(&mut self, value: S) -> &mut Formula {
self.address.set_address(value);
self
}
pub(crate) fn set_attributes<R: std::io::BufRead>(
&mut self,
reader: &mut Reader<R>,
_e: &BytesStart,
) {
let mut buf = Vec::new();
loop {
match reader.read_event(&mut buf) {
Ok(Event::Text(e)) => {
self.set_address_str(e.unescape_and_decode(&reader).unwrap());
}
Ok(Event::End(ref e)) => match e.name() {
b"c:f" => return,
_ => (),
},
Ok(Event::Eof) => panic!("Error not find {} end element", "c:f"),
Err(e) => panic!("Error at position {}: {:?}", reader.buffer_position(), e),
_ => (),
}
buf.clear();
}
}
pub(crate) fn write_to(&self, writer: &mut Writer<Cursor<Vec<u8>>>) {
// c:f
write_start_tag(writer, "c:f", vec![], false);
write_text_node_no_escape(writer, self.get_address_str());
write_end_tag(writer, "c:f");
}
}
| 27.5 | 92 | 0.518457 |
4b1560e33c8f37eb4634314ae9b1b0f5cf76e090
| 6,072 |
use std::collections::HashMap;
use std::net::{IpAddr, Ipv4Addr};
use anyhow::{anyhow, Result};
use byteorder::{BigEndian, ByteOrder};
use log::*;
use trust_dns_proto::op::{
header::MessageType, op_code::OpCode, response_code::ResponseCode, Message,
};
use trust_dns_proto::rr::{
dns_class::DNSClass, record_data::RData, record_type::RecordType, resource::Record,
};
pub enum FakeDnsMode {
Include,
Exclude,
}
pub struct FakeDns {
ip_to_domain: HashMap<u32, String>,
domain_to_ip: HashMap<String, u32>,
cursor: u32,
min_cursor: u32,
max_cursor: u32,
ttl: u32,
filters: Vec<String>,
mode: FakeDnsMode,
}
impl FakeDns {
pub fn new(mode: FakeDnsMode) -> Self {
let min_cursor = Self::ip_to_u32(&Ipv4Addr::new(240, 255, 0, 0));
let max_cursor = Self::ip_to_u32(&Ipv4Addr::new(240, 255, 4, 255));
FakeDns {
ip_to_domain: HashMap::new(),
domain_to_ip: HashMap::new(),
cursor: min_cursor,
min_cursor,
max_cursor,
ttl: 1,
filters: Vec::new(),
mode,
}
}
pub fn add_filter(&mut self, filter: String) {
self.filters.push(filter);
}
fn allocate_ip(&mut self, domain: &str) -> Ipv4Addr {
self.ip_to_domain.insert(self.cursor, domain.to_owned());
self.domain_to_ip.insert(domain.to_owned(), self.cursor);
let ip = Self::u32_to_ip(self.cursor);
self.cursor += 1;
if self.cursor > self.max_cursor {
self.cursor = self.min_cursor;
}
ip
}
pub fn query_domain(&mut self, ip: &IpAddr) -> Option<String> {
let ip = match ip {
IpAddr::V4(ip) => ip,
_ => return None,
};
match self.ip_to_domain.get(&Self::ip_to_u32(ip)) {
Some(v) => Some(v.clone()),
None => None,
}
}
pub fn query_fake_ip(&mut self, domain: &str) -> Option<IpAddr> {
match self.domain_to_ip.get(domain) {
Some(v) => Some(IpAddr::V4(Self::u32_to_ip(v.to_owned()))),
None => None,
}
}
fn accept(&self, domain: &str) -> bool {
match self.mode {
FakeDnsMode::Exclude => {
for d in &self.filters {
if domain.contains(d) || d == "*" {
return false;
}
}
true
}
FakeDnsMode::Include => {
for d in &self.filters {
if domain.contains(d) || d == "*" {
return true;
}
}
false
}
}
}
pub fn generate_fake_response(&mut self, request: &[u8]) -> Result<Vec<u8>> {
let req = Message::from_vec(request)?;
if req.queries().is_empty() {
return Err(anyhow!("no queries in this DNS request"));
}
let query = &req.queries()[0];
if query.query_class() != DNSClass::IN {
return Err(anyhow!("unsupported query class {}", query.query_class()));
}
let t = query.query_type();
if t != RecordType::A && t != RecordType::AAAA && t != RecordType::Unknown(65) {
return Err(anyhow!(
"unsupported query record type {:?}",
query.query_type()
));
}
let raw_name = query.name();
// TODO check if a valid domain
let domain = if raw_name.is_fqdn() {
let fqdn = raw_name.to_ascii();
fqdn[..fqdn.len() - 1].to_string()
} else {
raw_name.to_ascii()
};
if !self.accept(&domain) {
return Err(anyhow!("domain {} not accepted", domain));
}
let ip = if let Some(ip) = self.query_fake_ip(&domain) {
match ip {
IpAddr::V4(a) => a,
_ => return Err(anyhow!("unexpected Ipv6 fake IP")),
}
} else {
let ip = self.allocate_ip(&domain);
debug!("allocate {} for {}", &ip, &domain);
ip
};
let mut resp = Message::new();
// sets the response according to request
// https://github.com/miekg/dns/blob/f515aa579d28efa1af67d9a62cc57f2dfe59da76/defaults.go#L15
resp.set_id(req.id())
.set_message_type(MessageType::Response)
.set_op_code(req.op_code());
if resp.op_code() == OpCode::Query {
resp.set_recursion_desired(req.recursion_desired())
.set_checking_disabled(req.checking_disabled());
}
resp.set_response_code(ResponseCode::NoError);
if !req.queries().is_empty() {
resp.add_query(query.clone());
}
if query.query_type() == RecordType::A {
let mut ans = Record::new();
ans.set_name(raw_name.clone())
.set_rr_type(RecordType::A)
.set_ttl(self.ttl)
.set_dns_class(DNSClass::IN)
.set_rdata(RData::A(ip));
resp.add_answer(ans);
}
Ok(resp.to_vec()?)
}
pub fn is_fake_ip(&self, ip: &IpAddr) -> bool {
let ip = match ip {
IpAddr::V4(ip) => ip,
_ => return false,
};
let ip = Self::ip_to_u32(ip);
ip >= self.min_cursor && ip <= self.max_cursor
}
fn u32_to_ip(ip: u32) -> Ipv4Addr {
Ipv4Addr::from(ip)
}
fn ip_to_u32(ip: &Ipv4Addr) -> u32 {
BigEndian::read_u32(&ip.octets())
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::net::Ipv4Addr;
#[test]
fn test_u32_to_ip() {
let ip1 = Ipv4Addr::new(127, 0, 0, 1);
let ip2 = FakeDns::u32_to_ip(2130706433u32);
assert_eq!(ip1, ip2);
}
#[test]
fn test_ip_to_u32() {
let ip = Ipv4Addr::new(127, 0, 0, 1);
let ip1 = FakeDns::ip_to_u32(&ip);
let ip2 = 2130706433u32;
assert_eq!(ip1, ip2);
}
}
| 28.373832 | 101 | 0.512681 |
28576e5bcfaab2a7790e95402576dc06a9d31989
| 1,063 |
#[doc = "Reader of register COMP[%s]"]
pub type R = crate::R<u16, super::COMP>;
#[doc = "Writer for register COMP[%s]"]
pub type W = crate::W<u16, super::COMP>;
#[doc = "Register COMP[%s] `reset()`'s with value 0"]
impl crate::ResetValue for super::COMP {
type Type = u16;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `COMP`"]
pub type COMP_R = crate::R<u16, u16>;
#[doc = "Write proxy for field `COMP`"]
pub struct COMP_W<'a> {
w: &'a mut W,
}
impl<'a> COMP_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff) | ((value as u16) & 0xffff);
self.w
}
}
impl R {
#[doc = "Bits 0:15 - Compare Value"]
#[inline(always)]
pub fn comp(&self) -> COMP_R {
COMP_R::new((self.bits & 0xffff) as u16)
}
}
impl W {
#[doc = "Bits 0:15 - Compare Value"]
#[inline(always)]
pub fn comp(&mut self) -> COMP_W {
COMP_W { w: self }
}
}
| 25.926829 | 74 | 0.552211 |
7576c1326b8e494c8902d21888a85976639d9d82
| 841 |
// ignore-test (fails spuriously, see issue #89228)
// FIXME: If two macros in the same module have the same name
// (yes, that's a thing), rustdoc lists both of them on the index page,
// but only documents the first one on the page for the macro.
// Fortunately, this can only happen in document private items mode,
// but it still isn't ideal beahvior.
//
// See https://github.com/rust-lang/rust/pull/88019#discussion_r693920453
//
// compile-flags: --document-private-items
// @has macro_document_private_duplicate/index.html 'Doc 1.'
// @has macro_document_private_duplicate/macro.a_macro.html 'Doc 1.'
/// Doc 1.
macro_rules! a_macro {
() => ()
}
// @has macro_document_private_duplicate/index.html 'Doc 2.'
// @!has macro_document_private_duplicate/macro.a_macro.html 'Doc 2.'
/// Doc 2.
macro_rules! a_macro {
() => ()
}
| 32.346154 | 73 | 0.717004 |
d649c6491314c2e5b4a0ad7123aef8bf396b48a9
| 25,164 |
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Type decoding
// tjc note: Would be great to have a `match check` macro equivalent
// for some of these
#![allow(non_camel_case_types)]
pub use self::DefIdSource::*;
use middle::region;
use middle::subst;
use middle::subst::VecPerParamSpace;
use middle::ty::{mod, Ty};
use std::rc::Rc;
use std::str;
use std::string::String;
use syntax::abi;
use syntax::ast;
use syntax::parse::token;
// Compact string representation for Ty values. API ty_str &
// parse_from_str. Extra parameters are for converting to/from def_ids in the
// data buffer. Whatever format you choose should not contain pipe characters.
// Def id conversion: when we encounter def-ids, they have to be translated.
// For example, the crate number must be converted from the crate number used
// in the library we are reading from into the local crate numbers in use
// here. To perform this translation, the type decoder is supplied with a
// conversion function of type `conv_did`.
//
// Sometimes, particularly when inlining, the correct translation of the
// def-id will depend on where it originated from. Therefore, the conversion
// function is given an indicator of the source of the def-id. See
// astencode.rs for more information.
#[deriving(Show)]
pub enum DefIdSource {
// Identifies a struct, trait, enum, etc.
NominalType,
// Identifies a type alias (`type X = ...`).
TypeWithId,
// Identifies a type parameter (`fn foo<X>() { ... }`).
TypeParameter,
// Identifies a region parameter (`fn foo<'X>() { ... }`).
RegionParameter,
// Identifies an unboxed closure
UnboxedClosureSource
}
impl Copy for DefIdSource {}
pub type conv_did<'a> =
|source: DefIdSource, ast::DefId|: 'a -> ast::DefId;
pub struct PState<'a, 'tcx: 'a> {
data: &'a [u8],
krate: ast::CrateNum,
pos: uint,
tcx: &'a ty::ctxt<'tcx>
}
fn peek(st: &PState) -> char {
st.data[st.pos] as char
}
fn next(st: &mut PState) -> char {
let ch = st.data[st.pos] as char;
st.pos = st.pos + 1u;
return ch;
}
fn next_byte(st: &mut PState) -> u8 {
let b = st.data[st.pos];
st.pos = st.pos + 1u;
return b;
}
fn scan<R, F, G>(st: &mut PState, mut is_last: F, op: G) -> R where
F: FnMut(char) -> bool,
G: FnOnce(&[u8]) -> R,
{
let start_pos = st.pos;
debug!("scan: '{}' (start)", st.data[st.pos] as char);
while !is_last(st.data[st.pos] as char) {
st.pos += 1;
debug!("scan: '{}'", st.data[st.pos] as char);
}
let end_pos = st.pos;
st.pos += 1;
return op(st.data[start_pos..end_pos]);
}
pub fn parse_ident(st: &mut PState, last: char) -> ast::Ident {
ast::Ident::new(parse_name(st, last))
}
pub fn parse_name(st: &mut PState, last: char) -> ast::Name {
fn is_last(b: char, c: char) -> bool { return c == b; }
parse_name_(st, |a| is_last(last, a) )
}
fn parse_name_<F>(st: &mut PState, is_last: F) -> ast::Name where
F: FnMut(char) -> bool,
{
scan(st, is_last, |bytes| {
token::intern(str::from_utf8(bytes).unwrap())
})
}
pub fn parse_state_from_data<'a, 'tcx>(data: &'a [u8], crate_num: ast::CrateNum,
pos: uint, tcx: &'a ty::ctxt<'tcx>)
-> PState<'a, 'tcx> {
PState {
data: data,
krate: crate_num,
pos: pos,
tcx: tcx
}
}
fn data_log_string(data: &[u8], pos: uint) -> String {
let mut buf = String::new();
buf.push_str("<<");
for i in range(pos, data.len()) {
let c = data[i];
if c > 0x20 && c <= 0x7F {
buf.push(c as char);
} else {
buf.push('.');
}
}
buf.push_str(">>");
buf
}
pub fn parse_ty_closure_data<'tcx>(data: &[u8],
crate_num: ast::CrateNum,
pos: uint,
tcx: &ty::ctxt<'tcx>,
conv: conv_did)
-> ty::ClosureTy<'tcx> {
let mut st = parse_state_from_data(data, crate_num, pos, tcx);
parse_closure_ty(&mut st, conv)
}
pub fn parse_ty_data<'tcx>(data: &[u8], crate_num: ast::CrateNum, pos: uint,
tcx: &ty::ctxt<'tcx>, conv: conv_did) -> Ty<'tcx> {
debug!("parse_ty_data {}", data_log_string(data, pos));
let mut st = parse_state_from_data(data, crate_num, pos, tcx);
parse_ty(&mut st, conv)
}
pub fn parse_region_data(data: &[u8], crate_num: ast::CrateNum, pos: uint, tcx: &ty::ctxt,
conv: conv_did) -> ty::Region {
debug!("parse_region_data {}", data_log_string(data, pos));
let mut st = parse_state_from_data(data, crate_num, pos, tcx);
parse_region(&mut st, conv)
}
pub fn parse_bare_fn_ty_data<'tcx>(data: &[u8], crate_num: ast::CrateNum, pos: uint,
tcx: &ty::ctxt<'tcx>, conv: conv_did)
-> ty::BareFnTy<'tcx> {
debug!("parse_bare_fn_ty_data {}", data_log_string(data, pos));
let mut st = parse_state_from_data(data, crate_num, pos, tcx);
parse_bare_fn_ty(&mut st, conv)
}
pub fn parse_trait_ref_data<'tcx>(data: &[u8], crate_num: ast::CrateNum, pos: uint,
tcx: &ty::ctxt<'tcx>, conv: conv_did)
-> ty::TraitRef<'tcx> {
debug!("parse_trait_ref_data {}", data_log_string(data, pos));
let mut st = parse_state_from_data(data, crate_num, pos, tcx);
parse_trait_ref(&mut st, conv)
}
pub fn parse_substs_data<'tcx>(data: &[u8], crate_num: ast::CrateNum, pos: uint,
tcx: &ty::ctxt<'tcx>, conv: conv_did) -> subst::Substs<'tcx> {
debug!("parse_substs_data {}", data_log_string(data, pos));
let mut st = parse_state_from_data(data, crate_num, pos, tcx);
parse_substs(&mut st, conv)
}
pub fn parse_bounds_data<'tcx>(data: &[u8], crate_num: ast::CrateNum,
pos: uint, tcx: &ty::ctxt<'tcx>, conv: conv_did)
-> ty::ParamBounds<'tcx> {
let mut st = parse_state_from_data(data, crate_num, pos, tcx);
parse_bounds(&mut st, conv)
}
pub fn parse_existential_bounds_data(data: &[u8], crate_num: ast::CrateNum,
pos: uint, tcx: &ty::ctxt, conv: conv_did)
-> ty::ExistentialBounds {
let mut st = parse_state_from_data(data, crate_num, pos, tcx);
parse_existential_bounds(&mut st, conv)
}
pub fn parse_builtin_bounds_data(data: &[u8], crate_num: ast::CrateNum,
pos: uint, tcx: &ty::ctxt, conv: conv_did)
-> ty::BuiltinBounds {
let mut st = parse_state_from_data(data, crate_num, pos, tcx);
parse_builtin_bounds(&mut st, conv)
}
fn parse_size(st: &mut PState) -> Option<uint> {
assert_eq!(next(st), '/');
if peek(st) == '|' {
assert_eq!(next(st), '|');
None
} else {
let n = parse_uint(st);
assert_eq!(next(st), '|');
Some(n)
}
}
fn parse_trait_store(st: &mut PState, conv: conv_did) -> ty::TraitStore {
match next(st) {
'~' => ty::UniqTraitStore,
'&' => ty::RegionTraitStore(parse_region(st, conv), parse_mutability(st)),
c => {
st.tcx.sess.bug(format!("parse_trait_store(): bad input '{}'",
c).as_slice())
}
}
}
fn parse_vec_per_param_space<'a, 'tcx, T, F>(st: &mut PState<'a, 'tcx>,
mut f: F)
-> VecPerParamSpace<T> where
F: FnMut(&mut PState<'a, 'tcx>) -> T,
{
let mut r = VecPerParamSpace::empty();
for &space in subst::ParamSpace::all().iter() {
assert_eq!(next(st), '[');
while peek(st) != ']' {
r.push(space, f(st));
}
assert_eq!(next(st), ']');
}
r
}
fn parse_substs<'a, 'tcx>(st: &mut PState<'a, 'tcx>,
conv: conv_did) -> subst::Substs<'tcx> {
let regions =
parse_region_substs(st, |x,y| conv(x,y));
let types =
parse_vec_per_param_space(st, |st| parse_ty(st, |x,y| conv(x,y)));
return subst::Substs { types: types,
regions: regions };
}
fn parse_region_substs(st: &mut PState, conv: conv_did) -> subst::RegionSubsts {
match next(st) {
'e' => subst::ErasedRegions,
'n' => {
subst::NonerasedRegions(
parse_vec_per_param_space(
st, |st| parse_region(st, |x,y| conv(x,y))))
}
_ => panic!("parse_bound_region: bad input")
}
}
fn parse_bound_region(st: &mut PState, conv: conv_did) -> ty::BoundRegion {
match next(st) {
'a' => {
let id = parse_uint(st);
assert_eq!(next(st), '|');
ty::BrAnon(id)
}
'[' => {
let def = parse_def(st, RegionParameter, |x,y| conv(x,y));
let ident = token::str_to_ident(parse_str(st, ']').as_slice());
ty::BrNamed(def, ident.name)
}
'f' => {
let id = parse_uint(st);
assert_eq!(next(st), '|');
ty::BrFresh(id)
}
'e' => ty::BrEnv,
_ => panic!("parse_bound_region: bad input")
}
}
fn parse_region(st: &mut PState, conv: conv_did) -> ty::Region {
match next(st) {
'b' => {
assert_eq!(next(st), '[');
let id = ty::DebruijnIndex::new(parse_uint(st));
assert_eq!(next(st), '|');
let br = parse_bound_region(st, |x,y| conv(x,y));
assert_eq!(next(st), ']');
ty::ReLateBound(id, br)
}
'B' => {
assert_eq!(next(st), '[');
let node_id = parse_uint(st) as ast::NodeId;
assert_eq!(next(st), '|');
let space = parse_param_space(st);
assert_eq!(next(st), '|');
let index = parse_uint(st);
assert_eq!(next(st), '|');
let nm = token::str_to_ident(parse_str(st, ']').as_slice());
ty::ReEarlyBound(node_id, space, index, nm.name)
}
'f' => {
assert_eq!(next(st), '[');
let scope = parse_scope(st);
assert_eq!(next(st), '|');
let br = parse_bound_region(st, |x,y| conv(x,y));
assert_eq!(next(st), ']');
ty::ReFree(ty::FreeRegion { scope: scope,
bound_region: br})
}
's' => {
let scope = parse_scope(st);
assert_eq!(next(st), '|');
ty::ReScope(scope)
}
't' => {
ty::ReStatic
}
'e' => {
ty::ReStatic
}
_ => panic!("parse_region: bad input")
}
}
fn parse_scope(st: &mut PState) -> region::CodeExtent {
match next(st) {
'M' => {
let node_id = parse_uint(st) as ast::NodeId;
region::CodeExtent::Misc(node_id)
}
_ => panic!("parse_scope: bad input")
}
}
fn parse_opt<'a, 'tcx, T, F>(st: &mut PState<'a, 'tcx>, f: F) -> Option<T> where
F: FnOnce(&mut PState<'a, 'tcx>) -> T,
{
match next(st) {
'n' => None,
's' => Some(f(st)),
_ => panic!("parse_opt: bad input")
}
}
fn parse_str(st: &mut PState, term: char) -> String {
let mut result = String::new();
while peek(st) != term {
unsafe {
result.as_mut_vec().push_all(&[next_byte(st)])
}
}
next(st);
result
}
fn parse_trait_ref<'a, 'tcx>(st: &mut PState<'a, 'tcx>, conv: conv_did)
-> ty::TraitRef<'tcx> {
let def = parse_def(st, NominalType, |x,y| conv(x,y));
let substs = parse_substs(st, |x,y| conv(x,y));
ty::TraitRef {def_id: def, substs: substs}
}
fn parse_ty<'a, 'tcx>(st: &mut PState<'a, 'tcx>, conv: conv_did) -> Ty<'tcx> {
match next(st) {
'b' => return ty::mk_bool(),
'i' => return ty::mk_int(),
'u' => return ty::mk_uint(),
'M' => {
match next(st) {
'b' => return ty::mk_mach_uint(ast::TyU8),
'w' => return ty::mk_mach_uint(ast::TyU16),
'l' => return ty::mk_mach_uint(ast::TyU32),
'd' => return ty::mk_mach_uint(ast::TyU64),
'B' => return ty::mk_mach_int(ast::TyI8),
'W' => return ty::mk_mach_int(ast::TyI16),
'L' => return ty::mk_mach_int(ast::TyI32),
'D' => return ty::mk_mach_int(ast::TyI64),
'f' => return ty::mk_mach_float(ast::TyF32),
'F' => return ty::mk_mach_float(ast::TyF64),
_ => panic!("parse_ty: bad numeric type")
}
}
'c' => return ty::mk_char(),
't' => {
assert_eq!(next(st), '[');
let def = parse_def(st, NominalType, |x,y| conv(x,y));
let substs = parse_substs(st, |x,y| conv(x,y));
assert_eq!(next(st), ']');
return ty::mk_enum(st.tcx, def, substs);
}
'x' => {
assert_eq!(next(st), '[');
let trait_ref = parse_trait_ref(st, |x,y| conv(x,y));
let bounds = parse_existential_bounds(st, |x,y| conv(x,y));
assert_eq!(next(st), ']');
return ty::mk_trait(st.tcx, trait_ref, bounds);
}
'p' => {
let did = parse_def(st, TypeParameter, |x,y| conv(x,y));
debug!("parsed ty_param: did={}", did);
let index = parse_uint(st);
assert_eq!(next(st), '|');
let space = parse_param_space(st);
assert_eq!(next(st), '|');
return ty::mk_param(st.tcx, space, index, did);
}
'~' => return ty::mk_uniq(st.tcx, parse_ty(st, |x,y| conv(x,y))),
'*' => return ty::mk_ptr(st.tcx, parse_mt(st, |x,y| conv(x,y))),
'&' => {
let r = parse_region(st, |x,y| conv(x,y));
let mt = parse_mt(st, |x,y| conv(x,y));
return ty::mk_rptr(st.tcx, r, mt);
}
'V' => {
let t = parse_ty(st, |x,y| conv(x,y));
let sz = parse_size(st);
return ty::mk_vec(st.tcx, t, sz);
}
'v' => {
return ty::mk_str(st.tcx);
}
'T' => {
assert_eq!(next(st), '[');
let mut params = Vec::new();
while peek(st) != ']' { params.push(parse_ty(st, |x,y| conv(x,y))); }
st.pos = st.pos + 1u;
return ty::mk_tup(st.tcx, params);
}
'f' => {
return ty::mk_closure(st.tcx, parse_closure_ty(st, |x,y| conv(x,y)));
}
'F' => {
return ty::mk_bare_fn(st.tcx, parse_bare_fn_ty(st, |x,y| conv(x,y)));
}
'#' => {
let pos = parse_hex(st);
assert_eq!(next(st), ':');
let len = parse_hex(st);
assert_eq!(next(st), '#');
let key = ty::creader_cache_key {cnum: st.krate,
pos: pos,
len: len };
match st.tcx.rcache.borrow().get(&key).cloned() {
Some(tt) => return tt,
None => {}
}
let mut ps = PState {
pos: pos,
.. *st
};
let tt = parse_ty(&mut ps, |x,y| conv(x,y));
st.tcx.rcache.borrow_mut().insert(key, tt);
return tt;
}
'\"' => {
let _ = parse_def(st, TypeWithId, |x,y| conv(x,y));
let inner = parse_ty(st, |x,y| conv(x,y));
inner
}
'a' => {
assert_eq!(next(st), '[');
let did = parse_def(st, NominalType, |x,y| conv(x,y));
let substs = parse_substs(st, |x,y| conv(x,y));
assert_eq!(next(st), ']');
return ty::mk_struct(st.tcx, did, substs);
}
'k' => {
assert_eq!(next(st), '[');
let did = parse_def(st, UnboxedClosureSource, |x,y| conv(x,y));
let region = parse_region(st, |x,y| conv(x,y));
let substs = parse_substs(st, |x,y| conv(x,y));
assert_eq!(next(st), ']');
return ty::mk_unboxed_closure(st.tcx, did, region, substs);
}
'e' => {
return ty::mk_err();
}
c => { panic!("unexpected char in type string: {}", c);}
}
}
fn parse_mutability(st: &mut PState) -> ast::Mutability {
match peek(st) {
'm' => { next(st); ast::MutMutable }
_ => { ast::MutImmutable }
}
}
fn parse_mt<'a, 'tcx>(st: &mut PState<'a, 'tcx>, conv: conv_did) -> ty::mt<'tcx> {
let m = parse_mutability(st);
ty::mt { ty: parse_ty(st, |x,y| conv(x,y)), mutbl: m }
}
fn parse_def(st: &mut PState, source: DefIdSource,
conv: conv_did) -> ast::DefId {
return conv(source, scan(st, |c| { c == '|' }, parse_def_id));
}
fn parse_uint(st: &mut PState) -> uint {
let mut n = 0;
loop {
let cur = peek(st);
if cur < '0' || cur > '9' { return n; }
st.pos = st.pos + 1u;
n *= 10;
n += (cur as uint) - ('0' as uint);
};
}
fn parse_param_space(st: &mut PState) -> subst::ParamSpace {
subst::ParamSpace::from_uint(parse_uint(st))
}
fn parse_hex(st: &mut PState) -> uint {
let mut n = 0u;
loop {
let cur = peek(st);
if (cur < '0' || cur > '9') && (cur < 'a' || cur > 'f') { return n; }
st.pos = st.pos + 1u;
n *= 16u;
if '0' <= cur && cur <= '9' {
n += (cur as uint) - ('0' as uint);
} else { n += 10u + (cur as uint) - ('a' as uint); }
};
}
fn parse_unsafety(c: char) -> ast::Unsafety {
match c {
'u' => ast::Unsafety::Unsafe,
'n' => ast::Unsafety::Normal,
_ => panic!("parse_unsafety: bad unsafety {}", c)
}
}
fn parse_abi_set(st: &mut PState) -> abi::Abi {
assert_eq!(next(st), '[');
scan(st, |c| c == ']', |bytes| {
let abi_str = str::from_utf8(bytes).unwrap();
abi::lookup(abi_str.as_slice()).expect(abi_str)
})
}
fn parse_onceness(c: char) -> ast::Onceness {
match c {
'o' => ast::Once,
'm' => ast::Many,
_ => panic!("parse_onceness: bad onceness")
}
}
fn parse_closure_ty<'a, 'tcx>(st: &mut PState<'a, 'tcx>,
conv: conv_did) -> ty::ClosureTy<'tcx> {
let unsafety = parse_unsafety(next(st));
let onceness = parse_onceness(next(st));
let store = parse_trait_store(st, |x,y| conv(x,y));
let bounds = parse_existential_bounds(st, |x,y| conv(x,y));
let sig = parse_sig(st, |x,y| conv(x,y));
let abi = parse_abi_set(st);
ty::ClosureTy {
unsafety: unsafety,
onceness: onceness,
store: store,
bounds: bounds,
sig: sig,
abi: abi,
}
}
fn parse_bare_fn_ty<'a, 'tcx>(st: &mut PState<'a, 'tcx>,
conv: conv_did) -> ty::BareFnTy<'tcx> {
let unsafety = parse_unsafety(next(st));
let abi = parse_abi_set(st);
let sig = parse_sig(st, |x,y| conv(x,y));
ty::BareFnTy {
unsafety: unsafety,
abi: abi,
sig: sig
}
}
fn parse_sig<'a, 'tcx>(st: &mut PState<'a, 'tcx>, conv: conv_did) -> ty::FnSig<'tcx> {
assert_eq!(next(st), '[');
let mut inputs = Vec::new();
while peek(st) != ']' {
inputs.push(parse_ty(st, |x,y| conv(x,y)));
}
st.pos += 1u; // eat the ']'
let variadic = match next(st) {
'V' => true,
'N' => false,
r => panic!(format!("bad variadic: {}", r)),
};
let output = match peek(st) {
'z' => {
st.pos += 1u;
ty::FnDiverging
}
_ => ty::FnConverging(parse_ty(st, |x,y| conv(x,y)))
};
ty::FnSig {inputs: inputs,
output: output,
variadic: variadic}
}
// Rust metadata parsing
pub fn parse_def_id(buf: &[u8]) -> ast::DefId {
let mut colon_idx = 0u;
let len = buf.len();
while colon_idx < len && buf[colon_idx] != ':' as u8 { colon_idx += 1u; }
if colon_idx == len {
error!("didn't find ':' when parsing def id");
panic!();
}
let crate_part = buf[0u..colon_idx];
let def_part = buf[colon_idx + 1u..len];
let crate_num = match str::from_utf8(crate_part).and_then(from_str::<uint>) {
Some(cn) => cn as ast::CrateNum,
None => panic!("internal error: parse_def_id: crate number expected, found {}",
crate_part)
};
let def_num = match str::from_utf8(def_part).and_then(from_str::<uint>) {
Some(dn) => dn as ast::NodeId,
None => panic!("internal error: parse_def_id: id expected, found {}",
def_part)
};
ast::DefId { krate: crate_num, node: def_num }
}
pub fn parse_predicate_data<'tcx>(data: &[u8],
start: uint,
crate_num: ast::CrateNum,
tcx: &ty::ctxt<'tcx>,
conv: conv_did)
-> ty::Predicate<'tcx>
{
let mut st = parse_state_from_data(data, crate_num, start, tcx);
parse_predicate(&mut st, conv)
}
pub fn parse_predicate<'a,'tcx>(st: &mut PState<'a, 'tcx>,
conv: conv_did)
-> ty::Predicate<'tcx>
{
match next(st) {
't' => ty::Predicate::Trait(Rc::new(parse_trait_ref(st, conv))),
'e' => ty::Predicate::Equate(parse_ty(st, |x,y| conv(x,y)),
parse_ty(st, |x,y| conv(x,y))),
'r' => ty::Predicate::RegionOutlives(parse_region(st, |x,y| conv(x,y)),
parse_region(st, |x,y| conv(x,y))),
'o' => ty::Predicate::TypeOutlives(parse_ty(st, |x,y| conv(x,y)),
parse_region(st, |x,y| conv(x,y))),
c => panic!("Encountered invalid character in metadata: {}", c)
}
}
pub fn parse_type_param_def_data<'tcx>(data: &[u8], start: uint,
crate_num: ast::CrateNum, tcx: &ty::ctxt<'tcx>,
conv: conv_did) -> ty::TypeParameterDef<'tcx>
{
let mut st = parse_state_from_data(data, crate_num, start, tcx);
parse_type_param_def(&mut st, conv)
}
fn parse_type_param_def<'a, 'tcx>(st: &mut PState<'a, 'tcx>, conv: conv_did)
-> ty::TypeParameterDef<'tcx> {
let name = parse_name(st, ':');
let def_id = parse_def(st, NominalType, |x,y| conv(x,y));
let space = parse_param_space(st);
assert_eq!(next(st), '|');
let index = parse_uint(st);
assert_eq!(next(st), '|');
let associated_with = parse_opt(st, |st| {
parse_def(st, NominalType, |x,y| conv(x,y))
});
assert_eq!(next(st), '|');
let bounds = parse_bounds(st, |x,y| conv(x,y));
let default = parse_opt(st, |st| parse_ty(st, |x,y| conv(x,y)));
ty::TypeParameterDef {
name: name,
def_id: def_id,
space: space,
index: index,
associated_with: associated_with,
bounds: bounds,
default: default
}
}
fn parse_existential_bounds(st: &mut PState, conv: conv_did) -> ty::ExistentialBounds {
let r = parse_region(st, |x,y| conv(x,y));
let bb = parse_builtin_bounds(st, conv);
return ty::ExistentialBounds { region_bound: r, builtin_bounds: bb };
}
fn parse_builtin_bounds(st: &mut PState, _conv: conv_did) -> ty::BuiltinBounds {
let mut builtin_bounds = ty::empty_builtin_bounds();
loop {
match next(st) {
'S' => {
builtin_bounds.insert(ty::BoundSend);
}
'Z' => {
builtin_bounds.insert(ty::BoundSized);
}
'P' => {
builtin_bounds.insert(ty::BoundCopy);
}
'T' => {
builtin_bounds.insert(ty::BoundSync);
}
'.' => {
return builtin_bounds;
}
c => {
panic!("parse_bounds: bad builtin bounds ('{}')", c)
}
}
}
}
fn parse_bounds<'a, 'tcx>(st: &mut PState<'a, 'tcx>, conv: conv_did)
-> ty::ParamBounds<'tcx> {
let builtin_bounds = parse_builtin_bounds(st, |x,y| conv(x,y));
let mut param_bounds = ty::ParamBounds {
region_bounds: Vec::new(),
builtin_bounds: builtin_bounds,
trait_bounds: Vec::new()
};
loop {
match next(st) {
'R' => {
param_bounds.region_bounds.push(parse_region(st, |x, y| conv (x, y)));
}
'I' => {
param_bounds.trait_bounds.push(Rc::new(parse_trait_ref(st, |x,y| conv(x,y))));
}
'.' => {
return param_bounds;
}
c => {
panic!("parse_bounds: bad bounds ('{}')", c)
}
}
}
}
| 32.427835 | 94 | 0.518081 |
76b7e91b442aaa97d16f71ec48773af23b4640fc
| 3,307 |
///! Main module for Constellation, use for setting up a Constellation instance,
///! specifying properties and configurations. See SingleThreadedConstellation
///! and MultiThreadedConstellation for examples.
use crate::error::ConstellationError;
use crate::implementation::constellation_identifier::ConstellationIdentifier;
use crate::{ActivityIdentifier, ActivityTrait, Context, Event};
use std::sync::{Arc, Mutex};
/// Has to implement Sync and Send to be able to be shared in Arc<Mutex<..>>
/// between threads. mopa::Any enables downcasting on the trait object.
pub trait ConstellationTrait: Sync + Send + mopa::Any {
/// Activate Constellation instance.
///
/// When created, the Constellation instance is inactive in order for the
/// user to be able to change configuration and properties before
/// activation.
///
/// # Returns
/// * `Result<bool, ConstellationError` - Result struct which contains a
/// boolean to indicate whether activation was successful or not. Upon
/// error it will return ConstellationError
fn activate(&mut self) -> Result<bool, ConstellationError>;
/// Submit an activity to Constellation. Make sure to handle the
/// ActivityIdentifier properly so that it is unique for the entire
/// constellation instance.
///
/// # Arguments
/// * `activity` - A reference to an activity implementing the ActivityTrait.
/// The activity must be inside an Arc<Mutex<..>>, in order to work with
/// thread safety.
/// * `context` - A reference to the context created for this activity
/// * `may_be_stolen` - A boolean indicating whether this activity can be
/// stolen or not.
/// * `expects_events` - A boolean indicating whether this activity expects
/// events or not.
///
/// # Returns
/// * `ActivityIdentifier` - The generated Activity Identifier for
/// this Activity.
fn submit(
&mut self,
activity: Arc<Mutex<dyn ActivityTrait>>,
context: &Context,
may_be_stolen: bool,
expects_events: bool,
) -> ActivityIdentifier;
/// Send an event
///
/// # Arguments
/// * `e` - The event to send, an event may contain a user-defined Payload
/// struct, containing data.
fn send(&mut self, e: Box<Event>);
/// Terminate Constellation instance.
///
/// # Returns
/// * `Result<bool, ConstellationError` - Result which contains a boolean
/// indicating whether Constellation successfully shutdown, upon error
/// a ConstellationError will be returned.
fn done(&mut self) -> Result<bool, ConstellationError>;
/// Return the identifier for this Constellation instance
///
/// # Returns
/// * `ConstellationIdentifier` - An identifier for this specific
/// Constellation instance.
fn identifier(&mut self) -> ConstellationIdentifier;
/// Check if the calling node is master.
///
/// # Returns
/// * `Result<bool, ConstellationError` - Result<..>, which upon a
/// successful call contains *true* if node is master and *false* if not.
fn is_master(&self) -> Result<bool, ConstellationError>;
/// Return the number of nodes in this constellation instance.
fn nodes(&mut self) -> i32;
}
mopafy!(ConstellationTrait);
| 39.369048 | 81 | 0.674025 |
d7825df967f937da35ab26f4ac47e9f92cf33d1f
| 5,099 |
///////////////////////////////////////////////////////////////////////////////
//
// Copyright 2018-2019 Airalab <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
///////////////////////////////////////////////////////////////////////////////
use std::sync::Arc;
use client::{self, Client};
use codec::{Encode, Decode};
use primitives::{Bytes, Blake2Hasher, H256};
use runtime_primitives::{generic, traits};
use transaction_pool::{
txpool::{
ChainApi as PoolChainApi,
ExHash as ExHashT,
Pool,
},
};
use msgs::substrate_ros_msgs::{
ExHash, RawExtrinsic,
SubmitExtrinsic, SubmitExtrinsicRes,
PendingExtrinsics, PendingExtrinsicsRes,
RemoveExtrinsic, RemoveExtrinsicRes,
};
use rosrust::api::error::Error;
use crate::traits::RosRpc;
const SUBMIT_SRV_NAME: &str = "/author/submit_extrinsic";
const REMOVE_SRV_NAME: &str = "/author/remove_extrinsic";
const PENDING_SRV_NAME: &str = "/author/pending_extrinsics";
/// Authoring API
pub struct Author<B, E, P, RA> where P: PoolChainApi + Sync + Send + 'static {
/// Substrate client
client: Arc<Client<B, E, <P as PoolChainApi>::Block, RA>>,
/// Transactions pool
pool: Arc<Pool<P>>,
}
impl<B, E, P, RA> Author<B, E, P, RA> where
B: client::backend::Backend<<P as PoolChainApi>::Block, Blake2Hasher> + Send + Sync + 'static,
E: client::CallExecutor<<P as PoolChainApi>::Block, Blake2Hasher> + Send + Sync + 'static,
P: PoolChainApi<Hash=H256> + Sync + Send + 'static,
P::Block: traits::Block<Hash=H256>,
P::Error: 'static,
RA: Send + Sync + 'static
{
/// Create new instance of Authoring API.
pub fn new(
client: Arc<Client<B, E, <P as PoolChainApi>::Block, RA>>,
pool: Arc<Pool<P>>,
) -> Self {
Author {
client,
pool,
}
}
fn submit_extrinsic(&self, ext: Bytes) -> Result<ExHashT<P>, &str> {
let xt = Decode::decode(&mut &ext[..]).map_err(|_| "Bad extrinsic format")?;
let best_block_hash = self.client.info().chain.best_hash;
self.pool
.submit_one(&generic::BlockId::hash(best_block_hash), xt)
.map_err(|_| "Extrinsic pool error")
}
fn pending_extrinsics(&self) -> Vec<Bytes> {
self.pool.ready().map(|tx| tx.data.encode().into()).collect()
}
fn remove_extrinsic(&self, hashes: Vec<ExHashT<P>>) -> Vec<ExHashT<P>> {
self.pool.remove_invalid(&hashes)
.into_iter()
.map(|tx| tx.hash.clone())
.collect()
}
}
impl<B, E, P, RA> RosRpc for Author<B, E, P, RA> where
B: client::backend::Backend<<P as PoolChainApi>::Block, Blake2Hasher> + Send + Sync + 'static,
E: client::CallExecutor<<P as PoolChainApi>::Block, Blake2Hasher> + Send + Sync + 'static,
P: PoolChainApi<Hash=H256> + Sync + Send + 'static,
P::Block: traits::Block<Hash=H256>,
P::Error: 'static,
RA: Send + Sync + 'static
{
fn start(api: Arc<Self>) -> Result<Vec<rosrust::Service>, Error> {
let mut services = vec![];
let api1 = api.clone();
services.push(
rosrust::service::<SubmitExtrinsic, _>(SUBMIT_SRV_NAME, move |req| {
let mut res = SubmitExtrinsicRes::default();
match api1.submit_extrinsic(req.extrinsic.data.into()) {
Ok(hash) => {
res.hash = ExHash::default();
res.hash.data = hash.into();
},
Err(err) => res.error = err.to_string()
}
Ok(res)
})?
);
let api2 = api.clone();
services.push(
rosrust::service::<PendingExtrinsics, _>(PENDING_SRV_NAME, move |_req| {
let mut res = PendingExtrinsicsRes::default();
for xt in api2.pending_extrinsics() {
let mut xt_msg = RawExtrinsic::default();
for b in xt.iter() { xt_msg.data.push(*b); }
res.extrinsics.push(xt_msg);
}
Ok(res)
})?
);
let api3 = api.clone();
services.push(
rosrust::service::<RemoveExtrinsic, _>(REMOVE_SRV_NAME, move |req| {
let mut res = RemoveExtrinsicRes::default();
let hashes = req.extrinsics.iter().map(|h| h.data.into()).collect();
for xt in api3.remove_extrinsic(hashes) {
let mut hash_msg = ExHash::default();
hash_msg.data = xt.into();
res.extrinsics.push(hash_msg);
}
Ok(res)
})?
);
Ok(services)
}
}
| 34.687075 | 95 | 0.575603 |
db8d3b7905340c4e259385701813d20705ae7e09
| 6,969 |
//! Ratio (dimensionless quantity).
#[cfg(feature = "std")]
use super::angle::{Angle, radian};
quantity! {
/// Ratio (dimensionless quantity).
quantity: Ratio; "ratio";
/// Dimension of ratio, 1 (dimensionless).
dimension: ISQ<
Z0, // length
Z0, // mass
Z0, // time
Z0, // electric current
Z0, // thermodynamic temperature
Z0, // amount of substance
Z0>; // luminous intensity
units {
@ratio: 1.0; "", "", "";
@part_per_hundred: 1.0_E-2; "parts per hundred", "part per hundred", "parts per hundred";
@percent: 1.0_E-2; "%", "percent", "percent";
@part_per_thousand: 1.0_E-3; "parts per thousand", "part per thousand",
"parts per thousand";
@per_mille: 1.0_E-3; "‰", "per mille", "per mille";
@part_per_ten_thousand: 1.0_E-4; "parts per ten thousand", "part per then thousand",
"parts per ten thousand"; // ‱, doesn't display properly.
@basis_point: 1.0_E-4; "bp", "basis point", "basis points";
@part_per_million: 1.0_E-6; "ppm", "part per million", "parts per million";
@part_per_billion: 1.0_E-9; "ppb", "part per billion", "parts per billion";
@part_per_trillion: 1.0_E-12; "ppt", "part per trillion", "parts per trillion";
@part_per_quadrillion: 1.0_E-15; "ppq", "part per quadrillion", "parts per quadrillion";
@decibel: 1.0, 10.0, 20.0; "dB", "decibel", "decibels";
}
}
/// Implementation of various stdlib inverse trigonometric functions
#[cfg(feature = "std")]
impl<U, V> Ratio<U, V>
where
U: crate::si::Units<V> + ?Sized,
V: crate::num::Float + crate::Conversion<V>,
radian: crate::Conversion<V, T = V::T>,
{
/// Computes the value of the inverse cosine of the ratio.
#[inline(always)]
pub fn acos(self) -> Angle<U, V> {
Angle::new::<radian>(self.value.acos())
}
/// Computes the value of the inverse hyperbolic cosine of the ratio.
#[inline(always)]
pub fn acosh(self) -> Angle<U, V> {
Angle::new::<radian>(self.value.acosh())
}
/// Computes the value of the inverse sine of the ratio.
#[inline(always)]
pub fn asin(self) -> Angle<U, V> {
Angle::new::<radian>(self.value.asin())
}
/// Computes the value of the inverse hyperbolic sine of the ratio.
#[inline(always)]
pub fn asinh(self) -> Angle<U, V> {
Angle::new::<radian>(self.value.asinh())
}
/// Computes the value of the inverse tangent of the ratio.
#[inline(always)]
pub fn atan(self) -> Angle<U, V> {
Angle::new::<radian>(self.value.atan())
}
/// Computes the value of the inverse hyperbolic tangent of the ratio.
#[inline(always)]
pub fn atanh(self) -> Angle<U, V> {
Angle::new::<radian>(self.value.atanh())
}
}
mod convert {
use super::*;
impl<U, V> From<V> for Ratio<U, V>
where
U: crate::si::Units<V> + ?Sized,
V: crate::num::Num + crate::Conversion<V>,
{
fn from(t: V) -> Self {
Ratio {
dimension: crate::lib::marker::PhantomData,
units: crate::lib::marker::PhantomData,
value: t,
}
}
}
storage_types! {
use super::*;
impl<U> From<Ratio<U, V>> for V
where
U: crate::si::Units<V> + ?Sized,
V: crate::num::Num + crate::Conversion<V>,
{
fn from(t: Ratio<U, V>) -> Self {
t.value
}
}
}
}
#[cfg(test)]
mod tests {
storage_types! {
use crate::num::{FromPrimitive, One};
use crate::si::quantities::*;
use crate::si::ratio as r;
use crate::tests::Test;
#[test]
fn from() {
let r1: Ratio<V> = Ratio::<V>::from(V::one());
let r2: Ratio<V> = V::one().into();
let _: V = V::from(r1);
let _: V = r2.into();
}
#[test]
fn check_units() {
Test::assert_eq(&Ratio::new::<r::ratio>(V::one() / V::from_f64(1.0_E2).unwrap()),
&Ratio::new::<r::part_per_hundred>(V::one()));
Test::assert_eq(&Ratio::new::<r::ratio>(V::one() / V::from_f64(1.0_E2).unwrap()),
&Ratio::new::<r::percent>(V::one()));
Test::assert_eq(&Ratio::new::<r::ratio>(V::one() / V::from_f64(1.0_E3).unwrap()),
&Ratio::new::<r::part_per_thousand>(V::one()));
Test::assert_eq(&Ratio::new::<r::ratio>(V::one() / V::from_f64(1.0_E3).unwrap()),
&Ratio::new::<r::per_mille>(V::one()));
Test::assert_eq(&Ratio::new::<r::ratio>(V::one() / V::from_f64(1.0_E4).unwrap()),
&Ratio::new::<r::part_per_ten_thousand>(V::one()));
Test::assert_eq(&Ratio::new::<r::ratio>(V::one() / V::from_f64(1.0_E4).unwrap()),
&Ratio::new::<r::basis_point>(V::one()));
Test::assert_eq(&Ratio::new::<r::ratio>(V::one() / V::from_f64(1.0_E6).unwrap()),
&Ratio::new::<r::part_per_million>(V::one()));
Test::assert_eq(&Ratio::new::<r::ratio>(V::one() / V::from_f64(1.0_E9).unwrap()),
&Ratio::new::<r::part_per_billion>(V::one()));
Test::assert_eq(&Ratio::new::<r::ratio>(V::one()
/ V::from_f64(1.0_E12).unwrap()),
&Ratio::new::<r::part_per_trillion>(V::one()));
Test::assert_eq(&Ratio::new::<r::ratio>(V::one()
/ V::from_f64(1.0_E15).unwrap()),
&Ratio::new::<r::part_per_quadrillion>(V::one()));
}
}
#[cfg(feature = "std")]
mod inv_trig {
storage_types! {
types: Float;
use crate::si::angle as a;
use crate::si::quantities::*;
use crate::tests::Test;
fn test_nan_or_eq(yl: V, yr: V) -> bool {
(yl.is_nan() && yr.is_nan()) || Test::eq(&yl, &yr)
}
quickcheck! {
fn acos(x: V) -> bool {
test_nan_or_eq(x.acos(), Ratio::from(x).acos().get::<a::radian>())
}
fn acosh(x: V) -> bool {
test_nan_or_eq(x.acosh(), Ratio::from(x).acosh().get::<a::radian>())
}
fn asin(x: V) -> bool {
test_nan_or_eq(x.asin(), Ratio::from(x).asin().get::<a::radian>())
}
fn asinh(x: V) -> bool {
test_nan_or_eq(x.asinh(), Ratio::from(x).asinh().get::<a::radian>())
}
fn atan(x: V) -> bool {
test_nan_or_eq(x.atan(), Ratio::from(x).atan().get::<a::radian>())
}
fn atanh(x: V) -> bool {
test_nan_or_eq(x.atanh(), Ratio::from(x).atanh().get::<a::radian>())
}
}
}
}
}
| 35.19697 | 97 | 0.49835 |
089bee8a9fe6406a460f5cb0ad90bfbc89623dae
| 14,788 |
use std::mem;
use std::slice;
use std::ffi::{CStr, CString};
use std::io;
use std::rc::Rc;
use libc::{c_int, c_char};
use ffi;
use {ErrorType, Type, Pointer, InterpretResult};
fn default_write(_: &mut VM, text: &str) {
print!("{}", text);
}
fn default_error(_: &mut VM, _type: ErrorType, module: &str, line: i32, message: &str) {
match _type {
ErrorType::Compile => println!("[{} line {}] {}", module, line, message),
ErrorType::Runtime => println!("{}", message),
ErrorType::StackTrace => println!("[{} line {}] in {}", module, line, message),
}
}
fn default_load_module(_: &mut VM, name: &str) -> Option<String> {
use std::path::PathBuf;
use std::fs::File;
use std::io::Read;
let mut buffer = String::new();
// Look for a file named [name].wren.
let mut name_path = PathBuf::new();
name_path.push(name);
name_path.set_extension("wren");
let result = File::open(&name_path).map(|mut f| f.read_to_string(&mut buffer));
if result.is_ok() {
return Some(buffer);
}
// If that fails, treat [name] as a directory and look for module.wren in there.
name_path.set_extension("");
name_path.push("module");
name_path.set_extension("wren");
buffer.clear();
let result = File::open(&name_path).map(|mut f| f.read_to_string(&mut buffer));
if result.is_ok() { Some(buffer) } else { None }
}
/// Wrapper around `WrenConfiguration`. Refer to `wren.h` for info on each field.
pub struct Configuration(ffi::WrenConfiguration);
impl Configuration {
/// Create a new Configuration using `wrenInitConfiguration`.
///
/// This also sets the printing and module loading functions to mimic those used in the CLI interpreter.
pub fn new() -> Configuration {
let mut raw: ffi::WrenConfiguration = unsafe { mem::uninitialized() };
unsafe { ffi::wrenInitConfiguration(&mut raw) }
let mut cfg = Configuration(raw);
cfg.set_write_fn(wren_write_fn!(default_write));
cfg.set_error_fn(wren_error_fn!(default_error));
cfg.set_load_module_fn(wren_load_module_fn!(default_load_module));
cfg
}
pub fn set_reallocate_fn(&mut self, f: ::ReallocateFn) {
self.0.reallocate_fn = f;
}
pub fn set_load_module_fn(&mut self, f: ::LoadModuleFn) {
self.0.load_module_fn = f;
}
pub fn set_bind_foreign_method_fn(&mut self, f: ::BindForeignMethodFn) {
self.0.bind_foreign_method_fn = f;
}
pub fn set_bind_foreign_class_fn(&mut self, f: ::BindForeignClassFn) {
self.0.bind_foreign_class_fn = f;
}
pub fn set_write_fn(&mut self, f: ::WriteFn) {
self.0.write_fn = f;
}
pub fn set_error_fn(&mut self, f: ::ErrorFn) {
self.0.error_fn = f;
}
pub fn set_initial_heap_size(&mut self, size: usize) {
self.0.initial_heap_size = size;
}
pub fn set_min_heap_size(&mut self, size: usize) {
self.0.min_heap_size = size;
}
pub fn set_heap_growth_percent(&mut self, percent: i32) {
self.0.heap_growth_percent = percent;
}
pub fn set_user_data(&mut self, data: Pointer) {
self.0.user_data = data;
}
}
/// Reference-counted wrapper around `WrenHandle`.
///
/// Automatically calls `wrenReleaseHandle` when there are no more references.
#[derive(Clone)]
pub struct Handle(Rc<RawHandle>);
struct RawHandle {
raw: *mut ffi::WrenHandle,
vm: *mut ffi::WrenVM,
}
impl Drop for RawHandle {
fn drop(&mut self) {
unsafe { ffi::wrenReleaseHandle(self.vm, self.raw) }
}
}
/// Wrapper around `WrenForeignClassMethods`.
#[derive(Copy, Clone)]
pub struct ForeignClassMethods(ffi::WrenForeignClassMethods);
impl ForeignClassMethods {
pub fn new() -> ForeignClassMethods {
ForeignClassMethods(ffi::WrenForeignClassMethods {
allocate: None,
finalize: None,
})
}
pub fn set_allocate_fn(&mut self, f: ::ForeignMethodFn) {
self.0.allocate = f;
}
pub fn set_finalize_fn(&mut self, f: ::FinalizerFn) {
self.0.finalize = f;
}
#[doc(hidden)]
pub fn get(&self) -> ffi::WrenForeignClassMethods {
self.0
}
}
/// Wrapper around `WrenVM`. Refer to `wren.h` for info on each function.
///
/// Some functions have some additional safety features. In particular:
///
/// 1. Functions that retrieve slot values will perform type checking and return an Option.
///
/// 2. `wrenEnsureSlots` is called automatically where needed.
///
/// 3. Functions that operate on lists will validate their parameters.
pub struct VM {
raw: *mut ffi::WrenVM,
owned: bool,
}
impl VM {
/// Create a new VM.
pub fn new(cfg: Configuration) -> VM {
let mut cfg = cfg;
let raw = unsafe { ffi::wrenNewVM(&mut cfg.0) };
VM { raw, owned: true }
}
/// Create a wrapper around an existing WrenVM pointer.
///
/// This is mainly used by function wrapping macros.
pub unsafe fn from_ptr(ptr: *mut ffi::WrenVM) -> VM {
VM {
raw: ptr,
owned: false,
}
}
/// Maps to `wrenCollectGarbage`.
pub fn collect_garbage(&mut self) {
unsafe { ffi::wrenCollectGarbage(self.raw) }
}
/// Maps to `wrenInterpret`.
pub fn interpret(&mut self, source: &str) -> InterpretResult {
let source_cstr = CString::new(source).unwrap();
unsafe { ffi::wrenInterpret(self.raw, source_cstr.as_ptr()) }
}
/// Convenience function that loads a script from a file and interprets it.
pub fn interpret_file(&mut self, path: &str) -> Result<InterpretResult, io::Error> {
use std::fs::File;
use std::io::Read;
let mut buffer = String::new();
let mut file = File::open(path)?;
file.read_to_string(&mut buffer)?;
Ok(self.interpret(&buffer))
}
/// Maps to `wrenMakeCallHandle`.
pub fn make_call_handle(&mut self, signature: &str) -> Handle {
let signature_cstr = CString::new(signature).unwrap();
let handle = RawHandle {
raw: unsafe { ffi::wrenMakeCallHandle(self.raw, signature_cstr.as_ptr()) },
vm: self.raw,
};
Handle(Rc::new(handle))
}
/// Maps to `wrenCall`.
pub fn call(&mut self, method: &Handle) -> InterpretResult {
unsafe { ffi::wrenCall(self.raw, method.0.raw) }
}
/*
/// Maps to `wrenReleaseHandle`.
pub fn release_handle(&mut self, handle: Handle) {
unsafe { ffi::wrenReleaseHandle(self.raw, handle.0.raw) }
}
*/
/// Maps to `wrenGetSlotCount`.
pub fn get_slot_count(&mut self) -> i32 {
unsafe { ffi::wrenGetSlotCount(self.raw) }
}
// This gets called automatically where needed.
fn ensure_slots(&mut self, num_slots: i32) {
unsafe { ffi::wrenEnsureSlots(self.raw, num_slots) }
}
/// Maps to `wrenGetSlotType`.
pub fn get_slot_type(&mut self, slot: i32) -> Type {
assert!(self.get_slot_count() > slot,
"Slot {} is out of bounds",
slot);
unsafe { ffi::wrenGetSlotType(self.raw, slot) }
}
/// Maps to `wrenGetSlotBool`.
///
/// Returns `None` if the value in `slot` isn't a bool.
pub fn get_slot_bool(&mut self, slot: i32) -> Option<bool> {
if self.get_slot_type(slot) == Type::Bool {
Some(unsafe { ffi::wrenGetSlotBool(self.raw, slot) != 0 })
} else {
None
}
}
/// Maps to `wrenGetSlotBytes`.
///
/// Returns `None` if the value in `slot` isn't a string.
pub fn get_slot_bytes(&mut self, slot: i32) -> Option<&[u8]> {
if self.get_slot_type(slot) == Type::String {
let mut length = unsafe { mem::uninitialized() };
let ptr = unsafe { ffi::wrenGetSlotBytes(self.raw, slot, &mut length) };
Some(unsafe { slice::from_raw_parts(ptr as *const u8, length as usize) })
} else {
None
}
}
/// Maps to `wrenGetSlotDouble`.
///
/// Returns `None` if the value in `slot` isn't a number.
pub fn get_slot_double(&mut self, slot: i32) -> Option<f64> {
if self.get_slot_type(slot) == Type::Num {
Some(unsafe { ffi::wrenGetSlotDouble(self.raw, slot) })
} else {
None
}
}
/// Maps to `wrenGetSlotForeign`.
///
/// Returns `None` if the value in `slot` isn't a foreign object.
pub fn get_slot_foreign(&mut self, slot: i32) -> Option<Pointer> {
if self.get_slot_type(slot) == Type::Foreign {
Some(unsafe { ffi::wrenGetSlotForeign(self.raw, slot) })
} else {
None
}
}
/// Convenience function that calls `wrenGetSlotForeign` and casts the result.
///
/// This function uses `mem::transmute` internally and is therefore very unsafe.
pub unsafe fn get_slot_foreign_typed<T>(&mut self, slot: i32) -> &mut T {
assert!(self.get_slot_type(slot) == Type::Foreign,
"Slot {} must contain a foreign object",
slot);
mem::transmute::<Pointer, &mut T>(ffi::wrenGetSlotForeign(self.raw, slot))
}
/// Maps to `wrenGetSlotString`.
///
/// Returns `None` if the value in `slot` isn't a string.
pub fn get_slot_string(&mut self, slot: i32) -> Option<&str> {
if self.get_slot_type(slot) == Type::String {
let ptr = unsafe { ffi::wrenGetSlotString(self.raw, slot) };
Some(unsafe { CStr::from_ptr(ptr).to_str().unwrap() })
} else {
None
}
}
/// Maps to `wrenGetSlotHandle`.
pub fn get_slot_handle(&mut self, slot: i32) -> Handle {
assert!(self.get_slot_count() > slot,
"Slot {} is out of bounds",
slot);
let handle = RawHandle {
raw: unsafe { ffi::wrenGetSlotHandle(self.raw, slot) },
vm: self.raw,
};
Handle(Rc::new(handle))
}
/// Maps to `wrenSetSlotBool`.
pub fn set_slot_bool(&mut self, slot: i32, value: bool) {
self.ensure_slots(slot + 1);
unsafe { ffi::wrenSetSlotBool(self.raw, slot, value as c_int) }
}
/// Maps to `wrenSetSlotBytes`.
pub fn set_slot_bytes(&mut self, slot: i32, bytes: &[u8]) {
self.ensure_slots(slot + 1);
let ptr = bytes.as_ptr() as *const c_char;
let len = bytes.len();
unsafe { ffi::wrenSetSlotBytes(self.raw, slot, ptr, len) }
}
/// Maps to `wrenSetSlotDouble`.
pub fn set_slot_double(&mut self, slot: i32, value: f64) {
self.ensure_slots(slot + 1);
unsafe { ffi::wrenSetSlotDouble(self.raw, slot, value) }
}
/// Maps to `wrenSetSlotNewForeign`.
pub fn set_slot_new_foreign(&mut self, slot: i32, class_slot: i32, size: usize) -> Pointer {
self.ensure_slots(slot + 1);
unsafe { ffi::wrenSetSlotNewForeign(self.raw, slot, class_slot, size) }
}
/// Convenience function that calls `wrenSetSlotNewForeign` using type information.
pub fn set_slot_new_foreign_typed<T>(&mut self, slot: i32, class_slot: i32) -> *mut T {
self.set_slot_new_foreign(slot, class_slot, mem::size_of::<T>()) as *mut T
}
/// Maps to `wrenSetSlotNewList`.
pub fn set_slot_new_list(&mut self, slot: i32) {
self.ensure_slots(slot + 1);
unsafe { ffi::wrenSetSlotNewList(self.raw, slot) }
}
/// Maps to `wrenSetSlotNull`.
pub fn set_slot_null(&mut self, slot: i32) {
self.ensure_slots(slot + 1);
unsafe { ffi::wrenSetSlotNull(self.raw, slot) }
}
/// Maps to `wrenSetSlotString`.
pub fn set_slot_string(&mut self, slot: i32, s: &str) {
self.ensure_slots(slot + 1);
let cstr = CString::new(s).unwrap();
unsafe { ffi::wrenSetSlotString(self.raw, slot, cstr.as_ptr()) }
}
/// Maps to `wrenSetSlotHandle`.
pub fn set_slot_handle(&mut self, slot: i32, handle: &Handle) {
self.ensure_slots(slot + 1);
unsafe { ffi::wrenSetSlotHandle(self.raw, slot, handle.0.raw) }
}
/// Maps to `wrenGetListCount`.
pub fn get_list_count(&mut self, slot: i32) -> i32 {
if self.get_slot_type(slot) == Type::List {
unsafe { ffi::wrenGetListCount(self.raw, slot) }
} else {
0
}
}
// Checks parameters and converts a negative (relative) list index to an absolute index.
// Wren already does the latter, but this way we can check if the index is out of bounds.
// (which Wren doesn't do in release builds)
fn check_index(&mut self, list_slot: i32, index: i32) -> i32 {
assert!(self.get_slot_type(list_slot) == Type::List,
"Slot {} must contain a list",
list_slot);
let list_count = self.get_list_count(list_slot);
let index = if index < 0 {
list_count + 1 + index
} else {
index
};
assert!(index <= list_count, "List index out of bounds");
index
}
/// Maps to `wrenGetListElement`.
pub fn get_list_element(&mut self, list_slot: i32, index: i32, element_slot: i32) {
self.ensure_slots(element_slot + 1);
let index = self.check_index(list_slot, index);
unsafe { ffi::wrenGetListElement(self.raw, list_slot, index, element_slot) };
}
/// Maps to `wrenInsertInList`.
pub fn insert_in_list(&mut self, list_slot: i32, index: i32, element_slot: i32) {
assert!(element_slot < self.get_slot_count(),
"No element in slot {}",
element_slot);
let index = self.check_index(list_slot, index);
unsafe { ffi::wrenInsertInList(self.raw, list_slot, index, element_slot) };
}
/// Maps to `wrenGetVariable`.
pub fn get_variable(&mut self, module: &str, name: &str, slot: i32) {
self.ensure_slots(slot + 1);
let module_cstr = CString::new(module).unwrap();
let name_cstr = CString::new(name).unwrap();
unsafe { ffi::wrenGetVariable(self.raw, module_cstr.as_ptr(), name_cstr.as_ptr(), slot) }
}
/// Maps to `wrenAbortFiber`.
pub fn abort_fiber(&mut self, slot: i32) {
unsafe { ffi::wrenAbortFiber(self.raw, slot) }
}
/// Maps to `wrenGetUserData`.
pub fn get_user_data(&mut self) -> Pointer {
unsafe { ffi::wrenGetUserData(self.raw) }
}
/// Maps to `wrenSetUserData`.
pub fn set_user_data(&mut self, data: Pointer) {
unsafe { ffi::wrenSetUserData(self.raw, data) }
}
}
impl Drop for VM {
fn drop(&mut self) {
if self.owned {
unsafe { ffi::wrenFreeVM(self.raw) }
}
}
}
| 32.716814 | 108 | 0.598729 |
cc5064bad1cddcb9c787180eb83ac0980fd165ab
| 18,130 |
// Copyright Materialize, Inc. All rights reserved.
//
// Use of this software is governed by the Business Source License
// included in the LICENSE file.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.
//! Symbiosis mode.
//!
//! In symbiosis mode, Materialize will conjoin with an OLTP database to
//! masquerade as a HTAP system. All DDL statements and writes will be routed to
//! the OLTP database (like `CREATE TABLE`, `INSERT`, etc.), while reads will be
//! routed through Materialize. Changes to the tables in the OLTP database are
//! automatically streamed through Materialize.
//!
//! The only supported OLTP database at the moment is PostgreSQL. Supporting
//! other databases is complicated by the fact that we roughly followe
//! Postgres's SQL semantics; using, say, MySQL, would be rather confusing,
//! because `INSERT`, `UPDATE`, and `DELETE` statements would be subject to a
//! wildly different set of SQL semantics than `SELECT` statements.
//!
//! Symbiosis mode is only suitable for development. It is likely to be
//! extremely slow and inefficient on large data sets.
use std::cell::RefCell;
use std::collections::{BTreeMap, HashMap};
use std::convert::TryInto;
use std::env;
use std::rc::Rc;
use anyhow::{anyhow, bail};
use chrono::Utc;
use tokio_postgres::types::FromSql;
use uuid::Uuid;
use pgrepr::Jsonb;
use repr::adt::decimal::Significand;
use repr::{Datum, RelationDesc, RelationType, Row, RowPacker, ScalarType};
use sql::ast::{
ColumnOption, CreateTableStatement, DataType, DeleteStatement, DropObjectsStatement,
InsertStatement, ObjectType, Statement, TableConstraint, UpdateStatement,
};
use sql::catalog::Catalog;
use sql::names::FullName;
use sql::normalize;
use sql::plan::{scalar_type_from_sql, MutationKind, Plan, PlanContext, StatementContext, Table};
pub struct Postgres {
client: tokio_postgres::Client,
table_types: HashMap<FullName, (Vec<DataType>, RelationDesc)>,
}
impl Postgres {
pub async fn open_and_erase(url: &str) -> Result<Self, anyhow::Error> {
let mut config: tokio_postgres::Config = url.parse()?;
let username = whoami::username();
if config.get_user().is_none() {
config.user(
env::var("PGUSER")
.ok()
.as_deref()
.unwrap_or_else(|| &username),
);
}
if config.get_password().is_none() {
if let Ok(password) = env::var("PGPASSWORD") {
config.password(password);
}
}
if config.get_dbname().is_none() {
if let Ok(dbname) = env::var("PGDATABASE") {
config.dbname(&dbname);
}
}
if config.get_hosts().is_empty() {
config.host(
env::var("PGHOST")
.ok()
.as_deref()
.unwrap_or_else(|| "localhost"),
);
}
let (client, conn) = config
.connect(tokio_postgres::NoTls)
.await
.map_err(|err| anyhow!("Postgres connection failed: {}", err))?;
tokio::spawn(async move {
if let Err(e) = conn.await {
panic!("connection error: {}", e);
}
});
// drop all tables
client
.execute(
r#"
DO $$ DECLARE
r RECORD;
BEGIN
FOR r IN (SELECT tablename FROM pg_tables WHERE schemaname = current_schema()) LOOP
EXECUTE 'DROP TABLE IF EXISTS ' || quote_ident(r.tablename) || ' CASCADE';
END LOOP;
END $$;
"#,
&[],
)
.await?;
Ok(Self {
client,
table_types: HashMap::new(),
})
}
pub fn can_handle(&self, stmt: &Statement) -> bool {
match stmt {
Statement::CreateTable { .. }
| Statement::DropObjects { .. }
| Statement::Delete { .. }
| Statement::Insert { .. }
| Statement::Update { .. } => true,
_ => false,
}
}
pub async fn execute(
&mut self,
pcx: &PlanContext,
catalog: &dyn Catalog,
stmt: &Statement,
) -> Result<Plan, anyhow::Error> {
let scx = StatementContext {
pcx,
catalog,
param_types: Rc::new(RefCell::new(BTreeMap::new())),
};
Ok(match stmt {
Statement::CreateTable(CreateTableStatement {
name,
columns,
constraints,
if_not_exists,
..
}) => {
let sql_types: Vec<_> = columns
.iter()
.map(|column| column.data_type.clone())
.collect();
let names: Vec<_> = columns
.iter()
.map(|c| Some(sql::normalize::column_name(c.name.clone())))
.collect();
// Build initial relation type that handles declared data types
// and NOT NULL constraints.
let mut typ = RelationType::new(
columns
.iter()
.map(|c| {
let ty = scalar_type_from_sql(&c.data_type)?;
let nullable =
!c.options.iter().any(|o| o.option == ColumnOption::NotNull);
Ok(ty.nullable(nullable))
})
.collect::<Result<Vec<_>, anyhow::Error>>()?,
);
// Handle column-level UNIQUE and PRIMARY KEY constraints.
// PRIMARY KEY implies UNIQUE and NOT NULL.
for (index, column) in columns.iter().enumerate() {
for option in column.options.iter() {
if let ColumnOption::Unique { is_primary } = option.option {
typ = typ.with_key(vec![index]);
if is_primary {
typ.column_types[index].nullable = false;
}
}
}
}
// Handle table-level UNIQUE and PRIMARY KEY constraints.
// PRIMARY KEY implies UNIQUE and NOT NULL.
for constraint in constraints {
if let TableConstraint::Unique {
name: _,
columns,
is_primary,
} = constraint
{
let mut key = vec![];
for column in columns {
let name = normalize::column_name(column.clone());
match names.iter().position(|n| n.as_ref() == Some(&name)) {
None => bail!("unknown column {} in unique constraint", name),
Some(i) => key.push(i),
}
}
if *is_primary {
for i in key.iter() {
typ.column_types[*i].nullable = false;
}
}
typ = typ.with_key(key);
}
}
self.client.execute(&*stmt.to_string(), &[]).await?;
let name = scx.allocate_name(normalize::object_name(name.clone())?);
let desc = RelationDesc::new(typ, names);
self.table_types
.insert(name.clone(), (sql_types, desc.clone()));
let table = Table {
create_sql: stmt.to_string(),
desc,
};
Plan::CreateTable {
name,
table,
if_not_exists: *if_not_exists,
}
}
Statement::DropObjects(DropObjectsStatement {
names,
object_type: ObjectType::Table,
if_exists,
..
}) => {
self.client.execute(&*stmt.to_string(), &[]).await?;
let mut items = vec![];
for name in names {
let name = match scx.resolve_item(name.clone()) {
Ok(name) => name,
Err(err) => {
if *if_exists {
continue;
} else {
return Err(err.into());
}
}
};
items.push(catalog.get_item(&name).id());
}
Plan::DropItems {
items,
ty: ObjectType::Table,
}
}
Statement::Delete(DeleteStatement { table_name, .. }) => {
let mut updates = vec![];
let table_name = scx.resolve_item(table_name.clone())?;
let sql = format!("{} RETURNING *", stmt.to_string());
for row in self.run_query(&table_name, sql, 0).await? {
updates.push((row, -1));
}
let affected_rows = updates.len();
Plan::SendDiffs {
id: catalog.get_item(&table_name).id(),
updates,
affected_rows,
kind: MutationKind::Delete,
}
}
Statement::Insert(InsertStatement { table_name, .. }) => {
let mut updates = vec![];
let table_name = scx.resolve_item(table_name.clone())?;
// RETURNING cannot return zero columns, but we might be
// executing INSERT INTO t DEFAULT VALUES where t is a zero
// arity table. So use a time-honored trick of always including
// a junk column in RETURNING, then stripping that column out.
let sql = format!("{} RETURNING *, 1", stmt.to_string());
for row in self.run_query(&table_name, sql, 1).await? {
updates.push((row, 1));
}
let affected_rows = updates.len();
Plan::SendDiffs {
id: catalog.get_item(&table_name).id(),
updates,
affected_rows,
kind: MutationKind::Insert,
}
}
Statement::Update(UpdateStatement {
table_name,
selection,
..
}) => {
let mut updates = vec![];
let mut sql = format!("SELECT * FROM {}", table_name);
let table_name = scx.resolve_item(table_name.clone())?;
if let Some(selection) = selection {
sql += &format!(" WHERE {}", selection);
}
for row in self.run_query(&table_name, sql, 0).await? {
updates.push((row, -1))
}
let affected_rows = updates.len();
let sql = format!("{} RETURNING *", stmt.to_string());
for row in self.run_query(&table_name, sql, 0).await? {
updates.push((row, 1));
}
assert_eq!(affected_rows * 2, updates.len());
Plan::SendDiffs {
id: catalog.get_item(&table_name).id(),
updates,
affected_rows,
kind: MutationKind::Update,
}
}
_ => bail!("Unsupported symbiosis statement: {:?}", stmt),
})
}
async fn run_query(
&mut self,
table_name: &FullName,
query: String,
junk: usize,
) -> Result<Vec<Row>, anyhow::Error> {
let (sql_types, desc) = self
.table_types
.get(table_name)
.ok_or_else(|| anyhow!("Unknown table: {:?}", table_name))?
.clone();
let mut rows = vec![];
let postgres_rows = self.client.query(&*query, &[]).await?;
let mut row = RowPacker::new();
for postgres_row in postgres_rows.iter() {
for c in 0..postgres_row.len() - junk {
row = push_column(
row,
&postgres_row,
c,
&sql_types[c],
desc.typ().column_types[c].nullable,
)?;
}
rows.push(row.finish_and_reuse());
}
Ok(rows)
}
}
fn push_column(
mut row: RowPacker,
postgres_row: &tokio_postgres::Row,
i: usize,
sql_type: &DataType,
nullable: bool,
) -> Result<RowPacker, anyhow::Error> {
// NOTE this needs to stay in sync with materialize::sql::scalar_type_from_sql
// in some cases, we use slightly different representations than postgres does for the same sql types, so we have to be careful about conversions
match sql_type {
DataType::Boolean => {
let bool = get_column_inner::<bool>(postgres_row, i, nullable)?;
row.push(bool.into());
}
DataType::Char(_) | DataType::Varchar(_) | DataType::Text => {
let string = get_column_inner::<String>(postgres_row, i, nullable)?;
row.push(string.as_deref().into());
}
DataType::SmallInt => {
let i = get_column_inner::<i16>(postgres_row, i, nullable)?.map(i32::from);
row.push(i.into());
}
DataType::Int => {
let i = get_column_inner::<i32>(postgres_row, i, nullable)?;
row.push(i.into());
}
DataType::BigInt => {
let i = get_column_inner::<i64>(postgres_row, i, nullable)?;
row.push(i.into());
}
DataType::Float(p) => {
if p.unwrap_or(53) <= 24 {
let f = get_column_inner::<f32>(postgres_row, i, nullable)?.map(f64::from);
row.push(f.into());
} else {
let f = get_column_inner::<f64>(postgres_row, i, nullable)?;
row.push(f.into());
}
}
DataType::Real => {
let f = get_column_inner::<f32>(postgres_row, i, nullable)?.map(f64::from);
row.push(f.into());
}
DataType::Double => {
let f = get_column_inner::<f64>(postgres_row, i, nullable)?;
row.push(f.into());
}
DataType::Date => {
let d: chrono::NaiveDate =
get_column_inner::<chrono::NaiveDate>(postgres_row, i, nullable)?.unwrap();
row.push(Datum::Date(d));
}
DataType::Timestamp => {
let d: chrono::NaiveDateTime =
get_column_inner::<chrono::NaiveDateTime>(postgres_row, i, nullable)?.unwrap();
row.push(Datum::Timestamp(d));
}
DataType::TimestampTz => {
let d: chrono::DateTime<Utc> =
get_column_inner::<chrono::DateTime<Utc>>(postgres_row, i, nullable)?.unwrap();
row.push(Datum::TimestampTz(d));
}
DataType::Interval => {
let iv = get_column_inner::<pgrepr::Interval>(postgres_row, i, nullable)?.unwrap();
row.push(Datum::Interval(iv.0));
}
DataType::Decimal(_, _) => {
let desired_scale = match scalar_type_from_sql(sql_type).unwrap() {
ScalarType::Decimal(_precision, desired_scale) => desired_scale,
_ => unreachable!(),
};
match get_column_inner::<pgrepr::Numeric>(postgres_row, i, nullable)? {
None => row.push(Datum::Null),
Some(d) => {
let mut significand = d.0.significand();
// TODO(jamii) lots of potential for unchecked edge cases here eg 10^scale_correction could overflow
// current representation is `significand * 10^current_scale`
// want to get to `significand2 * 10^desired_scale`
// so `significand2 = significand * 10^(current_scale - desired_scale)`
let scale_correction = (d.0.scale() as isize) - (desired_scale as isize);
if scale_correction > 0 {
significand /= 10i128.pow(scale_correction.try_into()?);
} else {
significand *= 10i128.pow((-scale_correction).try_into()?);
};
row.push(Significand::new(significand).into());
}
}
}
DataType::Bytea => {
let bytes = get_column_inner::<Vec<u8>>(postgres_row, i, nullable)?;
row.push(bytes.as_deref().into());
}
DataType::Jsonb => {
let jsonb = get_column_inner::<Jsonb>(postgres_row, i, nullable)?;
if let Some(jsonb) = jsonb {
row.extend_by_row(&jsonb.0.into_row())
} else {
row.push(Datum::Null)
}
}
DataType::Uuid => {
let u = get_column_inner::<Uuid>(postgres_row, i, nullable)?.unwrap();
row.push(Datum::UUID(u));
}
_ => bail!(
"Postgres to materialize conversion not yet supported for {:?}",
sql_type
),
}
Ok(row)
}
fn get_column_inner<'a, T>(
postgres_row: &'a tokio_postgres::Row,
i: usize,
nullable: bool,
) -> Result<Option<T>, anyhow::Error>
where
T: FromSql<'a>,
{
if nullable {
let value: Option<T> = postgres_row.try_get(i)?;
Ok(value)
} else {
let value: T = postgres_row.try_get(i)?;
Ok(Some(value))
}
}
| 38.088235 | 149 | 0.482736 |
0e1073653f3e38e0093b72012c5eb78399bfce94
| 29,197 |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
///! Array types
use std::any::Any;
use std::convert::From;
use std::mem;
use std::ptr;
use std::sync::Arc;
use array_data::*;
use buffer::*;
use datatypes::*;
use memory;
use util::bit_util;
/// Trait for dealing with different types of array at runtime when the type of the
/// array is not known in advance
pub trait Array: Send + Sync {
/// Returns the array as `Any` so that it can be downcast to a specific implementation
fn as_any(&self) -> &Any;
/// Returns a reference-counted pointer to the data of this array
fn data(&self) -> ArrayDataRef;
/// Returns a borrowed & reference-counted pointer to the data of this array
fn data_ref(&self) -> &ArrayDataRef;
/// Returns a reference to the data type of this array
fn data_type(&self) -> &DataType {
self.data_ref().data_type()
}
/// Returns the length (i.e., number of elements) of this array
fn len(&self) -> i64 {
self.data().len()
}
/// Returns the offset of this array
fn offset(&self) -> i64 {
self.data().offset()
}
/// Returns whether the element at index `i` is null
fn is_null(&self, i: i64) -> bool {
self.data().is_null(i)
}
/// Returns whether the element at index `i` is not null
fn is_valid(&self, i: i64) -> bool {
self.data().is_valid(i)
}
/// Returns the total number of nulls in this array
fn null_count(&self) -> i64 {
self.data().null_count()
}
}
pub type ArrayRef = Arc<Array>;
/// Constructs an array using the input `data`. Returns a reference-counted `Array`
/// instance.
fn make_array(data: ArrayDataRef) -> ArrayRef {
// TODO: here data_type() needs to clone the type - maybe add a type tag enum to
// avoid the cloning.
match data.data_type().clone() {
DataType::Boolean => Arc::new(PrimitiveArray::<bool>::from(data)) as ArrayRef,
DataType::Int8 => Arc::new(PrimitiveArray::<i8>::from(data)) as ArrayRef,
DataType::Int16 => Arc::new(PrimitiveArray::<i16>::from(data)) as ArrayRef,
DataType::Int32 => Arc::new(PrimitiveArray::<i32>::from(data)) as ArrayRef,
DataType::Int64 => Arc::new(PrimitiveArray::<i64>::from(data)) as ArrayRef,
DataType::UInt8 => Arc::new(PrimitiveArray::<u8>::from(data)) as ArrayRef,
DataType::UInt16 => Arc::new(PrimitiveArray::<u16>::from(data)) as ArrayRef,
DataType::UInt32 => Arc::new(PrimitiveArray::<u32>::from(data)) as ArrayRef,
DataType::UInt64 => Arc::new(PrimitiveArray::<u64>::from(data)) as ArrayRef,
DataType::Float32 => Arc::new(PrimitiveArray::<f32>::from(data)) as ArrayRef,
DataType::Float64 => Arc::new(PrimitiveArray::<f64>::from(data)) as ArrayRef,
DataType::Utf8 => Arc::new(BinaryArray::from(data)) as ArrayRef,
DataType::List(_) => Arc::new(ListArray::from(data)) as ArrayRef,
DataType::Struct(_) => Arc::new(StructArray::from(data)) as ArrayRef,
dt => panic!("Unexpected data type {:?}", dt),
}
}
/// ----------------------------------------------------------------------------
/// Implementations of different array types
struct RawPtrBox<T> {
inner: *const T,
}
impl<T> RawPtrBox<T> {
fn new(inner: *const T) -> Self {
Self { inner }
}
fn get(&self) -> *const T {
self.inner
}
}
unsafe impl<T> Send for RawPtrBox<T> {}
unsafe impl<T> Sync for RawPtrBox<T> {}
/// Array whose elements are of primitive types.
pub struct PrimitiveArray<T: ArrowPrimitiveType> {
data: ArrayDataRef,
/// Pointer to the value array. The lifetime of this must be <= to the value buffer
/// stored in `data`, so it's safe to store.
raw_values: RawPtrBox<T>,
}
/// Macro to define primitive arrays for different data types and native types.
macro_rules! def_primitive_array {
($data_ty:path, $native_ty:ident) => {
impl PrimitiveArray<$native_ty> {
pub fn new(length: i64, values: Buffer, null_count: i64, offset: i64) -> Self {
let array_data = ArrayData::builder($data_ty)
.len(length)
.add_buffer(values)
.null_count(null_count)
.offset(offset)
.build();
PrimitiveArray::from(array_data)
}
/// Returns a `Buffer` holds all the values of this array.
///
/// Note this doesn't take account into the offset of this array.
pub fn values(&self) -> Buffer {
self.data.buffers()[0].clone()
}
/// Returns a raw pointer to the values of this array.
pub fn raw_values(&self) -> *const $native_ty {
unsafe { mem::transmute(self.raw_values.get().offset(self.data.offset() as isize)) }
}
/// Returns the primitive value at index `i`.
///
/// Note this doesn't do any bound checking, for performance reason.
pub fn value(&self, i: i64) -> $native_ty {
unsafe { *(self.raw_values().offset(i as isize)) }
}
/// Returns the minimum value in the array, according to the natural order.
pub fn min(&self) -> Option<$native_ty> {
self.min_max_helper(|a, b| a < b)
}
/// Returns the maximum value in the array, according to the natural order.
pub fn max(&self) -> Option<$native_ty> {
self.min_max_helper(|a, b| a > b)
}
fn min_max_helper<F>(&self, cmp: F) -> Option<$native_ty>
where
F: Fn($native_ty, $native_ty) -> bool,
{
let mut n: Option<$native_ty> = None;
let data = self.data();
for i in 0..data.len() {
if data.is_null(i) {
continue;
}
let m = self.value(i as i64);
match n {
None => n = Some(m),
Some(nn) => if cmp(m, nn) {
n = Some(m)
},
}
}
n
}
}
/// Constructs a primitive array from a vector. Should only be used for testing.
impl From<Vec<$native_ty>> for PrimitiveArray<$native_ty> {
fn from(data: Vec<$native_ty>) -> Self {
let array_data = ArrayData::builder($data_ty)
.len(data.len() as i64)
.add_buffer(Buffer::from(data.to_byte_slice()))
.build();
PrimitiveArray::from(array_data)
}
}
impl From<Vec<Option<$native_ty>>> for PrimitiveArray<$native_ty> {
fn from(data: Vec<Option<$native_ty>>) -> Self {
const TY_SIZE: usize = mem::size_of::<$native_ty>();
const NULL: [u8; TY_SIZE] = [0; TY_SIZE];
let data_len = data.len() as i64;
let size = bit_util::round_upto_multiple_of_64(data_len) as usize;
let mut null_buffer = Vec::with_capacity(size);
unsafe {
ptr::write_bytes(null_buffer.as_mut_ptr(), 0, size);
null_buffer.set_len(size);
}
let mut value_buffer: Vec<u8> = Vec::with_capacity(size * TY_SIZE);
let mut i = 0;
for n in data {
if let Some(v) = n {
bit_util::set_bit(&mut null_buffer[..], i);
value_buffer.extend_from_slice(&v.to_byte_slice());
} else {
value_buffer.extend_from_slice(&NULL);
}
i += 1;
}
let array_data = ArrayData::builder($data_ty)
.len(data_len)
.add_buffer(Buffer::from(Buffer::from(value_buffer)))
.null_bit_buffer(Buffer::from(null_buffer))
.build();
PrimitiveArray::from(array_data)
}
}
};
}
/// Constructs a `PrimitiveArray` from an array data reference.
impl<T: ArrowPrimitiveType> From<ArrayDataRef> for PrimitiveArray<T> {
fn from(data: ArrayDataRef) -> Self {
assert_eq!(data.buffers().len(), 1);
let raw_values = data.buffers()[0].raw_data();
assert!(memory::is_aligned::<u8>(raw_values, mem::align_of::<T>()));
Self {
data,
raw_values: RawPtrBox::new(raw_values as *const T),
}
}
}
impl<T: ArrowPrimitiveType> Array for PrimitiveArray<T> {
fn as_any(&self) -> &Any {
self
}
fn data(&self) -> ArrayDataRef {
self.data.clone()
}
fn data_ref(&self) -> &ArrayDataRef {
&self.data
}
}
def_primitive_array!(DataType::Boolean, bool);
def_primitive_array!(DataType::UInt8, u8);
def_primitive_array!(DataType::UInt16, u16);
def_primitive_array!(DataType::UInt32, u32);
def_primitive_array!(DataType::UInt64, u64);
def_primitive_array!(DataType::Int8, i8);
def_primitive_array!(DataType::Int16, i16);
def_primitive_array!(DataType::Int32, i32);
def_primitive_array!(DataType::Int64, i64);
def_primitive_array!(DataType::Float32, f32);
def_primitive_array!(DataType::Float64, f64);
/// A list array where each element is a variable-sized sequence of values with the same
/// type.
pub struct ListArray {
data: ArrayDataRef,
values: ArrayRef,
value_offsets: RawPtrBox<i32>,
}
impl ListArray {
/// Returns an reference to the values of this list.
pub fn values(&self) -> ArrayRef {
self.values.clone()
}
/// Returns a clone of the value type of this list.
pub fn value_type(&self) -> DataType {
self.values.data().data_type().clone()
}
/// Returns the offset for value at index `i`.
///
/// Note this doesn't do any bound checking, for performance reason.
#[inline]
pub fn value_offset(&self, i: i64) -> i32 {
self.value_offset_at(self.data.offset() + i)
}
/// Returns the length for value at index `i`.
///
/// Note this doesn't do any bound checking, for performance reason.
#[inline]
pub fn value_length(&self, mut i: i64) -> i32 {
i += self.data.offset();
self.value_offset_at(i + 1) - self.value_offset_at(i)
}
#[inline]
fn value_offset_at(&self, i: i64) -> i32 {
unsafe { *self.value_offsets.get().offset(i as isize) }
}
}
/// Constructs a `ListArray` from an array data reference.
impl From<ArrayDataRef> for ListArray {
fn from(data: ArrayDataRef) -> Self {
assert_eq!(data.buffers().len(), 1);
assert_eq!(data.child_data().len(), 1);
let values = make_array(data.child_data()[0].clone());
let raw_value_offsets = data.buffers()[0].raw_data();
assert!(memory::is_aligned(
raw_value_offsets,
mem::align_of::<i32>()
));
let value_offsets = raw_value_offsets as *const i32;
unsafe {
assert_eq!(*value_offsets.offset(0), 0);
assert_eq!(
*value_offsets.offset(data.len() as isize),
values.data().len() as i32
);
}
Self {
data: data.clone(),
values,
value_offsets: RawPtrBox::new(value_offsets),
}
}
}
impl Array for ListArray {
fn as_any(&self) -> &Any {
self
}
fn data(&self) -> ArrayDataRef {
self.data.clone()
}
fn data_ref(&self) -> &ArrayDataRef {
&self.data
}
}
/// A special type of `ListArray` whose elements are binaries.
pub struct BinaryArray {
data: ArrayDataRef,
value_offsets: RawPtrBox<i32>,
value_data: RawPtrBox<u8>,
}
impl BinaryArray {
/// Returns the element at index `i` as a byte slice.
pub fn get_value(&self, i: i64) -> &[u8] {
assert!(i >= 0 && i < self.data.len());
let offset = i.checked_add(self.data.offset()).unwrap();
unsafe {
let pos = self.value_offset_at(offset);
::std::slice::from_raw_parts(
self.value_data.get().offset(pos as isize),
(self.value_offset_at(offset + 1) - pos) as usize,
)
}
}
/// Returns the element at index `i` as a string.
///
/// Note this doesn't do any bound checking, for performance reason.
pub fn get_string(&self, i: i64) -> String {
let slice = self.get_value(i);
unsafe { String::from_utf8_unchecked(Vec::from(slice)) }
}
/// Returns the offset for the element at index `i`.
///
/// Note this doesn't do any bound checking, for performance reason.
#[inline]
pub fn value_offset(&self, i: i64) -> i32 {
self.value_offset_at(i)
}
/// Returns the length for the element at index `i`.
///
/// Note this doesn't do any bound checking, for performance reason.
#[inline]
pub fn value_length(&self, mut i: i64) -> i32 {
i += self.data.offset();
self.value_offset_at(i + 1) - self.value_offset_at(i)
}
#[inline]
fn value_offset_at(&self, i: i64) -> i32 {
unsafe { *self.value_offsets.get().offset(i as isize) }
}
}
impl From<ArrayDataRef> for BinaryArray {
fn from(data: ArrayDataRef) -> Self {
assert_eq!(data.buffers().len(), 2);
let raw_value_offsets = data.buffers()[0].raw_data();
assert!(memory::is_aligned(
raw_value_offsets,
mem::align_of::<i32>()
));
let value_data = data.buffers()[1].raw_data();
Self {
data: data.clone(),
value_offsets: RawPtrBox::new(raw_value_offsets as *const i32),
value_data: RawPtrBox::new(value_data),
}
}
}
impl<'a> From<Vec<&'a str>> for BinaryArray {
fn from(v: Vec<&'a str>) -> Self {
let mut offsets = vec![];
let mut values = vec![];
let mut length_so_far = 0;
offsets.push(length_so_far);
for s in &v {
length_so_far += s.len() as i32;
offsets.push(length_so_far as i32);
values.extend_from_slice(s.as_bytes());
}
let array_data = ArrayData::builder(DataType::Utf8)
.len(v.len() as i64)
.add_buffer(Buffer::from(offsets.to_byte_slice()))
.add_buffer(Buffer::from(&values[..]))
.build();
BinaryArray::from(array_data)
}
}
impl Array for BinaryArray {
fn as_any(&self) -> &Any {
self
}
fn data(&self) -> ArrayDataRef {
self.data.clone()
}
fn data_ref(&self) -> &ArrayDataRef {
&self.data
}
}
/// A nested array type where each child (called *field*) is represented by a separate array.
pub struct StructArray {
data: ArrayDataRef,
boxed_fields: Vec<ArrayRef>,
}
impl StructArray {
/// Returns the field at `pos`.
pub fn column(&self, pos: usize) -> &ArrayRef {
&self.boxed_fields[pos]
}
}
impl From<ArrayDataRef> for StructArray {
fn from(data: ArrayDataRef) -> Self {
let mut boxed_fields = vec![];
for cd in data.child_data() {
boxed_fields.push(make_array(cd.clone()));
}
Self { data, boxed_fields }
}
}
impl Array for StructArray {
fn as_any(&self) -> &Any {
self
}
fn data(&self) -> ArrayDataRef {
self.data.clone()
}
fn data_ref(&self) -> &ArrayDataRef {
&self.data
}
}
impl From<Vec<(Field, ArrayRef)>> for StructArray {
fn from(v: Vec<(Field, ArrayRef)>) -> Self {
let (field_types, field_values): (Vec<_>, Vec<_>) = v.into_iter().unzip();
let data = ArrayData::builder(DataType::Struct(field_types))
.child_data(field_values.into_iter().map(|a| a.data()).collect())
.build();
StructArray::from(data)
}
}
#[cfg(test)]
mod tests {
use std::thread;
use super::{Array, BinaryArray, ListArray, PrimitiveArray, StructArray};
use array_data::ArrayData;
use buffer::Buffer;
use datatypes::{DataType, Field, ToByteSlice};
use memory;
#[test]
fn test_primitive_array_from_vec() {
let buf = Buffer::from(&[0, 1, 2, 3, 4].to_byte_slice());
let buf2 = buf.clone();
let pa = PrimitiveArray::<i32>::new(5, buf, 0, 0);
let slice = unsafe { ::std::slice::from_raw_parts(pa.raw_values(), 5) };
assert_eq!(buf2, pa.values());
assert_eq!(&[0, 1, 2, 3, 4], slice);
assert_eq!(5, pa.len());
assert_eq!(0, pa.offset());
assert_eq!(0, pa.null_count());
for i in 0..5 {
assert!(!pa.is_null(i));
assert!(pa.is_valid(i));
assert_eq!(i as i32, pa.value(i));
}
}
#[test]
fn test_primitive_array_from_vec_option() {
// Test building a primitive array with null values
let pa = PrimitiveArray::<i32>::from(vec![Some(0), None, Some(2), None, Some(4)]);
assert_eq!(5, pa.len());
assert_eq!(0, pa.offset());
assert_eq!(2, pa.null_count());
for i in 0..5 {
if i % 2 == 0 {
assert!(!pa.is_null(i));
assert!(pa.is_valid(i));
assert_eq!(i as i32, pa.value(i));
} else {
assert!(pa.is_null(i));
assert!(!pa.is_valid(i));
}
}
}
#[test]
fn test_primitive_array_builder() {
// Test building an primitive array with ArrayData builder and offset
let buf = Buffer::from(&[0, 1, 2, 3, 4].to_byte_slice());
let buf2 = buf.clone();
let data = ArrayData::builder(DataType::Int32)
.len(5)
.offset(2)
.add_buffer(buf)
.build();
let pa = PrimitiveArray::<i32>::from(data);
assert_eq!(buf2, pa.values());
assert_eq!(5, pa.len());
assert_eq!(0, pa.null_count());
for i in 0..3 {
assert_eq!((i + 2) as i32, pa.value(i));
}
}
#[test]
#[should_panic(expected = "")]
fn test_primitive_array_invalid_buffer_len() {
let data = ArrayData::builder(DataType::Int32).len(5).build();
PrimitiveArray::<i32>::from(data);
}
#[test]
fn test_list_array() {
// Construct a value array
let value_data = ArrayData::builder(DataType::Int32)
.len(7)
.add_buffer(Buffer::from(&[0, 1, 2, 3, 4, 5, 6, 7].to_byte_slice()))
.build();
// Construct a buffer for value offsets, for the nested array:
// [[0, 1, 2], [3, 4, 5], [6, 7]]
let value_offsets = Buffer::from(&[0, 2, 5, 7].to_byte_slice());
// Construct a list array from the above two
let list_data_type = DataType::List(Box::new(DataType::Int32));
let list_data = ArrayData::builder(list_data_type.clone())
.len(3)
.add_buffer(value_offsets.clone())
.add_child_data(value_data.clone())
.build();
let list_array = ListArray::from(list_data);
let values = list_array.values();
assert_eq!(value_data, values.data());
assert_eq!(DataType::Int32, list_array.value_type());
assert_eq!(3, list_array.len());
assert_eq!(0, list_array.null_count());
assert_eq!(5, list_array.value_offset(2));
assert_eq!(2, list_array.value_length(2));
for i in 0..3 {
assert!(list_array.is_valid(i as i64));
assert!(!list_array.is_null(i as i64));
}
// Now test with a non-zero offset
let list_data = ArrayData::builder(list_data_type)
.len(3)
.offset(1)
.add_buffer(value_offsets)
.add_child_data(value_data.clone())
.build();
let list_array = ListArray::from(list_data);
let values = list_array.values();
assert_eq!(value_data, values.data());
assert_eq!(DataType::Int32, list_array.value_type());
assert_eq!(3, list_array.len());
assert_eq!(0, list_array.null_count());
assert_eq!(5, list_array.value_offset(1));
assert_eq!(2, list_array.value_length(1));
}
#[test]
#[should_panic(expected = "")]
fn test_list_array_invalid_buffer_len() {
let value_data = ArrayData::builder(DataType::Int32)
.len(7)
.add_buffer(Buffer::from(&[0, 1, 2, 3, 4, 5, 6, 7].to_byte_slice()))
.build();
let list_data_type = DataType::List(Box::new(DataType::Int32));
let list_data = ArrayData::builder(list_data_type)
.len(3)
.add_child_data(value_data)
.build();
ListArray::from(list_data);
}
#[test]
#[should_panic(expected = "")]
fn test_list_array_invalid_child_array_len() {
let value_offsets = Buffer::from(&[0, 2, 5, 7].to_byte_slice());
let list_data_type = DataType::List(Box::new(DataType::Int32));
let list_data = ArrayData::builder(list_data_type)
.len(3)
.add_buffer(value_offsets)
.build();
ListArray::from(list_data);
}
#[test]
#[should_panic(expected = "")]
fn test_list_array_invalid_value_offset_start() {
let value_data = ArrayData::builder(DataType::Int32)
.len(7)
.add_buffer(Buffer::from(&[0, 1, 2, 3, 4, 5, 6, 7].to_byte_slice()))
.build();
let value_offsets = Buffer::from(&[2, 2, 5, 7].to_byte_slice());
let list_data_type = DataType::List(Box::new(DataType::Int32));
let list_data = ArrayData::builder(list_data_type.clone())
.len(3)
.add_buffer(value_offsets.clone())
.add_child_data(value_data.clone())
.build();
ListArray::from(list_data);
}
#[test]
#[should_panic(expected = "")]
fn test_list_array_invalid_value_offset_end() {
let value_data = ArrayData::builder(DataType::Int32)
.len(7)
.add_buffer(Buffer::from(&[0, 1, 2, 3, 4, 5, 6, 7].to_byte_slice()))
.build();
let value_offsets = Buffer::from(&[0, 2, 5, 8].to_byte_slice());
let list_data_type = DataType::List(Box::new(DataType::Int32));
let list_data = ArrayData::builder(list_data_type.clone())
.len(3)
.add_buffer(value_offsets.clone())
.add_child_data(value_data.clone())
.build();
ListArray::from(list_data);
}
#[test]
fn test_binary_array() {
let values: [u8; 12] = [
b'h', b'e', b'l', b'l', b'o', b'p', b'a', b'r', b'q', b'u', b'e', b't',
];
let offsets: [i32; 4] = [0, 5, 5, 12];
// Array data: ["hello", "", "parquet"]
let array_data = ArrayData::builder(DataType::Utf8)
.len(3)
.add_buffer(Buffer::from(offsets.to_byte_slice()))
.add_buffer(Buffer::from(&values[..]))
.build();
let binary_array = BinaryArray::from(array_data);
assert_eq!(3, binary_array.len());
assert_eq!(0, binary_array.null_count());
assert_eq!([b'h', b'e', b'l', b'l', b'o'], binary_array.get_value(0));
assert_eq!("hello", binary_array.get_string(0));
assert_eq!([] as [u8; 0], binary_array.get_value(1));
assert_eq!("", binary_array.get_string(1));
assert_eq!(
[b'p', b'a', b'r', b'q', b'u', b'e', b't'],
binary_array.get_value(2)
);
assert_eq!("parquet", binary_array.get_string(2));
assert_eq!(5, binary_array.value_offset(2));
assert_eq!(7, binary_array.value_length(2));
for i in 0..3 {
assert!(binary_array.is_valid(i as i64));
assert!(!binary_array.is_null(i as i64));
}
// Test binary array with offset
let array_data = ArrayData::builder(DataType::Utf8)
.len(4)
.offset(1)
.add_buffer(Buffer::from(offsets.to_byte_slice()))
.add_buffer(Buffer::from(&values[..]))
.build();
let binary_array = BinaryArray::from(array_data);
assert_eq!(
[b'p', b'a', b'r', b'q', b'u', b'e', b't'],
binary_array.get_value(1)
);
assert_eq!("parquet", binary_array.get_string(1));
}
#[test]
#[should_panic(expected = "")]
fn test_binary_array_get_value_index_out_of_bound() {
let values: [u8; 12] = [
b'h', b'e', b'l', b'l', b'o', b'p', b'a', b'r', b'q', b'u', b'e', b't',
];
let offsets: [i32; 4] = [0, 5, 5, 12];
let array_data = ArrayData::builder(DataType::Utf8)
.len(3)
.add_buffer(Buffer::from(offsets.to_byte_slice()))
.add_buffer(Buffer::from(&values[..]))
.build();
let binary_array = BinaryArray::from(array_data);
binary_array.get_value(4);
}
#[test]
fn test_struct_array() {
let boolean_data = ArrayData::builder(DataType::Boolean)
.len(4)
.add_buffer(Buffer::from([false, false, true, true].to_byte_slice()))
.build();
let int_data = ArrayData::builder(DataType::Int64)
.len(4)
.add_buffer(Buffer::from([42, 28, 19, 31].to_byte_slice()))
.build();
let mut field_types = vec![];
field_types.push(Field::new("a", DataType::Boolean, false));
field_types.push(Field::new("b", DataType::Int64, false));
let struct_array_data = ArrayData::builder(DataType::Struct(field_types))
.add_child_data(boolean_data.clone())
.add_child_data(int_data.clone())
.build();
let struct_array = StructArray::from(struct_array_data);
assert_eq!(boolean_data, struct_array.column(0).data());
assert_eq!(int_data, struct_array.column(1).data());
}
#[test]
#[should_panic(expected = "")]
fn test_primitive_array_alignment() {
let ptr = memory::allocate_aligned(8).unwrap();
let buf = Buffer::from_raw_parts(ptr, 8);
let buf2 = buf.slice(1);
let array_data = ArrayData::builder(DataType::Int32).add_buffer(buf2).build();
PrimitiveArray::<i32>::from(array_data);
}
#[test]
#[should_panic(expected = "")]
fn test_list_array_alignment() {
let ptr = memory::allocate_aligned(8).unwrap();
let buf = Buffer::from_raw_parts(ptr, 8);
let buf2 = buf.slice(1);
let values: [i32; 8] = [0; 8];
let value_data = ArrayData::builder(DataType::Int32)
.add_buffer(Buffer::from(values.to_byte_slice()))
.build();
let list_data_type = DataType::List(Box::new(DataType::Int32));
let list_data = ArrayData::builder(list_data_type.clone())
.add_buffer(buf2)
.add_child_data(value_data.clone())
.build();
ListArray::from(list_data);
}
#[test]
#[should_panic(expected = "")]
fn test_binary_array_alignment() {
let ptr = memory::allocate_aligned(8).unwrap();
let buf = Buffer::from_raw_parts(ptr, 8);
let buf2 = buf.slice(1);
let values: [u8; 12] = [0; 12];
let array_data = ArrayData::builder(DataType::Utf8)
.add_buffer(buf2)
.add_buffer(Buffer::from(&values[..]))
.build();
BinaryArray::from(array_data);
}
#[test]
fn test_buffer_array_min_max() {
let a = PrimitiveArray::<i32>::from(vec![5, 6, 7, 8, 9]);
assert_eq!(5, a.min().unwrap());
assert_eq!(9, a.max().unwrap());
}
#[test]
fn test_buffer_array_min_max_with_nulls() {
let a = PrimitiveArray::<i32>::from(vec![Some(5), None, None, Some(8), Some(9)]);
assert_eq!(5, a.min().unwrap());
assert_eq!(9, a.max().unwrap());
}
#[test]
fn test_access_array_concurrently() {
let a = PrimitiveArray::<i32>::from(vec![5, 6, 7, 8, 9]);
let ret = thread::spawn(move || a.value(3)).join();
assert!(ret.is_ok());
assert_eq!(8, ret.ok().unwrap());
}
}
| 34.068845 | 100 | 0.56177 |
619a5b6afe624dacfefd58bed775fc8be628509d
| 6,477 |
// Copyright (c) 2018-2021 The MobileCoin Foundation
//! Ledger Sync test app
use mc_account_keys::AccountKey;
use mc_attest_core::{MrSignerVerifier, Verifier, DEBUG_ENCLAVE};
use mc_common::{logger::log, ResponderId};
use mc_connection::{ConnectionManager, HardcodedCredentialsProvider, ThickClient};
use mc_consensus_scp::{test_utils::test_node_id, QuorumSet};
use mc_ledger_db::{Ledger, LedgerDB};
use mc_ledger_sync::{LedgerSync, LedgerSyncService, PollingNetworkState};
use mc_transaction_core::{Block, BlockContents};
use mc_util_uri::ConsensusClientUri as ClientUri;
use std::{path::PathBuf, str::FromStr, sync::Arc};
use tempdir::TempDir;
const NETWORK: &str = "test";
fn _make_ledger_long(ledger: &mut LedgerDB) {
use rand::{rngs::StdRng, SeedableRng};
let num_blocks = ledger.num_blocks().unwrap();
let last_block = ledger.get_block(num_blocks - 1).unwrap();
assert_eq!(last_block.cumulative_txo_count, ledger.num_txos().unwrap());
let mut rng: StdRng = SeedableRng::from_seed([1u8; 32]);
let accounts: Vec<AccountKey> = (0..20).map(|_i| AccountKey::random(&mut rng)).collect();
let recipient_pub_keys = accounts
.iter()
.map(|account| account.default_subaddress())
.collect::<Vec<_>>();
let results: Vec<(Block, BlockContents)> = mc_transaction_core_test_utils::get_blocks(
&recipient_pub_keys[..],
1,
1000,
1000,
&last_block,
&mut rng,
);
for (block, block_contents) in &results {
println!("block {} containing {:?}", block.index, block_contents);
ledger.append_block(block, block_contents, None).unwrap();
assert_eq!(block.cumulative_txo_count, ledger.num_txos().unwrap());
}
}
fn main() {
let (logger, _global_logger_guard) =
mc_common::logger::create_app_logger(mc_common::logger::o!());
log::info!(logger, "starting, network = {}", NETWORK);
// Get a ledger database to work on.
let ledger_dir =
TempDir::new("ledger_sync_test_app").expect("Could not get test_ledger tempdir");
let ledger_path = ledger_dir.path().to_path_buf();
let ledger_path_str = ledger_dir
.path()
.to_str()
.expect("Could not get ledger_path_str")
.to_string();
log::info!(logger, "ledger_path_str = {}", ledger_path_str);
// Hack to make the ledger longer
if false {
// let mut ledger = LedgerDB::open(format!("../../target/sample_data/{}/ledger",
// NETWORK)).expect("Failed opening local LedgerDB");
let mut ledger = LedgerDB::open(PathBuf::from("../../target/sample_data/ledger"))
.expect("Failed opening local LedgerDB");
_make_ledger_long(&mut ledger);
return;
}
std::fs::copy(
"../../target/sample_data/ledger/data.mdb".to_string(),
format!("{}/data.mdb", ledger_path_str),
)
.expect("failed copying ledger");
let ledger = LedgerDB::open(ledger_path).expect("Failed opening local LedgerDB");
log::info!(
logger,
"num_blocks = {}, num_txos = {}",
ledger.num_blocks().unwrap(),
ledger.num_txos().unwrap()
);
// Set up connections.
let grpc_env = Arc::new(
grpcio::EnvBuilder::new()
.name_prefix("Test-RPC".to_string())
.build(),
);
let mut mr_signer_verifier =
MrSignerVerifier::from(mc_consensus_enclave_measurement::sigstruct());
mr_signer_verifier.allow_hardening_advisory("INTEL-SA-00334");
let mut verifier = Verifier::default();
verifier.mr_signer(mr_signer_verifier).debug(DEBUG_ENCLAVE);
log::debug!(logger, "Verifier: {:?}", verifier);
let peers = vec!["1", "2", "3", "4"]
.into_iter()
.map(|node_id| {
let node_uri =
ClientUri::from_str(&format!("mc://node{}.{}.mobilecoin.com/", node_id, NETWORK))
.expect("failed parsing URI");
ThickClient::new(
node_uri.clone(),
verifier.clone(),
grpc_env.clone(),
HardcodedCredentialsProvider::from(&node_uri),
logger.clone(),
)
.expect("Could not construct ThickClient")
})
.collect();
let conn_manager = ConnectionManager::new(peers, logger.clone());
// Create network state.
let node_1 = test_node_id(1);
let node_2 = test_node_id(2);
let node_3 = test_node_id(3);
let node_4 = test_node_id(4);
let quorum_set: QuorumSet<ResponderId> = QuorumSet::new_with_node_ids(
3,
vec![
node_1.responder_id,
node_2.responder_id,
node_3.responder_id,
node_4.responder_id,
],
);
let mut network_state =
PollingNetworkState::new(quorum_set, conn_manager.clone(), logger.clone());
// Create ledger sync service.
/*
let transactions_fetcher =
mc_ledger_sync::ConnectionManagerTransactionsFetcher::new(conn_manager.clone(), logger.clone());
*/
let transactions_fetcher = mc_ledger_sync::ReqwestTransactionsFetcher::new(
vec![
String::from(
"https://s3-us-west-1.amazonaws.com/mobilecoin.chain/node2.test.mobilecoin.com/",
),
String::from(
"https://s3-us-west-1.amazonaws.com/mobilecoin.chain/node3.test.mobilecoin.com/",
),
String::from(
"https://s3-us-west-1.amazonaws.com/mobilecoin.chain/node4.test.mobilecoin.com/",
),
],
logger.clone(),
)
.expect("failed creating ReqwestTransactionsFetcher");
let mut sync_service = LedgerSyncService::new(
ledger.clone(),
conn_manager,
transactions_fetcher,
logger.clone(),
);
loop {
if !sync_service.is_behind(&network_state) {
network_state.poll();
}
log::info!(
logger,
"ledger sync service is_behind: {:?}",
sync_service.is_behind(&network_state)
);
if sync_service.is_behind(&network_state) {
let _ = sync_service.attempt_ledger_sync(&network_state, 10);
} else {
log::debug!(
logger,
"Sleeping, num_blocks = {}...",
ledger.num_blocks().unwrap()
);
std::thread::sleep(std::time::Duration::from_secs(10));
}
}
}
| 33.559585 | 104 | 0.607226 |
7593c2fe0fb565fbd415ec1c1408446b4ce1f820
| 13,337 |
use ignition_9p::{FileType, Qid, Stat, StatMode, UnixTriplet};
use std::collections::HashMap;
use std::convert::TryInto;
use crate::USER_NAME;
#[derive(Clone, Copy)]
struct FileIndex(usize);
#[derive(Clone, Copy)]
struct DirectoryIndex(usize);
/// A simple immutable file system.
pub struct FileSystem {
directories: Vec<InnerDirectory>,
files: Vec<InnerFile>,
}
impl FileSystem {
pub fn builder() -> builder::FileSystem {
builder::FileSystem::new()
}
pub fn root(&self) -> Directory<'_> {
Directory {
file_system: self,
index: DirectoryIndex(0),
}
}
fn file(&self, index: FileIndex) -> &InnerFile {
&self.files[index.0]
}
fn directory(&self, index: DirectoryIndex) -> &InnerDirectory {
&self.directories[index.0]
}
}
/// A reference to a directory or a file, borrowed from a file system.
#[derive(Clone, Copy)]
pub enum Node<'a> {
Directory(Directory<'a>),
File(File<'a>),
}
impl<'a> Node<'a> {
pub fn content(&self) -> &[u8] {
match self {
Node::Directory(directory) => directory.content(),
Node::File(file) => file.content(),
}
}
pub fn cut_points(&self) -> Option<&[usize]> {
match self {
Node::Directory(directory) => Some(directory.cut_points()),
Node::File(_) => None,
}
}
pub fn qid(&self) -> Qid {
match self {
Node::Directory(directory) => directory.qid(),
Node::File(file) => file.qid(),
}
}
pub fn stat(&self) -> Stat {
match self {
Node::Directory(directory) => directory.stat(),
Node::File(file) => file.stat(),
}
}
}
/// A reference to a directory, borrowed from a file system.
#[derive(Clone, Copy)]
pub struct Directory<'a> {
file_system: &'a FileSystem,
index: DirectoryIndex,
}
impl<'a> Directory<'a> {
fn get(&self) -> &InnerDirectory {
self.file_system.directory(self.index)
}
pub fn parent(&self) -> Directory<'a> {
Directory {
file_system: self.file_system,
index: self.get().parent,
}
}
pub fn content(&self) -> &[u8] {
&self.get().content
}
pub fn cut_points(&self) -> &[usize] {
&self.get().cut_points
}
pub fn entry(&self, name: &str) -> Option<Node<'a>> {
self.get()
.entries
.get(name)
.map(|x| x.to_node(self.file_system))
}
pub fn qid(&self) -> Qid {
self.get().qid()
}
pub fn stat(&self) -> Stat {
self.get().stat()
}
}
/// A reference to a file, borrowed from a file system.
#[derive(Clone, Copy)]
pub struct File<'a> {
file_system: &'a FileSystem,
index: FileIndex,
}
impl<'a> File<'a> {
fn get(&self) -> &InnerFile {
self.file_system.file(self.index)
}
pub fn content(&self) -> &[u8] {
&self.get().content
}
pub fn qid(&self) -> Qid {
self.get().qid()
}
pub fn stat(&self) -> Stat {
self.get().stat()
}
}
#[derive(Clone, Copy)]
enum InnerNode {
Directory(DirectoryIndex),
File(FileIndex),
}
impl InnerNode {
fn to_node(self, file_system: &FileSystem) -> Node<'_> {
match self {
InnerNode::File(index) => Node::File(File { file_system, index }),
InnerNode::Directory(index) => Node::Directory(Directory { file_system, index }),
}
}
}
struct InnerDirectory {
parent: DirectoryIndex,
name: String,
content: Vec<u8>,
cut_points: Vec<usize>,
entries: HashMap<String, InnerNode>,
qid_path: u64,
}
impl InnerDirectory {
fn qid(&self) -> Qid {
Qid {
file_type: FileType::default().with_dir(true),
version: 0,
path: self.qid_path,
}
}
fn stat(&self) -> Stat {
let qid = self.qid();
Stat {
kernel_type: 0,
kernel_dev: 0,
qid,
mode: StatMode::default()
.with_file_type(qid.file_type)
.with_user(UnixTriplet::RWX)
.with_group(UnixTriplet::RX)
.with_other(UnixTriplet::RX),
atime: 0, // TODO: static timestamp 2020-01-01 00:00:00 UTC
mtime: 0,
length: 0,
name: self.name.clone(),
uid: USER_NAME.to_string(),
gid: USER_NAME.to_string(),
muid: USER_NAME.to_string(),
}
}
}
struct InnerFile {
name: String,
content: Vec<u8>,
qid_path: u64,
}
impl InnerFile {
pub fn qid(&self) -> Qid {
Qid {
file_type: FileType::default().with_dir(false),
version: 0,
path: self.qid_path,
}
}
pub fn stat(&self) -> Stat {
let qid = self.qid();
Stat {
kernel_type: 0,
kernel_dev: 0,
qid,
mode: StatMode::default()
.with_file_type(qid.file_type)
.with_user(UnixTriplet::RW)
.with_group(UnixTriplet::R)
.with_other(UnixTriplet::R),
atime: 0, // TODO: static timestamp 2020-01-01 00:00:00 UTC
mtime: 0,
length: self.content.len().try_into().unwrap(),
name: self.name.clone(),
uid: USER_NAME.to_string(),
gid: USER_NAME.to_string(),
muid: USER_NAME.to_string(),
}
}
}
pub mod builder {
use ignition_9p::wire::WriteTo;
use std::collections::HashMap;
use thiserror::Error;
use super::{DirectoryIndex, FileIndex};
/// A file system builder.
pub struct FileSystem {
directories: Vec<InnerDirectory>,
files: Vec<InnerFile>,
}
impl FileSystem {
pub fn new() -> FileSystem {
FileSystem {
directories: vec![InnerDirectory {
parent: DirectoryIndex(0),
name: "/".to_string(),
entries: HashMap::new(),
}],
files: vec![],
}
}
pub fn root(&mut self) -> Directory<'_> {
Directory {
file_system: self,
index: DirectoryIndex(0),
}
}
pub fn build(mut self) -> super::FileSystem {
let mut qid_path_factory = QidPathFactory { next_qid_path: 0 };
let mut fs = super::FileSystem {
directories: self
.directories
.drain(..)
.map(|directory| directory.build(&mut qid_path_factory))
.collect(),
files: self
.files
.drain(..)
.map(|file| file.build(&mut qid_path_factory))
.collect(),
};
// Generate stat data to for directories.
for i in 0..fs.directories.len() {
let mut content = vec![];
let mut cut_points = vec![0];
for node in fs.directories[i].entries.values().copied() {
match node {
super::InnerNode::Directory(index) => {
fs.directory(index).stat().write_to(&mut content).unwrap()
}
super::InnerNode::File(index) => {
fs.file(index).stat().write_to(&mut content).unwrap()
}
}
cut_points.push(content.len());
}
fs.directories[i].content = content;
fs.directories[i].cut_points = cut_points;
}
fs
}
fn push_file(&mut self, file: InnerFile) -> FileIndex {
let index = FileIndex(self.files.len());
self.files.push(file);
index
}
fn push_directory(&mut self, directory: InnerDirectory) -> DirectoryIndex {
let index = DirectoryIndex(self.directories.len());
self.directories.push(directory);
index
}
fn file(&mut self, index: FileIndex) -> &mut InnerFile {
&mut self.files[index.0]
}
fn directory(&mut self, index: DirectoryIndex) -> &mut InnerDirectory {
&mut self.directories[index.0]
}
}
/// A file builder, borrowed from a file system builder.
pub struct File<'a> {
file_system: &'a mut FileSystem,
index: FileIndex,
}
impl<'a> File<'a> {
fn get(&mut self) -> &mut InnerFile {
self.file_system.file(self.index)
}
pub fn set_content(&mut self, content: Vec<u8>) {
self.get().content = content;
}
}
/// A directory builder, borrowed from a file system builder.
pub struct Directory<'a> {
file_system: &'a mut FileSystem,
index: DirectoryIndex,
}
impl<'a> Directory<'a> {
fn get(&mut self) -> &mut InnerDirectory {
self.file_system.directory(self.index)
}
pub fn new_directory<'b>(
&'b mut self,
name: &str,
) -> Result<Directory<'b>, NewDirectoryError> {
if self.get().entries.contains_key(name) {
return Err(NewDirectoryError::AlreadyExists {
name: name.to_string(),
});
}
let new_directory_index = self.file_system.push_directory(InnerDirectory {
parent: self.index,
name: name.to_string(),
entries: HashMap::new(),
});
self.get()
.entries
.insert(name.to_string(), InnerNode::Directory(new_directory_index));
Ok(Directory {
file_system: self.file_system,
index: new_directory_index,
})
}
pub fn new_file<'b>(&'b mut self, name: &str) -> Result<File<'b>, NewFileError> {
if self.get().entries.contains_key(name) {
return Err(NewFileError::AlreadyExists {
name: name.to_string(),
});
}
let new_file_index = self.file_system.push_file(InnerFile {
name: name.to_string(),
content: vec![],
});
self.get()
.entries
.insert(name.to_string(), InnerNode::File(new_file_index));
Ok(File {
file_system: self.file_system,
index: new_file_index,
})
}
}
struct QidPathFactory {
next_qid_path: u64,
}
impl QidPathFactory {
fn next(&mut self) -> u64 {
let result = self.next_qid_path;
self.next_qid_path += 1;
result
}
}
enum InnerNode {
Directory(DirectoryIndex),
File(FileIndex),
}
impl InnerNode {
fn build(self) -> super::InnerNode {
match self {
InnerNode::Directory(index) => super::InnerNode::Directory(index),
InnerNode::File(index) => super::InnerNode::File(index),
}
}
}
struct InnerFile {
name: String,
content: Vec<u8>,
}
impl InnerFile {
fn build(self, qid_path_factory: &mut QidPathFactory) -> super::InnerFile {
super::InnerFile {
name: self.name,
content: self.content,
qid_path: qid_path_factory.next(),
}
}
}
struct InnerDirectory {
parent: DirectoryIndex,
name: String,
entries: HashMap<String, InnerNode>,
}
impl InnerDirectory {
fn build(mut self, qid_path_factory: &mut QidPathFactory) -> super::InnerDirectory {
super::InnerDirectory {
parent: self.parent,
name: self.name,
// NOTE: These empty vecs are wrong, but they are fixed at the end of
// FileSystem::build() after all directories and files have been built and are
// available for calls to stat().
content: vec![],
cut_points: vec![],
entries: self.entries.drain().map(|(k, v)| (k, v.build())).collect(),
qid_path: qid_path_factory.next(),
}
}
}
#[derive(Error, Debug)]
pub enum NewFileError {
#[error("the file {name:?} already exists")]
AlreadyExists { name: String },
}
#[derive(Error, Debug)]
pub enum NewDirectoryError {
#[error("the directory {name:?} already exists")]
AlreadyExists { name: String },
}
}
| 28.316348 | 95 | 0.489165 |
9cc9c05be425fc1eaeea919a3aad3416738a61f2
| 2,059 |
//! Sequence the execution of tasks using the parking mechanism.
use r3::{
hunk::Hunk,
kernel::{prelude::*, traits, Cfg, StaticTask},
};
use super::Driver;
use crate::utils::SeqTracker;
pub trait SupportedSystem: traits::KernelBase + traits::KernelStatic {}
impl<T: traits::KernelBase + traits::KernelStatic> SupportedSystem for T {}
pub struct App<System: SupportedSystem> {
task2: StaticTask<System>,
seq: Hunk<System, SeqTracker>,
}
impl<System: SupportedSystem> App<System> {
pub const fn new<C, D: Driver<Self>>(b: &mut Cfg<C>) -> Self
where
C: ~const traits::CfgBase<System = System> + ~const traits::CfgTask,
{
StaticTask::define()
.start(task1_body::<System, D>)
.priority(2)
.active(true)
.finish(b);
let task2 = StaticTask::define()
.start(task2_body::<System, D>)
.priority(1)
.active(true)
.finish(b);
let seq = Hunk::<_, SeqTracker>::define().finish(b);
App { task2, seq }
}
}
fn task1_body<System: SupportedSystem, D: Driver<App<System>>>() {
D::app().seq.expect_and_replace(1, 2);
D::app().task2.unpark_exact().unwrap();
D::app().seq.expect_and_replace(3, 4);
D::app().task2.interrupt().unwrap();
}
fn task2_body<System: SupportedSystem, D: Driver<App<System>>>() {
D::app().seq.expect_and_replace(0, 1);
System::park().unwrap(); // blocks, switching to `task1`
D::app().seq.expect_and_replace(2, 3);
assert_eq!(
// blocks, switching to `task1`
System::park(),
Err(r3::kernel::ParkError::Interrupted)
);
D::app().seq.expect_and_replace(4, 5);
// Give a park token to itself
D::app().task2.unpark_exact().unwrap();
// `park` doesn't block if the task already has a token
System::park().unwrap();
D::app().task2.unpark_exact().unwrap();
assert_eq!(
D::app().task2.unpark_exact(),
Err(r3::kernel::UnparkExactError::QueueOverflow)
);
D::success();
}
| 26.397436 | 76 | 0.599806 |
03fd51256ef5f2c9d6c8a7bb746703caaec7d0c3
| 1,040 |
use json_sm::SoftwareUpdateResponse;
#[derive(thiserror::Error, Debug)]
pub enum SmartRestSerializerError {
#[error("The operation status is not supported. {response:?}")]
UnsupportedOperationStatus { response: SoftwareUpdateResponse },
#[error("Failed to serialize SmartREST.")]
InvalidCsv(#[from] csv::Error),
#[error(transparent)]
FromCsvWriter(#[from] csv::IntoInnerError<csv::Writer<Vec<u8>>>),
#[error(transparent)]
FromUtf8Error(#[from] std::string::FromUtf8Error),
}
#[derive(thiserror::Error, Debug)]
pub enum SmartRestDeserializerError {
#[error("The received SmartREST message ID {id} is unsupported.")]
UnsupportedOperation { id: String },
#[error("Failed to deserialize SmartREST.")]
InvalidCsv(#[from] csv::Error),
#[error("Jwt response contains incorrect ID: {0}")]
InvalidMessageId(u16),
#[error("Parameter {parameter} is not recognized. {hint}")]
InvalidParameter {
operation: String,
parameter: String,
hint: String,
},
}
| 28.888889 | 70 | 0.676923 |
721ce63dc643ed3847ac0ddfa980f87f42ac517f
| 353 |
use crate::math::num::Monoid;
#[cfg_attr(nightly, codesnip::entry("LazySegTree", include("Monoid")))]
mod lazy;
#[codesnip::entry("LazySegTree")]
pub use lazy::{LSTMonoid, LazySegTree};
#[cfg_attr(nightly, codesnip::entry("SegmentTree", include("Monoid")))]
mod segtree;
#[codesnip::entry("SegmentTree")]
pub use segtree::SegmentTree;
pub mod types;
| 25.214286 | 71 | 0.72238 |
2245083f054ce0a91dbcddc2b0dd350f17b47a15
| 19,749 |
//! Contains ZX Spectrum System contrller (like ula or so) of emulator
use crate::{
error::Error,
host::{Host, HostContext, IoExtender},
settings::RustzxSettings,
utils::screen::bitmap_line_addr,
zx::{
constants::{ADDR_LD_BREAK, CANVAS_HEIGHT, CLOCKS_PER_COL},
events::EmulationEvents,
joy::{
kempston::KempstonJoy,
sinclair::{self, SinclairJoyNum, SinclairKey},
},
keys::{CompoundKey, ZXKey},
machine::ZXMachine,
memory::{Page, RamType, RomType, ZXMemory, PAGE_SIZE},
mouse::kempston::{KempstonMouse, KempstonMouseButton, KempstonMouseWheelDirection},
tape::{TapeImpl, ZXTape},
video::{colors::ZXColor, screen::ZXScreen},
},
};
use rustzx_z80::Z80Bus;
#[cfg(feature = "embedded-roms")]
use crate::zx::roms;
#[cfg(feature = "sound")]
use crate::zx::sound::mixer::ZXMixer;
#[cfg(feature = "precise-border")]
use crate::zx::video::border::ZXBorder;
/// ZX System controller
pub(crate) struct ZXController<H: Host> {
// parts of ZX Spectum.
pub machine: ZXMachine,
pub memory: ZXMemory,
pub screen: ZXScreen<H::FrameBuffer>,
pub tape: ZXTape<H::TapeAsset>,
#[cfg(feature = "precise-border")]
pub border: ZXBorder<H::FrameBuffer>,
pub kempston: Option<KempstonJoy>,
pub mouse: Option<KempstonMouse>,
pub io_extender: Option<H::IoExtender>,
#[cfg(feature = "sound")]
pub mixer: ZXMixer,
pub keyboard: [u8; 8],
pub keyboard_extended: [u8; 8],
pub keyboard_sinclair: [u8; 8],
pub caps_shift_modifier_mask: u32,
// current border color
pub border_color: ZXColor,
// clocls count from frame start
frame_clocks: usize,
// frames count, which passed during emulation invokation
passed_frames: usize,
events: EmulationEvents,
paging_enabled: bool,
screen_bank: u8,
current_port_7ffd: u8,
// Z80 module expected controller implementation without errors,
// so we need to store the internal errors manually. For sake of simplicity,
// Only last error is saved
last_emulation_error: Option<Error>,
}
impl<H: Host> ZXController<H> {
/// Returns new ZXController from settings
#[allow(clippy::let_and_return)]
pub fn new(settings: &RustzxSettings, host_context: H::Context) -> Self {
let (memory, paging, screen_bank);
match settings.machine {
ZXMachine::Sinclair48K => {
memory = ZXMemory::new(RomType::K16, RamType::K48);
paging = false;
screen_bank = 0;
}
ZXMachine::Sinclair128K => {
memory = ZXMemory::new(RomType::K32, RamType::K128);
paging = true;
screen_bank = 5;
}
};
let kempston = if settings.kempston_enabled {
Some(KempstonJoy::default())
} else {
None
};
let mouse = if settings.mouse_enabled {
Some(KempstonMouse::default())
} else {
None
};
let screen = ZXScreen::new(settings.machine, host_context.frame_buffer_context());
#[cfg(feature = "precise-border")]
let border = ZXBorder::new(settings.machine, host_context.frame_buffer_context());
#[cfg(feature = "sound")]
let mixer = Self::create_mixer(settings);
let out = ZXController {
machine: settings.machine,
memory,
screen,
#[cfg(feature = "precise-border")]
border,
kempston,
mouse,
io_extender: None,
#[cfg(feature = "sound")]
mixer,
keyboard: [0xFF; 8],
keyboard_extended: [0xFF; 8],
keyboard_sinclair: [0xFF; 8],
caps_shift_modifier_mask: 0,
border_color: ZXColor::Black,
frame_clocks: 0,
passed_frames: 0,
tape: Default::default(),
events: Default::default(),
paging_enabled: paging,
screen_bank,
current_port_7ffd: 0,
last_emulation_error: None,
};
#[cfg(feature = "embedded-roms")]
if settings.load_default_rom {
let mut out = out;
out.load_default_rom();
return out;
}
out
}
#[cfg(feature = "sound")]
fn create_mixer(settings: &RustzxSettings) -> ZXMixer {
let mut mixer = ZXMixer::new(
settings.beeper_enabled,
#[cfg(feature = "ay")]
settings.ay_enabled,
#[cfg(feature = "ay")]
settings.ay_mode,
settings.sound_sample_rate,
);
mixer.volume(settings.sound_volume as f64 / 200.0);
mixer
}
/// returns current frame emulation pos in percents
#[cfg(feature = "sound")]
fn frame_pos(&self) -> f64 {
let val = self.frame_clocks as f64 / self.machine.specs().clocks_frame as f64;
if val > 1.0 {
1.0
} else {
val
}
}
/// loads builted-in ROM
#[cfg(feature = "embedded-roms")]
fn load_default_rom(&mut self) {
match self.machine {
ZXMachine::Sinclair48K => {
let page = self.memory.rom_page_data_mut(0);
page.copy_from_slice(roms::ROM_48K);
}
ZXMachine::Sinclair128K => {
let page = self.memory.rom_page_data_mut(0);
page.copy_from_slice(roms::ROM_128K_0);
let page = self.memory.rom_page_data_mut(1);
page.copy_from_slice(roms::ROM_128K_1);
}
}
}
/// Changes key state in controller
pub fn send_key(&mut self, key: ZXKey, pressed: bool) {
if pressed {
self.keyboard[key.row_id()] &= !key.mask();
return;
}
self.keyboard[key.row_id()] |= key.mask();
}
pub fn send_sinclair_key(&mut self, num: SinclairJoyNum, key: SinclairKey, pressed: bool) {
let key = sinclair::sinclair_event_to_zx_key(key, num);
if pressed {
self.keyboard_sinclair[key.row_id()] &= !key.mask();
return;
}
self.keyboard_sinclair[key.row_id()] |= key.mask();
}
pub fn send_compound_key(&mut self, key: CompoundKey, pressed: bool) {
let mut dummy_modifier_mask = 0;
let modifier_mask = match key.modifier_key() {
ZXKey::Shift => &mut self.caps_shift_modifier_mask,
_ => &mut dummy_modifier_mask,
};
let primary_key = key.primary_key();
let modifier_key = key.modifier_key();
if pressed {
*modifier_mask |= key.modifier_mask();
self.keyboard_extended[primary_key.row_id()] &= !primary_key.mask();
self.keyboard_extended[modifier_key.row_id()] &= !modifier_key.mask();
} else {
*modifier_mask &= !key.modifier_mask();
if *modifier_mask == 0 {
self.keyboard_extended[modifier_key.row_id()] |= modifier_key.mask();
}
self.keyboard_extended[primary_key.row_id()] |= primary_key.mask();
}
}
pub fn send_mouse_button(&mut self, button: KempstonMouseButton, pressed: bool) {
if let Some(mouse) = &mut self.mouse {
mouse.send_button(button, pressed);
}
}
pub fn send_mouse_wheel(&mut self, dir: KempstonMouseWheelDirection) {
if let Some(mouse) = &mut self.mouse {
mouse.send_wheel(dir);
}
}
pub fn send_mouse_pos_diff(&mut self, x: i8, y: i8) {
if let Some(mouse) = &mut self.mouse {
mouse.send_pos_diff(x, y);
}
}
/// Returns current bus floating value
fn floating_bus_value(&self) -> u8 {
let specs = self.machine.specs();
let clocks = self.frame_clocks;
if clocks < specs.clocks_first_pixel + 2 {
return 0xFF;
}
let clocks = clocks - (specs.clocks_first_pixel + 2);
let row = clocks / specs.clocks_line;
let clocks = clocks % specs.clocks_line;
let col = (clocks / 8) * 2 + (clocks % 8) / 2;
if row < CANVAS_HEIGHT
&& clocks < specs.clocks_screen_row - CLOCKS_PER_COL
&& ((clocks & 0x04) == 0)
{
if clocks % 2 == 0 {
return self.memory.read(bitmap_line_addr(row) + col as u16);
} else {
let byte = (row / 8) * 32 + col;
return self.memory.read(0x5800 + byte as u16);
};
}
0xFF
}
/// make contention
fn do_contention(&mut self) {
let contention = self.machine.contention_clocks(self.frame_clocks);
self.wait_internal(contention);
}
/// make contention + wait some clocks
fn do_contention_and_wait(&mut self, wait_time: usize) {
let contention = self.machine.contention_clocks(self.frame_clocks);
self.wait_internal(contention + wait_time);
}
// check addr contention
fn addr_is_contended(&self, addr: u16) -> bool {
if let Page::Ram(bank) = self.memory.get_page(addr) {
self.machine.bank_is_contended(bank as usize)
} else {
false
}
}
/// Returns early IO contention clocks
fn io_contention_first(&mut self, port: u16) {
if self.addr_is_contended(port) {
self.do_contention();
};
self.wait_internal(1);
}
/// Returns late IO contention clocks
fn io_contention_last(&mut self, port: u16) {
if self.machine.port_is_contended(port) {
self.do_contention_and_wait(2);
} else if self.addr_is_contended(port) {
self.do_contention_and_wait(1);
self.do_contention_and_wait(1);
self.do_contention();
} else {
self.wait_internal(2);
}
}
/// Starts a new frame
fn new_frame(&mut self) {
self.frame_clocks -= self.machine.specs().clocks_frame;
self.screen.new_frame();
#[cfg(feature = "precise-border")]
self.border.new_frame();
#[cfg(feature = "sound")]
self.mixer.new_frame();
}
/// Clears all detected
pub fn clear_events(&mut self) {
self.events.clear();
}
/// Returns last events
pub fn events(&self) -> EmulationEvents {
self.events
}
/// Returns true if all frame clocks has been passed
pub fn frames_count(&self) -> usize {
self.passed_frames
}
pub fn reset_frame_counter(&mut self) {
self.passed_frames = 0;
}
pub fn write_7ffd(&mut self, val: u8) {
if !self.paging_enabled {
return;
}
self.current_port_7ffd = val;
// remap top 16K of the ram
self.memory.remap(3, Page::Ram(val & 0x07));
// third block is not pageable
// second block is screen buffer, not pageable. but we need to change active buffer
let new_screen_bank = if val & 0x08 == 0 { 5 } else { 7 };
self.screen.switch_bank(new_screen_bank as usize);
self.screen_bank = new_screen_bank;
// remap ROM
self.memory.remap(0, Page::Rom((val >> 4) & 0x01));
// check paging allow bit
if val & 0x20 != 0 {
self.paging_enabled = false;
}
}
pub fn read_7ffd(&self) -> u8 {
self.current_port_7ffd
}
#[cfg(all(feature = "sound", feature = "ay"))]
fn read_ay_port(&mut self) -> u8 {
self.mixer.ay.read()
}
#[cfg(not(all(feature = "sound", feature = "ay")))]
fn read_ay_port(&mut self) -> u8 {
self.floating_bus_value()
}
#[cfg(all(feature = "sound", feature = "ay"))]
fn write_ay_port(&mut self, value: u8) {
self.mixer.ay.write(value);
}
#[cfg(not(all(feature = "sound", feature = "ay")))]
fn write_ay_port(&mut self, _: u8) {}
#[cfg(all(feature = "sound", feature = "ay"))]
fn select_ay_reg(&mut self, value: u8) {
self.mixer.ay.select_reg(value)
}
#[cfg(not(all(feature = "sound", feature = "ay")))]
fn select_ay_reg(&mut self, _: u8) {}
pub(crate) fn set_border_color(
&mut self,
#[cfg(feature = "precise-border")] clocks: usize,
#[cfg(not(feature = "precise-border"))] _clocks: usize,
color: ZXColor,
) {
self.border_color = color;
#[cfg(feature = "precise-border")]
self.border.set_border(clocks, color);
}
pub(crate) fn take_last_emulation_error(&mut self) -> Option<Error> {
self.last_emulation_error.take()
}
pub(crate) fn refresh_memory_dependent_devices(&mut self) {
match self.machine {
ZXMachine::Sinclair48K => {
for (idx, data) in self.memory.ram_page_data(0).iter().enumerate() {
self.screen.update(idx as u16, 0, *data);
}
}
ZXMachine::Sinclair128K => {
for (idx, data) in self.memory.ram_page_data(5).iter().enumerate() {
self.screen.update(idx as u16, 5, *data);
}
for (idx, data) in self.memory.ram_page_data(7).iter().enumerate() {
self.screen.update(idx as u16, 7, *data);
}
}
}
}
}
impl<H: Host> Z80Bus for ZXController<H> {
/// we need to check different breakpoints like tape
/// loading detection breakpoint
fn pc_callback(&mut self, addr: u16) {
// check mapped memory page at 0x0000 .. 0x3FFF
let check_fast_load = match self.machine {
ZXMachine::Sinclair48K if self.memory.get_bank_type(0) == Page::Rom(0) => true,
ZXMachine::Sinclair128K if self.memory.get_bank_type(0) == Page::Rom(1) => true,
_ => false,
};
if check_fast_load {
// Tape LOAD/VERIFY
if addr == ADDR_LD_BREAK {
// Add event (Fast tape loading request) it must be executed
// by emulator immediately
self.events |= EmulationEvents::TAPE_FAST_LOAD_TRIGGER_DETECTED;
}
}
}
/// read data without taking onto account contention
fn read_internal(&mut self, addr: u16) -> u8 {
self.memory.read(addr)
}
/// write data without taking onto account contention
fn write_internal(&mut self, addr: u16, data: u8) {
self.memory.write(addr, data);
// if ram then compare bank to screen bank
if let Page::Ram(bank) = self.memory.get_page(addr) {
self.screen
.update(addr % PAGE_SIZE as u16, bank as usize, data);
}
}
/// Cahnges internal state on clocks count change (emualtion processing)
fn wait_internal(&mut self, clk: usize) {
self.frame_clocks += clk;
if let Err(e) = self.tape.process_clocks(clk) {
self.last_emulation_error = Some(e);
}
#[cfg(feature = "sound")]
{
let pos = self.frame_pos();
self.mixer.process(pos);
}
self.screen.process_clocks(self.frame_clocks);
if self.frame_clocks >= self.machine.specs().clocks_frame {
self.new_frame();
self.passed_frames += 1;
}
}
// wait with memory request pin active
fn wait_mreq(&mut self, addr: u16, clk: usize) {
match self.machine {
ZXMachine::Sinclair48K | ZXMachine::Sinclair128K => {
// contention in low 16k RAM
if self.addr_is_contended(addr) {
self.do_contention();
}
}
}
self.wait_internal(clk);
}
/// wait without memory request pin active
fn wait_no_mreq(&mut self, addr: u16, clk: usize) {
// only for 48 K!
self.wait_mreq(addr, clk);
}
/// read io from hardware
fn read_io(&mut self, port: u16) -> u8 {
// all contentions check
self.io_contention_first(port);
self.io_contention_last(port);
let io_extender_value = self
.io_extender
.as_mut()
.and_then(|e| e.extends_port(port).then(|| e.read(port)));
// find out what we need to do
let [_, h] = port.to_le_bytes();
let output = if let Some(value) = io_extender_value {
value
} else if port & 0x0001 == 0 {
// ULA port
let mut tmp: u8 = 0xFF;
for n in 0..8 {
// if bit of row reset
if ((h >> n) & 0x01) == 0 {
let keyboard_byte =
self.keyboard[n] & self.keyboard_extended[n] & self.keyboard_sinclair[n];
tmp &= keyboard_byte;
}
}
// Emulate zx spectrum "issue 2" model.
// For future "issue 3" implementation condition will be `!self.ear`, but
// different zx spectrum "issues" emulation is not planned yet
if !self.tape.current_bit() {
tmp ^= 0x40;
}
// 5 and 7 bits are unused
tmp
} else if self.mouse.is_some() && (port & 0x0121 == 0x0001) {
self.mouse.as_ref().unwrap().buttons_port
} else if self.mouse.is_some() && (port & 0x0521 == 0x0101) {
self.mouse.as_ref().unwrap().x_pos_port
} else if self.mouse.is_some() && (port & 0x0521 == 0x0501) {
self.mouse.as_ref().unwrap().y_pos_port
} else if port & 0xC002 == 0xC000 {
self.read_ay_port()
} else if self.kempston.is_some() && (port & 0x00E0 == 0) {
self.kempston.as_ref().unwrap().read()
} else {
self.floating_bus_value()
};
// add one clock after operation
self.wait_internal(1);
output
}
/// write value to hardware port
fn write_io(&mut self, port: u16, data: u8) {
// first contention
self.io_contention_first(port);
// find active port
if self
.io_extender
.as_ref()
.map_or(false, |e| e.extends_port(port))
{
self.io_extender.as_mut().unwrap().write(port, data);
} else if port & 0xC002 == 0xC000 {
self.select_ay_reg(data);
} else if port & 0xC002 == 0x8000 {
self.write_ay_port(data);
} else if port & 0x0001 == 0 {
self.set_border_color(self.frame_clocks, ZXColor::from_bits(data & 0x07));
#[cfg(feature = "sound")]
{
let mic = data & 0x08 != 0;
let ear = data & 0x10 != 0;
self.mixer.beeper.change_state(ear, mic);
}
} else if (port & 0x8002 == 0) && (self.machine == ZXMachine::Sinclair128K) {
self.write_7ffd(data);
}
// last contention after byte write
self.io_contention_last(port);
// add one clock after operation
self.wait_internal(1);
}
/// value, requested during `INT0` interrupt
fn read_interrupt(&mut self) -> u8 {
0xFF
}
/// checks system maskable interrupt pin state
fn int_active(&self) -> bool {
self.frame_clocks % self.machine.specs().clocks_frame
< self.machine.specs().interrupt_length
}
/// checks non-maskable interrupt pin state
fn nmi_active(&self) -> bool {
false
}
/// CPU calls it when RETI instruction was processed
fn reti(&mut self) {}
/// CPU calls when was being halted
fn halt(&mut self, _: bool) {}
}
| 33.025084 | 97 | 0.557294 |
8fb35f03f340648013d8991c0e55ed2e3eea7b78
| 4,151 |
// Adapted from the futures-retry example: https://gitlab.com/mexus/futures-retry/blob/master/examples/tcp-client-complex.rs
use futures::future::Future;
use futures_retry::{ErrorHandler, FutureRetry, RetryPolicy};
use http::StatusCode;
use log::trace;
use reqwest::r#async::Client as HttpClient;
use serde_json::json;
use std::{default::Default, fmt::Display, time::Duration};
use url::Url;
// The account creation endpoint set by the engines in the [RFC](https://github.com/interledger/rfcs/pull/536)
static ACCOUNTS_ENDPOINT: &str = "accounts";
const MAX_RETRIES: usize = 10;
const DEFAULT_HTTP_TIMEOUT: Duration = Duration::from_millis(5000);
#[derive(Clone)]
pub struct Client {
max_retries: usize,
client: HttpClient,
}
impl Client {
/// Timeout duration is in millisecodns
pub fn new(timeout: Duration, max_retries: usize) -> Self {
Client {
client: HttpClient::builder().timeout(timeout).build().unwrap(),
max_retries,
}
}
pub fn create_engine_account<T: Display + Copy>(
&self,
engine_url: Url,
id: T,
) -> impl Future<Item = StatusCode, Error = reqwest::Error> {
let mut se_url = engine_url.clone();
se_url
.path_segments_mut()
.expect("Invalid settlement engine URL")
.push(ACCOUNTS_ENDPOINT);
trace!(
"Sending account {} creation request to settlement engine: {:?}",
id,
se_url.clone()
);
// The actual HTTP request which gets made to the engine
let client = self.client.clone();
let create_settlement_engine_account = move || {
client
.post(se_url.as_ref())
.json(&json!({"id" : id.to_string()}))
.send()
.and_then(move |response| {
// If the account is not found on the peer's connector, the
// retry logic will not get triggered. When the counterparty
// tries to add the account, they will complete the handshake.
Ok(response.status())
})
};
FutureRetry::new(
create_settlement_engine_account,
IoHandler::new(
self.max_retries,
format!("[Engine: {}, Account: {}]", engine_url, id),
),
)
}
}
/// An I/O handler that counts attempts.
struct IoHandler<D> {
max_attempts: usize,
current_attempt: usize,
display_name: D,
}
impl<D> IoHandler<D> {
fn new(max_attempts: usize, display_name: D) -> Self {
IoHandler {
max_attempts,
current_attempt: 0,
display_name,
}
}
}
// The error handler trait implements the Retry logic based on the received
// Error Status Code.
impl<D> ErrorHandler<reqwest::Error> for IoHandler<D>
where
D: ::std::fmt::Display,
{
type OutError = reqwest::Error;
fn handle(&mut self, e: reqwest::Error) -> RetryPolicy<reqwest::Error> {
self.current_attempt += 1;
if self.current_attempt > self.max_attempts {
trace!(
"[{}] All attempts ({}) have been used",
self.display_name,
self.max_attempts
);
return RetryPolicy::ForwardError(e);
}
trace!(
"[{}] Attempt {}/{} has failed",
self.display_name,
self.current_attempt,
self.max_attempts
);
if e.is_client_error() {
// do not retry 4xx
RetryPolicy::ForwardError(e)
} else if e.is_timeout() || e.is_server_error() {
// Retry timeouts and 5xx every 5 seconds
RetryPolicy::WaitRetry(Duration::from_secs(5))
} else {
// Retry other errors slightly more frequently since they may be
// related to the engine not having started yet
RetryPolicy::WaitRetry(Duration::from_secs(1))
}
}
}
impl Default for Client {
fn default() -> Self {
Client::new(DEFAULT_HTTP_TIMEOUT, MAX_RETRIES)
}
}
| 31.210526 | 124 | 0.577933 |
64919ce887fd095dbe0f633f4c43d79eea2e6179
| 71,513 |
//! Various utilities for building scripts and deriving keys related to channels. These are
//! largely of interest for those implementing chain::keysinterface::ChannelKeys message signing
//! by hand.
use std::ffi::c_void;
use bitcoin::hashes::Hash;
use crate::c_types::*;
/// Build the commitment secret from the seed and the commitment number
#[no_mangle]
pub extern "C" fn build_commitment_secret(commitment_seed: *const [u8; 32], mut idx: u64) -> crate::c_types::ThirtyTwoBytes {
let mut ret = lightning::ln::chan_utils::build_commitment_secret(unsafe { &*commitment_seed}, idx);
crate::c_types::ThirtyTwoBytes { data: ret }
}
/// Derives a per-commitment-transaction private key (eg an htlc key or delayed_payment key)
/// from the base secret and the per_commitment_point.
///
/// Note that this is infallible iff we trust that at least one of the two input keys are randomly
/// generated (ie our own).
#[no_mangle]
pub extern "C" fn derive_private_key(mut per_commitment_point: crate::c_types::PublicKey, base_secret: *const [u8; 32]) -> crate::c_types::derived::CResult_SecretKeySecpErrorZ {
let mut ret = lightning::ln::chan_utils::derive_private_key(&bitcoin::secp256k1::Secp256k1::new(), &per_commitment_point.into_rust(), &::bitcoin::secp256k1::key::SecretKey::from_slice(&unsafe { *base_secret}[..]).unwrap());
let mut local_ret = match ret { Ok(mut o) => crate::c_types::CResultTempl::ok( { crate::c_types::SecretKey::from_rust(o) }), Err(mut e) => crate::c_types::CResultTempl::err( { crate::c_types::Secp256k1Error::from_rust(e) }) };
local_ret
}
/// Derives a per-commitment-transaction public key (eg an htlc key or a delayed_payment key)
/// from the base point and the per_commitment_key. This is the public equivalent of
/// derive_private_key - using only public keys to derive a public key instead of private keys.
///
/// Note that this is infallible iff we trust that at least one of the two input keys are randomly
/// generated (ie our own).
#[no_mangle]
pub extern "C" fn derive_public_key(mut per_commitment_point: crate::c_types::PublicKey, mut base_point: crate::c_types::PublicKey) -> crate::c_types::derived::CResult_PublicKeySecpErrorZ {
let mut ret = lightning::ln::chan_utils::derive_public_key(&bitcoin::secp256k1::Secp256k1::new(), &per_commitment_point.into_rust(), &base_point.into_rust());
let mut local_ret = match ret { Ok(mut o) => crate::c_types::CResultTempl::ok( { crate::c_types::PublicKey::from_rust(&o) }), Err(mut e) => crate::c_types::CResultTempl::err( { crate::c_types::Secp256k1Error::from_rust(e) }) };
local_ret
}
/// Derives a per-commitment-transaction revocation key from its constituent parts.
///
/// Only the cheating participant owns a valid witness to propagate a revoked
/// commitment transaction, thus per_commitment_secret always come from cheater
/// and revocation_base_secret always come from punisher, which is the broadcaster
/// of the transaction spending with this key knowledge.
///
/// Note that this is infallible iff we trust that at least one of the two input keys are randomly
/// generated (ie our own).
#[no_mangle]
pub extern "C" fn derive_private_revocation_key(per_commitment_secret: *const [u8; 32], countersignatory_revocation_base_secret: *const [u8; 32]) -> crate::c_types::derived::CResult_SecretKeySecpErrorZ {
let mut ret = lightning::ln::chan_utils::derive_private_revocation_key(&bitcoin::secp256k1::Secp256k1::new(), &::bitcoin::secp256k1::key::SecretKey::from_slice(&unsafe { *per_commitment_secret}[..]).unwrap(), &::bitcoin::secp256k1::key::SecretKey::from_slice(&unsafe { *countersignatory_revocation_base_secret}[..]).unwrap());
let mut local_ret = match ret { Ok(mut o) => crate::c_types::CResultTempl::ok( { crate::c_types::SecretKey::from_rust(o) }), Err(mut e) => crate::c_types::CResultTempl::err( { crate::c_types::Secp256k1Error::from_rust(e) }) };
local_ret
}
/// Derives a per-commitment-transaction revocation public key from its constituent parts. This is
/// the public equivalend of derive_private_revocation_key - using only public keys to derive a
/// public key instead of private keys.
///
/// Only the cheating participant owns a valid witness to propagate a revoked
/// commitment transaction, thus per_commitment_point always come from cheater
/// and revocation_base_point always come from punisher, which is the broadcaster
/// of the transaction spending with this key knowledge.
///
/// Note that this is infallible iff we trust that at least one of the two input keys are randomly
/// generated (ie our own).
#[no_mangle]
pub extern "C" fn derive_public_revocation_key(mut per_commitment_point: crate::c_types::PublicKey, mut countersignatory_revocation_base_point: crate::c_types::PublicKey) -> crate::c_types::derived::CResult_PublicKeySecpErrorZ {
let mut ret = lightning::ln::chan_utils::derive_public_revocation_key(&bitcoin::secp256k1::Secp256k1::new(), &per_commitment_point.into_rust(), &countersignatory_revocation_base_point.into_rust());
let mut local_ret = match ret { Ok(mut o) => crate::c_types::CResultTempl::ok( { crate::c_types::PublicKey::from_rust(&o) }), Err(mut e) => crate::c_types::CResultTempl::err( { crate::c_types::Secp256k1Error::from_rust(e) }) };
local_ret
}
use lightning::ln::chan_utils::TxCreationKeys as nativeTxCreationKeysImport;
type nativeTxCreationKeys = nativeTxCreationKeysImport;
/// The set of public keys which are used in the creation of one commitment transaction.
/// These are derived from the channel base keys and per-commitment data.
///
/// A broadcaster key is provided from potential broadcaster of the computed transaction.
/// A countersignatory key is coming from a protocol participant unable to broadcast the
/// transaction.
///
/// These keys are assumed to be good, either because the code derived them from
/// channel basepoints via the new function, or they were obtained via
/// CommitmentTransaction.trust().keys() because we trusted the source of the
/// pre-calculated keys.
#[must_use]
#[repr(C)]
pub struct TxCreationKeys {
/// Nearly everywhere, inner must be non-null, however in places where
/// the Rust equivalent takes an Option, it may be set to null to indicate None.
pub inner: *mut nativeTxCreationKeys,
pub is_owned: bool,
}
impl Drop for TxCreationKeys {
fn drop(&mut self) {
if self.is_owned && !self.inner.is_null() {
let _ = unsafe { Box::from_raw(self.inner) };
}
}
}
#[no_mangle]
pub extern "C" fn TxCreationKeys_free(this_ptr: TxCreationKeys) { }
#[allow(unused)]
/// Used only if an object of this type is returned as a trait impl by a method
extern "C" fn TxCreationKeys_free_void(this_ptr: *mut c_void) {
unsafe { let _ = Box::from_raw(this_ptr as *mut nativeTxCreationKeys); }
}
#[allow(unused)]
/// When moving out of the pointer, we have to ensure we aren't a reference, this makes that easy
impl TxCreationKeys {
pub(crate) fn take_inner(mut self) -> *mut nativeTxCreationKeys {
assert!(self.is_owned);
let ret = self.inner;
self.inner = std::ptr::null_mut();
ret
}
}
impl Clone for TxCreationKeys {
fn clone(&self) -> Self {
Self {
inner: Box::into_raw(Box::new(unsafe { &*self.inner }.clone())),
is_owned: true,
}
}
}
#[allow(unused)]
/// Used only if an object of this type is returned as a trait impl by a method
pub(crate) extern "C" fn TxCreationKeys_clone_void(this_ptr: *const c_void) -> *mut c_void {
Box::into_raw(Box::new(unsafe { (*(this_ptr as *mut nativeTxCreationKeys)).clone() })) as *mut c_void
}
#[no_mangle]
pub extern "C" fn TxCreationKeys_clone(orig: &TxCreationKeys) -> TxCreationKeys {
TxCreationKeys { inner: Box::into_raw(Box::new(unsafe { &*orig.inner }.clone())), is_owned: true }
}
/// The broadcaster's per-commitment public key which was used to derive the other keys.
#[no_mangle]
pub extern "C" fn TxCreationKeys_get_per_commitment_point(this_ptr: &TxCreationKeys) -> crate::c_types::PublicKey {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.per_commitment_point;
crate::c_types::PublicKey::from_rust(&(*inner_val))
}
/// The broadcaster's per-commitment public key which was used to derive the other keys.
#[no_mangle]
pub extern "C" fn TxCreationKeys_set_per_commitment_point(this_ptr: &mut TxCreationKeys, mut val: crate::c_types::PublicKey) {
unsafe { &mut *this_ptr.inner }.per_commitment_point = val.into_rust();
}
/// The revocation key which is used to allow the broadcaster of the commitment
/// transaction to provide their counterparty the ability to punish them if they broadcast
/// an old state.
#[no_mangle]
pub extern "C" fn TxCreationKeys_get_revocation_key(this_ptr: &TxCreationKeys) -> crate::c_types::PublicKey {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.revocation_key;
crate::c_types::PublicKey::from_rust(&(*inner_val))
}
/// The revocation key which is used to allow the broadcaster of the commitment
/// transaction to provide their counterparty the ability to punish them if they broadcast
/// an old state.
#[no_mangle]
pub extern "C" fn TxCreationKeys_set_revocation_key(this_ptr: &mut TxCreationKeys, mut val: crate::c_types::PublicKey) {
unsafe { &mut *this_ptr.inner }.revocation_key = val.into_rust();
}
/// Broadcaster's HTLC Key
#[no_mangle]
pub extern "C" fn TxCreationKeys_get_broadcaster_htlc_key(this_ptr: &TxCreationKeys) -> crate::c_types::PublicKey {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.broadcaster_htlc_key;
crate::c_types::PublicKey::from_rust(&(*inner_val))
}
/// Broadcaster's HTLC Key
#[no_mangle]
pub extern "C" fn TxCreationKeys_set_broadcaster_htlc_key(this_ptr: &mut TxCreationKeys, mut val: crate::c_types::PublicKey) {
unsafe { &mut *this_ptr.inner }.broadcaster_htlc_key = val.into_rust();
}
/// Countersignatory's HTLC Key
#[no_mangle]
pub extern "C" fn TxCreationKeys_get_countersignatory_htlc_key(this_ptr: &TxCreationKeys) -> crate::c_types::PublicKey {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.countersignatory_htlc_key;
crate::c_types::PublicKey::from_rust(&(*inner_val))
}
/// Countersignatory's HTLC Key
#[no_mangle]
pub extern "C" fn TxCreationKeys_set_countersignatory_htlc_key(this_ptr: &mut TxCreationKeys, mut val: crate::c_types::PublicKey) {
unsafe { &mut *this_ptr.inner }.countersignatory_htlc_key = val.into_rust();
}
/// Broadcaster's Payment Key (which isn't allowed to be spent from for some delay)
#[no_mangle]
pub extern "C" fn TxCreationKeys_get_broadcaster_delayed_payment_key(this_ptr: &TxCreationKeys) -> crate::c_types::PublicKey {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.broadcaster_delayed_payment_key;
crate::c_types::PublicKey::from_rust(&(*inner_val))
}
/// Broadcaster's Payment Key (which isn't allowed to be spent from for some delay)
#[no_mangle]
pub extern "C" fn TxCreationKeys_set_broadcaster_delayed_payment_key(this_ptr: &mut TxCreationKeys, mut val: crate::c_types::PublicKey) {
unsafe { &mut *this_ptr.inner }.broadcaster_delayed_payment_key = val.into_rust();
}
#[must_use]
#[no_mangle]
pub extern "C" fn TxCreationKeys_new(mut per_commitment_point_arg: crate::c_types::PublicKey, mut revocation_key_arg: crate::c_types::PublicKey, mut broadcaster_htlc_key_arg: crate::c_types::PublicKey, mut countersignatory_htlc_key_arg: crate::c_types::PublicKey, mut broadcaster_delayed_payment_key_arg: crate::c_types::PublicKey) -> TxCreationKeys {
TxCreationKeys { inner: Box::into_raw(Box::new(nativeTxCreationKeys {
per_commitment_point: per_commitment_point_arg.into_rust(),
revocation_key: revocation_key_arg.into_rust(),
broadcaster_htlc_key: broadcaster_htlc_key_arg.into_rust(),
countersignatory_htlc_key: countersignatory_htlc_key_arg.into_rust(),
broadcaster_delayed_payment_key: broadcaster_delayed_payment_key_arg.into_rust(),
})), is_owned: true }
}
#[no_mangle]
pub extern "C" fn TxCreationKeys_write(obj: &TxCreationKeys) -> crate::c_types::derived::CVec_u8Z {
crate::c_types::serialize_obj(unsafe { &(*(*obj).inner) })
}
#[no_mangle]
pub(crate) extern "C" fn TxCreationKeys_write_void(obj: *const c_void) -> crate::c_types::derived::CVec_u8Z {
crate::c_types::serialize_obj(unsafe { &*(obj as *const nativeTxCreationKeys) })
}
#[no_mangle]
pub extern "C" fn TxCreationKeys_read(ser: crate::c_types::u8slice) -> TxCreationKeys {
if let Ok(res) = crate::c_types::deserialize_obj(ser) {
TxCreationKeys { inner: Box::into_raw(Box::new(res)), is_owned: true }
} else {
TxCreationKeys { inner: std::ptr::null_mut(), is_owned: true }
}
}
use lightning::ln::chan_utils::ChannelPublicKeys as nativeChannelPublicKeysImport;
type nativeChannelPublicKeys = nativeChannelPublicKeysImport;
/// One counterparty's public keys which do not change over the life of a channel.
#[must_use]
#[repr(C)]
pub struct ChannelPublicKeys {
/// Nearly everywhere, inner must be non-null, however in places where
/// the Rust equivalent takes an Option, it may be set to null to indicate None.
pub inner: *mut nativeChannelPublicKeys,
pub is_owned: bool,
}
impl Drop for ChannelPublicKeys {
fn drop(&mut self) {
if self.is_owned && !self.inner.is_null() {
let _ = unsafe { Box::from_raw(self.inner) };
}
}
}
#[no_mangle]
pub extern "C" fn ChannelPublicKeys_free(this_ptr: ChannelPublicKeys) { }
#[allow(unused)]
/// Used only if an object of this type is returned as a trait impl by a method
extern "C" fn ChannelPublicKeys_free_void(this_ptr: *mut c_void) {
unsafe { let _ = Box::from_raw(this_ptr as *mut nativeChannelPublicKeys); }
}
#[allow(unused)]
/// When moving out of the pointer, we have to ensure we aren't a reference, this makes that easy
impl ChannelPublicKeys {
pub(crate) fn take_inner(mut self) -> *mut nativeChannelPublicKeys {
assert!(self.is_owned);
let ret = self.inner;
self.inner = std::ptr::null_mut();
ret
}
}
impl Clone for ChannelPublicKeys {
fn clone(&self) -> Self {
Self {
inner: Box::into_raw(Box::new(unsafe { &*self.inner }.clone())),
is_owned: true,
}
}
}
#[allow(unused)]
/// Used only if an object of this type is returned as a trait impl by a method
pub(crate) extern "C" fn ChannelPublicKeys_clone_void(this_ptr: *const c_void) -> *mut c_void {
Box::into_raw(Box::new(unsafe { (*(this_ptr as *mut nativeChannelPublicKeys)).clone() })) as *mut c_void
}
#[no_mangle]
pub extern "C" fn ChannelPublicKeys_clone(orig: &ChannelPublicKeys) -> ChannelPublicKeys {
ChannelPublicKeys { inner: Box::into_raw(Box::new(unsafe { &*orig.inner }.clone())), is_owned: true }
}
/// The public key which is used to sign all commitment transactions, as it appears in the
/// on-chain channel lock-in 2-of-2 multisig output.
#[no_mangle]
pub extern "C" fn ChannelPublicKeys_get_funding_pubkey(this_ptr: &ChannelPublicKeys) -> crate::c_types::PublicKey {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.funding_pubkey;
crate::c_types::PublicKey::from_rust(&(*inner_val))
}
/// The public key which is used to sign all commitment transactions, as it appears in the
/// on-chain channel lock-in 2-of-2 multisig output.
#[no_mangle]
pub extern "C" fn ChannelPublicKeys_set_funding_pubkey(this_ptr: &mut ChannelPublicKeys, mut val: crate::c_types::PublicKey) {
unsafe { &mut *this_ptr.inner }.funding_pubkey = val.into_rust();
}
/// The base point which is used (with derive_public_revocation_key) to derive per-commitment
/// revocation keys. This is combined with the per-commitment-secret generated by the
/// counterparty to create a secret which the counterparty can reveal to revoke previous
/// states.
#[no_mangle]
pub extern "C" fn ChannelPublicKeys_get_revocation_basepoint(this_ptr: &ChannelPublicKeys) -> crate::c_types::PublicKey {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.revocation_basepoint;
crate::c_types::PublicKey::from_rust(&(*inner_val))
}
/// The base point which is used (with derive_public_revocation_key) to derive per-commitment
/// revocation keys. This is combined with the per-commitment-secret generated by the
/// counterparty to create a secret which the counterparty can reveal to revoke previous
/// states.
#[no_mangle]
pub extern "C" fn ChannelPublicKeys_set_revocation_basepoint(this_ptr: &mut ChannelPublicKeys, mut val: crate::c_types::PublicKey) {
unsafe { &mut *this_ptr.inner }.revocation_basepoint = val.into_rust();
}
/// The public key on which the non-broadcaster (ie the countersignatory) receives an immediately
/// spendable primary channel balance on the broadcaster's commitment transaction. This key is
/// static across every commitment transaction.
#[no_mangle]
pub extern "C" fn ChannelPublicKeys_get_payment_point(this_ptr: &ChannelPublicKeys) -> crate::c_types::PublicKey {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.payment_point;
crate::c_types::PublicKey::from_rust(&(*inner_val))
}
/// The public key on which the non-broadcaster (ie the countersignatory) receives an immediately
/// spendable primary channel balance on the broadcaster's commitment transaction. This key is
/// static across every commitment transaction.
#[no_mangle]
pub extern "C" fn ChannelPublicKeys_set_payment_point(this_ptr: &mut ChannelPublicKeys, mut val: crate::c_types::PublicKey) {
unsafe { &mut *this_ptr.inner }.payment_point = val.into_rust();
}
/// The base point which is used (with derive_public_key) to derive a per-commitment payment
/// public key which receives non-HTLC-encumbered funds which are only available for spending
/// after some delay (or can be claimed via the revocation path).
#[no_mangle]
pub extern "C" fn ChannelPublicKeys_get_delayed_payment_basepoint(this_ptr: &ChannelPublicKeys) -> crate::c_types::PublicKey {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.delayed_payment_basepoint;
crate::c_types::PublicKey::from_rust(&(*inner_val))
}
/// The base point which is used (with derive_public_key) to derive a per-commitment payment
/// public key which receives non-HTLC-encumbered funds which are only available for spending
/// after some delay (or can be claimed via the revocation path).
#[no_mangle]
pub extern "C" fn ChannelPublicKeys_set_delayed_payment_basepoint(this_ptr: &mut ChannelPublicKeys, mut val: crate::c_types::PublicKey) {
unsafe { &mut *this_ptr.inner }.delayed_payment_basepoint = val.into_rust();
}
/// The base point which is used (with derive_public_key) to derive a per-commitment public key
/// which is used to encumber HTLC-in-flight outputs.
#[no_mangle]
pub extern "C" fn ChannelPublicKeys_get_htlc_basepoint(this_ptr: &ChannelPublicKeys) -> crate::c_types::PublicKey {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.htlc_basepoint;
crate::c_types::PublicKey::from_rust(&(*inner_val))
}
/// The base point which is used (with derive_public_key) to derive a per-commitment public key
/// which is used to encumber HTLC-in-flight outputs.
#[no_mangle]
pub extern "C" fn ChannelPublicKeys_set_htlc_basepoint(this_ptr: &mut ChannelPublicKeys, mut val: crate::c_types::PublicKey) {
unsafe { &mut *this_ptr.inner }.htlc_basepoint = val.into_rust();
}
#[must_use]
#[no_mangle]
pub extern "C" fn ChannelPublicKeys_new(mut funding_pubkey_arg: crate::c_types::PublicKey, mut revocation_basepoint_arg: crate::c_types::PublicKey, mut payment_point_arg: crate::c_types::PublicKey, mut delayed_payment_basepoint_arg: crate::c_types::PublicKey, mut htlc_basepoint_arg: crate::c_types::PublicKey) -> ChannelPublicKeys {
ChannelPublicKeys { inner: Box::into_raw(Box::new(nativeChannelPublicKeys {
funding_pubkey: funding_pubkey_arg.into_rust(),
revocation_basepoint: revocation_basepoint_arg.into_rust(),
payment_point: payment_point_arg.into_rust(),
delayed_payment_basepoint: delayed_payment_basepoint_arg.into_rust(),
htlc_basepoint: htlc_basepoint_arg.into_rust(),
})), is_owned: true }
}
#[no_mangle]
pub extern "C" fn ChannelPublicKeys_write(obj: &ChannelPublicKeys) -> crate::c_types::derived::CVec_u8Z {
crate::c_types::serialize_obj(unsafe { &(*(*obj).inner) })
}
#[no_mangle]
pub(crate) extern "C" fn ChannelPublicKeys_write_void(obj: *const c_void) -> crate::c_types::derived::CVec_u8Z {
crate::c_types::serialize_obj(unsafe { &*(obj as *const nativeChannelPublicKeys) })
}
#[no_mangle]
pub extern "C" fn ChannelPublicKeys_read(ser: crate::c_types::u8slice) -> ChannelPublicKeys {
if let Ok(res) = crate::c_types::deserialize_obj(ser) {
ChannelPublicKeys { inner: Box::into_raw(Box::new(res)), is_owned: true }
} else {
ChannelPublicKeys { inner: std::ptr::null_mut(), is_owned: true }
}
}
/// Create per-state keys from channel base points and the per-commitment point.
/// Key set is asymmetric and can't be used as part of counter-signatory set of transactions.
#[must_use]
#[no_mangle]
pub extern "C" fn TxCreationKeys_derive_new(mut per_commitment_point: crate::c_types::PublicKey, mut broadcaster_delayed_payment_base: crate::c_types::PublicKey, mut broadcaster_htlc_base: crate::c_types::PublicKey, mut countersignatory_revocation_base: crate::c_types::PublicKey, mut countersignatory_htlc_base: crate::c_types::PublicKey) -> crate::c_types::derived::CResult_TxCreationKeysSecpErrorZ {
let mut ret = lightning::ln::chan_utils::TxCreationKeys::derive_new(&bitcoin::secp256k1::Secp256k1::new(), &per_commitment_point.into_rust(), &broadcaster_delayed_payment_base.into_rust(), &broadcaster_htlc_base.into_rust(), &countersignatory_revocation_base.into_rust(), &countersignatory_htlc_base.into_rust());
let mut local_ret = match ret { Ok(mut o) => crate::c_types::CResultTempl::ok( { crate::ln::chan_utils::TxCreationKeys { inner: Box::into_raw(Box::new(o)), is_owned: true } }), Err(mut e) => crate::c_types::CResultTempl::err( { crate::c_types::Secp256k1Error::from_rust(e) }) };
local_ret
}
/// Generate per-state keys from channel static keys.
/// Key set is asymmetric and can't be used as part of counter-signatory set of transactions.
#[must_use]
#[no_mangle]
pub extern "C" fn TxCreationKeys_from_channel_static_keys(mut per_commitment_point: crate::c_types::PublicKey, broadcaster_keys: &crate::ln::chan_utils::ChannelPublicKeys, countersignatory_keys: &crate::ln::chan_utils::ChannelPublicKeys) -> crate::c_types::derived::CResult_TxCreationKeysSecpErrorZ {
let mut ret = lightning::ln::chan_utils::TxCreationKeys::from_channel_static_keys(&per_commitment_point.into_rust(), unsafe { &*broadcaster_keys.inner }, unsafe { &*countersignatory_keys.inner }, &bitcoin::secp256k1::Secp256k1::new());
let mut local_ret = match ret { Ok(mut o) => crate::c_types::CResultTempl::ok( { crate::ln::chan_utils::TxCreationKeys { inner: Box::into_raw(Box::new(o)), is_owned: true } }), Err(mut e) => crate::c_types::CResultTempl::err( { crate::c_types::Secp256k1Error::from_rust(e) }) };
local_ret
}
/// A script either spendable by the revocation
/// key or the broadcaster_delayed_payment_key and satisfying the relative-locktime OP_CSV constrain.
/// Encumbering a `to_holder` output on a commitment transaction or 2nd-stage HTLC transactions.
#[no_mangle]
pub extern "C" fn get_revokeable_redeemscript(mut revocation_key: crate::c_types::PublicKey, mut contest_delay: u16, mut broadcaster_delayed_payment_key: crate::c_types::PublicKey) -> crate::c_types::derived::CVec_u8Z {
let mut ret = lightning::ln::chan_utils::get_revokeable_redeemscript(&revocation_key.into_rust(), contest_delay, &broadcaster_delayed_payment_key.into_rust());
ret.into_bytes().into()
}
use lightning::ln::chan_utils::HTLCOutputInCommitment as nativeHTLCOutputInCommitmentImport;
type nativeHTLCOutputInCommitment = nativeHTLCOutputInCommitmentImport;
/// Information about an HTLC as it appears in a commitment transaction
#[must_use]
#[repr(C)]
pub struct HTLCOutputInCommitment {
/// Nearly everywhere, inner must be non-null, however in places where
/// the Rust equivalent takes an Option, it may be set to null to indicate None.
pub inner: *mut nativeHTLCOutputInCommitment,
pub is_owned: bool,
}
impl Drop for HTLCOutputInCommitment {
fn drop(&mut self) {
if self.is_owned && !self.inner.is_null() {
let _ = unsafe { Box::from_raw(self.inner) };
}
}
}
#[no_mangle]
pub extern "C" fn HTLCOutputInCommitment_free(this_ptr: HTLCOutputInCommitment) { }
#[allow(unused)]
/// Used only if an object of this type is returned as a trait impl by a method
extern "C" fn HTLCOutputInCommitment_free_void(this_ptr: *mut c_void) {
unsafe { let _ = Box::from_raw(this_ptr as *mut nativeHTLCOutputInCommitment); }
}
#[allow(unused)]
/// When moving out of the pointer, we have to ensure we aren't a reference, this makes that easy
impl HTLCOutputInCommitment {
pub(crate) fn take_inner(mut self) -> *mut nativeHTLCOutputInCommitment {
assert!(self.is_owned);
let ret = self.inner;
self.inner = std::ptr::null_mut();
ret
}
}
impl Clone for HTLCOutputInCommitment {
fn clone(&self) -> Self {
Self {
inner: Box::into_raw(Box::new(unsafe { &*self.inner }.clone())),
is_owned: true,
}
}
}
#[allow(unused)]
/// Used only if an object of this type is returned as a trait impl by a method
pub(crate) extern "C" fn HTLCOutputInCommitment_clone_void(this_ptr: *const c_void) -> *mut c_void {
Box::into_raw(Box::new(unsafe { (*(this_ptr as *mut nativeHTLCOutputInCommitment)).clone() })) as *mut c_void
}
#[no_mangle]
pub extern "C" fn HTLCOutputInCommitment_clone(orig: &HTLCOutputInCommitment) -> HTLCOutputInCommitment {
HTLCOutputInCommitment { inner: Box::into_raw(Box::new(unsafe { &*orig.inner }.clone())), is_owned: true }
}
/// Whether the HTLC was \"offered\" (ie outbound in relation to this commitment transaction).
/// Note that this is not the same as whether it is ountbound *from us*. To determine that you
/// need to compare this value to whether the commitment transaction in question is that of
/// the counterparty or our own.
#[no_mangle]
pub extern "C" fn HTLCOutputInCommitment_get_offered(this_ptr: &HTLCOutputInCommitment) -> bool {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.offered;
(*inner_val)
}
/// Whether the HTLC was \"offered\" (ie outbound in relation to this commitment transaction).
/// Note that this is not the same as whether it is ountbound *from us*. To determine that you
/// need to compare this value to whether the commitment transaction in question is that of
/// the counterparty or our own.
#[no_mangle]
pub extern "C" fn HTLCOutputInCommitment_set_offered(this_ptr: &mut HTLCOutputInCommitment, mut val: bool) {
unsafe { &mut *this_ptr.inner }.offered = val;
}
/// The value, in msat, of the HTLC. The value as it appears in the commitment transaction is
/// this divided by 1000.
#[no_mangle]
pub extern "C" fn HTLCOutputInCommitment_get_amount_msat(this_ptr: &HTLCOutputInCommitment) -> u64 {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.amount_msat;
(*inner_val)
}
/// The value, in msat, of the HTLC. The value as it appears in the commitment transaction is
/// this divided by 1000.
#[no_mangle]
pub extern "C" fn HTLCOutputInCommitment_set_amount_msat(this_ptr: &mut HTLCOutputInCommitment, mut val: u64) {
unsafe { &mut *this_ptr.inner }.amount_msat = val;
}
/// The CLTV lock-time at which this HTLC expires.
#[no_mangle]
pub extern "C" fn HTLCOutputInCommitment_get_cltv_expiry(this_ptr: &HTLCOutputInCommitment) -> u32 {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.cltv_expiry;
(*inner_val)
}
/// The CLTV lock-time at which this HTLC expires.
#[no_mangle]
pub extern "C" fn HTLCOutputInCommitment_set_cltv_expiry(this_ptr: &mut HTLCOutputInCommitment, mut val: u32) {
unsafe { &mut *this_ptr.inner }.cltv_expiry = val;
}
/// The hash of the preimage which unlocks this HTLC.
#[no_mangle]
pub extern "C" fn HTLCOutputInCommitment_get_payment_hash(this_ptr: &HTLCOutputInCommitment) -> *const [u8; 32] {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.payment_hash;
&(*inner_val).0
}
/// The hash of the preimage which unlocks this HTLC.
#[no_mangle]
pub extern "C" fn HTLCOutputInCommitment_set_payment_hash(this_ptr: &mut HTLCOutputInCommitment, mut val: crate::c_types::ThirtyTwoBytes) {
unsafe { &mut *this_ptr.inner }.payment_hash = ::lightning::ln::channelmanager::PaymentHash(val.data);
}
#[no_mangle]
pub extern "C" fn HTLCOutputInCommitment_write(obj: &HTLCOutputInCommitment) -> crate::c_types::derived::CVec_u8Z {
crate::c_types::serialize_obj(unsafe { &(*(*obj).inner) })
}
#[no_mangle]
pub(crate) extern "C" fn HTLCOutputInCommitment_write_void(obj: *const c_void) -> crate::c_types::derived::CVec_u8Z {
crate::c_types::serialize_obj(unsafe { &*(obj as *const nativeHTLCOutputInCommitment) })
}
#[no_mangle]
pub extern "C" fn HTLCOutputInCommitment_read(ser: crate::c_types::u8slice) -> HTLCOutputInCommitment {
if let Ok(res) = crate::c_types::deserialize_obj(ser) {
HTLCOutputInCommitment { inner: Box::into_raw(Box::new(res)), is_owned: true }
} else {
HTLCOutputInCommitment { inner: std::ptr::null_mut(), is_owned: true }
}
}
/// Gets the witness redeemscript for an HTLC output in a commitment transaction. Note that htlc
/// does not need to have its previous_output_index filled.
#[no_mangle]
pub extern "C" fn get_htlc_redeemscript(htlc: &crate::ln::chan_utils::HTLCOutputInCommitment, keys: &crate::ln::chan_utils::TxCreationKeys) -> crate::c_types::derived::CVec_u8Z {
let mut ret = lightning::ln::chan_utils::get_htlc_redeemscript(unsafe { &*htlc.inner }, unsafe { &*keys.inner });
ret.into_bytes().into()
}
/// Gets the redeemscript for a funding output from the two funding public keys.
/// Note that the order of funding public keys does not matter.
#[no_mangle]
pub extern "C" fn make_funding_redeemscript(mut broadcaster: crate::c_types::PublicKey, mut countersignatory: crate::c_types::PublicKey) -> crate::c_types::derived::CVec_u8Z {
let mut ret = lightning::ln::chan_utils::make_funding_redeemscript(&broadcaster.into_rust(), &countersignatory.into_rust());
ret.into_bytes().into()
}
/// panics if htlc.transaction_output_index.is_none()!
#[no_mangle]
pub extern "C" fn build_htlc_transaction(prev_hash: *const [u8; 32], mut feerate_per_kw: u32, mut contest_delay: u16, htlc: &crate::ln::chan_utils::HTLCOutputInCommitment, mut broadcaster_delayed_payment_key: crate::c_types::PublicKey, mut revocation_key: crate::c_types::PublicKey) -> crate::c_types::Transaction {
let mut ret = lightning::ln::chan_utils::build_htlc_transaction(&::bitcoin::hash_types::Txid::from_slice(&unsafe { &*prev_hash }[..]).unwrap(), feerate_per_kw, contest_delay, unsafe { &*htlc.inner }, &broadcaster_delayed_payment_key.into_rust(), &revocation_key.into_rust());
let mut local_ret = ::bitcoin::consensus::encode::serialize(&ret);
crate::c_types::Transaction::from_vec(local_ret)
}
use lightning::ln::chan_utils::ChannelTransactionParameters as nativeChannelTransactionParametersImport;
type nativeChannelTransactionParameters = nativeChannelTransactionParametersImport;
/// Per-channel data used to build transactions in conjunction with the per-commitment data (CommitmentTransaction).
/// The fields are organized by holder/counterparty.
///
/// Normally, this is converted to the broadcaster/countersignatory-organized DirectedChannelTransactionParameters
/// before use, via the as_holder_broadcastable and as_counterparty_broadcastable functions.
#[must_use]
#[repr(C)]
pub struct ChannelTransactionParameters {
/// Nearly everywhere, inner must be non-null, however in places where
/// the Rust equivalent takes an Option, it may be set to null to indicate None.
pub inner: *mut nativeChannelTransactionParameters,
pub is_owned: bool,
}
impl Drop for ChannelTransactionParameters {
fn drop(&mut self) {
if self.is_owned && !self.inner.is_null() {
let _ = unsafe { Box::from_raw(self.inner) };
}
}
}
#[no_mangle]
pub extern "C" fn ChannelTransactionParameters_free(this_ptr: ChannelTransactionParameters) { }
#[allow(unused)]
/// Used only if an object of this type is returned as a trait impl by a method
extern "C" fn ChannelTransactionParameters_free_void(this_ptr: *mut c_void) {
unsafe { let _ = Box::from_raw(this_ptr as *mut nativeChannelTransactionParameters); }
}
#[allow(unused)]
/// When moving out of the pointer, we have to ensure we aren't a reference, this makes that easy
impl ChannelTransactionParameters {
pub(crate) fn take_inner(mut self) -> *mut nativeChannelTransactionParameters {
assert!(self.is_owned);
let ret = self.inner;
self.inner = std::ptr::null_mut();
ret
}
}
impl Clone for ChannelTransactionParameters {
fn clone(&self) -> Self {
Self {
inner: Box::into_raw(Box::new(unsafe { &*self.inner }.clone())),
is_owned: true,
}
}
}
#[allow(unused)]
/// Used only if an object of this type is returned as a trait impl by a method
pub(crate) extern "C" fn ChannelTransactionParameters_clone_void(this_ptr: *const c_void) -> *mut c_void {
Box::into_raw(Box::new(unsafe { (*(this_ptr as *mut nativeChannelTransactionParameters)).clone() })) as *mut c_void
}
#[no_mangle]
pub extern "C" fn ChannelTransactionParameters_clone(orig: &ChannelTransactionParameters) -> ChannelTransactionParameters {
ChannelTransactionParameters { inner: Box::into_raw(Box::new(unsafe { &*orig.inner }.clone())), is_owned: true }
}
/// Holder public keys
#[no_mangle]
pub extern "C" fn ChannelTransactionParameters_get_holder_pubkeys(this_ptr: &ChannelTransactionParameters) -> crate::ln::chan_utils::ChannelPublicKeys {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.holder_pubkeys;
crate::ln::chan_utils::ChannelPublicKeys { inner: unsafe { ( (&((*inner_val)) as *const _) as *mut _) }, is_owned: false }
}
/// Holder public keys
#[no_mangle]
pub extern "C" fn ChannelTransactionParameters_set_holder_pubkeys(this_ptr: &mut ChannelTransactionParameters, mut val: crate::ln::chan_utils::ChannelPublicKeys) {
unsafe { &mut *this_ptr.inner }.holder_pubkeys = *unsafe { Box::from_raw(val.take_inner()) };
}
/// The contest delay selected by the holder, which applies to counterparty-broadcast transactions
#[no_mangle]
pub extern "C" fn ChannelTransactionParameters_get_holder_selected_contest_delay(this_ptr: &ChannelTransactionParameters) -> u16 {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.holder_selected_contest_delay;
(*inner_val)
}
/// The contest delay selected by the holder, which applies to counterparty-broadcast transactions
#[no_mangle]
pub extern "C" fn ChannelTransactionParameters_set_holder_selected_contest_delay(this_ptr: &mut ChannelTransactionParameters, mut val: u16) {
unsafe { &mut *this_ptr.inner }.holder_selected_contest_delay = val;
}
/// Whether the holder is the initiator of this channel.
/// This is an input to the commitment number obscure factor computation.
#[no_mangle]
pub extern "C" fn ChannelTransactionParameters_get_is_outbound_from_holder(this_ptr: &ChannelTransactionParameters) -> bool {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.is_outbound_from_holder;
(*inner_val)
}
/// Whether the holder is the initiator of this channel.
/// This is an input to the commitment number obscure factor computation.
#[no_mangle]
pub extern "C" fn ChannelTransactionParameters_set_is_outbound_from_holder(this_ptr: &mut ChannelTransactionParameters, mut val: bool) {
unsafe { &mut *this_ptr.inner }.is_outbound_from_holder = val;
}
/// The late-bound counterparty channel transaction parameters.
/// These parameters are populated at the point in the protocol where the counterparty provides them.
#[no_mangle]
pub extern "C" fn ChannelTransactionParameters_get_counterparty_parameters(this_ptr: &ChannelTransactionParameters) -> crate::ln::chan_utils::CounterpartyChannelTransactionParameters {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.counterparty_parameters;
let mut local_inner_val = crate::ln::chan_utils::CounterpartyChannelTransactionParameters { inner: unsafe { (if inner_val.is_none() { std::ptr::null() } else { { (inner_val.as_ref().unwrap()) } } as *const _) as *mut _ }, is_owned: false };
local_inner_val
}
/// The late-bound counterparty channel transaction parameters.
/// These parameters are populated at the point in the protocol where the counterparty provides them.
#[no_mangle]
pub extern "C" fn ChannelTransactionParameters_set_counterparty_parameters(this_ptr: &mut ChannelTransactionParameters, mut val: crate::ln::chan_utils::CounterpartyChannelTransactionParameters) {
let mut local_val = if val.inner.is_null() { None } else { Some( { *unsafe { Box::from_raw(val.take_inner()) } }) };
unsafe { &mut *this_ptr.inner }.counterparty_parameters = local_val;
}
/// The late-bound funding outpoint
#[no_mangle]
pub extern "C" fn ChannelTransactionParameters_get_funding_outpoint(this_ptr: &ChannelTransactionParameters) -> crate::chain::transaction::OutPoint {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.funding_outpoint;
let mut local_inner_val = crate::chain::transaction::OutPoint { inner: unsafe { (if inner_val.is_none() { std::ptr::null() } else { { (inner_val.as_ref().unwrap()) } } as *const _) as *mut _ }, is_owned: false };
local_inner_val
}
/// The late-bound funding outpoint
#[no_mangle]
pub extern "C" fn ChannelTransactionParameters_set_funding_outpoint(this_ptr: &mut ChannelTransactionParameters, mut val: crate::chain::transaction::OutPoint) {
let mut local_val = if val.inner.is_null() { None } else { Some( { *unsafe { Box::from_raw(val.take_inner()) } }) };
unsafe { &mut *this_ptr.inner }.funding_outpoint = local_val;
}
#[must_use]
#[no_mangle]
pub extern "C" fn ChannelTransactionParameters_new(mut holder_pubkeys_arg: crate::ln::chan_utils::ChannelPublicKeys, mut holder_selected_contest_delay_arg: u16, mut is_outbound_from_holder_arg: bool, mut counterparty_parameters_arg: crate::ln::chan_utils::CounterpartyChannelTransactionParameters, mut funding_outpoint_arg: crate::chain::transaction::OutPoint) -> ChannelTransactionParameters {
let mut local_counterparty_parameters_arg = if counterparty_parameters_arg.inner.is_null() { None } else { Some( { *unsafe { Box::from_raw(counterparty_parameters_arg.take_inner()) } }) };
let mut local_funding_outpoint_arg = if funding_outpoint_arg.inner.is_null() { None } else { Some( { *unsafe { Box::from_raw(funding_outpoint_arg.take_inner()) } }) };
ChannelTransactionParameters { inner: Box::into_raw(Box::new(nativeChannelTransactionParameters {
holder_pubkeys: *unsafe { Box::from_raw(holder_pubkeys_arg.take_inner()) },
holder_selected_contest_delay: holder_selected_contest_delay_arg,
is_outbound_from_holder: is_outbound_from_holder_arg,
counterparty_parameters: local_counterparty_parameters_arg,
funding_outpoint: local_funding_outpoint_arg,
})), is_owned: true }
}
use lightning::ln::chan_utils::CounterpartyChannelTransactionParameters as nativeCounterpartyChannelTransactionParametersImport;
type nativeCounterpartyChannelTransactionParameters = nativeCounterpartyChannelTransactionParametersImport;
/// Late-bound per-channel counterparty data used to build transactions.
#[must_use]
#[repr(C)]
pub struct CounterpartyChannelTransactionParameters {
/// Nearly everywhere, inner must be non-null, however in places where
/// the Rust equivalent takes an Option, it may be set to null to indicate None.
pub inner: *mut nativeCounterpartyChannelTransactionParameters,
pub is_owned: bool,
}
impl Drop for CounterpartyChannelTransactionParameters {
fn drop(&mut self) {
if self.is_owned && !self.inner.is_null() {
let _ = unsafe { Box::from_raw(self.inner) };
}
}
}
#[no_mangle]
pub extern "C" fn CounterpartyChannelTransactionParameters_free(this_ptr: CounterpartyChannelTransactionParameters) { }
#[allow(unused)]
/// Used only if an object of this type is returned as a trait impl by a method
extern "C" fn CounterpartyChannelTransactionParameters_free_void(this_ptr: *mut c_void) {
unsafe { let _ = Box::from_raw(this_ptr as *mut nativeCounterpartyChannelTransactionParameters); }
}
#[allow(unused)]
/// When moving out of the pointer, we have to ensure we aren't a reference, this makes that easy
impl CounterpartyChannelTransactionParameters {
pub(crate) fn take_inner(mut self) -> *mut nativeCounterpartyChannelTransactionParameters {
assert!(self.is_owned);
let ret = self.inner;
self.inner = std::ptr::null_mut();
ret
}
}
impl Clone for CounterpartyChannelTransactionParameters {
fn clone(&self) -> Self {
Self {
inner: Box::into_raw(Box::new(unsafe { &*self.inner }.clone())),
is_owned: true,
}
}
}
#[allow(unused)]
/// Used only if an object of this type is returned as a trait impl by a method
pub(crate) extern "C" fn CounterpartyChannelTransactionParameters_clone_void(this_ptr: *const c_void) -> *mut c_void {
Box::into_raw(Box::new(unsafe { (*(this_ptr as *mut nativeCounterpartyChannelTransactionParameters)).clone() })) as *mut c_void
}
#[no_mangle]
pub extern "C" fn CounterpartyChannelTransactionParameters_clone(orig: &CounterpartyChannelTransactionParameters) -> CounterpartyChannelTransactionParameters {
CounterpartyChannelTransactionParameters { inner: Box::into_raw(Box::new(unsafe { &*orig.inner }.clone())), is_owned: true }
}
/// Counter-party public keys
#[no_mangle]
pub extern "C" fn CounterpartyChannelTransactionParameters_get_pubkeys(this_ptr: &CounterpartyChannelTransactionParameters) -> crate::ln::chan_utils::ChannelPublicKeys {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.pubkeys;
crate::ln::chan_utils::ChannelPublicKeys { inner: unsafe { ( (&((*inner_val)) as *const _) as *mut _) }, is_owned: false }
}
/// Counter-party public keys
#[no_mangle]
pub extern "C" fn CounterpartyChannelTransactionParameters_set_pubkeys(this_ptr: &mut CounterpartyChannelTransactionParameters, mut val: crate::ln::chan_utils::ChannelPublicKeys) {
unsafe { &mut *this_ptr.inner }.pubkeys = *unsafe { Box::from_raw(val.take_inner()) };
}
/// The contest delay selected by the counterparty, which applies to holder-broadcast transactions
#[no_mangle]
pub extern "C" fn CounterpartyChannelTransactionParameters_get_selected_contest_delay(this_ptr: &CounterpartyChannelTransactionParameters) -> u16 {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.selected_contest_delay;
(*inner_val)
}
/// The contest delay selected by the counterparty, which applies to holder-broadcast transactions
#[no_mangle]
pub extern "C" fn CounterpartyChannelTransactionParameters_set_selected_contest_delay(this_ptr: &mut CounterpartyChannelTransactionParameters, mut val: u16) {
unsafe { &mut *this_ptr.inner }.selected_contest_delay = val;
}
#[must_use]
#[no_mangle]
pub extern "C" fn CounterpartyChannelTransactionParameters_new(mut pubkeys_arg: crate::ln::chan_utils::ChannelPublicKeys, mut selected_contest_delay_arg: u16) -> CounterpartyChannelTransactionParameters {
CounterpartyChannelTransactionParameters { inner: Box::into_raw(Box::new(nativeCounterpartyChannelTransactionParameters {
pubkeys: *unsafe { Box::from_raw(pubkeys_arg.take_inner()) },
selected_contest_delay: selected_contest_delay_arg,
})), is_owned: true }
}
/// Whether the late bound parameters are populated.
#[must_use]
#[no_mangle]
pub extern "C" fn ChannelTransactionParameters_is_populated(this_arg: &ChannelTransactionParameters) -> bool {
let mut ret = unsafe { &*this_arg.inner }.is_populated();
ret
}
/// Convert the holder/counterparty parameters to broadcaster/countersignatory-organized parameters,
/// given that the holder is the broadcaster.
///
/// self.is_populated() must be true before calling this function.
#[must_use]
#[no_mangle]
pub extern "C" fn ChannelTransactionParameters_as_holder_broadcastable(this_arg: &ChannelTransactionParameters) -> crate::ln::chan_utils::DirectedChannelTransactionParameters {
let mut ret = unsafe { &*this_arg.inner }.as_holder_broadcastable();
crate::ln::chan_utils::DirectedChannelTransactionParameters { inner: Box::into_raw(Box::new(ret)), is_owned: true }
}
/// Convert the holder/counterparty parameters to broadcaster/countersignatory-organized parameters,
/// given that the counterparty is the broadcaster.
///
/// self.is_populated() must be true before calling this function.
#[must_use]
#[no_mangle]
pub extern "C" fn ChannelTransactionParameters_as_counterparty_broadcastable(this_arg: &ChannelTransactionParameters) -> crate::ln::chan_utils::DirectedChannelTransactionParameters {
let mut ret = unsafe { &*this_arg.inner }.as_counterparty_broadcastable();
crate::ln::chan_utils::DirectedChannelTransactionParameters { inner: Box::into_raw(Box::new(ret)), is_owned: true }
}
#[no_mangle]
pub extern "C" fn CounterpartyChannelTransactionParameters_write(obj: &CounterpartyChannelTransactionParameters) -> crate::c_types::derived::CVec_u8Z {
crate::c_types::serialize_obj(unsafe { &(*(*obj).inner) })
}
#[no_mangle]
pub(crate) extern "C" fn CounterpartyChannelTransactionParameters_write_void(obj: *const c_void) -> crate::c_types::derived::CVec_u8Z {
crate::c_types::serialize_obj(unsafe { &*(obj as *const nativeCounterpartyChannelTransactionParameters) })
}
#[no_mangle]
pub extern "C" fn CounterpartyChannelTransactionParameters_read(ser: crate::c_types::u8slice) -> CounterpartyChannelTransactionParameters {
if let Ok(res) = crate::c_types::deserialize_obj(ser) {
CounterpartyChannelTransactionParameters { inner: Box::into_raw(Box::new(res)), is_owned: true }
} else {
CounterpartyChannelTransactionParameters { inner: std::ptr::null_mut(), is_owned: true }
}
}
#[no_mangle]
pub extern "C" fn ChannelTransactionParameters_write(obj: &ChannelTransactionParameters) -> crate::c_types::derived::CVec_u8Z {
crate::c_types::serialize_obj(unsafe { &(*(*obj).inner) })
}
#[no_mangle]
pub(crate) extern "C" fn ChannelTransactionParameters_write_void(obj: *const c_void) -> crate::c_types::derived::CVec_u8Z {
crate::c_types::serialize_obj(unsafe { &*(obj as *const nativeChannelTransactionParameters) })
}
#[no_mangle]
pub extern "C" fn ChannelTransactionParameters_read(ser: crate::c_types::u8slice) -> ChannelTransactionParameters {
if let Ok(res) = crate::c_types::deserialize_obj(ser) {
ChannelTransactionParameters { inner: Box::into_raw(Box::new(res)), is_owned: true }
} else {
ChannelTransactionParameters { inner: std::ptr::null_mut(), is_owned: true }
}
}
use lightning::ln::chan_utils::DirectedChannelTransactionParameters as nativeDirectedChannelTransactionParametersImport;
type nativeDirectedChannelTransactionParameters = nativeDirectedChannelTransactionParametersImport<'static>;
/// Static channel fields used to build transactions given per-commitment fields, organized by
/// broadcaster/countersignatory.
///
/// This is derived from the holder/counterparty-organized ChannelTransactionParameters via the
/// as_holder_broadcastable and as_counterparty_broadcastable functions.
#[must_use]
#[repr(C)]
pub struct DirectedChannelTransactionParameters {
/// Nearly everywhere, inner must be non-null, however in places where
/// the Rust equivalent takes an Option, it may be set to null to indicate None.
pub inner: *mut nativeDirectedChannelTransactionParameters,
pub is_owned: bool,
}
impl Drop for DirectedChannelTransactionParameters {
fn drop(&mut self) {
if self.is_owned && !self.inner.is_null() {
let _ = unsafe { Box::from_raw(self.inner) };
}
}
}
#[no_mangle]
pub extern "C" fn DirectedChannelTransactionParameters_free(this_ptr: DirectedChannelTransactionParameters) { }
#[allow(unused)]
/// Used only if an object of this type is returned as a trait impl by a method
extern "C" fn DirectedChannelTransactionParameters_free_void(this_ptr: *mut c_void) {
unsafe { let _ = Box::from_raw(this_ptr as *mut nativeDirectedChannelTransactionParameters); }
}
#[allow(unused)]
/// When moving out of the pointer, we have to ensure we aren't a reference, this makes that easy
impl DirectedChannelTransactionParameters {
pub(crate) fn take_inner(mut self) -> *mut nativeDirectedChannelTransactionParameters {
assert!(self.is_owned);
let ret = self.inner;
self.inner = std::ptr::null_mut();
ret
}
}
/// Get the channel pubkeys for the broadcaster
#[must_use]
#[no_mangle]
pub extern "C" fn DirectedChannelTransactionParameters_broadcaster_pubkeys(this_arg: &DirectedChannelTransactionParameters) -> crate::ln::chan_utils::ChannelPublicKeys {
let mut ret = unsafe { &*this_arg.inner }.broadcaster_pubkeys();
crate::ln::chan_utils::ChannelPublicKeys { inner: unsafe { ( (&(*ret) as *const _) as *mut _) }, is_owned: false }
}
/// Get the channel pubkeys for the countersignatory
#[must_use]
#[no_mangle]
pub extern "C" fn DirectedChannelTransactionParameters_countersignatory_pubkeys(this_arg: &DirectedChannelTransactionParameters) -> crate::ln::chan_utils::ChannelPublicKeys {
let mut ret = unsafe { &*this_arg.inner }.countersignatory_pubkeys();
crate::ln::chan_utils::ChannelPublicKeys { inner: unsafe { ( (&(*ret) as *const _) as *mut _) }, is_owned: false }
}
/// Get the contest delay applicable to the transactions.
/// Note that the contest delay was selected by the countersignatory.
#[must_use]
#[no_mangle]
pub extern "C" fn DirectedChannelTransactionParameters_contest_delay(this_arg: &DirectedChannelTransactionParameters) -> u16 {
let mut ret = unsafe { &*this_arg.inner }.contest_delay();
ret
}
/// Whether the channel is outbound from the broadcaster.
///
/// The boolean representing the side that initiated the channel is
/// an input to the commitment number obscure factor computation.
#[must_use]
#[no_mangle]
pub extern "C" fn DirectedChannelTransactionParameters_is_outbound(this_arg: &DirectedChannelTransactionParameters) -> bool {
let mut ret = unsafe { &*this_arg.inner }.is_outbound();
ret
}
/// The funding outpoint
#[must_use]
#[no_mangle]
pub extern "C" fn DirectedChannelTransactionParameters_funding_outpoint(this_arg: &DirectedChannelTransactionParameters) -> crate::chain::transaction::OutPoint {
let mut ret = unsafe { &*this_arg.inner }.funding_outpoint();
crate::c_types::bitcoin_to_C_outpoint(ret)
}
use lightning::ln::chan_utils::HolderCommitmentTransaction as nativeHolderCommitmentTransactionImport;
type nativeHolderCommitmentTransaction = nativeHolderCommitmentTransactionImport;
/// Information needed to build and sign a holder's commitment transaction.
///
/// The transaction is only signed once we are ready to broadcast.
#[must_use]
#[repr(C)]
pub struct HolderCommitmentTransaction {
/// Nearly everywhere, inner must be non-null, however in places where
/// the Rust equivalent takes an Option, it may be set to null to indicate None.
pub inner: *mut nativeHolderCommitmentTransaction,
pub is_owned: bool,
}
impl Drop for HolderCommitmentTransaction {
fn drop(&mut self) {
if self.is_owned && !self.inner.is_null() {
let _ = unsafe { Box::from_raw(self.inner) };
}
}
}
#[no_mangle]
pub extern "C" fn HolderCommitmentTransaction_free(this_ptr: HolderCommitmentTransaction) { }
#[allow(unused)]
/// Used only if an object of this type is returned as a trait impl by a method
extern "C" fn HolderCommitmentTransaction_free_void(this_ptr: *mut c_void) {
unsafe { let _ = Box::from_raw(this_ptr as *mut nativeHolderCommitmentTransaction); }
}
#[allow(unused)]
/// When moving out of the pointer, we have to ensure we aren't a reference, this makes that easy
impl HolderCommitmentTransaction {
pub(crate) fn take_inner(mut self) -> *mut nativeHolderCommitmentTransaction {
assert!(self.is_owned);
let ret = self.inner;
self.inner = std::ptr::null_mut();
ret
}
}
impl Clone for HolderCommitmentTransaction {
fn clone(&self) -> Self {
Self {
inner: Box::into_raw(Box::new(unsafe { &*self.inner }.clone())),
is_owned: true,
}
}
}
#[allow(unused)]
/// Used only if an object of this type is returned as a trait impl by a method
pub(crate) extern "C" fn HolderCommitmentTransaction_clone_void(this_ptr: *const c_void) -> *mut c_void {
Box::into_raw(Box::new(unsafe { (*(this_ptr as *mut nativeHolderCommitmentTransaction)).clone() })) as *mut c_void
}
#[no_mangle]
pub extern "C" fn HolderCommitmentTransaction_clone(orig: &HolderCommitmentTransaction) -> HolderCommitmentTransaction {
HolderCommitmentTransaction { inner: Box::into_raw(Box::new(unsafe { &*orig.inner }.clone())), is_owned: true }
}
/// Our counterparty's signature for the transaction
#[no_mangle]
pub extern "C" fn HolderCommitmentTransaction_get_counterparty_sig(this_ptr: &HolderCommitmentTransaction) -> crate::c_types::Signature {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.counterparty_sig;
crate::c_types::Signature::from_rust(&(*inner_val))
}
/// Our counterparty's signature for the transaction
#[no_mangle]
pub extern "C" fn HolderCommitmentTransaction_set_counterparty_sig(this_ptr: &mut HolderCommitmentTransaction, mut val: crate::c_types::Signature) {
unsafe { &mut *this_ptr.inner }.counterparty_sig = val.into_rust();
}
/// All non-dust counterparty HTLC signatures, in the order they appear in the transaction
#[no_mangle]
pub extern "C" fn HolderCommitmentTransaction_set_counterparty_htlc_sigs(this_ptr: &mut HolderCommitmentTransaction, mut val: crate::c_types::derived::CVec_SignatureZ) {
let mut local_val = Vec::new(); for mut item in val.into_rust().drain(..) { local_val.push( { item.into_rust() }); };
unsafe { &mut *this_ptr.inner }.counterparty_htlc_sigs = local_val;
}
#[no_mangle]
pub extern "C" fn HolderCommitmentTransaction_write(obj: &HolderCommitmentTransaction) -> crate::c_types::derived::CVec_u8Z {
crate::c_types::serialize_obj(unsafe { &(*(*obj).inner) })
}
#[no_mangle]
pub(crate) extern "C" fn HolderCommitmentTransaction_write_void(obj: *const c_void) -> crate::c_types::derived::CVec_u8Z {
crate::c_types::serialize_obj(unsafe { &*(obj as *const nativeHolderCommitmentTransaction) })
}
#[no_mangle]
pub extern "C" fn HolderCommitmentTransaction_read(ser: crate::c_types::u8slice) -> HolderCommitmentTransaction {
if let Ok(res) = crate::c_types::deserialize_obj(ser) {
HolderCommitmentTransaction { inner: Box::into_raw(Box::new(res)), is_owned: true }
} else {
HolderCommitmentTransaction { inner: std::ptr::null_mut(), is_owned: true }
}
}
/// Create a new holder transaction with the given counterparty signatures.
/// The funding keys are used to figure out which signature should go first when building the transaction for broadcast.
#[must_use]
#[no_mangle]
pub extern "C" fn HolderCommitmentTransaction_new(mut commitment_tx: crate::ln::chan_utils::CommitmentTransaction, mut counterparty_sig: crate::c_types::Signature, mut counterparty_htlc_sigs: crate::c_types::derived::CVec_SignatureZ, mut holder_funding_key: crate::c_types::PublicKey, mut counterparty_funding_key: crate::c_types::PublicKey) -> HolderCommitmentTransaction {
let mut local_counterparty_htlc_sigs = Vec::new(); for mut item in counterparty_htlc_sigs.into_rust().drain(..) { local_counterparty_htlc_sigs.push( { item.into_rust() }); };
let mut ret = lightning::ln::chan_utils::HolderCommitmentTransaction::new(*unsafe { Box::from_raw(commitment_tx.take_inner()) }, counterparty_sig.into_rust(), local_counterparty_htlc_sigs, &holder_funding_key.into_rust(), &counterparty_funding_key.into_rust());
HolderCommitmentTransaction { inner: Box::into_raw(Box::new(ret)), is_owned: true }
}
use lightning::ln::chan_utils::BuiltCommitmentTransaction as nativeBuiltCommitmentTransactionImport;
type nativeBuiltCommitmentTransaction = nativeBuiltCommitmentTransactionImport;
/// A pre-built Bitcoin commitment transaction and its txid.
#[must_use]
#[repr(C)]
pub struct BuiltCommitmentTransaction {
/// Nearly everywhere, inner must be non-null, however in places where
/// the Rust equivalent takes an Option, it may be set to null to indicate None.
pub inner: *mut nativeBuiltCommitmentTransaction,
pub is_owned: bool,
}
impl Drop for BuiltCommitmentTransaction {
fn drop(&mut self) {
if self.is_owned && !self.inner.is_null() {
let _ = unsafe { Box::from_raw(self.inner) };
}
}
}
#[no_mangle]
pub extern "C" fn BuiltCommitmentTransaction_free(this_ptr: BuiltCommitmentTransaction) { }
#[allow(unused)]
/// Used only if an object of this type is returned as a trait impl by a method
extern "C" fn BuiltCommitmentTransaction_free_void(this_ptr: *mut c_void) {
unsafe { let _ = Box::from_raw(this_ptr as *mut nativeBuiltCommitmentTransaction); }
}
#[allow(unused)]
/// When moving out of the pointer, we have to ensure we aren't a reference, this makes that easy
impl BuiltCommitmentTransaction {
pub(crate) fn take_inner(mut self) -> *mut nativeBuiltCommitmentTransaction {
assert!(self.is_owned);
let ret = self.inner;
self.inner = std::ptr::null_mut();
ret
}
}
impl Clone for BuiltCommitmentTransaction {
fn clone(&self) -> Self {
Self {
inner: Box::into_raw(Box::new(unsafe { &*self.inner }.clone())),
is_owned: true,
}
}
}
#[allow(unused)]
/// Used only if an object of this type is returned as a trait impl by a method
pub(crate) extern "C" fn BuiltCommitmentTransaction_clone_void(this_ptr: *const c_void) -> *mut c_void {
Box::into_raw(Box::new(unsafe { (*(this_ptr as *mut nativeBuiltCommitmentTransaction)).clone() })) as *mut c_void
}
#[no_mangle]
pub extern "C" fn BuiltCommitmentTransaction_clone(orig: &BuiltCommitmentTransaction) -> BuiltCommitmentTransaction {
BuiltCommitmentTransaction { inner: Box::into_raw(Box::new(unsafe { &*orig.inner }.clone())), is_owned: true }
}
/// The commitment transaction
#[no_mangle]
pub extern "C" fn BuiltCommitmentTransaction_get_transaction(this_ptr: &BuiltCommitmentTransaction) -> crate::c_types::Transaction {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.transaction;
let mut local_inner_val = ::bitcoin::consensus::encode::serialize(inner_val);
crate::c_types::Transaction::from_vec(local_inner_val)
}
/// The commitment transaction
#[no_mangle]
pub extern "C" fn BuiltCommitmentTransaction_set_transaction(this_ptr: &mut BuiltCommitmentTransaction, mut val: crate::c_types::Transaction) {
unsafe { &mut *this_ptr.inner }.transaction = val.into_bitcoin();
}
/// The txid for the commitment transaction.
///
/// This is provided as a performance optimization, instead of calling transaction.txid()
/// multiple times.
#[no_mangle]
pub extern "C" fn BuiltCommitmentTransaction_get_txid(this_ptr: &BuiltCommitmentTransaction) -> *const [u8; 32] {
let mut inner_val = &mut unsafe { &mut *this_ptr.inner }.txid;
(*inner_val).as_inner()
}
/// The txid for the commitment transaction.
///
/// This is provided as a performance optimization, instead of calling transaction.txid()
/// multiple times.
#[no_mangle]
pub extern "C" fn BuiltCommitmentTransaction_set_txid(this_ptr: &mut BuiltCommitmentTransaction, mut val: crate::c_types::ThirtyTwoBytes) {
unsafe { &mut *this_ptr.inner }.txid = ::bitcoin::hash_types::Txid::from_slice(&val.data[..]).unwrap();
}
#[must_use]
#[no_mangle]
pub extern "C" fn BuiltCommitmentTransaction_new(mut transaction_arg: crate::c_types::Transaction, mut txid_arg: crate::c_types::ThirtyTwoBytes) -> BuiltCommitmentTransaction {
BuiltCommitmentTransaction { inner: Box::into_raw(Box::new(nativeBuiltCommitmentTransaction {
transaction: transaction_arg.into_bitcoin(),
txid: ::bitcoin::hash_types::Txid::from_slice(&txid_arg.data[..]).unwrap(),
})), is_owned: true }
}
#[no_mangle]
pub extern "C" fn BuiltCommitmentTransaction_write(obj: &BuiltCommitmentTransaction) -> crate::c_types::derived::CVec_u8Z {
crate::c_types::serialize_obj(unsafe { &(*(*obj).inner) })
}
#[no_mangle]
pub(crate) extern "C" fn BuiltCommitmentTransaction_write_void(obj: *const c_void) -> crate::c_types::derived::CVec_u8Z {
crate::c_types::serialize_obj(unsafe { &*(obj as *const nativeBuiltCommitmentTransaction) })
}
#[no_mangle]
pub extern "C" fn BuiltCommitmentTransaction_read(ser: crate::c_types::u8slice) -> BuiltCommitmentTransaction {
if let Ok(res) = crate::c_types::deserialize_obj(ser) {
BuiltCommitmentTransaction { inner: Box::into_raw(Box::new(res)), is_owned: true }
} else {
BuiltCommitmentTransaction { inner: std::ptr::null_mut(), is_owned: true }
}
}
/// Get the SIGHASH_ALL sighash value of the transaction.
///
/// This can be used to verify a signature.
#[must_use]
#[no_mangle]
pub extern "C" fn BuiltCommitmentTransaction_get_sighash_all(this_arg: &BuiltCommitmentTransaction, mut funding_redeemscript: crate::c_types::u8slice, mut channel_value_satoshis: u64) -> crate::c_types::ThirtyTwoBytes {
let mut ret = unsafe { &*this_arg.inner }.get_sighash_all(&::bitcoin::blockdata::script::Script::from(Vec::from(funding_redeemscript.to_slice())), channel_value_satoshis);
crate::c_types::ThirtyTwoBytes { data: ret.as_ref().clone() }
}
/// Sign a transaction, either because we are counter-signing the counterparty's transaction or
/// because we are about to broadcast a holder transaction.
#[must_use]
#[no_mangle]
pub extern "C" fn BuiltCommitmentTransaction_sign(this_arg: &BuiltCommitmentTransaction, funding_key: *const [u8; 32], mut funding_redeemscript: crate::c_types::u8slice, mut channel_value_satoshis: u64) -> crate::c_types::Signature {
let mut ret = unsafe { &*this_arg.inner }.sign(&::bitcoin::secp256k1::key::SecretKey::from_slice(&unsafe { *funding_key}[..]).unwrap(), &::bitcoin::blockdata::script::Script::from(Vec::from(funding_redeemscript.to_slice())), channel_value_satoshis, &bitcoin::secp256k1::Secp256k1::new());
crate::c_types::Signature::from_rust(&ret)
}
use lightning::ln::chan_utils::CommitmentTransaction as nativeCommitmentTransactionImport;
type nativeCommitmentTransaction = nativeCommitmentTransactionImport;
/// This class tracks the per-transaction information needed to build a commitment transaction and to
/// actually build it and sign. It is used for holder transactions that we sign only when needed
/// and for transactions we sign for the counterparty.
///
/// This class can be used inside a signer implementation to generate a signature given the relevant
/// secret key.
#[must_use]
#[repr(C)]
pub struct CommitmentTransaction {
/// Nearly everywhere, inner must be non-null, however in places where
/// the Rust equivalent takes an Option, it may be set to null to indicate None.
pub inner: *mut nativeCommitmentTransaction,
pub is_owned: bool,
}
impl Drop for CommitmentTransaction {
fn drop(&mut self) {
if self.is_owned && !self.inner.is_null() {
let _ = unsafe { Box::from_raw(self.inner) };
}
}
}
#[no_mangle]
pub extern "C" fn CommitmentTransaction_free(this_ptr: CommitmentTransaction) { }
#[allow(unused)]
/// Used only if an object of this type is returned as a trait impl by a method
extern "C" fn CommitmentTransaction_free_void(this_ptr: *mut c_void) {
unsafe { let _ = Box::from_raw(this_ptr as *mut nativeCommitmentTransaction); }
}
#[allow(unused)]
/// When moving out of the pointer, we have to ensure we aren't a reference, this makes that easy
impl CommitmentTransaction {
pub(crate) fn take_inner(mut self) -> *mut nativeCommitmentTransaction {
assert!(self.is_owned);
let ret = self.inner;
self.inner = std::ptr::null_mut();
ret
}
}
impl Clone for CommitmentTransaction {
fn clone(&self) -> Self {
Self {
inner: Box::into_raw(Box::new(unsafe { &*self.inner }.clone())),
is_owned: true,
}
}
}
#[allow(unused)]
/// Used only if an object of this type is returned as a trait impl by a method
pub(crate) extern "C" fn CommitmentTransaction_clone_void(this_ptr: *const c_void) -> *mut c_void {
Box::into_raw(Box::new(unsafe { (*(this_ptr as *mut nativeCommitmentTransaction)).clone() })) as *mut c_void
}
#[no_mangle]
pub extern "C" fn CommitmentTransaction_clone(orig: &CommitmentTransaction) -> CommitmentTransaction {
CommitmentTransaction { inner: Box::into_raw(Box::new(unsafe { &*orig.inner }.clone())), is_owned: true }
}
#[no_mangle]
pub extern "C" fn CommitmentTransaction_write(obj: &CommitmentTransaction) -> crate::c_types::derived::CVec_u8Z {
crate::c_types::serialize_obj(unsafe { &(*(*obj).inner) })
}
#[no_mangle]
pub(crate) extern "C" fn CommitmentTransaction_write_void(obj: *const c_void) -> crate::c_types::derived::CVec_u8Z {
crate::c_types::serialize_obj(unsafe { &*(obj as *const nativeCommitmentTransaction) })
}
#[no_mangle]
pub extern "C" fn CommitmentTransaction_read(ser: crate::c_types::u8slice) -> CommitmentTransaction {
if let Ok(res) = crate::c_types::deserialize_obj(ser) {
CommitmentTransaction { inner: Box::into_raw(Box::new(res)), is_owned: true }
} else {
CommitmentTransaction { inner: std::ptr::null_mut(), is_owned: true }
}
}
/// The backwards-counting commitment number
#[must_use]
#[no_mangle]
pub extern "C" fn CommitmentTransaction_commitment_number(this_arg: &CommitmentTransaction) -> u64 {
let mut ret = unsafe { &*this_arg.inner }.commitment_number();
ret
}
/// The value to be sent to the broadcaster
#[must_use]
#[no_mangle]
pub extern "C" fn CommitmentTransaction_to_broadcaster_value_sat(this_arg: &CommitmentTransaction) -> u64 {
let mut ret = unsafe { &*this_arg.inner }.to_broadcaster_value_sat();
ret
}
/// The value to be sent to the counterparty
#[must_use]
#[no_mangle]
pub extern "C" fn CommitmentTransaction_to_countersignatory_value_sat(this_arg: &CommitmentTransaction) -> u64 {
let mut ret = unsafe { &*this_arg.inner }.to_countersignatory_value_sat();
ret
}
/// The feerate paid per 1000-weight-unit in this commitment transaction.
#[must_use]
#[no_mangle]
pub extern "C" fn CommitmentTransaction_feerate_per_kw(this_arg: &CommitmentTransaction) -> u32 {
let mut ret = unsafe { &*this_arg.inner }.feerate_per_kw();
ret
}
/// Trust our pre-built transaction and derived transaction creation public keys.
///
/// Applies a wrapper which allows access to these fields.
///
/// This should only be used if you fully trust the builder of this object. It should not
///\tbe used by an external signer - instead use the verify function.
#[must_use]
#[no_mangle]
pub extern "C" fn CommitmentTransaction_trust(this_arg: &CommitmentTransaction) -> crate::ln::chan_utils::TrustedCommitmentTransaction {
let mut ret = unsafe { &*this_arg.inner }.trust();
crate::ln::chan_utils::TrustedCommitmentTransaction { inner: Box::into_raw(Box::new(ret)), is_owned: true }
}
/// Verify our pre-built transaction and derived transaction creation public keys.
///
/// Applies a wrapper which allows access to these fields.
///
/// An external validating signer must call this method before signing
/// or using the built transaction.
#[must_use]
#[no_mangle]
pub extern "C" fn CommitmentTransaction_verify(this_arg: &CommitmentTransaction, channel_parameters: &crate::ln::chan_utils::DirectedChannelTransactionParameters, broadcaster_keys: &crate::ln::chan_utils::ChannelPublicKeys, countersignatory_keys: &crate::ln::chan_utils::ChannelPublicKeys) -> crate::c_types::derived::CResult_TrustedCommitmentTransactionNoneZ {
let mut ret = unsafe { &*this_arg.inner }.verify(unsafe { &*channel_parameters.inner }, unsafe { &*broadcaster_keys.inner }, unsafe { &*countersignatory_keys.inner }, &bitcoin::secp256k1::Secp256k1::new());
let mut local_ret = match ret { Ok(mut o) => crate::c_types::CResultTempl::ok( { crate::ln::chan_utils::TrustedCommitmentTransaction { inner: Box::into_raw(Box::new(o)), is_owned: true } }), Err(mut e) => crate::c_types::CResultTempl::err( { 0u8 /*e*/ }) };
local_ret
}
use lightning::ln::chan_utils::TrustedCommitmentTransaction as nativeTrustedCommitmentTransactionImport;
type nativeTrustedCommitmentTransaction = nativeTrustedCommitmentTransactionImport<'static>;
/// A wrapper on CommitmentTransaction indicating that the derived fields (the built bitcoin
/// transaction and the transaction creation keys) are trusted.
///
/// See trust() and verify() functions on CommitmentTransaction.
///
/// This structure implements Deref.
#[must_use]
#[repr(C)]
pub struct TrustedCommitmentTransaction {
/// Nearly everywhere, inner must be non-null, however in places where
/// the Rust equivalent takes an Option, it may be set to null to indicate None.
pub inner: *mut nativeTrustedCommitmentTransaction,
pub is_owned: bool,
}
impl Drop for TrustedCommitmentTransaction {
fn drop(&mut self) {
if self.is_owned && !self.inner.is_null() {
let _ = unsafe { Box::from_raw(self.inner) };
}
}
}
#[no_mangle]
pub extern "C" fn TrustedCommitmentTransaction_free(this_ptr: TrustedCommitmentTransaction) { }
#[allow(unused)]
/// Used only if an object of this type is returned as a trait impl by a method
extern "C" fn TrustedCommitmentTransaction_free_void(this_ptr: *mut c_void) {
unsafe { let _ = Box::from_raw(this_ptr as *mut nativeTrustedCommitmentTransaction); }
}
#[allow(unused)]
/// When moving out of the pointer, we have to ensure we aren't a reference, this makes that easy
impl TrustedCommitmentTransaction {
pub(crate) fn take_inner(mut self) -> *mut nativeTrustedCommitmentTransaction {
assert!(self.is_owned);
let ret = self.inner;
self.inner = std::ptr::null_mut();
ret
}
}
/// The transaction ID of the built Bitcoin transaction
#[must_use]
#[no_mangle]
pub extern "C" fn TrustedCommitmentTransaction_txid(this_arg: &TrustedCommitmentTransaction) -> crate::c_types::ThirtyTwoBytes {
let mut ret = unsafe { &*this_arg.inner }.txid();
crate::c_types::ThirtyTwoBytes { data: ret.into_inner() }
}
/// The pre-built Bitcoin commitment transaction
#[must_use]
#[no_mangle]
pub extern "C" fn TrustedCommitmentTransaction_built_transaction(this_arg: &TrustedCommitmentTransaction) -> crate::ln::chan_utils::BuiltCommitmentTransaction {
let mut ret = unsafe { &*this_arg.inner }.built_transaction();
crate::ln::chan_utils::BuiltCommitmentTransaction { inner: unsafe { ( (&(*ret) as *const _) as *mut _) }, is_owned: false }
}
/// The pre-calculated transaction creation public keys.
#[must_use]
#[no_mangle]
pub extern "C" fn TrustedCommitmentTransaction_keys(this_arg: &TrustedCommitmentTransaction) -> crate::ln::chan_utils::TxCreationKeys {
let mut ret = unsafe { &*this_arg.inner }.keys();
crate::ln::chan_utils::TxCreationKeys { inner: unsafe { ( (&(*ret) as *const _) as *mut _) }, is_owned: false }
}
/// Get a signature for each HTLC which was included in the commitment transaction (ie for
/// which HTLCOutputInCommitment::transaction_output_index.is_some()).
///
/// The returned Vec has one entry for each HTLC, and in the same order.
#[must_use]
#[no_mangle]
pub extern "C" fn TrustedCommitmentTransaction_get_htlc_sigs(this_arg: &TrustedCommitmentTransaction, htlc_base_key: *const [u8; 32], channel_parameters: &crate::ln::chan_utils::DirectedChannelTransactionParameters) -> crate::c_types::derived::CResult_CVec_SignatureZNoneZ {
let mut ret = unsafe { &*this_arg.inner }.get_htlc_sigs(&::bitcoin::secp256k1::key::SecretKey::from_slice(&unsafe { *htlc_base_key}[..]).unwrap(), unsafe { &*channel_parameters.inner }, &bitcoin::secp256k1::Secp256k1::new());
let mut local_ret = match ret { Ok(mut o) => crate::c_types::CResultTempl::ok( { let mut local_ret_0 = Vec::new(); for item in o.drain(..) { local_ret_0.push( { crate::c_types::Signature::from_rust(&item) }); }; local_ret_0.into() }), Err(mut e) => crate::c_types::CResultTempl::err( { 0u8 /*e*/ }) };
local_ret
}
/// Get the transaction number obscure factor
#[no_mangle]
pub extern "C" fn get_commitment_transaction_number_obscure_factor(mut broadcaster_payment_basepoint: crate::c_types::PublicKey, mut countersignatory_payment_basepoint: crate::c_types::PublicKey, mut outbound_from_broadcaster: bool) -> u64 {
let mut ret = lightning::ln::chan_utils::get_commitment_transaction_number_obscure_factor(&broadcaster_payment_basepoint.into_rust(), &countersignatory_payment_basepoint.into_rust(), outbound_from_broadcaster);
ret
}
| 52.085215 | 402 | 0.762812 |
9ba5f0fe9956018b64792b3a43ed4f14494aa813
| 9,773 |
use cannyls::deadline::Deadline;
use fibers::sync::mpsc;
use futures::{Async, Future, Poll};
use raftlog::log::{Log, LogSuffix};
use raftlog::{Error, ErrorKind};
use slog::Logger;
use std::mem;
use std::time::Instant;
use trackable::error::ErrorKindExt;
use super::log_prefix::{LoadLogPrefix, SaveLogPrefix};
use super::log_suffix::{LoadLogSuffix, SaveLogSuffix};
use super::{into_box_future, BoxFuture, Event, Handle, LocalNodeId, StorageMetrics};
/// Raft用のローカルログを保存するための`Future`実装.
// #[derive(Debug)]
pub struct SaveLog {
pub(crate) inner: SaveLogInner,
started_at: Instant,
metrics: StorageMetrics,
}
impl SaveLog {
/// Creates a new `SaveLog` instance.
pub(crate) fn new(inner: SaveLogInner, metrics: StorageMetrics) -> Self {
Self {
inner,
started_at: Instant::now(),
metrics,
}
}
}
impl Future for SaveLog {
type Item = ();
type Error = Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
if let Async::Ready(()) = track!(self.inner.poll())? {
let elapsed = prometrics::timestamp::duration_to_seconds(self.started_at.elapsed());
self.metrics.save_log_duration_seconds.observe(elapsed);
Ok(Async::Ready(()))
} else {
Ok(Async::NotReady)
}
}
}
// #[derive(Debug)]
#[allow(clippy::large_enum_variant)]
pub enum SaveLogInner {
Suffix(SaveLogSuffix),
Prefix(SaveLogPrefix),
Failed(Error),
}
impl Future for SaveLogInner {
type Item = ();
type Error = Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
match *self {
SaveLogInner::Suffix(ref mut f) => track!(f.poll()),
SaveLogInner::Prefix(ref mut f) => track!(f.poll()),
SaveLogInner::Failed(ref mut e) => {
let e = mem::replace(e, ErrorKind::Other.error().into());
Err(track!(e))
}
}
}
}
/// Raft用のローカルログを読み込むための`Future`実装.
// #[derive(Debug)]
pub struct LoadLog {
pub(crate) inner: LoadLogInner,
started_at: Instant,
metrics: StorageMetrics,
}
impl LoadLog {
/// Creates a new `LoadLog` instance.
pub(crate) fn new(inner: LoadLogInner, metrics: StorageMetrics) -> Self {
Self {
inner,
started_at: Instant::now(),
metrics,
}
}
}
impl Future for LoadLog {
type Item = Log;
type Error = Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
if let Async::Ready(item) = track!(self.inner.poll())? {
let elapsed = prometrics::timestamp::duration_to_seconds(self.started_at.elapsed());
self.metrics.load_log_duration_seconds.observe(elapsed);
Ok(Async::Ready(item))
} else {
Ok(Async::NotReady)
}
}
}
// #[derive(Debug)]
#[allow(clippy::large_enum_variant)]
pub(crate) enum LoadLogInner {
LoadLogPrefix {
next: Option<LoadLogSuffix>,
event_tx: Option<mpsc::Sender<Event>>,
future: LoadLogPrefix,
},
LoadLogSuffix(LoadLogSuffix),
CopyLogSuffix(LogSuffix),
Failed(Error),
}
impl Future for LoadLogInner {
type Item = Log;
type Error = Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
loop {
let next = match *self {
LoadLogInner::LoadLogSuffix(ref mut f) => {
return Ok(track!(f.poll())?.map(Log::Suffix));
}
LoadLogInner::CopyLogSuffix(ref mut f) => {
let suffix = mem::replace(f, Default::default());
return Ok(Async::Ready(Log::Suffix(suffix)));
}
LoadLogInner::LoadLogPrefix {
ref mut next,
ref mut future,
ref mut event_tx,
} => {
match track!(future.poll())? {
Async::NotReady => return Ok(Async::NotReady),
Async::Ready(None) => {
// 接頭辞部分が未保存の場合には、代わりに末尾部分の読み込みを試す
let next =
track_assert_some!(next.take(), ErrorKind::InconsistentState);
LoadLogInner::LoadLogSuffix(next)
}
Async::Ready(Some(p)) => {
if let Some(tx) = event_tx.take() {
let _ = tx.send(Event::LogPrefixUpdated { new_head: p.tail });
}
return Ok(Async::Ready(Log::Prefix(p)));
}
}
}
LoadLogInner::Failed(ref mut e) => {
let e = mem::replace(e, ErrorKind::Other.error().into());
return Err(track!(e));
}
};
*self = next;
}
}
}
/// Raft用のローカルログを削除するための`Future`実装.
pub struct DeleteLog {
logger: Logger,
event_tx: mpsc::Sender<Event>,
future: BoxFuture<()>,
}
impl DeleteLog {
/// Creates a new `DeleteLog` instance.
pub(crate) fn new(handle: &Handle, event_tx: mpsc::Sender<Event>, node: LocalNodeId) -> Self {
let logger = handle.logger.clone();
let future = into_box_future(
handle
.device
.request()
.deadline(Deadline::Infinity)
.wait_for_running()
.delete_range(node.to_available_lump_id_range())
.map(|_| ()),
);
info!(logger, "[START] DeleteLog");
Self {
logger,
event_tx,
future,
}
}
}
impl Future for DeleteLog {
type Item = ();
type Error = Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
if let Async::Ready(()) = track!(self.future.poll())? {
info!(self.logger, "[FINISH] DeleteLog");
let _ = self.event_tx.send(Event::LogSuffixDeleted);
Ok(Async::Ready(()))
} else {
Ok(Async::NotReady)
}
}
}
#[cfg(test)]
mod tests {
use cannyls::lump::LumpData;
use raftlog::cluster::ClusterConfig;
use raftlog::election::Term;
use raftlog::log::{LogEntry, LogIndex, LogPosition, LogPrefix, LogSuffix};
use std::collections::btree_set::BTreeSet;
use trackable::result::TestResult;
use test_util::{run_test_with_storage, wait_for};
use LocalNodeId;
#[test]
fn delete_log_prefix_works() -> TestResult {
let node_id = LocalNodeId::new([0, 11, 222, 3, 44, 5, 66]);
run_test_with_storage(node_id, |(mut storage, device)| {
let term = Term::new(3);
let log_prefix = LogPrefix {
tail: LogPosition {
prev_term: term,
index: LogIndex::new(0),
},
config: ClusterConfig::new(BTreeSet::new()),
snapshot: vec![],
};
wait_for(storage.save_log_prefix(log_prefix))?;
let lump_id = node_id.to_log_prefix_index_lump_id();
let result = wait_for(device.handle().request().head(lump_id))?;
assert!(result.is_some());
wait_for(storage.delete_log())?;
let lump_id = node_id.to_log_prefix_index_lump_id();
let result = wait_for(device.handle().request().head(lump_id))?;
assert!(result.is_none());
Ok(())
})
}
#[test]
fn delete_log_works_without_suffix() -> TestResult {
// 接尾部分がない状態で接尾部分の削除が失敗しないことを確認する。
let node_id = LocalNodeId::new([0, 11, 222, 3, 44, 5, 66]);
run_test_with_storage(node_id, |(mut storage, _device)| {
wait_for(storage.delete_log())?;
Ok(())
})
}
#[test]
fn delete_log_suffix_works_with_suffix() -> TestResult {
// 接尾部分を一度保存した後削除して、接尾部分がすべて削除されていることを確認する。
let node_id = LocalNodeId::new([0, 11, 222, 3, 44, 5, 66]);
let next_node_id = LocalNodeId::new([0, 11, 222, 3, 44, 5, 67]);
run_test_with_storage(node_id, |(mut storage, device)| {
let term = Term::new(0);
let log_entries = vec![LogEntry::Noop { term }; 3];
let log_suffix = LogSuffix {
head: LogPosition {
prev_term: term,
index: LogIndex::new(0),
},
entries: log_entries.clone(),
};
let non_deleted_lump_id = next_node_id.to_log_entry_lump_id(LogIndex::new(0));
let lump_data = track!(LumpData::new(vec![]))?;
wait_for(storage.save_log_suffix(&log_suffix))?;
for i in 0..log_entries.len() {
let lump_id = node_id.to_log_entry_lump_id(LogIndex::new(i as u64));
let result = wait_for(device.handle().request().head(lump_id))?;
assert!(result.is_some());
}
let _ = wait_for(
device
.handle()
.request()
.put(non_deleted_lump_id, lump_data),
);
wait_for(storage.delete_log())?;
// バグがないかを確認するために少し先の範囲外まで含めて Lump が存在しないことを確認する
for i in 0..(log_entries.len() * 2) {
let lump_id = node_id.to_log_entry_lump_id(LogIndex::new(i as u64));
let result = wait_for(device.handle().request().head(lump_id))?;
assert!(result.is_none());
}
assert!(wait_for(device.handle().request().get(non_deleted_lump_id))
.unwrap()
.is_some());
Ok(())
})
}
}
| 33.128814 | 98 | 0.530646 |
3a3a1c3f0ac1cc070ea52e761909f4c17f6c51cd
| 53,003 |
//! Traits for matrices operations.
//!
//! These traits defines operations for structs representing matrices arranged in row-major order.
//!
//! Implementations are provided for
//! - `Matrix`: an owned matrix
//! - `MatrixSlice`: a borrowed immutable block of `Matrix`
//! - `MatrixSliceMut`: a borrowed mutable block of `Matrix`
//!
//! ```
//! use rulinalg::matrix::{Matrix, BaseMatrix};
//!
//! let a = Matrix::new(3,3, (0..9).collect::<Vec<usize>>());
//!
//! // Manually create our slice - [[4,5],[7,8]].
//! let mat_slice = a.sub_slice([0,1], 3, 2);
//!
//! // We can perform arithmetic with mixing owned and borrowed versions
//! let _new_mat = &mat_slice.transpose() * &a;
//! ```
use matrix::{Matrix, MatrixSlice, MatrixSliceMut};
use matrix::{Cols, ColsMut, Row, RowMut, Column, ColumnMut, Rows, RowsMut, Axes};
use matrix::{DiagOffset, Diagonal, DiagonalMut};
use matrix::{back_substitution, forward_substitution};
use matrix::{SliceIter, SliceIterMut};
use norm::{MatrixNorm, MatrixMetric};
use vector::Vector;
use utils;
use libnum::Zero;
use error::Error;
use core::any::Any;
use core::cmp::min;
use core::marker::PhantomData;
use core::mem;
use core::ops::{Add, Mul, Div};
use core::ptr;
use core::slice;
use num_traits::float::FloatCore;
use libm::F32Ext;
use alloc::vec::Vec;
mod impl_base;
/// Trait for immutable matrix structs.
pub trait BaseMatrix<T >: Sized {
/// Rows in the matrix.
fn rows(&self) -> usize;
/// Columns in the matrix.
fn cols(&self) -> usize;
/// Row stride in the matrix.
fn row_stride(&self) -> usize;
/// Returns true if the matrix contais no elements
fn is_empty(&self) -> bool {
self.rows() == 0 || self.cols() == 0
}
/// Top left index of the matrix.
fn as_ptr(&self) -> *const T;
/// Returns a `MatrixSlice` over the whole matrix.
///
/// # Examples
///
/// ```
/// use rulinalg::matrix::{Matrix, BaseMatrix};
///
/// let a = Matrix::new(3, 3, vec![2.0; 9]);
/// let b = a.as_slice();
/// ```
fn as_slice(&self) -> MatrixSlice<T> {
unsafe {
MatrixSlice::from_raw_parts(self.as_ptr(), self.rows(), self.cols(), self.row_stride())
}
}
/// Get a reference to an element in the matrix without bounds checking.
unsafe fn get_unchecked(&self, index: [usize; 2]) -> &T {
&*(self.as_ptr().offset((index[0] * self.row_stride() + index[1]) as isize))
}
/// Get a reference to an element in the matrix.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrix};
///
/// let mat = matrix![0, 1;
/// 3, 4;
/// 6, 7];
///
/// assert_eq!(mat.get([0, 2]), None);
/// assert_eq!(mat.get([3, 0]), None);
///
/// assert_eq!( *mat.get([0, 0]).unwrap(), 0)
/// # }
/// ```
fn get(&self, index: [usize; 2]) -> Option<&T> {
let row_ind = index[0];
let col_ind = index[1];
if row_ind >= self.rows() || col_ind >= self.cols() {
None
} else {
unsafe { Some(self.get_unchecked(index)) }
}
}
/// Returns the column of a matrix at the given index.
/// `None` if the index is out of bounds.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrix};
///
/// let mat = matrix![0, 1, 2;
/// 3, 4, 5;
/// 6, 7, 8];
/// let col = mat.col(1);
/// let expected = matrix![1usize; 4; 7];
/// assert_matrix_eq!(*col, expected);
/// # }
/// ```
///
/// # Panics
///
/// Will panic if the column index is out of bounds.
fn col(&self, index: usize) -> Column<T> {
if index < self.cols() {
unsafe { self.col_unchecked(index) }
} else {
panic!("Column index out of bounds.")
}
}
/// Returns the column of a matrix at the given
/// index without doing a bounds check.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrix};
///
/// let mat = matrix![0, 1, 2;
/// 3, 4, 5;
/// 6, 7, 8];
/// let col = unsafe { mat.col_unchecked(2) };
/// let expected = matrix![2usize; 5; 8];
/// assert_matrix_eq!(*col, expected);
/// # }
/// ```
unsafe fn col_unchecked(&self, index: usize) -> Column<T> {
let ptr = self.as_ptr().offset(index as isize);
Column { col: MatrixSlice::from_raw_parts(ptr, self.rows(), 1, self.row_stride()) }
}
/// Returns the row of a matrix at the given index.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrix};
///
/// let mat = matrix![0, 1, 2;
/// 3, 4, 5;
/// 6, 7, 8];
/// let row = mat.row(1);
/// let expected = matrix![3usize, 4, 5];
/// assert_matrix_eq!(*row, expected);
/// # }
/// ```
///
/// # Panics
///
/// Will panic if the row index is out of bounds.
fn row(&self, index: usize) -> Row<T> {
if index < self.rows() {
unsafe { self.row_unchecked(index) }
} else {
panic!("Row index out of bounds.")
}
}
/// Returns the row of a matrix at the given index without doing unbounds checking
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrix};
///
/// let mat = matrix![0, 1, 2;
/// 3, 4, 5;
/// 6, 7, 8];
/// let row = unsafe { mat.row_unchecked(2) };
/// let expected = matrix![6usize, 7, 8];
/// assert_matrix_eq!(*row, expected);
/// # }
/// ```
unsafe fn row_unchecked(&self, index: usize) -> Row<T> {
let ptr = self.as_ptr().offset((self.row_stride() * index) as isize);
Row { row: MatrixSlice::from_raw_parts(ptr, 1, self.cols(), self.row_stride()) }
}
/// Returns an iterator over the matrix data.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrix};
///
/// let mat = matrix![0, 1, 2;
/// 3, 4, 5;
/// 6, 7, 8];
/// let slice = mat.sub_slice([1, 1], 2, 2);
///
/// let slice_data = slice.iter().map(|v| *v).collect::<Vec<usize>>();
/// assert_eq!(slice_data, vec![4, 5, 7, 8]);
/// # }
/// ```
fn iter<'a>(&self) -> SliceIter<'a, T>
where T: 'a
{
SliceIter {
slice_start: self.as_ptr(),
row_pos: 0,
col_pos: 0,
slice_rows: self.rows(),
slice_cols: self.cols(),
row_stride: self.row_stride(),
_marker: PhantomData::<&T>,
}
}
/// Iterate over the columns of the matrix.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrix};
///
/// let a = matrix![0, 1;
/// 2, 3;
/// 4, 5];
///
/// let mut iter = a.col_iter();
///
/// assert_matrix_eq!(*iter.next().unwrap(), matrix![ 0; 2; 4 ]);
/// assert_matrix_eq!(*iter.next().unwrap(), matrix![ 1; 3; 5 ]);
/// assert!(iter.next().is_none());
/// # }
/// ```
fn col_iter(&self) -> Cols<T> {
Cols {
_marker: PhantomData::<&T>,
col_pos: 0,
row_stride: self.row_stride() as isize,
slice_cols: self.cols(),
slice_rows: self.rows(),
slice_start: self.as_ptr(),
}
}
/// Iterate over the rows of the matrix.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrix};
/// let a = matrix![0, 1, 2;
/// 3, 4, 5;
/// 6, 7, 8];
///
/// let mut iter = a.row_iter();
///
/// assert_matrix_eq!(*iter.next().unwrap(), matrix![ 0, 1, 2 ]);
/// assert_matrix_eq!(*iter.next().unwrap(), matrix![ 3, 4, 5 ]);
/// assert_matrix_eq!(*iter.next().unwrap(), matrix![ 6, 7, 8 ]);
/// assert!(iter.next().is_none());
/// # }
/// ```
fn row_iter(&self) -> Rows<T> {
Rows {
slice_start: self.as_ptr(),
row_pos: 0,
slice_rows: self.rows(),
slice_cols: self.cols(),
row_stride: self.row_stride() as isize,
_marker: PhantomData::<&T>,
}
}
/// Iterate over diagonal entries
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg;
///
/// # fn main() {
/// use rulinalg::matrix::{DiagOffset, Matrix, BaseMatrix};
///
/// let a = matrix![0, 1, 2;
/// 3, 4, 5;
/// 6, 7, 8];
/// // Print super diag [1, 5]
/// for d in a.diag_iter(DiagOffset::Above(1)) {
/// println!("{}", d);
/// }
///
/// // Print sub diag [3, 7]
/// // Equivalent to `diag_iter(DiagOffset::Below(1))`
/// for d in a.diag_iter(DiagOffset::from(-1)) {
/// println!("{}", d);
/// }
/// # }
/// ```
///
/// # Panics
///
/// If using an `Above` or `Below` offset which is
/// out-of-bounds this function will panic.
///
/// This function will never panic if the `Main` diagonal
/// offset is used.
fn diag_iter(&self, k: DiagOffset) -> Diagonal<T, Self> {
let (diag_len, diag_start) = match k.into() {
DiagOffset::Main => (min(self.rows(), self.cols()), 0),
DiagOffset::Above(m) => {
assert!(m < self.cols(),
"Offset diagonal is not within matrix dimensions.");
(min(self.rows(), self.cols() - m), m)
}
DiagOffset::Below(m) => {
assert!(m < self.rows(),
"Offset diagonal is not within matrix dimensions.");
(min(self.rows() - m, self.cols()), m * self.row_stride())
}
};
Diagonal {
matrix: self,
diag_pos: diag_start,
diag_end: diag_start + diag_len.saturating_sub(1) * self.row_stride() + diag_len,
_marker: PhantomData::<&T>,
}
}
/// The sum of the rows of the matrix.
///
/// Returns a Vector equal to the sums of elements over the matrices rows.
///
/// Note that the resulting vector is identical to the sums of
/// elements along each column of the matrix.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrix};
///
/// let a = matrix![1.0, 2.0;
/// 3.0, 4.0];
///
/// let c = a.sum_rows();
/// assert_eq!(c, vector![4.0, 6.0]);
/// # }
/// ```
fn sum_rows(&self) -> Vector<T>
where T: Copy + Zero + Add<T, Output = T>
{
let mut sum_rows = vec![T::zero(); self.cols()];
for row in self.row_iter() {
utils::in_place_vec_bin_op(&mut sum_rows, row.raw_slice(), |sum, &r| *sum = *sum + r);
}
Vector::new(sum_rows)
}
/// The sum of the columns of the matrix.
///
/// Returns a Vector equal to the sums of elements over the matrices columns.
///
/// Note that the resulting vector is identical to the sums of
/// elements along each row of the matrix.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrix};
///
/// let a = matrix![1.0, 2.0;
/// 3.0, 4.0];
///
/// let c = a.sum_cols();
/// assert_eq!(c, vector![3.0, 7.0]);
/// # }
/// ```
fn sum_cols(&self) -> Vector<T>
where T: Copy + Zero + Add<T, Output = T>
{
let mut col_sum = Vec::with_capacity(self.rows());
col_sum.extend(self.row_iter().map(|row| utils::unrolled_sum(row.raw_slice())));
Vector::new(col_sum)
}
/// Compute given matrix norm for matrix.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::BaseMatrix;
/// use rulinalg::norm::Euclidean;
///
/// let a = matrix![3.0, 4.0];
/// let c = a.norm(Euclidean);
///
/// assert_eq!(c, 5.0);
/// # }
/// ```
fn norm<N: MatrixNorm<T, Self>>(&self, norm: N) -> T
where T: FloatCore
{
norm.norm(self)
}
/// Compute the metric distance between two matrices.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::BaseMatrix;
/// use rulinalg::norm::Euclidean;
///
/// let a = matrix![3.0, 4.0;
/// 1.0, 2.0];
/// let b = matrix![2.0, 5.0;
/// 0.0, 3.0];
///
/// // Compute the square root of the sum of
/// // elementwise squared-differences
/// let c = a.metric(&b, Euclidean);
///
/// assert_eq!(c, 2.0);
/// # }
/// ```
fn metric<'a, 'b, B, M>(&'a self, mat: &'b B, metric: M) -> T
where B: 'b + BaseMatrix<T>,
M: MatrixMetric<'a, 'b, T, Self, B>
{
metric.metric(self, mat)
}
/// The sum of all elements in the matrix
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::BaseMatrix;
///
/// let a = matrix![1.0, 2.0;
/// 3.0, 4.0];
///
/// let c = a.sum();
/// assert_eq!(c, 10.0);
/// # }
/// ```
fn sum(&self) -> T
where T: Copy + Zero + Add<T, Output = T>
{
self.row_iter()
.fold(T::zero(),
|sum, row| sum + utils::unrolled_sum(row.raw_slice()))
}
/// The min of the specified axis of the matrix.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrix, Axes};
///
/// let a = matrix![1.0, 2.0;
/// 3.0, 4.0];
///
/// let cmin = a.min(Axes::Col);
/// assert_eq!(cmin, vector![1.0, 3.0]);
///
/// let rmin = a.min(Axes::Row);
/// assert_eq!(rmin, vector![1.0, 2.0]);
/// # }
/// ```
fn min(&self, axis: Axes) -> Vector<T>
where T: Copy + PartialOrd
{
match axis {
Axes::Col => {
let mut mins: Vec<T> = Vec::with_capacity(self.rows());
for row in self.row_iter() {
let min = row.iter()
.skip(1)
.fold(row[0], |m, &v| if v < m { v } else { m });
mins.push(min);
}
Vector::new(mins)
}
Axes::Row => {
let mut mins: Vec<T> = self.row(0).raw_slice().into();
for row in self.row_iter().skip(1) {
utils::in_place_vec_bin_op(&mut mins, row.raw_slice(), |min, &r| if r < *min {
*min = r;
});
}
Vector::new(mins)
}
}
}
/// The max of the specified axis of the matrix.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{BaseMatrix, Axes};
///
/// let a = matrix![1.0, 2.0;
/// 3.0, 4.0];
///
/// let cmax = a.max(Axes::Col);
/// assert_eq!(cmax, vector![2.0, 4.0]);
///
/// let rmax = a.max(Axes::Row);
/// assert_eq!(rmax, vector![3.0, 4.0]);
/// # }
/// ```
fn max(&self, axis: Axes) -> Vector<T>
where T: Copy + PartialOrd
{
match axis {
Axes::Col => {
let mut maxs: Vec<T> = Vec::with_capacity(self.rows());
for row in self.row_iter() {
let max = row.iter()
.skip(1)
.fold(row[0], |m, &v| if v > m { v } else { m });
maxs.push(max);
}
Vector::new(maxs)
}
Axes::Row => {
let mut maxs: Vec<T> = self.row(0).raw_slice().into();
for row in self.row_iter().skip(1) {
utils::in_place_vec_bin_op(&mut maxs, row.raw_slice(), |max, &r| if r > *max {
*max = r;
});
}
Vector::new(maxs)
}
}
}
/// Convert the matrix struct into a owned Matrix.
fn into_matrix(self) -> Matrix<T>
where T: Copy
{
self.row_iter().collect()
}
/// Select rows from matrix
///
/// # Examples
///
/// ```
/// use rulinalg::matrix::{Matrix, BaseMatrix};
///
/// let a = Matrix::<f64>::ones(3,3);
///
/// let b = &a.select_rows(&[2]);
/// assert_eq!(b.rows(), 1);
/// assert_eq!(b.cols(), 3);
///
/// let c = &a.select_rows(&[1,2]);
/// assert_eq!(c.rows(), 2);
/// assert_eq!(c.cols(), 3);
/// ```
///
/// # Panics
///
/// - Panics if row indices exceed the matrix dimensions.
fn select_rows<'a, I>(&self, rows: I) -> Matrix<T>
where T: Copy,
I: IntoIterator<Item = &'a usize>,
I::IntoIter: ExactSizeIterator + Clone
{
let row_iter = rows.into_iter();
let mut mat_vec = Vec::with_capacity(row_iter.len() * self.cols());
for row in row_iter.clone() {
assert!(*row < self.rows(),
"Row index is greater than number of rows.");
}
for row_idx in row_iter.clone() {
unsafe {
let row = self.row_unchecked(*row_idx);
mat_vec.extend_from_slice(row.raw_slice());
}
}
Matrix {
cols: self.cols(),
rows: row_iter.len(),
data: mat_vec,
}
}
/// Select columns from matrix
///
/// # Examples
///
/// ```
/// use rulinalg::matrix::{Matrix, BaseMatrix};
///
/// let a = Matrix::<f64>::ones(3,3);
/// let b = &a.select_cols(&[2]);
/// assert_eq!(b.rows(), 3);
/// assert_eq!(b.cols(), 1);
///
/// let c = &a.select_cols(&[1,2]);
/// assert_eq!(c.rows(), 3);
/// assert_eq!(c.cols(), 2);
/// ```
///
/// # Panics
///
/// - Panics if column indices exceed the matrix dimensions.
fn select_cols<'a, I>(&self, cols: I) -> Matrix<T>
where T: Copy,
I: IntoIterator<Item = &'a usize>,
I::IntoIter: ExactSizeIterator + Clone
{
let col_iter = cols.into_iter();
let mut mat_vec = Vec::with_capacity(col_iter.len() * self.rows());
for col in col_iter.clone() {
assert!(*col < self.cols(),
"Column index is greater than number of columns.");
}
unsafe {
for i in 0..self.rows() {
for col in col_iter.clone() {
mat_vec.push(*self.get_unchecked([i, *col]));
}
}
}
Matrix {
cols: col_iter.len(),
rows: self.rows(),
data: mat_vec,
}
}
/// The elementwise product of two matrices.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrix};
///
/// let a = matrix![1.0, 2.0;
/// 3.0, 4.0];
/// let b = matrix![1.0, 2.0;
/// 3.0, 4.0];
///
/// let c = &a.elemul(&b);
/// assert_matrix_eq!(c, &matrix![1.0, 4.0; 9.0, 16.0]);
/// }
/// ```
///
/// # Panics
///
/// - The matrices have different row counts.
/// - The matrices have different column counts.
fn elemul(&self, m: &Self) -> Matrix<T>
where T: Copy + Mul<T, Output = T>
{
assert!(self.rows() == m.rows(), "Matrix row counts not equal.");
assert!(self.cols() == m.cols(), "Matrix column counts not equal.");
let mut data = Vec::with_capacity(self.rows() * self.cols());
for (self_r, m_r) in self.row_iter().zip(m.row_iter()) {
data.extend_from_slice(&utils::vec_bin_op(self_r.raw_slice(), m_r.raw_slice(), T::mul));
}
Matrix::new(self.rows(), self.cols(), data)
}
/// The elementwise division of two matrices.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrix};
///
/// let a = matrix![1.0, 2.0;
/// 3.0, 4.0];
/// let b = matrix![1.0, 2.0;
/// 3.0, 4.0];
///
/// let c = &a.elediv(&b);
/// assert_matrix_eq!(c, &matrix![1.0, 1.0; 1.0, 1.0]);
/// # }
/// ```
///
/// # Panics
///
/// - The matrices have different row counts.
/// - The matrices have different column counts.
fn elediv(&self, m: &Self) -> Matrix<T>
where T: Copy + Div<T, Output = T>
{
assert!(self.rows() == m.rows(), "Matrix row counts not equal.");
assert!(self.cols() == m.cols(), "Matrix column counts not equal.");
let mut data = Vec::with_capacity(self.rows() * self.cols());
for (self_r, m_r) in self.row_iter().zip(m.row_iter()) {
data.extend_from_slice(&utils::vec_bin_op(self_r.raw_slice(), m_r.raw_slice(), T::div));
}
Matrix::new(self.rows(), self.cols(), data)
}
/// Select block matrix from matrix
///
/// # Examples
///
/// ```
/// use rulinalg::matrix::{Matrix, BaseMatrix};
///
/// let a = Matrix::<f64>::identity(3);
/// let b = &a.select(&[0,1], &[1,2]);
///
/// // We get the 2x2 block matrix in the upper right corner.
/// assert_eq!(b.rows(), 2);
/// assert_eq!(b.cols(), 2);
///
/// // Prints [0,0, 1,0]
/// println!("{:?}", b.data());
/// ```
///
/// # Panics
///
/// - Panics if row or column indices exceed the matrix dimensions.
fn select(&self, rows: &[usize], cols: &[usize]) -> Matrix<T>
where T: Copy
{
let mut mat_vec = Vec::with_capacity(cols.len() * rows.len());
for col in cols {
assert!(*col < self.cols(),
"Column index is greater than number of columns.");
}
for row in rows {
assert!(*row < self.rows(),
"Row index is greater than number of columns.");
}
unsafe {
for row in rows {
for col in cols {
mat_vec.push(*self.get_unchecked([*row, *col]));
}
}
}
Matrix {
cols: cols.len(),
rows: rows.len(),
data: mat_vec,
}
}
/// Horizontally concatenates two matrices. With self on the left.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrix};
///
/// let a = matrix![1.0, 2.0;
/// 3.0, 4.0;
/// 5.0, 6.0];
/// let b = matrix![4.0;
/// 5.0;
/// 6.0];
///
/// let c = &a.hcat(&b);
/// assert_eq!(c.cols(), a.cols() + b.cols());
/// assert_eq!(c[[1, 2]], 5.0);
/// # }
/// ```
///
/// # Panics
///
/// - Self and m have different row counts.
fn hcat<S>(&self, m: &S) -> Matrix<T>
where T: Copy,
S: BaseMatrix<T>
{
assert!(self.rows() == m.rows(), "Matrix row counts are not equal.");
let mut new_data = Vec::with_capacity((self.cols() + m.cols()) * self.rows());
for (self_row, m_row) in self.row_iter().zip(m.row_iter()) {
new_data.extend_from_slice(self_row.raw_slice());
new_data.extend_from_slice(m_row.raw_slice());
}
Matrix {
cols: (self.cols() + m.cols()),
rows: self.rows(),
data: new_data,
}
}
/// Vertically concatenates two matrices. With self on top.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrix};
///
/// let a = matrix![1.0, 2.0, 3.0;
/// 4.0, 5.0, 6.0];
/// let b = matrix![4.0, 5.0, 6.0];;
///
/// let c = &a.vcat(&b);
/// assert_eq!(c.rows(), a.rows() + b.rows());
/// assert_eq!(c[[2, 2]], 6.0);
/// # }
/// ```
///
/// # Panics
///
/// - Self and m have different column counts.
fn vcat<S>(&self, m: &S) -> Matrix<T>
where T: Copy,
S: BaseMatrix<T>
{
assert!(self.cols() == m.cols(),
"Matrix column counts are not equal.");
let mut new_data = Vec::with_capacity((self.rows() + m.rows()) * self.cols());
for row in self.row_iter().chain(m.row_iter()) {
new_data.extend_from_slice(row.raw_slice());
}
Matrix {
cols: self.cols(),
rows: (self.rows() + m.rows()),
data: new_data,
}
}
/// Extract the diagonal of the matrix
///
/// Examples
///
/// ```
/// # #[macro_use]
/// # extern crate rulinalg;
///
/// use rulinalg::matrix::BaseMatrix;
///
/// # fn main() {
/// let a = matrix![1, 2, 3;
/// 4, 5, 6;
/// 7, 8, 9].diag().cloned().collect::<Vec<_>>();
/// let b = matrix![1, 2;
/// 3, 4;
/// 5, 6].diag().cloned().collect::<Vec<_>>();
///
/// assert_eq!(a, vec![1, 5, 9]);
/// assert_eq!(b, vec![1, 4]);
/// # }
/// ```
fn diag(&self) -> Diagonal<T, Self> {
self.diag_iter(DiagOffset::Main)
}
/// Tranposes the given matrix
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrix};
///
/// let mat = matrix![1.0, 2.0, 3.0;
/// 4.0, 5.0, 6.0];
///
/// let expected = matrix![1.0, 4.0;
/// 2.0, 5.0;
/// 3.0, 6.0];
/// assert_matrix_eq!(mat.transpose(), expected);
/// # }
/// ```
fn transpose(&self) -> Matrix<T>
where T: Copy
{
let mut new_data = Vec::with_capacity(self.rows() * self.cols());
unsafe {
new_data.set_len(self.rows() * self.cols());
for i in 0..self.cols() {
for j in 0..self.rows() {
*new_data.get_unchecked_mut(i * self.rows() + j) = *self.get_unchecked([j, i]);
}
}
}
Matrix {
cols: self.rows(),
rows: self.cols(),
data: new_data,
}
}
/// Checks if matrix is diagonal.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrix};
///
/// let a = matrix![1.0, 0.0;
/// 0.0, 1.0];
/// let a_diag = a.is_diag();
///
/// assert_eq!(a_diag, true);
///
/// let b = matrix![1.0, 0.0;
/// 1.0, 0.0];
/// let b_diag = b.is_diag();
///
/// assert_eq!(b_diag, false);
/// # }
/// ```
fn is_diag(&self) -> bool
where T: Zero + PartialEq
{
let mut next_diag = 0usize;
self.iter().enumerate().all(|(i, data)| if i == next_diag {
next_diag += self.cols() + 1;
true
} else {
data == &T::zero()
})
}
/// Solves an upper triangular linear system.
///
/// Given a matrix `A` and a vector `b`, this function returns the
/// solution of the upper triangular system `Ux = b`, where `U` is
/// the upper triangular part of `A`.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::BaseMatrix;
/// use std::f32;
///
/// let u = matrix![1.0, 2.0;
/// 0.0, 1.0];
/// let y = vector![3.0, 1.0];
///
/// let x = u.solve_u_triangular(y).expect("A solution should exist!");
/// assert!((x[0] - 1.0) < f32::EPSILON);
/// assert!((x[1] - 1.0) < f32::EPSILON);
/// # }
/// ```
///
/// # Panics
///
/// - Vector size and matrix column count are not equal.
///
/// # Failures
///
/// - There is no valid solution to the system (matrix is singular).
/// - The matrix is empty.
fn solve_u_triangular(&self, y: Vector<T>) -> Result<Vector<T>, &'static str>
where T: Any + FloatCore
{
assert!(self.cols() == y.size(), "Vector size != Matrix column count.");
back_substitution(self, y)
}
/// Solves a lower triangular linear system.
///
/// Given a matrix `A` and a vector `b`, this function returns the
/// solution of the lower triangular system `Lx = b`, where `L` is
/// the lower triangular part of `A`.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::BaseMatrix;
/// use std::f32;
///
/// let l = matrix![1.0, 0.0;
/// 2.0, 1.0];
/// let y = vector![1.0, 3.0];
///
/// let x = l.solve_l_triangular(y).expect("A solution should exist!");
/// println!("{:?}", x);
/// assert!((x[0] - 1.0) < f32::EPSILON);
/// assert!((x[1] - 1.0) < f32::EPSILON);
/// # }
/// ```
///
/// # Panics
///
/// - Vector size and matrix column count are not equal.
///
/// # Failures
///
/// - There is no valid solution to the system (matrix is singular).
/// - The matrix is empty.
fn solve_l_triangular(&self, y: Vector<T>) -> Result<Vector<T>, &'static str>
where T: Any + FloatCore
{
assert!(self.cols() == y.size(), "Vector size != Matrix column count.");
forward_substitution(self, y)
}
/// Split the matrix at the specified axis returning two `MatrixSlice`s.
///
/// # Examples
///
/// ```
/// use rulinalg::matrix::{Axes, Matrix, BaseMatrix};
///
/// let a = Matrix::new(3,3, vec![2.0; 9]);
/// let (b,c) = a.split_at(1, Axes::Row);
/// ```
fn split_at(&self, mid: usize, axis: Axes) -> (MatrixSlice<T>, MatrixSlice<T>) {
let slice_1: MatrixSlice<T>;
let slice_2: MatrixSlice<T>;
match axis {
Axes::Row => {
assert!(mid < self.rows());
unsafe {
slice_1 = MatrixSlice::from_raw_parts(self.as_ptr(),
mid,
self.cols(),
self.row_stride());
slice_2 = MatrixSlice::from_raw_parts(self.as_ptr()
.offset((mid * self.row_stride()) as
isize),
self.rows() - mid,
self.cols(),
self.row_stride());
}
}
Axes::Col => {
assert!(mid < self.cols());
unsafe {
slice_1 = MatrixSlice::from_raw_parts(self.as_ptr(),
self.rows(),
mid,
self.row_stride());
slice_2 = MatrixSlice::from_raw_parts(self.as_ptr().offset(mid as isize),
self.rows(),
self.cols() - mid,
self.row_stride());
}
}
}
(slice_1, slice_2)
}
/// Produce a `MatrixSlice` from an existing matrix.
///
/// # Examples
///
/// ```
/// use rulinalg::matrix::{Matrix, BaseMatrix, MatrixSlice};
///
/// let a = Matrix::new(3,3, (0..9).collect::<Vec<usize>>());
/// let slice = MatrixSlice::from_matrix(&a, [1,1], 2, 2);
/// let new_slice = slice.sub_slice([0,0], 1, 1);
/// ```
fn sub_slice<'a>(&self, start: [usize; 2], rows: usize, cols: usize) -> MatrixSlice<'a, T>
where T: 'a
{
assert!(start[0] + rows <= self.rows(),
"View dimensions exceed matrix dimensions.");
assert!(start[1] + cols <= self.cols(),
"View dimensions exceed matrix dimensions.");
unsafe {
MatrixSlice::from_raw_parts(self.as_ptr()
.offset((start[0] * self.row_stride() + start[1]) as
isize),
rows,
cols,
self.row_stride())
}
}
}
/// Trait for mutable matrices.
pub trait BaseMatrixMut<T >: BaseMatrix<T> {
/// Top left index of the slice.
fn as_mut_ptr(&mut self) -> *mut T;
/// Returns a `MatrixSliceMut` over the whole matrix.
///
/// # Examples
///
/// ```
/// use rulinalg::matrix::{Matrix, BaseMatrixMut};
///
/// let mut a = Matrix::new(3, 3, vec![2.0; 9]);
/// let b = a.as_mut_slice();
/// ```
fn as_mut_slice(&mut self) -> MatrixSliceMut<T> {
unsafe {
MatrixSliceMut::from_raw_parts(self.as_mut_ptr(),
self.rows(),
self.cols(),
self.row_stride())
}
}
/// Get a mutable reference to an element in the matrix without bounds checks.
unsafe fn get_unchecked_mut(&mut self, index: [usize; 2]) -> &mut T {
&mut *(self.as_mut_ptr().offset((index[0] * self.row_stride() + index[1]) as isize))
}
/// Get a mutable reference to an element in the matrix.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrix, BaseMatrixMut};
///
/// let mut mat = matrix![0, 1;
/// 3, 4;
/// 6, 7];
///
/// assert_eq!(mat.get_mut([0, 2]), None);
/// assert_eq!(mat.get_mut([3, 0]), None);
///
/// assert_eq!(*mat.get_mut([0, 0]).unwrap(), 0);
/// *mat.get_mut([0,0]).unwrap() = 2;
/// assert_eq!(*mat.get_mut([0, 0]).unwrap(), 2);
/// # }
/// ```
fn get_mut(&mut self, index: [usize; 2]) -> Option<&mut T> {
let row_ind = index[0];
let col_ind = index[1];
if row_ind >= self.rows() || col_ind >= self.cols() {
None
} else {
unsafe { Some(self.get_unchecked_mut(index)) }
}
}
/// Returns a mutable iterator over the matrix.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrixMut};
///
/// let mut a = Matrix::new(3,3, (0..9).collect::<Vec<usize>>());
///
/// {
/// let mut slice = a.sub_slice_mut([1,1], 2, 2);
///
/// for d in slice.iter_mut() {
/// *d = *d + 2;
/// }
/// }
///
/// // Only the matrix slice is updated.
/// assert_matrix_eq!(a, matrix![0, 1, 2; 3, 6, 7; 6, 9, 10]);
/// # }
/// ```
fn iter_mut<'a>(&mut self) -> SliceIterMut<'a, T>
where T: 'a
{
SliceIterMut {
slice_start: self.as_mut_ptr(),
row_pos: 0,
col_pos: 0,
slice_rows: self.rows(),
slice_cols: self.cols(),
row_stride: self.row_stride(),
_marker: PhantomData::<&mut T>,
}
}
/// Returns a mutable reference to the column of a matrix at the given index.
/// `None` if the index is out of bounds.
///
/// # Examples
///
/// ```
/// # #[macro_use]
/// # extern crate rulinalg;
///
/// # fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrixMut};
///
/// let mut mat = matrix![0, 1, 2;
/// 3, 4, 5;
/// 6, 7, 8];
/// let mut slice = mat.sub_slice_mut([1,1], 2, 2);
/// {
/// let col = slice.col_mut(1);
/// let mut expected = matrix![5usize; 8];
/// assert_matrix_eq!(*col, expected);
/// }
/// # }
/// ```
///
/// # Panics
///
/// Will panic if the column index is out of bounds.
fn col_mut(&mut self, index: usize) -> ColumnMut<T> {
if index < self.cols() {
unsafe { self.col_unchecked_mut(index) }
} else {
panic!("Column index out of bounds.")
}
}
/// Returns a mutable reference to the column of a matrix at the given index
/// without doing a bounds check.
///
/// # Examples
///
/// ```
/// # #[macro_use]
/// # extern crate rulinalg;
///
/// # fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrixMut};
///
/// let mut mat = matrix![0, 1, 2;
/// 3, 4, 5;
/// 6, 7, 8];
/// let mut slice = mat.sub_slice_mut([1,1], 2, 2);
/// let col = unsafe { slice.col_unchecked_mut(1) };
/// let mut expected = matrix![5usize; 8];
/// assert_matrix_eq!(*col, expected);
/// # }
/// ```
unsafe fn col_unchecked_mut(&mut self, index: usize) -> ColumnMut<T> {
let ptr = self.as_mut_ptr().offset(index as isize);
ColumnMut { col: MatrixSliceMut::from_raw_parts(ptr, self.rows(), 1, self.row_stride()) }
}
/// Returns a mutable reference to the row of a matrix at the given index.
/// `None` if the index is out of bounds.
///
/// # Examples
///
/// ```
/// # #[macro_use]
/// # extern crate rulinalg;
///
/// # fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrixMut};
///
/// let mut mat = matrix![0, 1, 2;
/// 3, 4, 5;
/// 6, 7, 8];
/// let mut slice = mat.sub_slice_mut([1,1], 2, 2);
/// {
/// let row = slice.row_mut(1);
/// let mut expected = matrix![7usize, 8];
/// assert_matrix_eq!(*row, expected);
/// }
/// # }
/// ```
///
/// # Panics
///
/// Will panic if the row index is out of bounds.
fn row_mut(&mut self, index: usize) -> RowMut<T> {
if index < self.rows() {
unsafe { self.row_unchecked_mut(index) }
} else {
panic!("Row index out of bounds.")
}
}
/// Returns a mutable reference to the row of a matrix at the given index
/// without doing a bounds check.
///
/// # Examples
///
/// ```
/// # #[macro_use]
/// # extern crate rulinalg;
///
/// # fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrixMut};
///
/// let mut mat = matrix![0, 1, 2;
/// 3, 4, 5;
/// 6, 7, 8];
/// let mut slice = mat.sub_slice_mut([1,1], 2, 2);
/// let row = unsafe { slice.row_unchecked_mut(1) };
/// let mut expected = matrix![7usize, 8];
/// assert_matrix_eq!(*row, expected);
/// # }
/// ```
unsafe fn row_unchecked_mut(&mut self, index: usize) -> RowMut<T> {
let ptr = self.as_mut_ptr().offset((self.row_stride() * index) as isize);
RowMut { row: MatrixSliceMut::from_raw_parts(ptr, 1, self.cols(), self.row_stride()) }
}
/// Swaps two rows in a matrix.
///
/// If `a == b`, this method does nothing.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg;
/// # fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrixMut};
///
/// let mut x = matrix![0, 1;
/// 2, 3;
/// 4, 5;
/// 6, 7];
///
/// x.swap_rows(1, 3);
/// let expected = matrix![0, 1;
/// 6, 7;
/// 4, 5;
/// 2, 3];
///
/// assert_matrix_eq!(x, expected);
/// # }
/// ```
///
/// # Panics
///
/// Panics if `a` or `b` are out of bounds.
fn swap_rows(&mut self, a: usize, b: usize) {
assert!(a < self.rows(), "Row index larger than row count (a).");
assert!(b < self.rows(), "Row index larger than row count (b).");
if a != b {
unsafe {
let row_a = slice::from_raw_parts_mut(self.as_mut_ptr()
.offset((self.row_stride() * a) as
isize),
self.cols());
let row_b = slice::from_raw_parts_mut(self.as_mut_ptr()
.offset((self.row_stride() * b) as
isize),
self.cols());
for (x, y) in row_a.into_iter().zip(row_b.into_iter()) {
mem::swap(x, y);
}
}
}
}
/// Swaps two columns in a matrix.
///
/// If `a == b`, this method does nothing.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg;
/// # fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrixMut};
///
/// let mut x = matrix![0, 1;
/// 2, 3;
/// 4, 5];
///
/// x.swap_cols(0, 1);
/// let expected = matrix![1, 0;
/// 3, 2;
/// 5, 4];
///
/// assert_matrix_eq!(x, expected);
/// # }
/// ```
///
/// # Panics
///
/// Panics if `a` or `b` are out of bounds.
fn swap_cols(&mut self, a: usize, b: usize) {
assert!(a < self.cols(),
"Row index larger than row count (a).");
assert!(b < self.cols(),
"Row index larger than row count (b).");
if a != b {
unsafe {
for i in 0..self.rows() {
let a_ptr: *mut T = self.get_unchecked_mut([i, a]);
let b_ptr: *mut T = self.get_unchecked_mut([i, b]);
ptr::swap(a_ptr, b_ptr);
}
}
}
}
/// Iterate over the mutable columns of the matrix.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrixMut};
///
/// let mut a = matrix![0, 1;
/// 2, 3;
/// 4, 5];
///
/// for mut col in a.col_iter_mut() {
/// *col += 1;
/// }
///
/// // Now contains the range 1..7
/// println!("{}", a);
/// # }
/// ```
fn col_iter_mut(&mut self) -> ColsMut<T> {
ColsMut {
_marker: PhantomData::<&mut T>,
col_pos: 0,
row_stride: self.row_stride() as isize,
slice_cols: self.cols(),
slice_rows: self.rows(),
slice_start: self.as_mut_ptr(),
}
}
/// Iterate over the mutable rows of the matrix.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrixMut};
///
/// let mut a = matrix![0, 1;
/// 2, 3;
/// 4, 5];
///
/// for mut row in a.row_iter_mut() {
/// *row += 1;
/// }
///
/// // Now contains the range 1..7
/// println!("{}", a);
/// # }
/// ```
fn row_iter_mut(&mut self) -> RowsMut<T> {
RowsMut {
slice_start: self.as_mut_ptr(),
row_pos: 0,
slice_rows: self.rows(),
slice_cols: self.cols(),
row_stride: self.row_stride() as isize,
_marker: PhantomData::<&mut T>,
}
}
/// Iterate over diagonal entries mutably
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg;
///
/// # fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrixMut, DiagOffset};
///
/// let mut a = matrix![0, 1, 2;
/// 3, 4, 5;
/// 6, 7, 8];
///
/// // Increment super diag
/// for d in a.diag_iter_mut(DiagOffset::Above(1)) {
/// *d = *d + 1;
/// }
///
/// // Zero the sub-diagonal (sets 3 and 7 to 0)
/// // Equivalent to `diag_iter(DiagOffset::Below(1))`
/// for sub_d in a.diag_iter_mut(DiagOffset::from(-1)) {
/// *sub_d = 0;
/// }
///
/// println!("{}", a);
/// # }
/// ```
///
/// # Panics
///
/// If using an `Above` or `Below` offset which is
/// out-of-bounds this function will panic.
///
/// This function will never panic if the `Main` diagonal
/// offset is used.
fn diag_iter_mut(&mut self, k: DiagOffset) -> DiagonalMut<T, Self> {
let (diag_len, diag_start) = match k.into() {
DiagOffset::Main => (min(self.rows(), self.cols()), 0),
DiagOffset::Above(m) => {
assert!(m < self.cols(),
"Offset diagonal is not within matrix dimensions.");
(min(self.rows(), self.cols() - m), m)
}
DiagOffset::Below(m) => {
assert!(m < self.rows(),
"Offset diagonal is not within matrix dimensions.");
(min(self.rows() - m, self.cols()), m * self.row_stride())
}
};
let diag_end = diag_start + (diag_len - 1) * self.row_stride() + diag_len;
DiagonalMut {
matrix: self,
diag_pos: diag_start,
diag_end: diag_end,
_marker: PhantomData::<&mut T>,
}
}
/// Sets the underlying matrix data to the target data.
///
/// # Examples
///
/// ```
/// use rulinalg::matrix::{Matrix, BaseMatrixMut};
///
/// let mut mat = Matrix::<f32>::zeros(4,4);
/// let one_block = Matrix::<f32>::ones(2,2);
///
/// // Get a mutable slice of the upper left 2x2 block.
/// let mat_block = mat.sub_slice_mut([0,0], 2, 2);
///
/// // Set the upper left 2x2 block to be ones.
/// mat_block.set_to(one_block);
/// ```
///
/// # Panics
///
/// Panics if the dimensions of `self` and `target` are not the same.
fn set_to<M: BaseMatrix<T>>(mut self, target: M)
where T: Copy
{
assert!(self.rows() == target.rows(),
"Target has different row count to self.");
assert!(self.cols() == target.cols(),
"Target has different column count to self.");
for (mut s, t) in self.row_iter_mut().zip(target.row_iter()) {
// Vectorized assignment per row.
utils::in_place_vec_bin_op(s.raw_slice_mut(), t.raw_slice(), |x, &y| *x = y);
}
}
/// Applies a function to each element in the matrix.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate rulinalg; fn main() {
/// use rulinalg::matrix::{Matrix, BaseMatrixMut};
///
/// fn add_two(a: f64) -> f64 {
/// a + 2f64
/// }
///
/// let a = Matrix::new(2, 2, vec![0.;4]);
///
/// let b = a.apply(&add_two);
///
/// assert_eq!(b, matrix![2.0, 2.0; 2.0, 2.0]);
/// # }
/// ```
fn apply(mut self, f: &Fn(T) -> T) -> Self
where T: Copy
{
for val in self.iter_mut() {
*val = f(*val);
}
self
}
/// Split the matrix at the specified axis returning two `MatrixSliceMut`s.
///
/// # Examples
///
/// ```
/// use rulinalg::matrix::{Axes, Matrix, BaseMatrixMut};
///
/// let mut a = Matrix::new(3,3, vec![2.0; 9]);
/// let (b, c) = a.split_at_mut(1, Axes::Col);
/// ```
fn split_at_mut(&mut self, mid: usize, axis: Axes) -> (MatrixSliceMut<T>, MatrixSliceMut<T>) {
let slice_1: MatrixSliceMut<T>;
let slice_2: MatrixSliceMut<T>;
match axis {
Axes::Row => {
assert!(mid < self.rows());
unsafe {
slice_1 = MatrixSliceMut::from_raw_parts(self.as_mut_ptr(),
mid,
self.cols(),
self.row_stride());
slice_2 = MatrixSliceMut::from_raw_parts(self.as_mut_ptr()
.offset((mid *
self.row_stride()) as
isize),
self.rows() - mid,
self.cols(),
self.row_stride());
}
}
Axes::Col => {
assert!(mid < self.cols());
unsafe {
slice_1 = MatrixSliceMut::from_raw_parts(self.as_mut_ptr(),
self.rows(),
mid,
self.row_stride());
slice_2 = MatrixSliceMut::from_raw_parts(self.as_mut_ptr()
.offset(mid as isize),
self.rows(),
self.cols() - mid,
self.row_stride());
}
}
}
(slice_1, slice_2)
}
/// Produce a `MatrixSliceMut` from an existing matrix.
///
/// # Examples
///
/// ```
/// use rulinalg::matrix::{Matrix, MatrixSliceMut, BaseMatrixMut};
///
/// let mut a = Matrix::new(3,3, (0..9).collect::<Vec<usize>>());
/// let mut slice = MatrixSliceMut::from_matrix(&mut a, [1,1], 2, 2);
/// let new_slice = slice.sub_slice_mut([0,0], 1, 1);
/// ```
fn sub_slice_mut<'a>(&mut self,
start: [usize; 2],
rows: usize,
cols: usize)
-> MatrixSliceMut<'a, T>
where T: 'a
{
assert!(start[0] + rows <= self.rows(),
"View dimensions exceed matrix dimensions.");
assert!(start[1] + cols <= self.cols(),
"View dimensions exceed matrix dimensions.");
unsafe {
MatrixSliceMut::from_raw_parts(self.as_mut_ptr()
.offset((start[0] * self.row_stride() + start[1]) as
isize),
rows,
cols,
self.row_stride())
}
}
}
| 30.673032 | 100 | 0.442428 |
8afa184acafc1ea8864c386579caef90cf0e9c84
| 3,683 |
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
crate::registry::base::{Command, Context, Notifier, SettingHandler, State},
crate::registry::device_storage::{DeviceStorageCompatible, DeviceStorageFactory},
crate::switchboard::base::*,
fuchsia_async as fasync,
futures::future::BoxFuture,
futures::StreamExt,
parking_lot::RwLock,
std::sync::Arc,
};
impl DeviceStorageCompatible for SystemInfo {
const KEY: &'static str = "system_info";
fn default_value() -> Self {
SystemInfo { login_override_mode: SystemLoginOverrideMode::None }
}
}
pub fn spawn_system_controller<T: DeviceStorageFactory + Send + Sync + 'static>(
context: Context<T>,
) -> BoxFuture<'static, SettingHandler> {
let storage_handle = context.environment.storage_factory_handle.clone();
let (system_handler_tx, mut system_handler_rx) = futures::channel::mpsc::unbounded::<Command>();
let notifier_lock = Arc::<RwLock<Option<Notifier>>>::new(RwLock::new(None));
fasync::spawn(async move {
let storage = storage_handle.lock().await.get_store::<SystemInfo>();
let mut stored_value: SystemInfo;
{
let mut storage_lock = storage.lock().await;
stored_value = storage_lock.get().await;
}
while let Some(command) = system_handler_rx.next().await {
match command {
Command::ChangeState(state) => match state {
State::Listen(notifier) => {
*notifier_lock.write() = Some(notifier);
}
State::EndListen => {
*notifier_lock.write() = None;
}
},
Command::HandleRequest(request, responder) =>
{
#[allow(unreachable_patterns)]
match request {
SettingRequest::SetLoginOverrideMode(mode) => {
stored_value.login_override_mode = SystemLoginOverrideMode::from(mode);
let storage_clone = storage.clone();
let notifier_clone = notifier_lock.clone();
fasync::spawn(async move {
{
let mut storage_lock = storage_clone.lock().await;
storage_lock.write(&stored_value, true).await.unwrap();
}
responder.send(Ok(None)).ok();
if let Some(notifier) = &*notifier_clone.read() {
notifier.unbounded_send(SettingType::System).unwrap();
}
});
}
SettingRequest::Get => {
responder
.send(Ok(Some(SettingResponse::System(stored_value))))
.unwrap();
}
_ => {
responder
.send(Err(SwitchboardError::UnimplementedRequest {
setting_type: SettingType::System,
request: request,
}))
.ok();
}
}
}
}
}
});
Box::pin(async move { system_handler_tx })
}
| 40.032609 | 100 | 0.485745 |
8a940435eb095dc4d8359a0b560794cf31d90c9e
| 7,301 |
use crate::field::*;
use crate::fin_field::*;
use crate::matrix::*;
use std::convert::TryInto;
/*
* vandermonde(size={height: m, width: n}, v=[a, b, c, ..., x]) |v| = m
* is
* (1 a a^2 a^3 a^4 ... a^n)
* (1 b b^2 b^3 b^4 ... b^n)
* (1 c c^2 c^3 c^4 ... c^n)
* ...
* (1 x x^n x^3 x^4 ... x^n)
*
* Note: n == m is not needed
*/
pub fn vandermonde<F: Field>(size: MatrixSize, v: &[F]) -> Option<Matrix<F>> {
let mut m = Matrix::new(size);
if size.height != v.len() {
return None;
}
for i in 0..v.len() {
if v[i] == F::ONE {
return None;
}
for j in (i + 1)..v.len() {
if v[i] == v[j] {
return None;
}
}
}
for i in 0..size.height {
for j in 0..size.width {
let e: u32 = j.try_into().unwrap();
m[i][j] = v[i].exp(e);
}
}
Some(m)
}
pub fn systematic_vandermonde<F: Field>(size: MatrixSize, v: &[F]) -> Option<Matrix<F>> {
let m = vandermonde(size, v);
if let Some(m) = m {
let mut sub = m.clone();
sub.drop_rows((size.width..size.height).collect());
let inv = sub.inverse().unwrap();
Some(&m * &inv)
} else {
None
}
}
// systematic & the topmomst parity is 111...1
pub fn modified_systematic_vandermonde<F: Field>(size: MatrixSize, v: &[F]) -> Option<Matrix<F>> {
let m = vandermonde(size, v);
if let Some(m) = m {
let mut sub = m.clone();
sub.drop_rows((size.width..size.height).collect());
let inv = sub.inverse().unwrap();
let mut m = &m * &inv;
for i in 0..size.width {
let f = m[size.width][i];
if f != F::ONE {
for j in size.width..size.height {
m[j][i] = m[j][i] * F::mul_inv(&f);
}
}
}
for i in size.width + 1..size.height {
let f = m[i][0];
if f != F::ONE {
for j in 0..size.width {
m[i][j] = m[i][j] * F::mul_inv(&f);
}
}
}
Some(m)
} else {
None
}
}
pub fn rsv(data_fragments: usize, parity_fragments: usize) -> Matrix<GF_2_8> {
let height = data_fragments + parity_fragments;
let velems: Vec<GF_2_8> = (1..=height)
.map(|i| GF_2_8::PRIMITIVE_ELEMENT.exp(i as u32))
.collect();
let m: Matrix<GF_2_8> = modified_systematic_vandermonde(
MatrixSize {
height,
width: data_fragments,
},
&velems,
)
.unwrap();
m
}
pub fn nonsystematic_rsv(data_fragments: usize, parity_fragments: usize) -> Matrix<GF_2_8> {
let height = data_fragments + parity_fragments;
let velems: Vec<GF_2_8> = (1..=height)
.map(|i| GF_2_8::PRIMITIVE_ELEMENT.exp(i as u32))
.collect();
let m: Matrix<GF_2_8> = vandermonde(
MatrixSize {
height,
width: data_fragments,
},
&velems,
)
.unwrap();
m
}
pub fn isa_rsv(data: usize, parity: usize) -> Matrix<GF_2_8> {
let m = data + parity;
let k = data;
let mut a = Matrix::new(MatrixSize {
height: m,
width: k,
});
let mut gen = GF_2_8::ONE;
for i in 0..k {
a[i][i] = GF_2_8::ONE;
}
for i in k..m {
let mut p = GF_2_8::ONE;
for j in 0..k {
a[i][j] = p;
p = p * gen;
}
gen = gen * GF_2_8::from(2);
}
a
}
#[cfg(test)]
mod tests {
use super::*;
use itertools::Itertools;
use rand::prelude::*;
#[test]
fn test_rsv1() {
let m = rsv(10, 4);
let remove_pattern = (0..14).into_iter().combinations(4);
for remove in remove_pattern.into_iter() {
let mut tmp = m.clone();
tmp.drop_rows(remove);
let mut inv = tmp.clone();
let inv = inv.inverse().unwrap();
assert!(Matrix::identity(10) == &tmp * &inv);
}
}
#[test]
fn test_isa_rsv1() {
let m = isa_rsv(10, 4);
let remove_pattern = (0..14).into_iter().combinations(4);
for remove in remove_pattern.into_iter() {
let mut tmp = m.clone();
tmp.drop_rows(remove);
let mut inv = tmp.clone();
let inv = inv.inverse().unwrap();
assert!(Matrix::identity(10) == &tmp * &inv);
}
}
#[test]
fn test_isa_rsv2() {
let mut m = isa_rsv(10, 4);
m.drop_rows(vec![2, 4, 5, 9]); // (10, 10)
let mut a = Matrix::new(MatrixSize {
height: 10,
width: 20,
});
for i in 0..a.height() {
for j in 0..a.width() {
a[i][j] = GF_2_8::from(rand::random::<u8>());
}
}
let mut result = &m * &a; // (10, 20)
result.drop_rows(vec![9]); // (9, 20)
let mut inv = m.clone(); // (10, 10);
let mut inv = inv.inverse().unwrap(); // (9, 9)
{
inv.drop_rows(vec![9]); // (9, 10);
println!("{}", inv.dump());
inv.drop_col(9);
println!("{}", inv.dump());
}
let b = &inv * &result; // (9, 20)
println!("{}", a.dump());
println!("{}", b.dump());
}
#[test]
fn test_inverse_vandermonde() {
let r = GF_2_8::PRIMITIVE_ELEMENT;
let v1 = vandermonde(
MatrixSize {
height: 4,
width: 4,
},
&vec![r.exp(1), r.exp(2), r.exp(3), r.exp(4)],
)
.unwrap();
let v1_inv = v1.clone().inverse().unwrap();
assert_eq!(&v1 * &v1_inv, Matrix::identity(4));
let v2 = vandermonde(
MatrixSize {
height: 4,
width: 4,
},
&vec![r.exp(2), r.exp(2), r.exp(3), r.exp(4)],
);
assert!(v2.is_none());
let v3 = vandermonde(
MatrixSize {
height: 5,
width: 4,
},
&vec![r.exp(1), r.exp(2), r.exp(3), r.exp(4), r.exp(5)],
)
.unwrap();
for i in 0..5 {
let mut v = v3.clone();
v.drop_rows(vec![i]);
let v_inv = v.clone().inverse().unwrap();
assert_eq!(&v * &v_inv, Matrix::identity(4));
}
}
#[test]
fn systematic_vandermonde_test() {
let r = GF_2_8::PRIMITIVE_ELEMENT;
let mut sv = systematic_vandermonde(
MatrixSize {
height: 5,
width: 4,
},
&vec![r.exp(1), r.exp(2), r.exp(3), r.exp(4), r.exp(5)],
)
.unwrap();
sv.drop_rows(vec![4]);
assert_eq!(sv, Matrix::identity(4));
}
#[test]
fn modified_systematic_vandermonde_test() {
let r = GF_2_8::PRIMITIVE_ELEMENT;
let mut sv = modified_systematic_vandermonde(
MatrixSize {
height: 5,
width: 4,
},
&vec![r.exp(1), r.exp(2), r.exp(3), r.exp(4), r.exp(5)],
)
.unwrap();
sv.drop_rows(vec![4]);
assert_eq!(sv, Matrix::identity(4));
}
}
| 23.251592 | 98 | 0.451171 |
ed399cc446bd22c827c30669694c05dca6a01695
| 1,636 |
#[rustfmt::skip]
mod fast_input {
#[macro_export] macro_rules! input{(sc=$sc:expr,$($r:tt)*)=>{input_inner!{$sc,$($r)*}};($($r:tt)*)=>{let mut sc=fast_input::Scanner::new(std::io::stdin().lock(),4096);input_inner!{sc,$($r)*}};}
#[macro_export] macro_rules! input_inner{($sc:expr)=>{};($sc:expr,)=>{};($sc:expr,$var:ident:$t:tt$($r:tt)*)=>{let $var=read_value!($sc,$t);input_inner!{$sc $($r)*}};}
#[macro_export] macro_rules! read_value{($sc:expr,($($t:tt),*))=>{($(read_value!($sc,$t)),*)};($sc:expr,[$t:tt;$len:expr])=>{(0..$len).map(|_|read_value!($sc,$t)).collect::<Vec<_>>()};($sc:expr,Chars)=>{read_value!($sc,String).chars().collect::<Vec<char>>()};($sc:expr,Usize1)=>{read_value!($sc,usize)-1};($sc:expr,$t:ty)=>{$sc.next::<$t>()};}
pub struct Scanner {buf:Vec<u8>,pos: usize,}
impl Scanner {
pub fn new<R: std::io::Read>(mut reader: R, estimated: usize) -> Self {
let mut buf = Vec::with_capacity(estimated);let _=std::io::copy(&mut reader,&mut buf).unwrap();if buf.last()!=Some(&b'\n'){panic!("{}", 0);}
Scanner { buf, pos: 0 }
}
#[inline]
pub fn next<T: std::str::FromStr>(&mut self) -> T where T::Err: std::fmt::Debug,{
let mut start=None;loop{match(self.buf[self.pos],start.is_some()){(b' ',true)|(b'\n', true)|(b'\r', true)=>break,(_, true)|(b' ', false)|(b'\n',false)|(b'\r', false)=>self.pos+=1,(_, false)=>start=Some(self.pos),}}let target=&self.buf[start.unwrap()..self.pos];
unsafe { std::str::from_utf8_unchecked(target) }.parse().unwrap()
}
}
}
fn main() {
input!(n: usize);
println!("{}", n);
}
| 71.130435 | 347 | 0.561736 |
7a61552c5423bf079de2e12f3abbcd1854e21bb5
| 654 |
//! Tests auto-converted from "sass-spec/spec/libsass-closed-issues/issue_592.hrx"
#[allow(unused)]
fn runner() -> crate::TestRunner {
super::runner()
}
#[test]
#[ignore] // wrong result
fn test() {
assert_eq!(
runner().ok("%a::-webkit-scrollbar {\
\n color: green;\
\n}\n\
\n.a {\
\n .b {\
\n @extend %a;\
\n }\n\
\n .c .b {\
\n @extend %a;\
\n }\
\n}\n"),
".a .c .b::-webkit-scrollbar, .a .b::-webkit-scrollbar {\
\n color: green;\
\n}\n"
);
}
| 23.357143 | 82 | 0.396024 |
1dce19b49584b92434aa9817720e3f57d61b6545
| 26,312 |
use bindings::{
Windows::Win32::Graphics::Direct3D11::*, Windows::Win32::Graphics::Direct3D12::*,
Windows::Win32::Graphics::DirectComposition::*, Windows::Win32::Graphics::Dxgi::*,
Windows::Win32::Graphics::Gdi::*, Windows::Win32::Graphics::Hlsl::*,
Windows::Win32::System::SystemServices::*, Windows::Win32::System::Threading::*,
Windows::Win32::UI::DisplayDevices::*, Windows::Win32::UI::MenusAndResources::*,
Windows::Win32::UI::WindowsAndMessaging::*,
};
use dx12_common::{
cd3dx12_blend_desc_default, cd3dx12_rasterizer_desc_default,
cd3dx12_resource_barrier_transition, create_default_buffer,
};
use std::ptr::null_mut;
use std::{convert::TryInto, ffi::CString};
use windows::Interface;
const NUM_OF_FRAMES: usize = 2;
#[derive(Debug, PartialEq)]
#[repr(C)]
struct Vertex {
position: [f32; 3],
color: [f32; 4],
}
impl Vertex {
const fn new(position: [f32; 3], color: [f32; 4]) -> Self {
Self { position, color }
}
}
const RED: [f32; 4] = [1.0, 0.0, 0.0, 1.0];
const GREEN: [f32; 4] = [0.0, 1.0, 0.0, 1.0];
const BLUE_TRANSPARENT: [f32; 4] = [0.0, 0.0, 1.0, 0.5];
const MAGENTA: [f32; 4] = [1.0, 0.0, 1.0, 1.0];
#[allow(dead_code)]
struct Window {
hwnd: HWND,
factory: IDXGIFactory4,
adapter: IDXGIAdapter1,
device: ID3D12Device,
queue: ID3D12CommandQueue,
allocators: [ID3D12CommandAllocator; NUM_OF_FRAMES],
comp_device: IDCompositionDevice,
swap_chain: IDXGISwapChain3,
current_frame: usize,
comp_target: IDCompositionTarget,
comp_visual: IDCompositionVisual,
rtv_desc_heap: ID3D12DescriptorHeap,
rtv_desc_size: usize,
back_buffers: [ID3D12Resource; NUM_OF_FRAMES],
root_signature: ID3D12RootSignature,
list: ID3D12GraphicsCommandList,
vertex_shader: ID3DBlob,
pixel_shader: ID3DBlob,
pipeline_state: ID3D12PipelineState,
viewport: D3D12_VIEWPORT,
scissor: RECT,
// Synchronization
fence: ID3D12Fence,
fence_event: HANDLE,
fence_values: [u64; NUM_OF_FRAMES],
// Resources
vertex_buffer: ID3D12Resource,
vertex_buffer_view: D3D12_VERTEX_BUFFER_VIEW,
indices_buffer: ID3D12Resource,
indices_buffer_view: D3D12_INDEX_BUFFER_VIEW,
}
impl Window {
pub fn new(hwnd: HWND) -> windows::Result<Self> {
// Start "DebugView" to listen errors
// https://docs.microsoft.com/en-us/sysinternals/downloads/debugview
let debug = unsafe { D3D12GetDebugInterface::<ID3D12Debug1>() }
.expect("Unable to create debug layer");
unsafe {
debug.EnableDebugLayer();
debug.SetEnableGPUBasedValidation(true);
debug.SetEnableSynchronizedCommandQueueValidation(true);
}
let factory = unsafe { CreateDXGIFactory2::<IDXGIFactory4>(0) }?;
let adapter = (0..99)
.into_iter()
.find_map(|i| unsafe {
let mut ptr: Option<IDXGIAdapter1> = None;
factory.EnumAdapters1(i, &mut ptr).and_some(ptr).ok()
})
.expect("Could not find d3d adapter");
let device: ID3D12Device = unsafe {
D3D12CreateDevice(
&adapter, // None for default adapter
D3D_FEATURE_LEVEL::D3D_FEATURE_LEVEL_11_0,
)
}?;
let queue = unsafe {
let desc = D3D12_COMMAND_QUEUE_DESC {
Type: D3D12_COMMAND_LIST_TYPE::D3D12_COMMAND_LIST_TYPE_DIRECT,
Priority: D3D12_COMMAND_QUEUE_PRIORITY::D3D12_COMMAND_QUEUE_PRIORITY_HIGH.0,
Flags: D3D12_COMMAND_QUEUE_FLAGS::D3D12_COMMAND_QUEUE_FLAG_NONE,
NodeMask: 0,
};
device.CreateCommandQueue::<ID3D12CommandQueue>(&desc)
}?;
let allocators: [ID3D12CommandAllocator; NUM_OF_FRAMES] = (0..NUM_OF_FRAMES)
.map(|_| unsafe {
device
.CreateCommandAllocator::<ID3D12CommandAllocator>(
D3D12_COMMAND_LIST_TYPE::D3D12_COMMAND_LIST_TYPE_DIRECT,
)
.expect("Unable to create allocator")
})
.collect::<Vec<_>>()
.try_into()
.expect("Unable to create allocators");
// Composition device
let comp_device: IDCompositionDevice = unsafe { DCompositionCreateDevice(None) }?;
// Create swap chain for composition
let swap_chain = unsafe {
let desc = DXGI_SWAP_CHAIN_DESC1 {
AlphaMode: DXGI_ALPHA_MODE::DXGI_ALPHA_MODE_PREMULTIPLIED,
BufferCount: NUM_OF_FRAMES as _,
Width: 1024,
Height: 1024,
Format: DXGI_FORMAT::DXGI_FORMAT_B8G8R8A8_UNORM,
Flags: 0,
BufferUsage: DXGI_USAGE_RENDER_TARGET_OUTPUT,
SampleDesc: DXGI_SAMPLE_DESC {
Count: 1,
Quality: 0,
},
Scaling: DXGI_SCALING::DXGI_SCALING_STRETCH,
Stereo: BOOL(0),
SwapEffect: DXGI_SWAP_EFFECT::DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL,
};
let mut ptr: Option<IDXGISwapChain1> = None;
factory
.CreateSwapChainForComposition(&queue, &desc, None, &mut ptr)
.and_some(ptr)
}?
.cast::<IDXGISwapChain3>()?;
// Current frame index
let current_frame = unsafe { swap_chain.GetCurrentBackBufferIndex() as usize };
// Create IDCompositionTarget for the window
let comp_target = unsafe {
let mut ptr = None;
comp_device
.CreateTargetForHwnd(hwnd, BOOL(1), &mut ptr)
.and_some(ptr)
}?;
// Create IDCompositionVisual for the window
let comp_visual = unsafe {
let mut ptr = None;
comp_device.CreateVisual(&mut ptr).and_some(ptr)
}?;
// Set swap_chain and the root visual and commit
unsafe {
comp_visual.SetContent(&swap_chain).ok()?;
comp_target.SetRoot(&comp_visual).ok()?;
comp_device.Commit().ok()?;
}
// Create descriptor heap for render target views
let rtv_desc_heap = unsafe {
let desc = D3D12_DESCRIPTOR_HEAP_DESC {
Type: D3D12_DESCRIPTOR_HEAP_TYPE::D3D12_DESCRIPTOR_HEAP_TYPE_RTV,
NumDescriptors: NUM_OF_FRAMES as _,
Flags: D3D12_DESCRIPTOR_HEAP_FLAGS::D3D12_DESCRIPTOR_HEAP_FLAG_NONE,
NodeMask: 0,
};
device.CreateDescriptorHeap::<ID3D12DescriptorHeap>(&desc)
}?;
// Create resource per frame
let mut descriptor = unsafe { rtv_desc_heap.GetCPUDescriptorHandleForHeapStart() };
let rtv_desc_size = unsafe {
device.GetDescriptorHandleIncrementSize(
D3D12_DESCRIPTOR_HEAP_TYPE::D3D12_DESCRIPTOR_HEAP_TYPE_RTV,
) as usize
};
let back_buffers = (0..NUM_OF_FRAMES)
.map(|i| {
let resource = unsafe { swap_chain.GetBuffer::<ID3D12Resource>(i as _) }?;
unsafe {
// let desc = D3D12_TEX2D_RTV {
// Format: DXGI_FORMAT_R8G8B8A8_UNORM,
// u: D3D12_RTV_DIMENSION_UNKNOWN as _,
// ViewDimension: 0,
// };
device.CreateRenderTargetView(&resource, 0 as _, &descriptor);
descriptor.ptr += rtv_desc_size;
}
Ok(resource)
})
.collect::<Result<Vec<_>, windows::Error>>()?
.try_into()
.expect("Unable to create resources");
// Create root signature
let root_signature = unsafe {
let root = {
let mut blob: Option<ID3DBlob> = None;
let mut error: Option<ID3DBlob> = None;
let desc = D3D12_ROOT_SIGNATURE_DESC {
NumParameters: 0,
pParameters: null_mut() as _,
NumStaticSamplers: 0,
pStaticSamplers: null_mut() as _,
Flags: D3D12_ROOT_SIGNATURE_FLAGS::D3D12_ROOT_SIGNATURE_FLAG_ALLOW_INPUT_ASSEMBLER_INPUT_LAYOUT,
};
D3D12SerializeRootSignature(
&desc,
D3D_ROOT_SIGNATURE_VERSION::D3D_ROOT_SIGNATURE_VERSION_1_0,
&mut blob as _,
&mut error as _,
)
.and_then(|| {
if error.is_none() {
blob.unwrap()
} else {
panic!("Root signature failed, error blob contains the error")
}
})
}?;
device.CreateRootSignature::<ID3D12RootSignature>(
0,
root.GetBufferPointer(),
root.GetBufferSize(),
)
}?;
let vertex_shader = unsafe {
let data = include_bytes!("./01-triangle.hlsl");
let mut err: Option<ID3DBlob> = None;
let mut ptr: Option<ID3DBlob> = None;
D3DCompile(
data.as_ptr() as *mut _,
data.len(),
PSTR("01-triangle.hlsl\0".as_ptr() as _),
null_mut(),
None,
PSTR("VSMain\0".as_ptr() as _),
PSTR("vs_5_0\0".as_ptr() as _),
0,
0,
&mut ptr,
&mut err,
)
.ok()?;
match ptr {
Some(v) => v,
None => {
panic!(
"Shader creation failed with error {}",
CString::from_raw(err.unwrap().GetBufferPointer() as _).to_string_lossy()
)
}
}
};
let pixel_shader = unsafe {
let data = include_bytes!("./01-triangle.hlsl");
let mut err: Option<ID3DBlob> = None;
let mut ptr: Option<ID3DBlob> = None;
D3DCompile(
data.as_ptr() as *mut _,
data.len(),
PSTR("01-triangle.hlsl\0".as_ptr() as _),
null_mut(),
None,
PSTR("PSMain\0".as_ptr() as _),
PSTR("ps_5_0\0".as_ptr() as _),
0,
0,
&mut ptr,
&mut err,
)
.ok()?;
match ptr {
Some(v) => v,
None => {
panic!(
"Shader creation failed with error {}",
CString::from_raw(err.unwrap().GetBufferPointer() as _).to_string_lossy()
)
}
}
};
let mut els = [
D3D12_INPUT_ELEMENT_DESC {
SemanticName: PSTR("POSITION\0".as_ptr() as _),
SemanticIndex: 0,
Format: DXGI_FORMAT::DXGI_FORMAT_R32G32B32_FLOAT,
InputSlot: 0,
InstanceDataStepRate: 0,
InputSlotClass:
D3D12_INPUT_CLASSIFICATION::D3D12_INPUT_CLASSIFICATION_PER_VERTEX_DATA,
AlignedByteOffset: 0,
},
D3D12_INPUT_ELEMENT_DESC {
SemanticName: PSTR("COLOR\0".as_ptr() as _),
SemanticIndex: 0,
Format: DXGI_FORMAT::DXGI_FORMAT_R32G32B32A32_FLOAT,
InputSlot: 0,
InstanceDataStepRate: 0,
InputSlotClass:
D3D12_INPUT_CLASSIFICATION::D3D12_INPUT_CLASSIFICATION_PER_VERTEX_DATA,
AlignedByteOffset: 12,
},
];
let pso_desc = D3D12_GRAPHICS_PIPELINE_STATE_DESC {
// TODO: Can I get rid of this clone? Or do I even have to?
pRootSignature: Some(root_signature.clone()),
// unsafe { std::mem::transmute(root_signature.abi()) },
InputLayout: D3D12_INPUT_LAYOUT_DESC {
NumElements: els.len() as u32,
pInputElementDescs: els.as_mut_ptr(),
},
VS: D3D12_SHADER_BYTECODE {
BytecodeLength: unsafe { vertex_shader.GetBufferSize() },
pShaderBytecode: unsafe { vertex_shader.GetBufferPointer() },
},
PS: D3D12_SHADER_BYTECODE {
BytecodeLength: unsafe { pixel_shader.GetBufferSize() },
pShaderBytecode: unsafe { pixel_shader.GetBufferPointer() },
},
RasterizerState: cd3dx12_rasterizer_desc_default(),
BlendState: cd3dx12_blend_desc_default(),
SampleMask: 0xffffffff,
PrimitiveTopologyType:
D3D12_PRIMITIVE_TOPOLOGY_TYPE::D3D12_PRIMITIVE_TOPOLOGY_TYPE_TRIANGLE,
NumRenderTargets: 1,
RTVFormats: (0..D3D12_SIMULTANEOUS_RENDER_TARGET_COUNT)
.map(|i| {
if i == 0 {
DXGI_FORMAT::DXGI_FORMAT_B8G8R8A8_UNORM
} else {
DXGI_FORMAT::DXGI_FORMAT_UNKNOWN
}
})
.collect::<Vec<_>>()
.try_into()
.unwrap(),
SampleDesc: DXGI_SAMPLE_DESC {
Count: 1,
Quality: 0,
},
..D3D12_GRAPHICS_PIPELINE_STATE_DESC::default()
};
let pipeline_state =
unsafe { device.CreateGraphicsPipelineState::<ID3D12PipelineState>(&pso_desc) }
.expect("Unable to create pipeline state");
// Create direct command list
let list: ID3D12GraphicsCommandList = unsafe {
device.CreateCommandList(
0,
D3D12_COMMAND_LIST_TYPE::D3D12_COMMAND_LIST_TYPE_DIRECT,
&allocators[current_frame],
&pipeline_state,
)
}?;
unsafe {
list.Close().ok()?;
}
// Create fence
let (fence, fence_values, fence_event) = unsafe {
let fence =
device.CreateFence::<ID3D12Fence>(0, D3D12_FENCE_FLAGS::D3D12_FENCE_FLAG_NONE)?;
let fence_event = CreateEventA(null_mut(), false, false, PSTR(null_mut()));
if fence_event.0 == 0 {
panic!("Unable to create fence event");
}
(fence, [1; NUM_OF_FRAMES], fence_event)
};
let viewport = D3D12_VIEWPORT {
Width: 1024.0,
Height: 1024.0,
MaxDepth: D3D12_MAX_DEPTH,
MinDepth: D3D12_MIN_DEPTH,
TopLeftX: 0.0,
TopLeftY: 0.0,
};
let scissor = RECT {
top: 0,
left: 0,
bottom: 1024,
right: 1024,
};
// Resource initialization ------------------------------------------
unsafe {
// allocators[current_frame].Reset().ok()?;
list.Reset(&allocators[current_frame], &pipeline_state)
.ok()?;
}
let (vertex_buffer, vertex_buffer_view, _vertex_buffer_upload) = unsafe {
// Coordinate space again as refresher:
//
// x, y
// -1.0, +1.0 +1.0, +1.0
// 0──────────┬──────────1 ◄─── vertex index
// │ │ │
// │ │ │
// │ │ │
// │ │ │
// │ 0,│0 │
// ├──────────┼──────────┤
// │ │ │
// │ │ │
// │ │ │
// │ │ │
// │ │ │
// 3──────────┴──────────2
// -1.0, -1.0 +1.0, -1.0
// In order to create quad (that is square), we form two triangles
// from the vertices:
//
// Indices 0, 1, 2 form a first triangle, and
// indices 0, 2, 3 form a second triangle.
// Vertexes (these don't form the triangle, but the indicies do)
let vertices: [Vertex; 4] = [
Vertex::new([-1.0, 1.0, 0.0], RED),
Vertex::new([1.0, 1.0, 0.0], GREEN),
Vertex::new([1.0, -1.0, 0.0], BLUE_TRANSPARENT),
Vertex::new([-1.0, -1.0, 0.0], MAGENTA),
];
let vertices_as_bytes = std::slice::from_raw_parts(
(&vertices as *const _) as *const u8,
std::mem::size_of_val(&vertices),
);
let vertex_buffers = create_default_buffer(&device, &list, vertices_as_bytes)?;
let vertex_buffer_view = D3D12_VERTEX_BUFFER_VIEW {
BufferLocation: vertex_buffers.gpu_buffer.GetGPUVirtualAddress(),
StrideInBytes: std::mem::size_of::<Vertex>() as _,
SizeInBytes: vertices_as_bytes.len() as _,
};
(
vertex_buffers.gpu_buffer,
vertex_buffer_view,
vertex_buffers.upload_buffer,
)
};
let (indices_buffer, indices_buffer_view, _indicies_upload_buffer) = unsafe {
// Vertex indicies which form the two triangles:
let indices: [u32; 6] = [
0, 1, 2, // Upper right triangle
0, 2, 3, // Bottom left triangle
];
let indicies_as_bytes = std::slice::from_raw_parts(
(&indices as *const _) as *const u8,
std::mem::size_of_val(&indices),
);
let buffers = create_default_buffer(&device, &list, indicies_as_bytes)?;
let view = D3D12_INDEX_BUFFER_VIEW {
BufferLocation: buffers.gpu_buffer.GetGPUVirtualAddress(),
SizeInBytes: indicies_as_bytes.len() as _,
Format: DXGI_FORMAT::DXGI_FORMAT_R32_UINT,
};
(buffers.gpu_buffer, view, buffers.upload_buffer)
};
unsafe {
list.Close().ok()?;
let mut lists = [Some(list.cast::<ID3D12CommandList>()?)];
queue.ExecuteCommandLists(lists.len() as _, lists.as_mut_ptr());
}
let mut win = Window {
hwnd,
factory,
adapter,
device,
queue,
allocators,
comp_device,
swap_chain,
current_frame,
comp_target,
comp_visual,
rtv_desc_heap,
rtv_desc_size,
back_buffers,
root_signature,
list,
pipeline_state,
vertex_shader,
pixel_shader,
viewport,
scissor,
fence,
fence_event,
fence_values,
vertex_buffer,
vertex_buffer_view,
indices_buffer,
indices_buffer_view,
};
win.wait_for_gpu()?;
// Temporary upload buffers _indicies_upload_buffer, and
// _vertex_buffer_upload can now be destroyed.
// End of resource initialization -------------------------------
Ok(win)
}
fn populate_command_list(&mut self) -> ::windows::Result<()> {
unsafe {
// Get the current backbuffer on which to draw
let current_frame = self.swap_chain.GetCurrentBackBufferIndex() as usize;
let current_back_buffer = &self.back_buffers[current_frame];
let rtv = {
let mut ptr = self.rtv_desc_heap.GetCPUDescriptorHandleForHeapStart();
ptr.ptr += self.rtv_desc_size * current_frame;
ptr
};
// Reset allocator
self.allocators[current_frame].Reset().ok()?;
// Reset list
self.list
.Reset(&self.allocators[current_frame], &self.pipeline_state)
.ok()?;
// Set root signature, viewport and scissor rect
self.list.SetGraphicsRootSignature(&self.root_signature);
self.list.RSSetViewports(1, &self.viewport);
self.list.RSSetScissorRects(1, &self.scissor);
// Direct the draw commands to the render target resource
self.list.ResourceBarrier(
1,
&cd3dx12_resource_barrier_transition(
current_back_buffer,
D3D12_RESOURCE_STATES::D3D12_RESOURCE_STATE_PRESENT,
D3D12_RESOURCE_STATES::D3D12_RESOURCE_STATE_RENDER_TARGET,
None,
None,
),
);
self.list.OMSetRenderTargets(1, &rtv, false, null_mut());
self.list
.ClearRenderTargetView(rtv, [1.0f32, 0.2, 0.4, 0.5].as_ptr(), 0, null_mut());
self.list.IASetPrimitiveTopology(
D3D_PRIMITIVE_TOPOLOGY::D3D_PRIMITIVE_TOPOLOGY_TRIANGLELIST,
);
self.list.IASetIndexBuffer(&self.indices_buffer_view);
self.list.IASetVertexBuffers(0, 1, &self.vertex_buffer_view);
self.list.DrawIndexedInstanced(6, 1, 0, 0, 0);
// Set render target to be presentable
self.list.ResourceBarrier(
1,
&cd3dx12_resource_barrier_transition(
current_back_buffer,
D3D12_RESOURCE_STATES::D3D12_RESOURCE_STATE_RENDER_TARGET,
D3D12_RESOURCE_STATES::D3D12_RESOURCE_STATE_PRESENT,
None,
None,
),
);
// Close list
self.list.Close().ok()?;
Ok(())
}
}
pub fn wait_for_gpu(&mut self) -> windows::Result<()> {
unsafe {
let fence_value = self.fence_values[self.current_frame];
self.queue.Signal(&self.fence, fence_value).ok()?;
self.fence
.SetEventOnCompletion(fence_value, self.fence_event)
.ok()?;
WaitForSingleObjectEx(self.fence_event, 0xFFFFFFFF, false);
self.fence_values[self.current_frame] += 1;
Ok(())
}
}
pub fn move_to_next_frame(&mut self) -> windows::Result<()> {
unsafe {
let current_fence_value = self.fence_values[self.current_frame];
self.queue.Signal(&self.fence, current_fence_value).ok()?;
// Update current frame
self.current_frame = self.swap_chain.GetCurrentBackBufferIndex() as usize;
let wait_fence_value = self.fence_values[self.current_frame];
// If the next frame is not ready to be rendered yet, wait until it is ready.
if self.fence.GetCompletedValue() < wait_fence_value {
self.fence
.SetEventOnCompletion(wait_fence_value, self.fence_event)
.ok()?;
WaitForSingleObjectEx(self.fence_event, 0xFFFFFFFF, false);
}
// Update the fence value
self.fence_values[self.current_frame] = current_fence_value + 1;
Ok(())
}
}
pub fn render(&mut self) -> windows::Result<()> {
self.populate_command_list()?;
unsafe {
let mut lists = [Some(self.list.cast::<ID3D12CommandList>()?)];
self.queue
.ExecuteCommandLists(lists.len() as _, lists.as_mut_ptr());
self.swap_chain.Present(1, 0).ok()?;
}
self.move_to_next_frame()?;
Ok(())
}
}
/// Main message loop for the window
extern "system" fn wndproc(hwnd: HWND, msg: u32, wparam: WPARAM, lparam: LPARAM) -> LRESULT {
unsafe {
static mut WINDOW: Option<Window> = None;
match msg {
WM_CREATE => {
let win = Window::new(hwnd).unwrap();
WINDOW = Some(win);
DefWindowProcA(hwnd, msg, wparam, lparam)
}
WM_PAINT => {
if let Some(window) = WINDOW.as_mut() {
window.render().unwrap();
}
ValidateRect(hwnd, std::ptr::null());
LRESULT(0)
}
WM_DESTROY => {
WINDOW = None;
PostQuitMessage(0);
LRESULT(0)
}
_ => DefWindowProcA(hwnd, msg, wparam, lparam),
}
}
}
fn main() {
unsafe {
let instance = GetModuleHandleA(None);
let cursor = LoadCursorW(HINSTANCE(0), IDC_ARROW);
let cls = WNDCLASSA {
style: WNDCLASS_STYLES::CS_HREDRAW | WNDCLASS_STYLES::CS_VREDRAW,
lpfnWndProc: Some(wndproc),
hInstance: instance,
lpszClassName: PSTR(b"Dx12LearningCls\0".as_ptr() as _),
cbClsExtra: 0,
cbWndExtra: 0,
hIcon: HICON(0),
hCursor: cursor,
hbrBackground: HBRUSH(0),
lpszMenuName: PSTR(null_mut()),
};
RegisterClassA(&cls);
let hwnd = CreateWindowExA(
WINDOW_EX_STYLE::WS_EX_NOREDIRECTIONBITMAP as _,
PSTR(b"Dx12LearningCls\0".as_ptr() as _),
PSTR(b"Index buffers example\0".as_ptr() as _),
WINDOW_STYLE::WS_OVERLAPPEDWINDOW | WINDOW_STYLE::WS_VISIBLE,
-2147483648 as _, // Where is CW_USEDEFAULT? I just hardcoded the value
-2147483648 as _,
-2147483648 as _,
-2147483648 as _,
HWND(0),
HMENU(0),
instance,
0 as _,
);
if hwnd == HWND(0) {
panic!("Failed to create window");
}
let mut message = MSG::default();
while GetMessageA(&mut message, HWND(0), 0, 0).into() {
TranslateMessage(&mut message);
DispatchMessageA(&mut message);
}
}
}
| 35.701493 | 116 | 0.517292 |
8f79e85823892ea123014687a117e5b965ada725
| 4,475 |
use std::env;
use std::ffi::OsString;
use std::io;
use std::path::PathBuf;
use std::string::FromUtf8Error;
use crate::fetch::ToolchainSpecifier;
use crate::manifest::bare_version::NoVersionMatchesManifestMsrvError;
use crate::subcommands::verify_msrv;
pub type TResult<T> = Result<T, CargoMSRVError>;
#[derive(Debug, thiserror::Error)]
pub enum CargoMSRVError {
#[error("Unable to parse minimum rust version: {0}")]
BareVersionParse(#[from] crate::manifest::bare_version::Error),
#[error(transparent)]
CargoMetadata(#[from] cargo_metadata::Error),
#[error("The default host triple (target) could not be found.")]
DefaultHostTripleNotFound,
#[error(transparent)]
Env(#[from] env::VarError),
#[error("{0}")]
GenericMessage(String),
#[error("IO error: '{error}'. caused by: '{source}'.")]
Io {
error: io::Error,
source: IoErrorSource,
},
#[error("{0}")]
InvalidConfig(String),
#[error(transparent)]
InvalidRustVersionNumber(#[from] std::num::ParseIntError),
#[error(transparent)]
InvalidUTF8(#[from] FromUtf8Error),
#[error("No crate root found for given crate")]
NoCrateRootFound,
#[error(transparent)]
NoVersionMatchesManifestMSRV(#[from] NoVersionMatchesManifestMsrvError),
#[error("Unable to find key 'package.rust-version' (or 'package.metadata.msrv') in '{0}'")]
NoMSRVKeyInCargoToml(PathBuf),
#[error("Unable to parse Cargo.toml: {0}")]
ParseToml(#[from] toml_edit::TomlError),
#[error(transparent)]
RustReleasesSource(#[from] rust_releases::RustChangelogError),
#[error(transparent)]
RustReleasesRustDistSource(#[from] rust_releases::RustDistError),
#[error("Unable to parse rust-releases source from '{0}'")]
RustReleasesSourceParseError(String),
#[error("Unable to install toolchain with `rustup install {0}`.")]
RustupInstallFailed(ToolchainSpecifier),
#[error("Check toolchain (with `rustup run <toolchain> <command>`) failed.")]
RustupRunWithCommandFailed,
#[error(transparent)]
SemverError(#[from] rust_releases::semver::Error),
#[error(transparent)]
SubCommandVerify(#[from] verify_msrv::Error),
#[error(transparent)]
SystemTime(#[from] std::time::SystemTimeError),
#[error("The given toolchain could not be found. Run `rustup toolchain list` for an overview of installed toolchains.")]
ToolchainNotInstalled,
#[error("The given target could not be found. Run `rustup target list` for an overview of available toolchains.")]
UnknownTarget,
#[error("Unable to access log folder, run with --no-log to try again without logging.")]
UnableToAccessLogFolder,
#[error("Unable to get or store the channel manifest on disk.")]
UnableToCacheChannelManifest,
#[error(
r#"Unable to find a Minimum Supported Rust Version (MSRV).
If you think this result is erroneous, please run: `{command}` manually.
If the above does succeed, or you think cargo-msrv errored in another way, please feel free to
report the issue at: https://github.com/foresterre/cargo-msrv/issues
Thank you in advance!"#
)]
UnableToFindAnyGoodVersion { command: String },
#[error("Unable to init logger, run with --no-log to try again without logging.")]
UnableToInitTracing,
#[error("Unable to parse the CLI arguments. Use `cargo msrv help` for more info.")]
UnableToParseCliArgs,
#[error("The Rust stable version could not be parsed from the stable channel manifest.")]
UnableToParseRustVersion,
#[error("Unable to run the checking command. If --check <cmd> is specified, you could try to verify if you can run the cmd manually.")]
UnableToRunCheck,
}
impl From<String> for CargoMSRVError {
fn from(s: String) -> Self {
Self::GenericMessage(s)
}
}
#[derive(Debug, thiserror::Error)]
pub enum IoErrorSource {
#[error("Unable to determine current working directory")]
CurrentDir,
#[error("Unable to read file '{0}'")]
ReadFile(PathBuf),
#[error("Unable to write file '{0}'")]
WriteFile(PathBuf),
#[error("Unable to remove file '{0}'")]
RemoveFile(PathBuf),
#[error("Unable to rename file '{0}'")]
RenameFile(PathBuf),
#[error("Unable to spawn process '{0:?}'")]
SpawnProcess(OsString),
#[error("Unable to collect output from '{0:?}', or process did not terminate properly")]
WaitForProcessAndCollectOutput(OsString),
}
| 30.442177 | 139 | 0.68581 |
612a4ee79be5426a022a12755420ac450e84e55d
| 4,439 |
mod compress;
mod nonzero;
mod pad;
mod slice;
use tract_hir::internal::*;
use tract_hir::ops::array;
use crate::model::{OnnxOpRegister, ParsingContext};
use crate::pb::*;
use tract_num_traits::AsPrimitive;
pub fn register_all_ops(reg: &mut OnnxOpRegister) {
reg.insert("Compress", compress::compress);
reg.insert("Concat", concat);
reg.insert("ConstantLike", constant_like);
reg.insert("ConstantOfShape", constant_of_shape);
reg.insert("Expand", |_, _| Ok((expand(array::MultiBroadcastTo::default()), vec![])));
reg.insert("EyeLike", eye_like);
reg.insert("Flatten", flatten);
reg.insert("Gather", gather);
reg.insert("NonZero", |_, _| Ok((Box::new(nonzero::NonZero), vec![])));
reg.insert("Pad", pad::pad);
reg.insert("Reshape", |_, _| Ok((expand(array::Reshape::default()), vec![])));
reg.insert("Shape", |_, _| Ok((expand(array::Shape::new(DatumType::I64)), vec![])));
reg.insert("Size", |_, _| Ok((expand(array::Size::new(DatumType::I64)), vec![])));
reg.insert("Transpose", transpose);
reg.insert("Tile", |_, _| Ok((expand(array::Tile::default()), vec![])));
reg.insert("Slice", slice::slice);
reg.insert("Split", split);
reg.insert("Squeeze", squeeze);
reg.insert("Unsqueeze", unsqueeze);
}
pub fn concat(
_ctx: &ParsingContext,
node: &NodeProto,
) -> TractResult<(Box<dyn InferenceOp>, Vec<String>)> {
let axis = node.get_attr("axis")?;
Ok((expand(array::Concat::new(axis)), vec![]))
}
pub fn make_const<T>(shape: &[usize], v: f32) -> TractResult<Arc<Tensor>>
where
T: Copy + Datum,
f32: AsPrimitive<T>,
{
Ok(tract_ndarray::Array::<T, _>::from_elem(shape, v.as_()).into_arc_tensor())
}
pub fn constant_like(
_ctx: &ParsingContext,
node: &NodeProto,
) -> TractResult<(Box<dyn InferenceOp>, Vec<String>)> {
let value = node.get_attr_opt("value")?.unwrap_or(0.);
if node.input.len() == 0 {
let dt = node.get_attr_opt("dtype")?.unwrap_or(f32::datum_type());
let shape: Vec<usize> = node.get_attr_vec("shape")?;
let tensor = dispatch_numbers!(self::make_const(dt)(&shape, value))?;
Ok((Box::new(tract_hir::ops::konst::Const::new(tensor)), vec![]))
} else {
Ok((Box::new(array::ConstantLike::new(value)), vec![]))
}
}
pub fn constant_of_shape(
_ctx: &ParsingContext,
node: &NodeProto,
) -> TractResult<(Box<dyn InferenceOp>, Vec<String>)> {
let value = match node.get_attr_opt::<Tensor>("value")? {
Some(val) => val.into_arc_tensor(),
None => make_const::<f32>(&vec![1], 0.0 as f32)?,
};
Ok((expand(array::ConstantOfShape::new(value)), vec![]))
}
pub fn eye_like(
_ctx: &ParsingContext,
node: &NodeProto,
) -> TractResult<(Box<dyn InferenceOp>, Vec<String>)> {
let dt = node.get_attr_opt("dtype")?;
let k = node.get_attr_opt("k")?.unwrap_or(0);
Ok((Box::new(array::EyeLike::new(dt, k)), vec![]))
}
pub fn flatten(
_ctx: &ParsingContext,
node: &NodeProto,
) -> TractResult<(Box<dyn InferenceOp>, Vec<String>)> {
let axis = node.get_attr_opt("axis")?.unwrap_or(1);
Ok((expand(array::Flatten::new(axis)), vec![]))
}
pub fn gather(
_ctx: &ParsingContext,
node: &NodeProto,
) -> TractResult<(Box<dyn InferenceOp>, Vec<String>)> {
let axis = node.get_attr_opt("axis")?.unwrap_or(0);
Ok((Box::new(array::Gather::new(axis)), vec![]))
}
pub fn split(
_ctx: &ParsingContext,
node: &NodeProto,
) -> TractResult<(Box<dyn InferenceOp>, Vec<String>)> {
let axis = node.get_attr_opt("axis")?.unwrap_or(0);
let split = node.get_attr_opt_vec("split")?;
Ok((expand(array::Split::new(axis, node.output.len(), split)), vec![]))
}
pub fn squeeze(
_ctx: &ParsingContext,
node: &NodeProto,
) -> TractResult<(Box<dyn InferenceOp>, Vec<String>)> {
let axes = node.get_attr_opt_vec("axes")?;
Ok((expand(array::Squeeze::new(axes)), vec![]))
}
pub fn transpose(
_ctx: &ParsingContext,
node: &NodeProto,
) -> TractResult<(Box<dyn InferenceOp>, Vec<String>)> {
let perm = node.get_attr_opt_vec("perm")?;
Ok((expand(array::PermuteAxes::new(perm.map(|t| t.into()))), vec![]))
}
pub fn unsqueeze(
_ctx: &ParsingContext,
node: &NodeProto,
) -> TractResult<(Box<dyn InferenceOp>, Vec<String>)> {
let axes = node.get_attr_vec::<i64>("axes")?.into_iter().map(|x| x as isize).collect();
Ok((expand(array::AddDims::new(axes)), vec![]))
}
| 33.126866 | 91 | 0.628295 |
89525b27ed36af7d5f50d4e1b1b6c152a3fd7c2b
| 19,045 |
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use rustc::session::Session;
use generated_code;
use std::cell::Cell;
use std::env;
use std::path::Path;
use syntax::ast;
use syntax::parse::filemap_to_tts;
use syntax::parse::lexer::{self, StringReader};
use syntax::parse::token::{self, Token};
use syntax::symbol::keywords;
use syntax::tokenstream::TokenTree;
use syntax_pos::*;
#[derive(Clone)]
pub struct SpanUtils<'a> {
pub sess: &'a Session,
// FIXME given that we clone SpanUtils all over the place, this err_count is
// probably useless and any logic relying on it is bogus.
pub err_count: Cell<isize>,
}
impl<'a> SpanUtils<'a> {
pub fn new(sess: &'a Session) -> SpanUtils<'a> {
SpanUtils {
sess: sess,
err_count: Cell::new(0),
}
}
pub fn make_path_string(file_name: &str) -> String {
let path = Path::new(file_name);
if path.is_absolute() {
path.clone().display().to_string()
} else {
env::current_dir().unwrap().join(&path).display().to_string()
}
}
// sub_span starts at span.lo, so we need to adjust the positions etc.
// If sub_span is None, we don't need to adjust.
pub fn make_sub_span(&self, span: Span, sub_span: Option<Span>) -> Option<Span> {
match sub_span {
None => None,
Some(sub) => {
let FileMapAndBytePos {fm, pos} = self.sess.codemap().lookup_byte_offset(span.lo);
let base = pos + fm.start_pos;
Some(Span {
lo: base + self.sess.codemap().lookup_byte_offset(sub.lo).pos,
hi: base + self.sess.codemap().lookup_byte_offset(sub.hi).pos,
expn_id: span.expn_id,
})
}
}
}
pub fn snippet(&self, span: Span) -> String {
match self.sess.codemap().span_to_snippet(span) {
Ok(s) => s,
Err(_) => String::new(),
}
}
pub fn retokenise_span(&self, span: Span) -> StringReader<'a> {
// sadness - we don't have spans for sub-expressions nor access to the tokens
// so in order to get extents for the function name itself (which dxr expects)
// we need to re-tokenise the fn definition
// Note: this is a bit awful - it adds the contents of span to the end of
// the codemap as a new filemap. This is mostly OK, but means we should
// not iterate over the codemap. Also, any spans over the new filemap
// are incompatible with spans over other filemaps.
let filemap = self.sess
.codemap()
.new_filemap(String::from("<anon-dxr>"), None, self.snippet(span));
lexer::StringReader::new(&self.sess.parse_sess, filemap)
}
fn span_to_tts(&self, span: Span) -> Vec<TokenTree> {
let filename = String::from("<anon-dxr>");
let filemap = self.sess.codemap().new_filemap(filename, None, self.snippet(span));
filemap_to_tts(&self.sess.parse_sess, filemap)
}
// Re-parses a path and returns the span for the last identifier in the path
pub fn span_for_last_ident(&self, span: Span) -> Option<Span> {
let mut result = None;
let mut toks = self.retokenise_span(span);
let mut bracket_count = 0;
loop {
let ts = toks.real_token();
if ts.tok == token::Eof {
return self.make_sub_span(span, result)
}
if bracket_count == 0 && (ts.tok.is_ident() || ts.tok.is_keyword(keywords::SelfValue)) {
result = Some(ts.sp);
}
bracket_count += match ts.tok {
token::Lt => 1,
token::Gt => -1,
token::BinOp(token::Shr) => -2,
_ => 0,
}
}
}
// Return the span for the first identifier in the path.
pub fn span_for_first_ident(&self, span: Span) -> Option<Span> {
let mut toks = self.retokenise_span(span);
let mut bracket_count = 0;
loop {
let ts = toks.real_token();
if ts.tok == token::Eof {
return None;
}
if bracket_count == 0 && (ts.tok.is_ident() || ts.tok.is_keyword(keywords::SelfValue)) {
return self.make_sub_span(span, Some(ts.sp));
}
bracket_count += match ts.tok {
token::Lt => 1,
token::Gt => -1,
token::BinOp(token::Shr) => -2,
_ => 0,
}
}
}
// Return the span for the last ident before a `(` or `<` or '::<' and outside any
// any brackets, or the last span.
pub fn sub_span_for_meth_name(&self, span: Span) -> Option<Span> {
let mut toks = self.retokenise_span(span);
let mut prev = toks.real_token();
let mut result = None;
let mut bracket_count = 0;
let mut prev_span = None;
while prev.tok != token::Eof {
prev_span = None;
let mut next = toks.real_token();
if (next.tok == token::OpenDelim(token::Paren) || next.tok == token::Lt) &&
bracket_count == 0 && prev.tok.is_ident() {
result = Some(prev.sp);
}
if bracket_count == 0 && next.tok == token::ModSep {
let old = prev;
prev = next;
next = toks.real_token();
if next.tok == token::Lt && old.tok.is_ident() {
result = Some(old.sp);
}
}
bracket_count += match prev.tok {
token::OpenDelim(token::Paren) | token::Lt => 1,
token::CloseDelim(token::Paren) | token::Gt => -1,
token::BinOp(token::Shr) => -2,
_ => 0,
};
if prev.tok.is_ident() && bracket_count == 0 {
prev_span = Some(prev.sp);
}
prev = next;
}
if result.is_none() && prev_span.is_some() {
return self.make_sub_span(span, prev_span);
}
return self.make_sub_span(span, result);
}
// Return the span for the last ident before a `<` and outside any
// angle brackets, or the last span.
pub fn sub_span_for_type_name(&self, span: Span) -> Option<Span> {
let mut toks = self.retokenise_span(span);
let mut prev = toks.real_token();
let mut result = None;
// We keep track of the following two counts - the depth of nesting of
// angle brackets, and the depth of nesting of square brackets. For the
// angle bracket count, we only count tokens which occur outside of any
// square brackets (i.e. bracket_count == 0). The intutition here is
// that we want to count angle brackets in the type, but not any which
// could be in expression context (because these could mean 'less than',
// etc.).
let mut angle_count = 0;
let mut bracket_count = 0;
loop {
let next = toks.real_token();
if (next.tok == token::Lt || next.tok == token::Colon) &&
angle_count == 0 &&
bracket_count == 0 &&
prev.tok.is_ident() {
result = Some(prev.sp);
}
if bracket_count == 0 {
angle_count += match prev.tok {
token::Lt => 1,
token::Gt => -1,
token::BinOp(token::Shl) => 2,
token::BinOp(token::Shr) => -2,
_ => 0,
};
}
bracket_count += match prev.tok {
token::OpenDelim(token::Bracket) => 1,
token::CloseDelim(token::Bracket) => -1,
_ => 0,
};
if next.tok == token::Eof {
break;
}
prev = next;
}
if angle_count != 0 || bracket_count != 0 {
let loc = self.sess.codemap().lookup_char_pos(span.lo);
span_bug!(span,
"Mis-counted brackets when breaking path? Parsing '{}' \
in {}, line {}",
self.snippet(span),
loc.file.name,
loc.line);
}
if result.is_none() && prev.tok.is_ident() && angle_count == 0 {
return self.make_sub_span(span, Some(prev.sp));
}
self.make_sub_span(span, result)
}
// Reparse span and return an owned vector of sub spans of the first limit
// identifier tokens in the given nesting level.
// example with Foo<Bar<T,V>, Bar<T,V>>
// Nesting = 0: all idents outside of angle brackets: [Foo]
// Nesting = 1: idents within one level of angle brackets: [Bar, Bar]
pub fn spans_with_brackets(&self, span: Span, nesting: isize, limit: isize) -> Vec<Span> {
let mut result: Vec<Span> = vec![];
let mut toks = self.retokenise_span(span);
// We keep track of how many brackets we're nested in
let mut angle_count: isize = 0;
let mut bracket_count: isize = 0;
let mut found_ufcs_sep = false;
loop {
let ts = toks.real_token();
if ts.tok == token::Eof {
if angle_count != 0 || bracket_count != 0 {
if generated_code(span) {
return vec![];
}
let loc = self.sess.codemap().lookup_char_pos(span.lo);
span_bug!(span,
"Mis-counted brackets when breaking path? \
Parsing '{}' in {}, line {}",
self.snippet(span),
loc.file.name,
loc.line);
}
return result
}
if (result.len() as isize) == limit {
return result;
}
bracket_count += match ts.tok {
token::OpenDelim(token::Bracket) => 1,
token::CloseDelim(token::Bracket) => -1,
_ => 0,
};
if bracket_count > 0 {
continue;
}
angle_count += match ts.tok {
token::Lt => 1,
token::Gt => -1,
token::BinOp(token::Shl) => 2,
token::BinOp(token::Shr) => -2,
_ => 0,
};
// Ignore the `>::` in `<Type as Trait>::AssocTy`.
// The root cause of this hack is that the AST representation of
// qpaths is horrible. It treats <A as B>::C as a path with two
// segments, B and C and notes that there is also a self type A at
// position 0. Because we don't have spans for individual idents,
// only the whole path, we have to iterate over the tokens in the
// path, trying to pull out the non-nested idents (e.g., avoiding 'a
// in `<A as B<'a>>::C`). So we end up with a span for `B>::C` from
// the start of the first ident to the end of the path.
if !found_ufcs_sep && angle_count == -1 {
found_ufcs_sep = true;
angle_count += 1;
}
if ts.tok.is_ident() && angle_count == nesting {
result.push(self.make_sub_span(span, Some(ts.sp)).unwrap());
}
}
}
/// `span` must be the span for an item such as a function or struct. This
/// function returns the program text from the start of the span until the
/// end of the 'signature' part, that is up to, but not including an opening
/// brace or semicolon.
pub fn signature_string_for_span(&self, span: Span) -> String {
let mut toks = self.span_to_tts(span).into_iter();
let mut prev = toks.next().unwrap();
let first_span = prev.get_span();
let mut angle_count = 0;
for tok in toks {
if let TokenTree::Token(_, ref tok) = prev {
angle_count += match *tok {
token::Eof => { break; }
token::Lt => 1,
token::Gt => -1,
token::BinOp(token::Shl) => 2,
token::BinOp(token::Shr) => -2,
_ => 0,
};
}
if angle_count > 0 {
prev = tok;
continue;
}
if let TokenTree::Token(_, token::Semi) = tok {
return self.snippet(mk_sp(first_span.lo, prev.get_span().hi));
} else if let TokenTree::Delimited(_, ref d) = tok {
if d.delim == token::Brace {
return self.snippet(mk_sp(first_span.lo, prev.get_span().hi));
}
}
prev = tok;
}
self.snippet(span)
}
pub fn sub_span_before_token(&self, span: Span, tok: Token) -> Option<Span> {
let mut toks = self.retokenise_span(span);
let mut prev = toks.real_token();
loop {
if prev.tok == token::Eof {
return None;
}
let next = toks.real_token();
if next.tok == tok {
return self.make_sub_span(span, Some(prev.sp));
}
prev = next;
}
}
pub fn sub_span_of_token(&self, span: Span, tok: Token) -> Option<Span> {
let mut toks = self.retokenise_span(span);
loop {
let next = toks.real_token();
if next.tok == token::Eof {
return None;
}
if next.tok == tok {
return self.make_sub_span(span, Some(next.sp));
}
}
}
pub fn sub_span_after_keyword(&self, span: Span, keyword: keywords::Keyword) -> Option<Span> {
self.sub_span_after(span, |t| t.is_keyword(keyword))
}
pub fn sub_span_after_token(&self, span: Span, tok: Token) -> Option<Span> {
self.sub_span_after(span, |t| t == tok)
}
fn sub_span_after<F: Fn(Token) -> bool>(&self, span: Span, f: F) -> Option<Span> {
let mut toks = self.retokenise_span(span);
loop {
let ts = toks.real_token();
if ts.tok == token::Eof {
return None;
}
if f(ts.tok) {
let ts = toks.real_token();
if ts.tok == token::Eof {
return None
} else {
return self.make_sub_span(span, Some(ts.sp));
}
}
}
}
// Returns a list of the spans of idents in a path.
// E.g., For foo::bar<x,t>::baz, we return [foo, bar, baz] (well, their spans)
pub fn spans_for_path_segments(&self, path: &ast::Path) -> Vec<Span> {
self.spans_with_brackets(path.span, 0, -1)
}
// Return an owned vector of the subspans of the param identifier
// tokens found in span.
pub fn spans_for_ty_params(&self, span: Span, number: isize) -> Vec<Span> {
// Type params are nested within one level of brackets:
// i.e. we want Vec<A, B> from Foo<A, B<T,U>>
self.spans_with_brackets(span, 1, number)
}
pub fn report_span_err(&self, kind: &str, span: Span) {
let loc = self.sess.codemap().lookup_char_pos(span.lo);
info!("({}) Could not find sub_span in `{}` in {}, line {}",
kind,
self.snippet(span),
loc.file.name,
loc.line);
self.err_count.set(self.err_count.get() + 1);
if self.err_count.get() > 1000 {
bug!("span errors reached 1000, giving up");
}
}
// Return the name for a macro definition (identifier after first `!`)
pub fn span_for_macro_def_name(&self, span: Span) -> Option<Span> {
let mut toks = self.retokenise_span(span);
loop {
let ts = toks.real_token();
if ts.tok == token::Eof {
return None;
}
if ts.tok == token::Not {
let ts = toks.real_token();
if ts.tok.is_ident() {
return self.make_sub_span(span, Some(ts.sp));
} else {
return None;
}
}
}
}
// Return the name for a macro use (identifier before first `!`).
pub fn span_for_macro_use_name(&self, span:Span) -> Option<Span> {
let mut toks = self.retokenise_span(span);
let mut prev = toks.real_token();
loop {
if prev.tok == token::Eof {
return None;
}
let ts = toks.real_token();
if ts.tok == token::Not {
if prev.tok.is_ident() {
return self.make_sub_span(span, Some(prev.sp));
} else {
return None;
}
}
prev = ts;
}
}
/// Return true if the span is generated code, and
/// it is not a subspan of the root callsite.
///
/// Used to filter out spans of minimal value,
/// such as references to macro internal variables.
pub fn filter_generated(&self, sub_span: Option<Span>, parent: Span) -> bool {
if !generated_code(parent) {
if sub_span.is_none() {
// Edge case - this occurs on generated code with incorrect expansion info.
return true;
}
return false;
}
// If sub_span is none, filter out generated code.
if sub_span.is_none() {
return true;
}
//If the span comes from a fake filemap, filter it.
if !self.sess.codemap().lookup_char_pos(parent.lo).file.is_real_file() {
return true;
}
// Otherwise, a generated span is deemed invalid if it is not a sub-span of the root
// callsite. This filters out macro internal variables and most malformed spans.
let span = self.sess.codemap().source_callsite(parent);
!(span.contains(parent))
}
}
macro_rules! filter {
($util: expr, $span: ident, $parent: expr, None) => {
if $util.filter_generated($span, $parent) {
return None;
}
};
($util: expr, $span: ident, $parent: expr) => {
if $util.filter_generated($span, $parent) {
return;
}
};
}
| 36.837524 | 100 | 0.513101 |
9cc4f15dbcc36f6e0abe72a5274b864c68f3ae50
| 10,782 |
use crate::prelude::*;
use nu_engine::whole_stream_command;
use std::error::Error;
pub fn create_default_context(interactive: bool) -> Result<EvaluationContext, Box<dyn Error>> {
let context = EvaluationContext::basic();
{
use crate::commands::*;
context.add_commands(vec![
// Fundamentals
whole_stream_command(NuPlugin),
whole_stream_command(Let),
whole_stream_command(LetEnv),
whole_stream_command(Def),
whole_stream_command(Source),
// System/file operations
whole_stream_command(Exec),
whole_stream_command(Pwd),
whole_stream_command(Ls),
whole_stream_command(Du),
whole_stream_command(Cd),
whole_stream_command(Remove),
whole_stream_command(Open),
whole_stream_command(Config),
whole_stream_command(ConfigGet),
whole_stream_command(ConfigSet),
whole_stream_command(ConfigSetInto),
whole_stream_command(ConfigClear),
whole_stream_command(ConfigRemove),
whole_stream_command(ConfigPath),
whole_stream_command(Help),
whole_stream_command(History),
whole_stream_command(Save),
whole_stream_command(Touch),
whole_stream_command(Cpy),
whole_stream_command(Date),
whole_stream_command(DateListTimeZone),
whole_stream_command(DateNow),
whole_stream_command(DateToTable),
whole_stream_command(DateToTimeZone),
whole_stream_command(DateFormat),
whole_stream_command(Cal),
whole_stream_command(Mkdir),
whole_stream_command(Mv),
whole_stream_command(Kill),
whole_stream_command(Version),
whole_stream_command(Clear),
whole_stream_command(Describe),
whole_stream_command(Which),
whole_stream_command(Debug),
whole_stream_command(WithEnv),
whole_stream_command(Do),
whole_stream_command(Sleep),
// Statistics
whole_stream_command(Size),
whole_stream_command(Length),
whole_stream_command(Benchmark),
// Metadata
whole_stream_command(Tags),
// Shells
whole_stream_command(Next),
whole_stream_command(Previous),
whole_stream_command(Shells),
whole_stream_command(Enter),
whole_stream_command(Exit),
// Viz
whole_stream_command(Chart),
// Viewers
whole_stream_command(Autoview),
whole_stream_command(Table),
// Text manipulation
whole_stream_command(Hash),
whole_stream_command(HashBase64),
whole_stream_command(HashMd5),
whole_stream_command(Split),
whole_stream_command(SplitColumn),
whole_stream_command(SplitRow),
whole_stream_command(SplitChars),
whole_stream_command(Lines),
whole_stream_command(Echo),
whole_stream_command(Parse),
whole_stream_command(Str),
whole_stream_command(StrToDecimal),
whole_stream_command(StrToInteger),
whole_stream_command(StrDowncase),
whole_stream_command(StrUpcase),
whole_stream_command(StrCapitalize),
whole_stream_command(StrFindReplace),
whole_stream_command(StrFrom),
whole_stream_command(StrSubstring),
whole_stream_command(StrToDatetime),
whole_stream_command(StrContains),
whole_stream_command(StrIndexOf),
whole_stream_command(StrTrim),
whole_stream_command(StrTrimLeft),
whole_stream_command(StrTrimRight),
whole_stream_command(StrStartsWith),
whole_stream_command(StrEndsWith),
whole_stream_command(StrCollect),
whole_stream_command(StrLength),
whole_stream_command(StrLPad),
whole_stream_command(StrReverse),
whole_stream_command(StrRPad),
whole_stream_command(StrCamelCase),
whole_stream_command(StrPascalCase),
whole_stream_command(StrKebabCase),
whole_stream_command(StrSnakeCase),
whole_stream_command(StrScreamingSnakeCase),
whole_stream_command(BuildString),
whole_stream_command(Ansi),
whole_stream_command(AnsiStrip),
whole_stream_command(Char),
// Column manipulation
whole_stream_command(DropColumn),
whole_stream_command(Move),
whole_stream_command(Reject),
whole_stream_command(Select),
whole_stream_command(Get),
whole_stream_command(Update),
whole_stream_command(Insert),
whole_stream_command(Into),
whole_stream_command(IntoBinary),
whole_stream_command(IntoInt),
whole_stream_command(SplitBy),
// Row manipulation
whole_stream_command(All),
whole_stream_command(Any),
whole_stream_command(Reverse),
whole_stream_command(Append),
whole_stream_command(Prepend),
whole_stream_command(SortBy),
whole_stream_command(GroupBy),
whole_stream_command(GroupByDate),
whole_stream_command(First),
whole_stream_command(Last),
whole_stream_command(Every),
whole_stream_command(Nth),
whole_stream_command(Drop),
whole_stream_command(Format),
whole_stream_command(FileSize),
whole_stream_command(Where),
whole_stream_command(If),
whole_stream_command(Compact),
whole_stream_command(Default),
whole_stream_command(Skip),
whole_stream_command(SkipUntil),
whole_stream_command(SkipWhile),
whole_stream_command(Keep),
whole_stream_command(KeepUntil),
whole_stream_command(KeepWhile),
whole_stream_command(Range),
whole_stream_command(Rename),
whole_stream_command(Uniq),
whole_stream_command(Each),
whole_stream_command(EachGroup),
whole_stream_command(EachWindow),
whole_stream_command(Empty),
// Table manipulation
whole_stream_command(Flatten),
whole_stream_command(Move),
whole_stream_command(Merge),
whole_stream_command(Shuffle),
whole_stream_command(Wrap),
whole_stream_command(Pivot),
whole_stream_command(Headers),
whole_stream_command(Reduce),
whole_stream_command(Roll),
whole_stream_command(RollColumn),
whole_stream_command(RollUp),
whole_stream_command(Rotate),
whole_stream_command(RotateCounterClockwise),
// Data processing
whole_stream_command(Histogram),
whole_stream_command(Autoenv),
whole_stream_command(AutoenvTrust),
whole_stream_command(AutoenvUnTrust),
whole_stream_command(Math),
whole_stream_command(MathAbs),
whole_stream_command(MathAverage),
whole_stream_command(MathEval),
whole_stream_command(MathMedian),
whole_stream_command(MathMinimum),
whole_stream_command(MathMode),
whole_stream_command(MathMaximum),
whole_stream_command(MathStddev),
whole_stream_command(MathSummation),
whole_stream_command(MathVariance),
whole_stream_command(MathProduct),
whole_stream_command(MathRound),
whole_stream_command(MathFloor),
whole_stream_command(MathCeil),
whole_stream_command(MathSqrt),
// File format output
whole_stream_command(To),
whole_stream_command(ToCsv),
whole_stream_command(ToHtml),
whole_stream_command(ToJson),
whole_stream_command(ToMarkdown),
whole_stream_command(ToToml),
whole_stream_command(ToTsv),
whole_stream_command(ToUrl),
whole_stream_command(ToYaml),
whole_stream_command(ToXml),
// File format input
whole_stream_command(From),
whole_stream_command(FromCsv),
whole_stream_command(FromEml),
whole_stream_command(FromTsv),
whole_stream_command(FromSsv),
whole_stream_command(FromIni),
whole_stream_command(FromJson),
whole_stream_command(FromOds),
whole_stream_command(FromToml),
whole_stream_command(FromUrl),
whole_stream_command(FromXlsx),
whole_stream_command(FromXml),
whole_stream_command(FromYaml),
whole_stream_command(FromYml),
whole_stream_command(FromIcs),
whole_stream_command(FromVcf),
// "Private" commands (not intended to be accessed directly)
whole_stream_command(RunExternalCommand { interactive }),
// Random value generation
whole_stream_command(Random),
whole_stream_command(RandomBool),
whole_stream_command(RandomDice),
#[cfg(feature = "uuid_crate")]
whole_stream_command(RandomUUID),
whole_stream_command(RandomInteger),
whole_stream_command(RandomDecimal),
whole_stream_command(RandomChars),
// Path
whole_stream_command(PathBasename),
whole_stream_command(PathCommand),
whole_stream_command(PathDirname),
whole_stream_command(PathExists),
whole_stream_command(PathExpand),
whole_stream_command(PathJoin),
whole_stream_command(PathParse),
whole_stream_command(PathSplit),
whole_stream_command(PathType),
// Url
whole_stream_command(UrlCommand),
whole_stream_command(UrlScheme),
whole_stream_command(UrlPath),
whole_stream_command(UrlHost),
whole_stream_command(UrlQuery),
whole_stream_command(Seq),
whole_stream_command(SeqDates),
whole_stream_command(TermSize),
]);
#[cfg(feature = "clipboard-cli")]
{
context.add_commands(vec![whole_stream_command(crate::commands::clip::Clip)]);
}
}
Ok(context)
}
| 40.996198 | 95 | 0.621499 |
62ab08cfd8e1569922a638b7aa76e723835a9b78
| 3,991 |
// Copyright 2021 Parity Technologies (UK) Ltd.
// This file is part of Polkadot.
// Polkadot is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Polkadot is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Polkadot. If not, see <http://www.gnu.org/licenses/>.
//! Tests for the Elexeum Runtime Configuration
use crate::*;
use frame_support::weights::{GetDispatchInfo, WeightToFeePolynomial};
use keyring::Sr25519Keyring::Charlie;
use pallet_transaction_payment::Multiplier;
use parity_scale_codec::Encode;
use separator::Separatable;
use sp_runtime::FixedPointNumber;
#[test]
fn payout_weight_portion() {
use pallet_staking::WeightInfo;
let payout_weight = <Runtime as pallet_staking::Config>::WeightInfo::payout_stakers_alive_staked(
MaxNominatorRewardedPerValidator::get(),
) as f64;
let block_weight = BlockWeights::get().max_block as f64;
println!(
"a full payout takes {:.2} of the block weight [{} / {}]",
payout_weight / block_weight,
payout_weight,
block_weight
);
assert!(payout_weight * 2f64 < block_weight);
}
#[test]
#[ignore]
fn block_cost() {
let max_block_weight = BlockWeights::get().max_block;
let raw_fee = WeightToFee::calc(&max_block_weight);
println!(
"Full Block weight == {} // WeightToFee(full_block) == {} plank",
max_block_weight,
raw_fee.separated_string(),
);
}
#[test]
#[ignore]
fn transfer_cost_min_multiplier() {
let min_multiplier = runtime_common::MinimumMultiplier::get();
let call = pallet_balances::Call::<Runtime>::transfer_keep_alive {
dest: Charlie.to_account_id().into(),
value: Default::default(),
};
let info = call.get_dispatch_info();
// convert to outer call.
let call = Call::Balances(call);
let len = call.using_encoded(|e| e.len()) as u32;
let mut ext = sp_io::TestExternalities::new_empty();
let mut test_with_multiplier = |m| {
ext.execute_with(|| {
pallet_transaction_payment::NextFeeMultiplier::<Runtime>::put(m);
let fee = TransactionPayment::compute_fee(len, &info, 0);
println!(
"weight = {:?} // multiplier = {:?} // full transfer fee = {:?}",
info.weight.separated_string(),
pallet_transaction_payment::NextFeeMultiplier::<Runtime>::get(),
fee.separated_string(),
);
});
};
test_with_multiplier(min_multiplier);
test_with_multiplier(Multiplier::saturating_from_rational(1, 1u128));
test_with_multiplier(Multiplier::saturating_from_rational(1, 1_000u128));
test_with_multiplier(Multiplier::saturating_from_rational(1, 1_000_000u128));
test_with_multiplier(Multiplier::saturating_from_rational(1, 1_000_000_000u128));
}
#[test]
fn nominator_limit() {
use pallet_election_provider_multi_phase::WeightInfo;
// starting point of the nominators.
let all_voters: u32 = 10_000;
// assuming we want around 5k candidates and 1k active validators.
let all_targets: u32 = 5_000;
let desired: u32 = 1_000;
let weight_with = |active| {
<Runtime as pallet_election_provider_multi_phase::Config>::WeightInfo::submit_unsigned(
all_voters.max(active),
all_targets,
active,
desired,
)
};
let mut active = 1;
while weight_with(active) <= OffchainSolutionWeightLimit::get() || active == all_voters {
active += 1;
}
println!("can support {} nominators to yield a weight of {}", active, weight_with(active));
}
#[test]
fn call_size() {
assert!(
core::mem::size_of::<Call>() <= 230,
"size of Call is more than 230 bytes: some calls have too big arguments, use Box to reduce \
the size of Call.
If the limit is too strong, maybe consider increase the limit to 300.",
);
}
| 31.674603 | 98 | 0.731646 |
01acc697b416db38f7be324ff0e5e4e15ec2da3a
| 19,753 |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
//! Utilities for providing the completion language feature
use crate::lsp::Position;
use common::{SourceLocationKey, Span};
use graphql_syntax::{parse_executable, ExecutableDocument, GraphQLSource};
use interner::StringKey;
use log::info;
use relay_compiler::Programs;
use crate::lsp::{
CompletionItem, CompletionParams, CompletionResponse, Connection, Message, ServerRequestId,
ServerResponse, TextDocumentPositionParams, Url,
};
use schema::{
Directive as SchemaDirective, DirectiveLocation, Schema, Type, TypeReference, TypeWithFields,
};
use graphql_syntax::{
Directive, ExecutableDefinition, FragmentSpread, InlineFragment, LinkedField, List,
OperationDefinition, OperationKind, ScalarField, Selection,
};
pub type GraphQLSourceCache = std::collections::HashMap<Url, Vec<GraphQLSource>>;
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum CompletionKind {
FieldName,
FragmentSpread,
DirectiveName { location: DirectiveLocation },
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct CompletionRequest {
/// The type of the completion request we're responding to
kind: CompletionKind,
/// A list of type metadata that we can use to resolve the leaf
/// type the request is being made against
type_path: Vec<TypePathItem>,
}
impl Default for CompletionRequest {
fn default() -> Self {
CompletionRequest {
kind: CompletionKind::FieldName,
type_path: vec![],
}
}
}
impl CompletionRequest {
fn add_type(&mut self, type_path_item: TypePathItem) {
self.type_path.push(type_path_item)
}
/// Returns the leaf type, which is the type that the completion request is being made against.
fn resolve_leaf_type(self, schema: &Schema) -> Option<Type> {
let mut type_path = self.type_path;
type_path.reverse();
let mut type_ =
resolve_root_type(type_path.pop().expect("path must be non-empty"), schema)?;
while let Some(path_item) = type_path.pop() {
type_ = resolve_relative_type(type_, path_item, schema)?;
}
Some(type_)
}
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub enum TypePathItem {
Operation(OperationKind),
FragmentDefinition { type_name: StringKey },
InlineFragment { type_name: StringKey },
LinkedField { name: StringKey },
ScalarField { name: StringKey },
}
pub fn create_completion_request(
document: ExecutableDocument,
position_span: Span,
) -> Option<CompletionRequest> {
info!("Building completion path for {:#?}", document);
let mut completion_request = CompletionRequest::default();
for definition in document.definitions {
match &definition {
ExecutableDefinition::Operation(operation) => {
if operation.location.contains(position_span) {
let (_, kind) = operation.operation.clone()?;
completion_request.add_type(TypePathItem::Operation(kind));
info!(
"Completion request is within operation: {:?}",
operation.name
);
let OperationDefinition {
selections,
directives,
..
} = operation;
let directive_location = match kind {
OperationKind::Query => DirectiveLocation::Query,
OperationKind::Mutation => DirectiveLocation::Mutation,
OperationKind::Subscription => DirectiveLocation::Subscription,
};
build_request_from_selection_or_directives(
selections,
directives,
directive_location,
position_span,
&mut completion_request,
);
}
// Check if the position span is within this operation's span
}
ExecutableDefinition::Fragment(fragment) => {
if fragment.location.contains(position_span) {
let type_name = fragment.type_condition.type_.value;
completion_request.add_type(TypePathItem::FragmentDefinition { type_name });
build_request_from_selection_or_directives(
&fragment.selections,
&fragment.directives,
DirectiveLocation::FragmentDefinition,
position_span,
&mut completion_request,
);
}
}
}
}
Some(completion_request)
}
/// Resolves the root type of this completion path.
fn resolve_root_type(root_path_item: TypePathItem, schema: &Schema) -> Option<Type> {
match root_path_item {
TypePathItem::Operation(kind) => match kind {
OperationKind::Query => schema.query_type(),
OperationKind::Mutation => schema.mutation_type(),
OperationKind::Subscription => schema.subscription_type(),
},
TypePathItem::FragmentDefinition { type_name } => schema.get_type(type_name),
_ => {
// TODO(brandondail) log here
None
}
}
}
fn resolve_relative_type(
parent_type: Type,
path_item: TypePathItem,
schema: &Schema,
) -> Option<Type> {
match path_item {
TypePathItem::Operation(_) => {
// TODO(brandondail) log here
None
}
TypePathItem::FragmentDefinition { .. } => {
// TODO(brandondail) log here
None
}
TypePathItem::LinkedField { name } => {
let field_id = schema.named_field(parent_type, name)?;
let field = schema.field(field_id);
info!("resolved type for {:?} : {:?}", field.name, field.type_);
Some(field.type_.inner())
}
TypePathItem::ScalarField { .. } => Some(parent_type),
TypePathItem::InlineFragment { type_name } => schema.get_type(type_name),
}
}
fn resolve_completion_items_from_fields<T: TypeWithFields>(
type_: &T,
schema: &Schema,
) -> Vec<CompletionItem> {
type_
.fields()
.iter()
.map(|field_id| {
let field = schema.field(*field_id);
let name = field.name.to_string();
CompletionItem::new_simple(name, String::from(""))
})
.collect()
}
/// Finds all the valid fragment names for a given type. Used to complete fragment spreads
fn get_valid_fragments_for_type(type_: Type, programs: &Programs) -> Vec<StringKey> {
let mut valid_fragment_names = vec![];
for fragment in programs.source.fragments() {
if fragment.type_condition == type_ {
valid_fragment_names.push(fragment.name.item);
}
}
info!("get_valid_fragments_for_type {:#?}", valid_fragment_names);
valid_fragment_names
}
fn resolve_completion_items_for_fragment_spread(
type_: Type,
programs: &Programs,
) -> Vec<CompletionItem> {
get_valid_fragments_for_type(type_, programs)
.iter()
.map(|fragment_name| {
CompletionItem::new_simple(fragment_name.to_string(), String::from(""))
})
.collect()
}
pub fn completion_items_for_request(
request: CompletionRequest,
schema: &Schema,
programs: Option<&Programs>,
) -> Option<Vec<CompletionItem>> {
let kind = request.kind;
let leaf_type = request.resolve_leaf_type(schema)?;
info!("completion_items_for_request: {:?} - {:?}", leaf_type, kind);
match kind {
CompletionKind::FragmentSpread => {
if let Some(programs) = programs {
let items = resolve_completion_items_for_fragment_spread(leaf_type, programs);
Some(items)
} else {
None
}
}
CompletionKind::FieldName => match leaf_type {
Type::Interface(interface_id) => {
let interface = schema.interface(interface_id);
let items = resolve_completion_items_from_fields(interface, schema);
Some(items)
}
Type::Object(object_id) => {
let object = schema.object(object_id);
let items = resolve_completion_items_from_fields(object, schema);
Some(items)
}
Type::Enum(_) | Type::InputObject(_) | Type::Scalar(_) | Type::Union(_) => None,
},
CompletionKind::DirectiveName { location } => {
let directives = schema.directives_for_location(location);
let items = directives
.iter()
.map(|directive| completion_item_from_directive(directive, schema))
.collect();
Some(items)
}
}
}
fn build_request_from_selections(
selections: &List<Selection>,
position_span: Span,
completion_request: &mut CompletionRequest,
) {
for item in &selections.items {
if item.span().contains(position_span) {
match item {
Selection::LinkedField(node) => {
completion_request.kind = CompletionKind::FieldName;
let LinkedField {
name,
selections,
directives,
..
} = node;
completion_request.add_type(TypePathItem::LinkedField { name: name.value });
build_request_from_selection_or_directives(
selections,
directives,
DirectiveLocation::Field,
position_span,
completion_request,
);
}
Selection::FragmentSpread(spread) => {
let FragmentSpread {
name, directives, ..
} = spread;
if name.span.contains(position_span) {
completion_request.kind = CompletionKind::FragmentSpread;
} else {
build_request_from_directives(
directives,
DirectiveLocation::FragmentSpread,
position_span,
completion_request,
);
}
}
Selection::InlineFragment(node) => {
let InlineFragment {
selections,
directives,
type_condition,
..
} = node;
if let Some(type_condition) = type_condition {
let type_name = type_condition.type_.value;
completion_request.add_type(TypePathItem::InlineFragment { type_name });
build_request_from_selection_or_directives(
selections,
directives,
DirectiveLocation::InlineFragment,
position_span,
completion_request,
)
}
}
Selection::ScalarField(node) => {
let ScalarField {
directives, name, ..
} = node;
completion_request.add_type(TypePathItem::ScalarField { name: name.value });
build_request_from_directives(
directives,
DirectiveLocation::Scalar,
position_span,
completion_request,
);
}
}
}
}
}
fn build_request_from_directives(
directives: &[Directive],
location: DirectiveLocation,
position_span: Span,
completion_request: &mut CompletionRequest,
) {
for Directive { span, .. } in directives {
if span.contains(position_span) {
completion_request.kind = CompletionKind::DirectiveName { location };
break;
}
}
}
fn build_request_from_selection_or_directives(
selections: &List<Selection>,
directives: &[Directive],
directive_location: DirectiveLocation,
position_span: Span,
completion_request: &mut CompletionRequest,
) {
if selections.span.contains(position_span) {
// TODO(brandondail) handle when the completion occurs at/within the start token
build_request_from_selections(selections, position_span, completion_request);
} else {
build_request_from_directives(
directives,
directive_location,
position_span,
completion_request,
)
}
}
fn completion_item_from_directive(directive: &SchemaDirective, schema: &Schema) -> CompletionItem {
let SchemaDirective {
name, arguments, ..
} = directive;
use crate::lsp::InsertTextFormat;
// Always use the name of the directive as the label
let label = name.to_string();
// We can return a snippet with the expected arguments of the directive
let (insert_text, insert_text_format) = if arguments.is_empty() {
(label.clone(), InsertTextFormat::PlainText)
} else {
let mut cursor_location = 1;
let mut args = vec![];
for arg in arguments.iter() {
if let TypeReference::NonNull(type_) = &arg.type_ {
let value_snippet = match type_ {
t if t.is_list() => format!("[${}]", cursor_location),
t if schema.is_string(t.inner()) => format!("\"${}\"", cursor_location),
_ => format!("${}", cursor_location),
};
let str = format!("{} : {}", arg.name, value_snippet);
args.push(str);
cursor_location += 1;
}
}
if args.is_empty() {
(label.clone(), InsertTextFormat::PlainText)
} else {
let insert_text = format!("{}({})", label, args.join(", "));
(insert_text, InsertTextFormat::Snippet)
}
};
CompletionItem {
label,
kind: None,
detail: None,
documentation: None,
deprecated: None,
preselect: None,
sort_text: None,
filter_text: None,
insert_text: Some(insert_text),
insert_text_format: Some(insert_text_format),
text_edit: None,
additional_text_edits: None,
command: None,
data: None,
tags: None,
}
}
/// Maps the LSP `Position` type back to a relative span, so we can find out which syntax node(s)
/// this completion request came from
pub fn position_to_span(position: Position, source: &GraphQLSource) -> Option<Span> {
let mut index_of_last_line = 0;
let mut line_index = source.line_index as u64;
let mut chars = source.text.chars().enumerate().peekable();
while let Some((index, chr)) = chars.next() {
let is_newline = match chr {
// Line terminators: https://www.ecma-international.org/ecma-262/#sec-line-terminators
'\u{000A}' | '\u{000D}' | '\u{2028}' | '\u{2029}' => match (chr, chars.peek()) {
// <CLRF>
('\u{000D}', Some((_, '\u{000D}'))) => false,
_ => true,
},
_ => false,
};
if is_newline {
line_index += 1;
index_of_last_line = index as u64;
}
if line_index == position.line {
let start_offset = (index_of_last_line + position.character) as u32;
return Some(Span::new(start_offset, start_offset));
}
}
None
}
pub fn send_completion_response(
items: Vec<CompletionItem>,
request_id: ServerRequestId,
connection: &Connection,
) {
// If there are no items, don't send any response
if items.is_empty() {
return;
}
let completion_response = CompletionResponse::Array(items);
let result = serde_json::to_value(&completion_response).ok();
let response = ServerResponse {
id: request_id,
error: None,
result,
};
connection.sender.send(Message::Response(response)).ok();
}
/// Return a `CompletionPath` for this request, only if the completion request occurs
// within a GraphQL document. Otherwise return `None`
pub fn get_completion_request(
params: CompletionParams,
graphql_source_cache: &GraphQLSourceCache,
) -> Option<CompletionRequest> {
let CompletionParams {
text_document_position,
..
} = params;
let TextDocumentPositionParams {
text_document,
position,
} = text_document_position;
let url = text_document.uri;
let graphql_sources = match graphql_source_cache.get(&url) {
Some(sources) => sources,
// If we have no sources for this file, do nothing
None => return None,
};
info!(
"Got completion request for file with sources: {:#?}",
*graphql_sources
);
info!("position: {:?}", position);
// We have GraphQL documents, now check if the completion request
// falls within the range of one of these documents.
let mut target_graphql_source: Option<&GraphQLSource> = None;
for graphql_source in &*graphql_sources {
let range = graphql_source.to_range();
if position >= range.start && position <= range.end {
target_graphql_source = Some(graphql_source);
break;
}
}
let graphql_source = match target_graphql_source {
Some(source) => source,
// Exit early if this completion request didn't fall within
// the range of one of our GraphQL documents
None => return None,
};
match parse_executable(
&graphql_source.text,
SourceLocationKey::standalone(&url.to_string()),
) {
Ok(document) => {
// Now we need to take the `Position` and map that to an offset relative
// to this GraphQL document, as the `Span`s in the document are relative.
info!("Successfully parsed the definitions for a target GraphQL source");
// Map the position to a zero-length span, relative to this GraphQL source.
let position_span = match position_to_span(position, &graphql_source) {
Some(span) => span,
// Exit early if we can't map the position for some reason
None => return None,
};
// Now we need to walk the Document, tracking our path along the way, until
// we find the position within the document. Note that the GraphQLSource will
// already be updated *with the characters that triggered the completion request*
// since the change event fires before completion.
info!("position_span: {:?}", position_span);
let completion_request = create_completion_request(document, position_span);
info!("Completion path: {:#?}", completion_request);
completion_request
}
Err(err) => {
info!("Failed to parse this target!");
info!("{:?}", err);
None
}
}
}
| 35.463196 | 99 | 0.569736 |
f8fb96668861ba3baaf429761cd129d75a8102e5
| 11,304 |
#![cfg(feature = "test-bpf")]
use solana_program_test::*;
use solana_sdk::{
instruction::InstructionError,
pubkey::Pubkey,
signature::{Keypair, Signer},
system_instruction::create_account,
transaction::{Transaction, TransactionError},
};
use helpers::*;
use spl_token::instruction::approve;
use spl_token_lending::{
error::LendingError,
instruction::withdraw_obligation_collateral,
math::Decimal,
processor::process_instruction,
state::{INITIAL_COLLATERAL_RATIO, SLOTS_PER_YEAR},
};
mod helpers;
const LAMPORTS_TO_SOL: u64 = 1_000_000_000;
const FRACTIONAL_TO_USDC: u64 = 1_000_000;
#[tokio::test]
async fn test_success() {
let mut test = ProgramTest::new(
"spl_token_lending",
spl_token_lending::id(),
processor!(process_instruction),
);
// limit to track compute unit increase
test.set_bpf_compute_max_units(190_000);
const INITIAL_SOL_RESERVE_SUPPLY_LAMPORTS: u64 = 100 * LAMPORTS_TO_SOL;
const INITIAL_USDC_RESERVE_SUPPLY_FRACTIONAL: u64 = 100 * FRACTIONAL_TO_USDC;
const OBLIGATION_LOAN: u64 = 10 * FRACTIONAL_TO_USDC;
const OBLIGATION_COLLATERAL: u64 = 10 * LAMPORTS_TO_SOL * INITIAL_COLLATERAL_RATIO;
// from Reserve::required_collateral_for_borrow
const REQUIRED_COLLATERAL: u64 = 7_220_474_693;
const WITHDRAW_COLLATERAL: u64 = OBLIGATION_COLLATERAL - REQUIRED_COLLATERAL;
let user_accounts_owner = Keypair::new();
let memory_keypair = Keypair::new();
let user_transfer_authority = Keypair::new();
let sol_usdc_dex_market = TestDexMarket::setup(&mut test, TestDexMarketPair::SOL_USDC);
let usdc_mint = add_usdc_mint(&mut test);
let lending_market = add_lending_market(&mut test, usdc_mint.pubkey);
let sol_reserve = add_reserve(
&mut test,
&user_accounts_owner,
&lending_market,
AddReserveArgs {
slots_elapsed: SLOTS_PER_YEAR,
liquidity_amount: INITIAL_SOL_RESERVE_SUPPLY_LAMPORTS,
liquidity_mint_decimals: 9,
liquidity_mint_pubkey: spl_token::native_mint::id(),
dex_market_pubkey: Some(sol_usdc_dex_market.pubkey),
collateral_amount: OBLIGATION_COLLATERAL,
config: TEST_RESERVE_CONFIG,
..AddReserveArgs::default()
},
);
let usdc_reserve = add_reserve(
&mut test,
&user_accounts_owner,
&lending_market,
AddReserveArgs {
initial_borrow_rate: 1,
liquidity_amount: INITIAL_USDC_RESERVE_SUPPLY_FRACTIONAL,
liquidity_mint_pubkey: usdc_mint.pubkey,
liquidity_mint_decimals: usdc_mint.decimals,
borrow_amount: OBLIGATION_LOAN * 101 / 100,
user_liquidity_amount: OBLIGATION_LOAN,
config: TEST_RESERVE_CONFIG,
..AddReserveArgs::default()
},
);
let obligation = add_obligation(
&mut test,
&user_accounts_owner,
&lending_market,
AddObligationArgs {
borrow_reserve: &usdc_reserve,
collateral_reserve: &sol_reserve,
collateral_amount: OBLIGATION_COLLATERAL,
borrowed_liquidity_wads: Decimal::from(OBLIGATION_LOAN),
},
);
let (mut banks_client, payer, recent_blockhash) = test.start().await;
let initial_collateral_supply_balance =
get_token_balance(&mut banks_client, sol_reserve.collateral_supply).await;
let initial_user_collateral_balance =
get_token_balance(&mut banks_client, sol_reserve.user_collateral_account).await;
let initial_obligation_token_balance =
get_token_balance(&mut banks_client, obligation.token_account).await;
let mut transaction = Transaction::new_with_payer(
&[
create_account(
&payer.pubkey(),
&memory_keypair.pubkey(),
0,
65548,
&spl_token_lending::id(),
),
approve(
&spl_token::id(),
&sol_reserve.user_collateral_account,
&user_transfer_authority.pubkey(),
&user_accounts_owner.pubkey(),
&[],
OBLIGATION_LOAN,
)
.unwrap(),
approve(
&spl_token::id(),
&obligation.token_account,
&user_transfer_authority.pubkey(),
&user_accounts_owner.pubkey(),
&[],
OBLIGATION_COLLATERAL,
)
.unwrap(),
withdraw_obligation_collateral(
spl_token_lending::id(),
WITHDRAW_COLLATERAL,
sol_reserve.collateral_supply,
sol_reserve.user_collateral_account,
sol_reserve.pubkey,
usdc_reserve.pubkey,
obligation.pubkey,
obligation.token_mint,
obligation.token_account,
lending_market.pubkey,
lending_market.authority,
user_transfer_authority.pubkey(),
sol_usdc_dex_market.pubkey,
sol_usdc_dex_market.bids_pubkey,
memory_keypair.pubkey(),
),
],
Some(&payer.pubkey()),
);
transaction.sign(
&[
&payer,
&memory_keypair,
&user_accounts_owner,
&user_transfer_authority,
],
recent_blockhash,
);
assert!(banks_client.process_transaction(transaction).await.is_ok());
// check that collateral tokens were transferred
let collateral_supply_balance =
get_token_balance(&mut banks_client, sol_reserve.collateral_supply).await;
assert_eq!(
collateral_supply_balance,
initial_collateral_supply_balance - WITHDRAW_COLLATERAL
);
let user_collateral_balance =
get_token_balance(&mut banks_client, sol_reserve.user_collateral_account).await;
assert_eq!(
user_collateral_balance,
initial_user_collateral_balance + WITHDRAW_COLLATERAL
);
// check that obligation tokens were burned
let obligation_token_balance =
get_token_balance(&mut banks_client, obligation.token_account).await;
assert_eq!(
obligation_token_balance,
initial_obligation_token_balance - WITHDRAW_COLLATERAL
);
}
#[tokio::test]
async fn test_withdraw_below_required() {
let mut test = ProgramTest::new(
"spl_token_lending",
spl_token_lending::id(),
processor!(process_instruction),
);
// limit to track compute unit increase
test.set_bpf_compute_max_units(180_000);
const INITIAL_SOL_RESERVE_SUPPLY_LAMPORTS: u64 = 100 * LAMPORTS_TO_SOL;
const INITIAL_USDC_RESERVE_SUPPLY_FRACTIONAL: u64 = 100 * FRACTIONAL_TO_USDC;
const OBLIGATION_LOAN: u64 = 10 * FRACTIONAL_TO_USDC;
const OBLIGATION_COLLATERAL: u64 = 10 * LAMPORTS_TO_SOL * INITIAL_COLLATERAL_RATIO;
// from Reserve::required_collateral_for_borrow
const REQUIRED_COLLATERAL: u64 = 7_220_474_693;
const WITHDRAW_COLLATERAL: u64 = OBLIGATION_COLLATERAL - REQUIRED_COLLATERAL + 1;
let user_accounts_owner = Keypair::new();
let memory_keypair = Keypair::new();
let user_transfer_authority = Keypair::new();
let sol_usdc_dex_market = TestDexMarket::setup(&mut test, TestDexMarketPair::SOL_USDC);
let usdc_mint = add_usdc_mint(&mut test);
let lending_market = add_lending_market(&mut test, usdc_mint.pubkey);
let sol_reserve = add_reserve(
&mut test,
&user_accounts_owner,
&lending_market,
AddReserveArgs {
slots_elapsed: SLOTS_PER_YEAR,
liquidity_amount: INITIAL_SOL_RESERVE_SUPPLY_LAMPORTS,
liquidity_mint_decimals: 9,
liquidity_mint_pubkey: spl_token::native_mint::id(),
dex_market_pubkey: Some(sol_usdc_dex_market.pubkey),
collateral_amount: OBLIGATION_COLLATERAL,
config: TEST_RESERVE_CONFIG,
..AddReserveArgs::default()
},
);
let usdc_reserve = add_reserve(
&mut test,
&user_accounts_owner,
&lending_market,
AddReserveArgs {
initial_borrow_rate: 1,
liquidity_amount: INITIAL_USDC_RESERVE_SUPPLY_FRACTIONAL,
liquidity_mint_pubkey: usdc_mint.pubkey,
liquidity_mint_decimals: usdc_mint.decimals,
borrow_amount: OBLIGATION_LOAN * 101 / 100,
user_liquidity_amount: OBLIGATION_LOAN,
config: TEST_RESERVE_CONFIG,
..AddReserveArgs::default()
},
);
let obligation = add_obligation(
&mut test,
&user_accounts_owner,
&lending_market,
AddObligationArgs {
borrow_reserve: &usdc_reserve,
collateral_reserve: &sol_reserve,
collateral_amount: OBLIGATION_COLLATERAL,
borrowed_liquidity_wads: Decimal::from(OBLIGATION_LOAN),
},
);
let (mut banks_client, payer, recent_blockhash) = test.start().await;
let mut transaction = Transaction::new_with_payer(
&[
create_account(
&payer.pubkey(),
&memory_keypair.pubkey(),
0,
65548,
&spl_token_lending::id(),
),
approve(
&spl_token::id(),
&sol_reserve.user_collateral_account,
&user_transfer_authority.pubkey(),
&user_accounts_owner.pubkey(),
&[],
OBLIGATION_LOAN,
)
.unwrap(),
approve(
&spl_token::id(),
&obligation.token_account,
&user_transfer_authority.pubkey(),
&user_accounts_owner.pubkey(),
&[],
OBLIGATION_COLLATERAL,
)
.unwrap(),
withdraw_obligation_collateral(
spl_token_lending::id(),
WITHDRAW_COLLATERAL,
sol_reserve.collateral_supply,
sol_reserve.user_collateral_account,
sol_reserve.pubkey,
usdc_reserve.pubkey,
obligation.pubkey,
obligation.token_mint,
obligation.token_account,
lending_market.pubkey,
lending_market.authority,
user_transfer_authority.pubkey(),
sol_usdc_dex_market.pubkey,
sol_usdc_dex_market.bids_pubkey,
memory_keypair.pubkey(),
),
],
Some(&payer.pubkey()),
);
transaction.sign(
&[
&payer,
&memory_keypair,
&user_accounts_owner,
&user_transfer_authority,
],
recent_blockhash,
);
// check that transaction fails
assert_eq!(
banks_client
.process_transaction(transaction)
.await
.unwrap_err()
.unwrap(),
TransactionError::InstructionError(
3,
InstructionError::Custom(
LendingError::ObligationCollateralWithdrawBelowRequired as u32
)
)
);
}
| 33.543027 | 91 | 0.614473 |
2977ff74e2a3d571e8e13081c8935285bf905c51
| 89,733 |
use std::{
ops::Deref,
path::{Path, PathBuf},
};
use base_db::{CrateGraph, FileId};
use cfg::{CfgAtom, CfgDiff};
use expect_test::{expect, Expect};
use paths::{AbsPath, AbsPathBuf};
use serde::de::DeserializeOwned;
use crate::{
CargoWorkspace, CfgOverrides, ProjectJson, ProjectJsonData, ProjectWorkspace, Sysroot,
WorkspaceBuildScripts,
};
fn load_cargo(file: &str) -> CrateGraph {
load_cargo_with_overrides(file, CfgOverrides::default())
}
fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> CrateGraph {
let meta = get_test_json_file(file);
let cargo_workspace = CargoWorkspace::new(meta);
let project_workspace = ProjectWorkspace::Cargo {
cargo: cargo_workspace,
build_scripts: WorkspaceBuildScripts::default(),
sysroot: None,
rustc: None,
rustc_cfg: Vec::new(),
cfg_overrides,
};
to_crate_graph(project_workspace)
}
fn load_rust_project(file: &str) -> CrateGraph {
let data = get_test_json_file(file);
let project = rooted_project_json(data);
let sysroot = Some(get_fake_sysroot());
let project_workspace = ProjectWorkspace::Json { project, sysroot, rustc_cfg: Vec::new() };
to_crate_graph(project_workspace)
}
fn get_test_json_file<T: DeserializeOwned>(file: &str) -> T {
let file = get_test_path(file);
let data = std::fs::read_to_string(file).unwrap();
let mut json = data.parse::<serde_json::Value>().unwrap();
fixup_paths(&mut json);
return serde_json::from_value(json).unwrap();
fn fixup_paths(val: &mut serde_json::Value) {
match val {
serde_json::Value::String(s) => replace_root(s, true),
serde_json::Value::Array(vals) => vals.iter_mut().for_each(fixup_paths),
serde_json::Value::Object(kvals) => kvals.values_mut().for_each(fixup_paths),
serde_json::Value::Null | serde_json::Value::Bool(_) | serde_json::Value::Number(_) => {
}
}
}
}
fn replace_root(s: &mut String, direction: bool) {
if direction {
let root = if cfg!(windows) { r#"C:\\ROOT\"# } else { "/ROOT/" };
*s = s.replace("$ROOT$", root)
} else {
let root = if cfg!(windows) { r#"C:\\\\ROOT\\"# } else { "/ROOT/" };
*s = s.replace(root, "$ROOT$")
}
}
fn get_test_path(file: &str) -> PathBuf {
let base = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
base.join("test_data").join(file)
}
fn get_fake_sysroot() -> Sysroot {
let sysroot_path = get_test_path("fake-sysroot");
let sysroot_src_dir = AbsPathBuf::assert(sysroot_path);
Sysroot::load(sysroot_src_dir).unwrap()
}
fn rooted_project_json(data: ProjectJsonData) -> ProjectJson {
let mut root = "$ROOT$".to_string();
replace_root(&mut root, true);
let path = Path::new(&root);
let base = AbsPath::assert(path);
ProjectJson::new(base, data)
}
fn to_crate_graph(project_workspace: ProjectWorkspace) -> CrateGraph {
project_workspace.to_crate_graph(&Default::default(), &mut |_, _| Vec::new(), &mut {
let mut counter = 0;
move |_path| {
counter += 1;
Some(FileId(counter))
}
})
}
fn check_crate_graph(crate_graph: CrateGraph, expect: Expect) {
let mut crate_graph = format!("{:#?}", crate_graph);
replace_root(&mut crate_graph, false);
expect.assert_eq(&crate_graph);
}
#[test]
fn cargo_hello_world_project_model_with_wildcard_overrides() {
let cfg_overrides = CfgOverrides::Wildcard(
CfgDiff::new(Vec::new(), vec![CfgAtom::Flag("test".into())]).unwrap(),
);
let crate_graph = load_cargo_with_overrides("hello-world-metadata.json", cfg_overrides);
check_crate_graph(
crate_graph,
expect![[r#"
CrateGraph {
arena: {
CrateId(
0,
): CrateData {
root_file_id: FileId(
1,
),
edition: Edition2018,
version: Some(
"0.1.0",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"hello_world",
),
canonical_name: "hello-world",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
"CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "hello_world",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "hello-world",
"CARGO_PKG_VERSION_PATCH": "0",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "1",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [
Dependency {
crate_id: CrateId(
4,
),
name: CrateName(
"libc",
),
prelude: true,
},
],
proc_macro: [],
origin: CratesIo {
repo: None,
},
},
CrateId(
5,
): CrateData {
root_file_id: FileId(
6,
),
edition: Edition2015,
version: Some(
"0.2.98",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"const_fn",
),
canonical_name: "const_fn",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
"feature=default",
"feature=std",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
"feature=align",
"feature=const-extern-fn",
"feature=default",
"feature=extra_traits",
"feature=rustc-dep-of-std",
"feature=std",
"feature=use_std",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
"CARGO_PKG_VERSION": "0.2.98",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "libc",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "libc",
"CARGO_PKG_VERSION_PATCH": "98",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "2",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [
Dependency {
crate_id: CrateId(
4,
),
name: CrateName(
"libc",
),
prelude: true,
},
],
proc_macro: [],
origin: CratesIo {
repo: Some(
"https://github.com/rust-lang/libc",
),
},
},
CrateId(
2,
): CrateData {
root_file_id: FileId(
3,
),
edition: Edition2018,
version: Some(
"0.1.0",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"an_example",
),
canonical_name: "an-example",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
"CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "hello_world",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "hello-world",
"CARGO_PKG_VERSION_PATCH": "0",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "1",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [
Dependency {
crate_id: CrateId(
0,
),
name: CrateName(
"hello_world",
),
prelude: true,
},
Dependency {
crate_id: CrateId(
4,
),
name: CrateName(
"libc",
),
prelude: true,
},
],
proc_macro: [],
origin: CratesIo {
repo: None,
},
},
CrateId(
4,
): CrateData {
root_file_id: FileId(
5,
),
edition: Edition2015,
version: Some(
"0.2.98",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"libc",
),
canonical_name: "libc",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
"feature=default",
"feature=std",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
"feature=align",
"feature=const-extern-fn",
"feature=default",
"feature=extra_traits",
"feature=rustc-dep-of-std",
"feature=std",
"feature=use_std",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
"CARGO_PKG_VERSION": "0.2.98",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "libc",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "libc",
"CARGO_PKG_VERSION_PATCH": "98",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "2",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [],
proc_macro: [],
origin: CratesIo {
repo: Some(
"https://github.com/rust-lang/libc",
),
},
},
CrateId(
1,
): CrateData {
root_file_id: FileId(
2,
),
edition: Edition2018,
version: Some(
"0.1.0",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"hello_world",
),
canonical_name: "hello-world",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
"CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "hello_world",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "hello-world",
"CARGO_PKG_VERSION_PATCH": "0",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "1",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [
Dependency {
crate_id: CrateId(
0,
),
name: CrateName(
"hello_world",
),
prelude: true,
},
Dependency {
crate_id: CrateId(
4,
),
name: CrateName(
"libc",
),
prelude: true,
},
],
proc_macro: [],
origin: CratesIo {
repo: None,
},
},
CrateId(
6,
): CrateData {
root_file_id: FileId(
7,
),
edition: Edition2015,
version: Some(
"0.2.98",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"build_script_build",
),
canonical_name: "build-script-build",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
"feature=default",
"feature=std",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
"feature=align",
"feature=const-extern-fn",
"feature=default",
"feature=extra_traits",
"feature=rustc-dep-of-std",
"feature=std",
"feature=use_std",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
"CARGO_PKG_VERSION": "0.2.98",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "libc",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "libc",
"CARGO_PKG_VERSION_PATCH": "98",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "2",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [],
proc_macro: [],
origin: CratesIo {
repo: Some(
"https://github.com/rust-lang/libc",
),
},
},
CrateId(
3,
): CrateData {
root_file_id: FileId(
4,
),
edition: Edition2018,
version: Some(
"0.1.0",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"it",
),
canonical_name: "it",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
"CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "hello_world",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "hello-world",
"CARGO_PKG_VERSION_PATCH": "0",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "1",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [
Dependency {
crate_id: CrateId(
0,
),
name: CrateName(
"hello_world",
),
prelude: true,
},
Dependency {
crate_id: CrateId(
4,
),
name: CrateName(
"libc",
),
prelude: true,
},
],
proc_macro: [],
origin: CratesIo {
repo: None,
},
},
},
}"#]],
)
}
#[test]
fn cargo_hello_world_project_model_with_selective_overrides() {
let cfg_overrides = {
CfgOverrides::Selective(
std::iter::once((
"libc".to_owned(),
CfgDiff::new(Vec::new(), vec![CfgAtom::Flag("test".into())]).unwrap(),
))
.collect(),
)
};
let crate_graph = load_cargo_with_overrides("hello-world-metadata.json", cfg_overrides);
check_crate_graph(
crate_graph,
expect![[r#"
CrateGraph {
arena: {
CrateId(
0,
): CrateData {
root_file_id: FileId(
1,
),
edition: Edition2018,
version: Some(
"0.1.0",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"hello_world",
),
canonical_name: "hello-world",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
"test",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
"test",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
"CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "hello_world",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "hello-world",
"CARGO_PKG_VERSION_PATCH": "0",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "1",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [
Dependency {
crate_id: CrateId(
4,
),
name: CrateName(
"libc",
),
prelude: true,
},
],
proc_macro: [],
origin: CratesIo {
repo: None,
},
},
CrateId(
5,
): CrateData {
root_file_id: FileId(
6,
),
edition: Edition2015,
version: Some(
"0.2.98",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"const_fn",
),
canonical_name: "const_fn",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
"feature=default",
"feature=std",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
"feature=align",
"feature=const-extern-fn",
"feature=default",
"feature=extra_traits",
"feature=rustc-dep-of-std",
"feature=std",
"feature=use_std",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
"CARGO_PKG_VERSION": "0.2.98",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "libc",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "libc",
"CARGO_PKG_VERSION_PATCH": "98",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "2",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [
Dependency {
crate_id: CrateId(
4,
),
name: CrateName(
"libc",
),
prelude: true,
},
],
proc_macro: [],
origin: CratesIo {
repo: Some(
"https://github.com/rust-lang/libc",
),
},
},
CrateId(
2,
): CrateData {
root_file_id: FileId(
3,
),
edition: Edition2018,
version: Some(
"0.1.0",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"an_example",
),
canonical_name: "an-example",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
"test",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
"test",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
"CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "hello_world",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "hello-world",
"CARGO_PKG_VERSION_PATCH": "0",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "1",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [
Dependency {
crate_id: CrateId(
0,
),
name: CrateName(
"hello_world",
),
prelude: true,
},
Dependency {
crate_id: CrateId(
4,
),
name: CrateName(
"libc",
),
prelude: true,
},
],
proc_macro: [],
origin: CratesIo {
repo: None,
},
},
CrateId(
4,
): CrateData {
root_file_id: FileId(
5,
),
edition: Edition2015,
version: Some(
"0.2.98",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"libc",
),
canonical_name: "libc",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
"feature=default",
"feature=std",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
"feature=align",
"feature=const-extern-fn",
"feature=default",
"feature=extra_traits",
"feature=rustc-dep-of-std",
"feature=std",
"feature=use_std",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
"CARGO_PKG_VERSION": "0.2.98",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "libc",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "libc",
"CARGO_PKG_VERSION_PATCH": "98",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "2",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [],
proc_macro: [],
origin: CratesIo {
repo: Some(
"https://github.com/rust-lang/libc",
),
},
},
CrateId(
1,
): CrateData {
root_file_id: FileId(
2,
),
edition: Edition2018,
version: Some(
"0.1.0",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"hello_world",
),
canonical_name: "hello-world",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
"test",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
"test",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
"CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "hello_world",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "hello-world",
"CARGO_PKG_VERSION_PATCH": "0",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "1",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [
Dependency {
crate_id: CrateId(
0,
),
name: CrateName(
"hello_world",
),
prelude: true,
},
Dependency {
crate_id: CrateId(
4,
),
name: CrateName(
"libc",
),
prelude: true,
},
],
proc_macro: [],
origin: CratesIo {
repo: None,
},
},
CrateId(
6,
): CrateData {
root_file_id: FileId(
7,
),
edition: Edition2015,
version: Some(
"0.2.98",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"build_script_build",
),
canonical_name: "build-script-build",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
"feature=default",
"feature=std",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
"feature=align",
"feature=const-extern-fn",
"feature=default",
"feature=extra_traits",
"feature=rustc-dep-of-std",
"feature=std",
"feature=use_std",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
"CARGO_PKG_VERSION": "0.2.98",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "libc",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "libc",
"CARGO_PKG_VERSION_PATCH": "98",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "2",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [],
proc_macro: [],
origin: CratesIo {
repo: Some(
"https://github.com/rust-lang/libc",
),
},
},
CrateId(
3,
): CrateData {
root_file_id: FileId(
4,
),
edition: Edition2018,
version: Some(
"0.1.0",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"it",
),
canonical_name: "it",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
"test",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
"test",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
"CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "hello_world",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "hello-world",
"CARGO_PKG_VERSION_PATCH": "0",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "1",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [
Dependency {
crate_id: CrateId(
0,
),
name: CrateName(
"hello_world",
),
prelude: true,
},
Dependency {
crate_id: CrateId(
4,
),
name: CrateName(
"libc",
),
prelude: true,
},
],
proc_macro: [],
origin: CratesIo {
repo: None,
},
},
},
}"#]],
)
}
#[test]
fn cargo_hello_world_project_model() {
let crate_graph = load_cargo("hello-world-metadata.json");
check_crate_graph(
crate_graph,
expect![[r#"
CrateGraph {
arena: {
CrateId(
0,
): CrateData {
root_file_id: FileId(
1,
),
edition: Edition2018,
version: Some(
"0.1.0",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"hello_world",
),
canonical_name: "hello-world",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
"test",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
"test",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
"CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "hello_world",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "hello-world",
"CARGO_PKG_VERSION_PATCH": "0",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "1",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [
Dependency {
crate_id: CrateId(
4,
),
name: CrateName(
"libc",
),
prelude: true,
},
],
proc_macro: [],
origin: CratesIo {
repo: None,
},
},
CrateId(
5,
): CrateData {
root_file_id: FileId(
6,
),
edition: Edition2015,
version: Some(
"0.2.98",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"const_fn",
),
canonical_name: "const_fn",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
"feature=default",
"feature=std",
"test",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
"feature=align",
"feature=const-extern-fn",
"feature=default",
"feature=extra_traits",
"feature=rustc-dep-of-std",
"feature=std",
"feature=use_std",
"test",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
"CARGO_PKG_VERSION": "0.2.98",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "libc",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "libc",
"CARGO_PKG_VERSION_PATCH": "98",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "2",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [
Dependency {
crate_id: CrateId(
4,
),
name: CrateName(
"libc",
),
prelude: true,
},
],
proc_macro: [],
origin: CratesIo {
repo: Some(
"https://github.com/rust-lang/libc",
),
},
},
CrateId(
2,
): CrateData {
root_file_id: FileId(
3,
),
edition: Edition2018,
version: Some(
"0.1.0",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"an_example",
),
canonical_name: "an-example",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
"test",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
"test",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
"CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "hello_world",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "hello-world",
"CARGO_PKG_VERSION_PATCH": "0",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "1",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [
Dependency {
crate_id: CrateId(
0,
),
name: CrateName(
"hello_world",
),
prelude: true,
},
Dependency {
crate_id: CrateId(
4,
),
name: CrateName(
"libc",
),
prelude: true,
},
],
proc_macro: [],
origin: CratesIo {
repo: None,
},
},
CrateId(
4,
): CrateData {
root_file_id: FileId(
5,
),
edition: Edition2015,
version: Some(
"0.2.98",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"libc",
),
canonical_name: "libc",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
"feature=default",
"feature=std",
"test",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
"feature=align",
"feature=const-extern-fn",
"feature=default",
"feature=extra_traits",
"feature=rustc-dep-of-std",
"feature=std",
"feature=use_std",
"test",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
"CARGO_PKG_VERSION": "0.2.98",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "libc",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "libc",
"CARGO_PKG_VERSION_PATCH": "98",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "2",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [],
proc_macro: [],
origin: CratesIo {
repo: Some(
"https://github.com/rust-lang/libc",
),
},
},
CrateId(
1,
): CrateData {
root_file_id: FileId(
2,
),
edition: Edition2018,
version: Some(
"0.1.0",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"hello_world",
),
canonical_name: "hello-world",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
"test",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
"test",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
"CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "hello_world",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "hello-world",
"CARGO_PKG_VERSION_PATCH": "0",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "1",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [
Dependency {
crate_id: CrateId(
0,
),
name: CrateName(
"hello_world",
),
prelude: true,
},
Dependency {
crate_id: CrateId(
4,
),
name: CrateName(
"libc",
),
prelude: true,
},
],
proc_macro: [],
origin: CratesIo {
repo: None,
},
},
CrateId(
6,
): CrateData {
root_file_id: FileId(
7,
),
edition: Edition2015,
version: Some(
"0.2.98",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"build_script_build",
),
canonical_name: "build-script-build",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
"feature=default",
"feature=std",
"test",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
"feature=align",
"feature=const-extern-fn",
"feature=default",
"feature=extra_traits",
"feature=rustc-dep-of-std",
"feature=std",
"feature=use_std",
"test",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
"CARGO_PKG_VERSION": "0.2.98",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "libc",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "libc",
"CARGO_PKG_VERSION_PATCH": "98",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "2",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [],
proc_macro: [],
origin: CratesIo {
repo: Some(
"https://github.com/rust-lang/libc",
),
},
},
CrateId(
3,
): CrateData {
root_file_id: FileId(
4,
),
edition: Edition2018,
version: Some(
"0.1.0",
),
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"it",
),
canonical_name: "it",
},
),
cfg_options: CfgOptions(
[
"debug_assertions",
"test",
],
),
potential_cfg_options: CfgOptions(
[
"debug_assertions",
"test",
],
),
env: Env {
entries: {
"CARGO_PKG_LICENSE": "",
"CARGO_PKG_VERSION_MAJOR": "0",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
"CARGO_PKG_VERSION": "0.1.0",
"CARGO_PKG_AUTHORS": "",
"CARGO_CRATE_NAME": "hello_world",
"CARGO_PKG_LICENSE_FILE": "",
"CARGO_PKG_HOMEPAGE": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_NAME": "hello-world",
"CARGO_PKG_VERSION_PATCH": "0",
"CARGO": "cargo",
"CARGO_PKG_REPOSITORY": "",
"CARGO_PKG_VERSION_MINOR": "1",
"CARGO_PKG_VERSION_PRE": "",
},
},
dependencies: [
Dependency {
crate_id: CrateId(
0,
),
name: CrateName(
"hello_world",
),
prelude: true,
},
Dependency {
crate_id: CrateId(
4,
),
name: CrateName(
"libc",
),
prelude: true,
},
],
proc_macro: [],
origin: CratesIo {
repo: None,
},
},
},
}"#]],
)
}
#[test]
fn rust_project_hello_world_project_model() {
let crate_graph = load_rust_project("hello-world-project.json");
check_crate_graph(
crate_graph,
expect![[r#"
CrateGraph {
arena: {
CrateId(
0,
): CrateData {
root_file_id: FileId(
1,
),
edition: Edition2018,
version: None,
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"alloc",
),
canonical_name: "alloc",
},
),
cfg_options: CfgOptions(
[],
),
potential_cfg_options: CfgOptions(
[],
),
env: Env {
entries: {},
},
dependencies: [
Dependency {
crate_id: CrateId(
1,
),
name: CrateName(
"core",
),
prelude: true,
},
],
proc_macro: [],
origin: Lang,
},
CrateId(
10,
): CrateData {
root_file_id: FileId(
11,
),
edition: Edition2018,
version: None,
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"unwind",
),
canonical_name: "unwind",
},
),
cfg_options: CfgOptions(
[],
),
potential_cfg_options: CfgOptions(
[],
),
env: Env {
entries: {},
},
dependencies: [],
proc_macro: [],
origin: Lang,
},
CrateId(
7,
): CrateData {
root_file_id: FileId(
8,
),
edition: Edition2018,
version: None,
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"std_detect",
),
canonical_name: "std_detect",
},
),
cfg_options: CfgOptions(
[],
),
potential_cfg_options: CfgOptions(
[],
),
env: Env {
entries: {},
},
dependencies: [],
proc_macro: [],
origin: Lang,
},
CrateId(
4,
): CrateData {
root_file_id: FileId(
5,
),
edition: Edition2018,
version: None,
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"proc_macro",
),
canonical_name: "proc_macro",
},
),
cfg_options: CfgOptions(
[],
),
potential_cfg_options: CfgOptions(
[],
),
env: Env {
entries: {},
},
dependencies: [
Dependency {
crate_id: CrateId(
6,
),
name: CrateName(
"std",
),
prelude: true,
},
],
proc_macro: [],
origin: Lang,
},
CrateId(
1,
): CrateData {
root_file_id: FileId(
2,
),
edition: Edition2018,
version: None,
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"core",
),
canonical_name: "core",
},
),
cfg_options: CfgOptions(
[],
),
potential_cfg_options: CfgOptions(
[],
),
env: Env {
entries: {},
},
dependencies: [],
proc_macro: [],
origin: Lang,
},
CrateId(
11,
): CrateData {
root_file_id: FileId(
12,
),
edition: Edition2018,
version: None,
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"hello_world",
),
canonical_name: "hello_world",
},
),
cfg_options: CfgOptions(
[],
),
potential_cfg_options: CfgOptions(
[],
),
env: Env {
entries: {},
},
dependencies: [
Dependency {
crate_id: CrateId(
1,
),
name: CrateName(
"core",
),
prelude: true,
},
Dependency {
crate_id: CrateId(
0,
),
name: CrateName(
"alloc",
),
prelude: true,
},
Dependency {
crate_id: CrateId(
6,
),
name: CrateName(
"std",
),
prelude: true,
},
Dependency {
crate_id: CrateId(
9,
),
name: CrateName(
"test",
),
prelude: false,
},
],
proc_macro: [],
origin: CratesIo {
repo: None,
},
},
CrateId(
8,
): CrateData {
root_file_id: FileId(
9,
),
edition: Edition2018,
version: None,
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"term",
),
canonical_name: "term",
},
),
cfg_options: CfgOptions(
[],
),
potential_cfg_options: CfgOptions(
[],
),
env: Env {
entries: {},
},
dependencies: [],
proc_macro: [],
origin: Lang,
},
CrateId(
5,
): CrateData {
root_file_id: FileId(
6,
),
edition: Edition2018,
version: None,
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"profiler_builtins",
),
canonical_name: "profiler_builtins",
},
),
cfg_options: CfgOptions(
[],
),
potential_cfg_options: CfgOptions(
[],
),
env: Env {
entries: {},
},
dependencies: [],
proc_macro: [],
origin: Lang,
},
CrateId(
2,
): CrateData {
root_file_id: FileId(
3,
),
edition: Edition2018,
version: None,
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"panic_abort",
),
canonical_name: "panic_abort",
},
),
cfg_options: CfgOptions(
[],
),
potential_cfg_options: CfgOptions(
[],
),
env: Env {
entries: {},
},
dependencies: [],
proc_macro: [],
origin: Lang,
},
CrateId(
9,
): CrateData {
root_file_id: FileId(
10,
),
edition: Edition2018,
version: None,
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"test",
),
canonical_name: "test",
},
),
cfg_options: CfgOptions(
[],
),
potential_cfg_options: CfgOptions(
[],
),
env: Env {
entries: {},
},
dependencies: [],
proc_macro: [],
origin: Lang,
},
CrateId(
6,
): CrateData {
root_file_id: FileId(
7,
),
edition: Edition2018,
version: None,
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"std",
),
canonical_name: "std",
},
),
cfg_options: CfgOptions(
[],
),
potential_cfg_options: CfgOptions(
[],
),
env: Env {
entries: {},
},
dependencies: [
Dependency {
crate_id: CrateId(
0,
),
name: CrateName(
"alloc",
),
prelude: true,
},
Dependency {
crate_id: CrateId(
1,
),
name: CrateName(
"core",
),
prelude: true,
},
Dependency {
crate_id: CrateId(
2,
),
name: CrateName(
"panic_abort",
),
prelude: true,
},
Dependency {
crate_id: CrateId(
3,
),
name: CrateName(
"panic_unwind",
),
prelude: true,
},
Dependency {
crate_id: CrateId(
5,
),
name: CrateName(
"profiler_builtins",
),
prelude: true,
},
Dependency {
crate_id: CrateId(
7,
),
name: CrateName(
"std_detect",
),
prelude: true,
},
Dependency {
crate_id: CrateId(
8,
),
name: CrateName(
"term",
),
prelude: true,
},
Dependency {
crate_id: CrateId(
9,
),
name: CrateName(
"test",
),
prelude: true,
},
Dependency {
crate_id: CrateId(
10,
),
name: CrateName(
"unwind",
),
prelude: true,
},
],
proc_macro: [],
origin: Lang,
},
CrateId(
3,
): CrateData {
root_file_id: FileId(
4,
),
edition: Edition2018,
version: None,
display_name: Some(
CrateDisplayName {
crate_name: CrateName(
"panic_unwind",
),
canonical_name: "panic_unwind",
},
),
cfg_options: CfgOptions(
[],
),
potential_cfg_options: CfgOptions(
[],
),
env: Env {
entries: {},
},
dependencies: [],
proc_macro: [],
origin: Lang,
},
},
}"#]],
);
}
#[test]
fn rust_project_is_proc_macro_has_proc_macro_dep() {
let crate_graph = load_rust_project("is-proc-macro-project.json");
// Since the project only defines one crate (outside the sysroot crates),
// it should be the one with the biggest Id.
let crate_id = crate_graph.iter().max().unwrap();
let crate_data = &crate_graph[crate_id];
// Assert that the project crate with `is_proc_macro` has a dependency
// on the proc_macro sysroot crate.
crate_data.dependencies.iter().find(|&dep| dep.name.deref() == "proc_macro").unwrap();
}
| 42.029508 | 122 | 0.258801 |
ab4b9a03970970275ea5c5b6d4a9b6abd12a29cf
| 25,835 |
use habitat_common::{error::{Error,
Result},
liveliness_checker,
outputln,
templating::package::Pkg};
use habitat_core::{env as henv,
os::process::windows_child::Child};
use mio::{Events,
Poll,
PollOpt,
Ready,
Token};
use mio_named_pipes::NamedPipe;
use std::{self,
env,
ffi::OsStr,
fs::{File,
OpenOptions},
io::{self,
prelude::*,
BufRead,
BufReader,
Read},
iter::once,
os::windows::{ffi::OsStrExt,
fs::*,
io::*},
path::PathBuf,
process,
thread,
time::{Duration,
Instant}};
use uuid::Uuid;
use winapi::um::{namedpipeapi,
processthreadsapi,
winbase};
static LOGKEY: &str = "HK";
const PIPE_SERVICE_ENVVAR: &str = "HAB_HOOK_PIPE_SCRIPT";
const SIGNAL_EXEC_HOOK: [u8; 1] = [1];
const SIGNAL_QUIT: [u8; 1] = [0];
habitat_core::env_config_duration!(PipeStartTimeout,
HAB_START_PIPE_TIMEOUT_SECS => from_secs,
Duration::from_secs(5));
#[derive(Debug)]
pub struct PipeHookClient {
hook_name: String,
hook_path: PathBuf,
pipe_name: String,
stdout_log_file: PathBuf,
stderr_log_file: PathBuf,
}
impl PipeHookClient {
pub fn new(hook_name: String,
hook_path: PathBuf,
stdout_log_file: PathBuf,
stderr_log_file: PathBuf)
-> Self {
let pipe_name = format!("{}-hook-{}", hook_name, Uuid::new_v4());
Self { hook_name,
hook_path,
pipe_name,
stdout_log_file,
stderr_log_file }
}
pub fn exec_hook<T>(&self,
service_group: &str,
pkg: &Pkg,
svc_encrypted_password: Option<T>)
-> Result<u32>
where T: ToString
{
// Always check to see if the powershell named pipe server is running.
// This should only be false the first time this function is called
// or if someone explicitly terminates the powershell process
if let Err(err) = self.pipe_wait() {
debug!("Initiating Powershell named pipe server for {} {} hook. last err: {}",
service_group, self.hook_name, err);
self.start_server(service_group, pkg, svc_encrypted_password)?;
}
// truncate log files before executing hook
// the log should only contain output for a single invocation
File::create(&self.stdout_log_file)?;
File::create(&self.stderr_log_file)?;
let (mut pipe, poll) = self.connect()?;
debug!("connected to {} {} hook pipe",
service_group, self.hook_name);
// The powershell server takes a single byte as input which will be either
// 0 to shut down (see drop below) or 1 to run the hook
self.pipe_ready(&poll, Ready::writable())?;
pipe.write_all(&SIGNAL_EXEC_HOOK)?;
// Now we wait for the hook to run and the powershell service to
// send back the hook's exit code over the pipe
self.pipe_ready(&poll, Ready::readable())?;
let mut exit_buf = [0; std::mem::size_of::<u32>()];
pipe.read_exact(&mut exit_buf)?;
Ok(u32::from_ne_bytes(exit_buf))
}
fn pipe_ready(&self, poll: &Poll, readiness: Ready) -> io::Result<bool> {
let mut events = Events::with_capacity(1024);
let loop_value = loop {
let checked_thread = liveliness_checker::mark_thread_alive();
let result =
poll.poll(&mut events, None)
.map(|_| events.iter().any(|e| e.readiness().contains(readiness)));
if let Ok(false) = result {
continue;
} else {
break checked_thread.unregister(result);
}
};
loop_value.into_result()
}
fn abs_pipe_name(&self) -> String { format!(r#"\\.\pipe\{}"#, self.pipe_name) }
fn connect(&self) -> Result<(NamedPipe, Poll)> {
let mut opts = OpenOptions::new();
opts.read(true)
.write(true)
.custom_flags(winbase::FILE_FLAG_OVERLAPPED);
let file = opts.open(self.abs_pipe_name())?;
let pipe = unsafe { NamedPipe::from_raw_handle(file.into_raw_handle()) };
let poll = Poll::new()?;
poll.register(&pipe, Token(0), Ready::all(), PollOpt::edge())?;
Ok((pipe, poll))
}
fn start_server<T>(&self,
service_group: &str,
pkg: &Pkg,
svc_encrypted_password: Option<T>)
-> Result<()>
where T: ToString
{
let script_path = match henv::var(PIPE_SERVICE_ENVVAR) {
Ok(script) => PathBuf::from(script),
Err(_) => {
env::current_exe()?.parent()
.expect("unable to find sup binary parent dir")
.join("named_pipe_service.ps1")
}
};
let ps_cmd = format!("& '{}' -HookPath '{}' -PipeName {} -ParentPID {}",
script_path.to_string_lossy(),
self.hook_path.to_string_lossy(),
self.pipe_name,
process::id());
// Start instance of powershell to host named pipe server for this client
let args = vec!["-NonInteractive", "-Command", ps_cmd.as_str()];
let child = Child::spawn("pwsh.exe",
&args,
&pkg.env.to_hash_map(),
&pkg.svc_user,
svc_encrypted_password)?;
debug!("spawned powershell server for {} {} hook on pipe: {}",
service_group, self.hook_name, self.pipe_name);
let out = child.stdout;
let err = child.stderr;
let preamble_out_str = format!("{} hook[{}]:", service_group, self.hook_name);
let preamble_err_str = preamble_out_str.clone();
let out_path = self.stdout_log_file.clone();
let err_path = self.stderr_log_file.clone();
let handle = child.handle;
// The above powershell will run for the lifetime of this client which is the same
// duration as the service itself and will thus service each hook run. So we will
// stream its stdout/err in separate threads
if let Some(o) = out {
thread::Builder::new().name(format!("{}-{}-pipe-server-out",
service_group, self.hook_name))
.spawn(move || stream_output(o, &out_path, &preamble_out_str))
.ok();
}
if let Some(e) = err {
thread::Builder::new().name(format!("{}-{}-pipe-server-err",
service_group, self.hook_name))
.spawn(move || stream_output(e, &err_path, &preamble_err_str))
.ok();
}
// We want to wait until we know the named pipe is up and running before returning OK
// If we suspect anything is wrong with the pipe we should terminate the pwsh process
let start = Instant::now();
loop {
match self.pipe_wait() {
Ok(_) => return Ok(()),
Err(err) => {
if start.elapsed() >= PipeStartTimeout::configured_value().into() {
self.win32_result(unsafe {
processthreadsapi::TerminateProcess(handle.raw(), 1)
})?;
return Err(Error::NamedPipeTimeoutOnStart(service_group.to_string(),
self.hook_name.to_string(),
err));
}
}
}
thread::yield_now();
}
}
// Simple call to win32 API to see if named pipe exists.
// Note that we cannot use Path::exists for named pipes
fn pipe_wait(&self) -> io::Result<()> {
let pipe_path: Vec<u16> = OsStr::new(&self.abs_pipe_name()).encode_wide()
.chain(once(0))
.collect();
// Note that WaitNamedPipeW will error immediately if the pipe has not yet
// been created. The timeout is honored if the pipe exists and then the
// waits for it to be ready for clients
self.win32_result(unsafe { namedpipeapi::WaitNamedPipeW(pipe_path.as_ptr(), 1000) })?;
Ok(())
}
fn quit(&self) -> Result<()> {
if let Err(err) = self.pipe_wait() {
debug!("error checking if pipe exists: {}", err);
} else {
debug!("Telling {} pipe server to quit", self.pipe_name);
let (mut pipe, poll) = self.connect()?;
self.pipe_ready(&poll, Ready::writable())?;
pipe.write_all(&SIGNAL_QUIT)?;
}
Ok(())
}
fn win32_result(&self, i: i32) -> io::Result<i32> {
if i == 0 {
Err(io::Error::last_os_error())
} else {
Ok(i)
}
}
}
impl Drop for PipeHookClient {
fn drop(&mut self) {
if let Err(err) = self.quit() {
error!("Unable to tell {} pipe server to quit. {}",
self.pipe_name, err);
}
}
}
fn stream_output<T>(out: T, log_file: &PathBuf, preamble_str: &str)
where T: Read
{
File::create(&log_file).unwrap_or_else(|_| {
panic!("{}: couldn't create log output file {}",
preamble_str,
&log_file.to_string_lossy())
});
for line in BufReader::new(out).lines() {
if let Ok(ref l) = line {
outputln!(preamble preamble_str, l);
// we append each line to the log file instead of continuously
// streaming to an open file because the parent thread needs to
// truncate the log on each hook execution so that the log only
// holds the output of the last run. This mimics the behavior of
// the HookOutput streaming.
match OpenOptions::new().write(true).append(true).open(&log_file) {
Ok(mut log) => {
if let Err(e) = writeln!(log, "{}", l) {
outputln!(preamble preamble_str, "couldn't write line. {}", e);
}
}
Err(err) => {
outputln!(preamble preamble_str, "unable to open log {} : {}",
&log_file.to_string_lossy(),
err);
}
}
}
}
}
#[cfg(test)]
mod test {
use super::*;
use habitat_common::{locked_env_var,
templating::{package::Pkg,
test_helpers::{create_with_content,
file_content}}};
use habitat_core::{os::process,
package::{PackageIdent,
PackageInstall},
service::ServiceGroup};
use tempfile::TempDir;
use winapi::um::synchapi;
locked_env_var!(HAB_HOOK_PIPE_SCRIPT, pipe_service_path);
fn named_pipe_service_ps1() -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("static")
.join("named_pipe_service.ps1")
}
fn pkg() -> Pkg {
let service_group = ServiceGroup::new(None, "test_service", "test_group", None).unwrap();
let pg_id = PackageIdent::new("testing",
&service_group.service(),
Some("1.0.0"),
Some("20170712000000"));
let pkg_install = PackageInstall::new_from_parts(pg_id.clone(),
PathBuf::from("/tmp"),
PathBuf::from("/tmp"),
PathBuf::from("/tmp"));
Pkg::from_install(&pkg_install).unwrap()
}
#[test]
fn pipe_hook_client_exec_hook_returns_exit_status() {
let var = pipe_service_path();
var.set(&named_pipe_service_ps1());
let tmpdir = TempDir::new().unwrap();
let path = tmpdir.path().join("health-check");
create_with_content(&path, "exit 5000");
let client = PipeHookClient::new("test".to_string(),
path,
tmpdir.path().join("out.log"),
tmpdir.path().join("err.log"));
let exit = client.exec_hook("tg1", &pkg(), None::<String>).unwrap();
assert_eq!(5000, exit);
}
#[test]
fn pipe_hook_client_exec_hook_returns_exit_status_when_no_exit_in_script() {
let var = pipe_service_path();
var.set(&named_pipe_service_ps1());
let tmpdir = TempDir::new().unwrap();
let path = tmpdir.path().join("health-check");
create_with_content(&path, "write-host 'no exit here'");
let client = PipeHookClient::new("test".to_string(),
path,
tmpdir.path().join("out.log"),
tmpdir.path().join("err.log"));
let exit = client.exec_hook("tg2", &pkg(), None::<String>).unwrap();
assert_eq!(0, exit);
}
#[test]
fn pipe_hook_client_exec_hook_logs_stdout() {
let var = pipe_service_path();
var.set(&named_pipe_service_ps1());
let tmpdir = TempDir::new().unwrap();
let path = tmpdir.path().join("health-check");
create_with_content(&path, "Write-Host 'you are my sunshine'");
let client = PipeHookClient::new("test".to_string(),
path,
tmpdir.path().join("out.log"),
tmpdir.path().join("err.log"));
client.exec_hook("tg3", &pkg(), None::<String>).unwrap();
// give stream a chance to write
thread::sleep(Duration::from_millis(10));
assert!(file_content(tmpdir.path().join("out.log")).contains("you are my sunshine\n"));
}
#[test]
fn pipe_hook_client_exec_hook_logs_stderr() {
let var = pipe_service_path();
var.set(&named_pipe_service_ps1());
let tmpdir = TempDir::new().unwrap();
let path = tmpdir.path().join("health-check");
create_with_content(&path, "Write-Error 'you are not my sunshine'");
let client = PipeHookClient::new("test".to_string(),
path,
tmpdir.path().join("out.log"),
tmpdir.path().join("err.log"));
client.exec_hook("tg4", &pkg(), None::<String>).unwrap();
// give stream a chance to write
thread::sleep(Duration::from_millis(10));
assert!(file_content(tmpdir.path().join("err.log")).contains("you are not my sunshine\n"));
}
#[test]
fn pipe_hook_client_exec_hook_shares_server_accross_calls() {
let var = pipe_service_path();
var.set(&named_pipe_service_ps1());
let tmpdir = TempDir::new().unwrap();
let path = tmpdir.path().join("health-check");
create_with_content(&path, "exit $PID");
let client = PipeHookClient::new("test".to_string(),
path,
tmpdir.path().join("out.log"),
tmpdir.path().join("err.log"));
let pid1 = client.exec_hook("tg5", &pkg(), None::<String>).unwrap();
let pid2 = client.exec_hook("tg5", &pkg(), None::<String>).unwrap();
assert_eq!(pid1, pid2);
}
#[test]
fn pipe_hook_client_start_server_terminates_failed_server() {
let tmpdir = TempDir::new().unwrap();
let path = tmpdir.path().join("fake-server.ps1");
create_with_content(&path,
"write-host $PID;while($true){Start-Sleep -Seconds 5}");
let var = pipe_service_path();
var.set(&path);
let client = PipeHookClient::new("test".to_string(),
tmpdir.path().join("health-check"),
tmpdir.path().join("out.log"),
tmpdir.path().join("err.log"));
let result = client.exec_hook("tg6", &pkg(), None::<String>);
// give stream a chance to write
thread::sleep(Duration::from_millis(10));
let pid_str = file_content(tmpdir.path().join("out.log"));
assert!(result.is_err());
assert!(!process::is_alive(pid_str.trim()
.parse::<process::Pid>()
.expect("could no parse pid")));
}
#[test]
fn pipe_hook_client_start_server_logs_stderr_of_failed_server() {
let tmpdir = TempDir::new().unwrap();
let path = tmpdir.path().join("fake-server.ps1");
create_with_content(&path, "write-error 'I am not a real pipe server'");
let var = pipe_service_path();
var.set(&path);
let client = PipeHookClient::new("test".to_string(),
tmpdir.path().join("health-check"),
tmpdir.path().join("out.log"),
tmpdir.path().join("err.log"));
let result = client.exec_hook("tg7", &pkg(), None::<String>);
// give stream a chance to write
thread::sleep(Duration::from_millis(10));
assert!(result.is_err());
assert!(file_content(tmpdir.path().join("err.log")).contains("I am not a real pipe \
server"));
}
#[test]
fn pipe_hook_client_start_server_logs_stdout_of_failed_server() {
let tmpdir = TempDir::new().unwrap();
let path = tmpdir.path().join("fake-server.ps1");
create_with_content(&path, "write-host 'I am not a real pipe server'");
let var = pipe_service_path();
var.set(&path);
let client = PipeHookClient::new("test".to_string(),
tmpdir.path().join("health-check"),
tmpdir.path().join("out.log"),
tmpdir.path().join("err.log"));
let result = client.exec_hook("tg8", &pkg(), None::<String>);
// give stream a chance to write
thread::sleep(Duration::from_millis(10));
assert!(result.is_err());
assert_eq!("I am not a real pipe server\n",
file_content(tmpdir.path().join("out.log")));
}
#[test]
fn pipe_hook_client_exec_hook_starts_new_service_if_current_instance_exits() {
let var = pipe_service_path();
var.set(&named_pipe_service_ps1());
let tmpdir = TempDir::new().unwrap();
let path = tmpdir.path().join("health-check");
create_with_content(&path, "exit $PID");
let client = PipeHookClient::new("test".to_string(),
path,
tmpdir.path().join("out.log"),
tmpdir.path().join("err.log"));
let pid1 = client.exec_hook("tg9", &pkg(), None::<String>).unwrap();
let handle = process::handle_from_pid(pid1).expect("unable to get handle to pipe server");
unsafe {
processthreadsapi::TerminateProcess(handle, 1);
}
let pid2 = client.exec_hook("tg9", &pkg(), None::<String>).unwrap();
assert_ne!(pid1, pid2);
}
#[test]
fn pipe_hook_client_exec_hook_restores_environment() {
let var = pipe_service_path();
var.set(&named_pipe_service_ps1());
let tmpdir = TempDir::new().unwrap();
let path = tmpdir.path().join("health-check");
create_with_content(&path,
"if($env:test_var) { exit 10 } else { $env:test_var=1;exit 0 }");
let client = PipeHookClient::new("test".to_string(),
path,
tmpdir.path().join("out.log"),
tmpdir.path().join("err.log"));
let exit1 = client.exec_hook("tg10", &pkg(), None::<String>).unwrap();
let exit2 = client.exec_hook("tg10", &pkg(), None::<String>).unwrap();
assert_eq!(0, exit1);
assert_eq!(0, exit2);
}
#[test]
fn pipe_hook_client_drop_quits_service() {
let var = pipe_service_path();
var.set(&named_pipe_service_ps1());
let tmpdir = TempDir::new().unwrap();
let path = tmpdir.path().join("health-check");
create_with_content(&path, "exit $PID");
let pid = {
let client = PipeHookClient::new("test".to_string(),
tmpdir.path().join("health-check"),
tmpdir.path().join("out.log"),
tmpdir.path().join("err.log"));
client.exec_hook("tg11", &pkg(), None::<String>).unwrap()
};
let handle = process::handle_from_pid(pid).expect("unable to get handle to pipe server");
unsafe {
synchapi::WaitForSingleObject(handle, 100);
}
assert!(!process::is_alive(pid));
}
#[test]
fn pipe_hook_client_exec_hook_will_return_3_if_hook_throws_exception() {
let var = pipe_service_path();
var.set(&named_pipe_service_ps1());
let tmpdir = TempDir::new().unwrap();
let path = tmpdir.path().join("health-check");
create_with_content(&path, "throw 'an exception'");
let client = PipeHookClient::new("test".to_string(),
path,
tmpdir.path().join("out.log"),
tmpdir.path().join("err.log"));
let exit = client.exec_hook("tg12", &pkg(), None::<String>).unwrap();
assert_eq!(3, exit);
}
#[test]
fn pipe_hook_client_exec_hook_clears_stdout_log() {
let var = pipe_service_path();
var.set(&named_pipe_service_ps1());
let tmpdir = TempDir::new().unwrap();
let path = tmpdir.path().join("health-check");
create_with_content(&path, "write-host 'you should only see me once in the log'");
let client = PipeHookClient::new("test".to_string(),
path,
tmpdir.path().join("out.log"),
tmpdir.path().join("err.log"));
client.exec_hook("tg13", &pkg(), None::<String>).unwrap();
client.exec_hook("tg13", &pkg(), None::<String>).unwrap();
// give stream a chance to write
thread::sleep(Duration::from_millis(10));
assert_eq!("you should only see me once in the log\n",
file_content(tmpdir.path().join("out.log")));
}
#[test]
fn pipe_hook_client_exec_hook_clears_stderr_log() {
let var = pipe_service_path();
var.set(&named_pipe_service_ps1());
let tmpdir = TempDir::new().unwrap();
let path = tmpdir.path().join("health-check");
create_with_content(&path, "write-error 'I am the only error'");
let client = PipeHookClient::new("test".to_string(),
path,
tmpdir.path().join("out.log"),
tmpdir.path().join("err.log"));
client.exec_hook("tg14", &pkg(), None::<String>).unwrap();
client.exec_hook("tg14", &pkg(), None::<String>).unwrap();
// give stream a chance to write
thread::sleep(Duration::from_millis(10));
let content = file_content(tmpdir.path().join("err.log"));
assert_eq!(content.find("I am the only error"),
content.rfind("I am the only error"));
}
#[test]
fn pipe_hook_client_exec_hook_passes_pid() {
let var = pipe_service_path();
var.set(&named_pipe_service_ps1());
let tmpdir = TempDir::new().unwrap();
let path = tmpdir.path().join("health-check");
create_with_content(&path, "exit $ParentPID");
let client = PipeHookClient::new("test".to_string(),
path,
tmpdir.path().join("out.log"),
tmpdir.path().join("err.log"));
let exit = client.exec_hook("tg15", &pkg(), None::<String>).unwrap();
assert_eq!(std::process::id(), exit);
}
}
| 40.11646 | 99 | 0.500213 |
91c444ba533bf36f9d203bc2c57b7fe594f7911f
| 863 |
mod local;
mod named_key;
mod requests;
// total mint supply
pub use local::{read_total_mint_supply, write_total_mint_supply};
// stake
pub use local::{
read_bonding_amount, read_unbond_requests, write_bonding_amount, write_unbond_requests,
};
// delegate
pub use local::{
read_redelegation_requests, read_undelegation_requests, write_redelegation_requests,
write_undelegation_requests,
};
pub use named_key::{read_delegations, write_delegations};
// vote
pub use local::{
read_vote, read_voted_amount, read_voting_amount, write_vote, write_voted_amount,
write_voting_amount,
};
// claim
pub use local::{
read_commission_amount, read_last_distributed_block, read_reward_amount,
write_commission_amount, write_last_distributed_block, write_reward_amount,
};
pub use requests::{RedelegateRequest, UnbondRequest, UndelegateRequest};
| 26.96875 | 91 | 0.800695 |
d974dcfab22b72b173ffd7420c0eb153be01dab6
| 103 |
//! This module contains all fuzzy control theory related entities.
pub mod norm;
pub mod membership;
| 20.6 | 67 | 0.776699 |
c166c400b36de1744622c374f57a3019ed003363
| 728 |
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-test
struct Obj {
member: uint
}
impl Obj {
static pure fn boom() -> bool {
return 1+1 == 2
}
pure fn chirp() {
self.boom(); //~ ERROR wat
}
}
fn main() {
let o = Obj { member: 0 };
o.chirp();
1 + 1;
}
| 24.266667 | 68 | 0.641484 |
563aae2d5116dc6d59f56ab213247f57052bdf26
| 3,869 |
#![cfg_attr(not(feature = "std"), no_std)]
/// Edit this file to define custom logic or remove it if it is not needed.
/// Learn more about FRAME and the core library of Substrate FRAME pallets:
/// <https://substrate.dev/docs/en/knowledgebase/runtime/frame>
pub use pallet::*;
#[cfg(test)]
mod mock;
#[cfg(test)]
mod tests;
#[cfg(feature = "runtime-benchmarks")]
mod benchmarking;
pub mod weights;
#[frame_support::pallet]
pub mod pallet {
use frame_support::{dispatch::DispatchResult, pallet_prelude::*};
use frame_system::pallet_prelude::*;
pub use crate::weights::WeightInfo;
/// Configure the pallet by specifying the parameters and types on which it depends.
#[pallet::config]
pub trait Config: frame_system::Config {
/// Because this pallet emits events, it depends on the runtime's definition of an event.
type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>;
type WeightInfo: WeightInfo;
}
#[pallet::pallet]
#[pallet::generate_store(pub(super) trait Store)]
pub struct Pallet<T>(_);
// The pallet's runtime storage items.
// https://substrate.dev/docs/en/knowledgebase/runtime/storage
#[pallet::storage]
#[pallet::getter(fn something)]
// Learn more about declaring storage items:
// https://substrate.dev/docs/en/knowledgebase/runtime/storage#declaring-storage-items
pub type Something<T> = StorageValue<_, u32>;
// Pallets use events to inform users when important changes are made.
// https://substrate.dev/docs/en/knowledgebase/runtime/events
#[pallet::event]
#[pallet::metadata(T::AccountId = "AccountId")]
#[pallet::generate_deposit(pub(super) fn deposit_event)]
pub enum Event<T: Config> {
/// Event documentation should end with an array that provides descriptive names for event
/// parameters. [something, who]
SomethingStored(u32, T::AccountId),
}
// Errors inform users that something went wrong.
#[pallet::error]
pub enum Error<T> {
/// Error names should be descriptive.
NoneValue,
/// Errors should have helpful documentation associated with them.
StorageOverflow,
}
// Dispatchable functions allows users to interact with the pallet and invoke state changes.
// These functions materialize as "extrinsics", which are often compared to transactions.
// Dispatchable functions must be annotated with a weight and must return a DispatchResult.
#[pallet::call]
impl<T: Config> Pallet<T> {
/// An example dispatchable that takes a singles value as a parameter, writes the value to
/// storage and emits an event. This function must be dispatched by a signed extrinsic.
#[pallet::weight(T::WeightInfo::do_something(*something))]
pub fn do_something(origin: OriginFor<T>, something: u32) -> DispatchResult {
// Check that the extrinsic was signed and get the signer.
// This function will return an error if the extrinsic is not signed.
// https://substrate.dev/docs/en/knowledgebase/runtime/origin
let who = ensure_signed(origin)?;
// Update storage.
<Something<T>>::put(something);
// Emit an event.
Self::deposit_event(Event::SomethingStored(something, who));
// Return a successful DispatchResultWithPostInfo
Ok(())
}
/// An example dispatchable that may throw a custom error.
#[pallet::weight(10_000 + T::DbWeight::get().reads_writes(1,1))]
pub fn cause_error(origin: OriginFor<T>) -> DispatchResult {
let _who = ensure_signed(origin)?;
// Read a value from storage.
match <Something<T>>::get() {
// Return an error if the value has not been set.
None => Err(Error::<T>::NoneValue)?,
Some(old) => {
// Increment the value read from storage; will error in the event of overflow.
let new = old.checked_add(1).ok_or(Error::<T>::StorageOverflow)?;
// Update the value in storage with the incremented result.
<Something<T>>::put(new);
Ok(())
},
}
}
}
}
| 35.495413 | 93 | 0.713104 |
ef29e5a6b022b624498854b93e13b4d81be48a22
| 16,320 |
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::mem;
use errors;
use syntax::ast::{self, Ident, NodeId};
use syntax::attr;
use syntax::codemap::{ExpnInfo, MacroAttribute, hygiene, respan};
use syntax::ext::base::ExtCtxt;
use syntax::ext::build::AstBuilder;
use syntax::ext::expand::ExpansionConfig;
use syntax::ext::hygiene::Mark;
use syntax::fold::Folder;
use syntax::parse::ParseSess;
use syntax::ptr::P;
use syntax::symbol::Symbol;
use syntax::visit::{self, Visitor};
use syntax_pos::{Span, DUMMY_SP};
use deriving;
const PROC_MACRO_KINDS: [&'static str; 3] =
["proc_macro_derive", "proc_macro_attribute", "proc_macro"];
struct ProcMacroDerive {
trait_name: ast::Name,
function_name: Ident,
span: Span,
attrs: Vec<ast::Name>,
}
struct ProcMacroDef {
function_name: Ident,
span: Span,
}
struct CollectProcMacros<'a> {
derives: Vec<ProcMacroDerive>,
attr_macros: Vec<ProcMacroDef>,
bang_macros: Vec<ProcMacroDef>,
in_root: bool,
handler: &'a errors::Handler,
is_proc_macro_crate: bool,
is_test_crate: bool,
}
pub fn modify(sess: &ParseSess,
resolver: &mut ::syntax::ext::base::Resolver,
mut krate: ast::Crate,
is_proc_macro_crate: bool,
is_test_crate: bool,
num_crate_types: usize,
handler: &errors::Handler) -> ast::Crate {
let ecfg = ExpansionConfig::default("proc_macro".to_string());
let mut cx = ExtCtxt::new(sess, ecfg, resolver);
let (derives, attr_macros, bang_macros) = {
let mut collect = CollectProcMacros {
derives: Vec::new(),
attr_macros: Vec::new(),
bang_macros: Vec::new(),
in_root: true,
handler,
is_proc_macro_crate,
is_test_crate,
};
visit::walk_crate(&mut collect, &krate);
(collect.derives, collect.attr_macros, collect.bang_macros)
};
if !is_proc_macro_crate {
return krate
}
if num_crate_types > 1 {
handler.err("cannot mix `proc-macro` crate type with others");
}
if is_test_crate {
return krate;
}
krate.module.items.push(mk_registrar(&mut cx, &derives, &attr_macros, &bang_macros));
krate
}
fn is_proc_macro_attr(attr: &ast::Attribute) -> bool {
PROC_MACRO_KINDS.iter().any(|kind| attr.check_name(kind))
}
impl<'a> CollectProcMacros<'a> {
fn check_not_pub_in_root(&self, vis: &ast::Visibility, sp: Span) {
if self.is_proc_macro_crate &&
self.in_root &&
vis.node == ast::VisibilityKind::Public {
self.handler.span_err(sp,
"`proc-macro` crate types cannot \
export any items other than functions \
tagged with `#[proc_macro_derive]` currently");
}
}
fn collect_custom_derive(&mut self, item: &'a ast::Item, attr: &'a ast::Attribute) {
// Once we've located the `#[proc_macro_derive]` attribute, verify
// that it's of the form `#[proc_macro_derive(Foo)]` or
// `#[proc_macro_derive(Foo, attributes(A, ..))]`
let list = match attr.meta_item_list() {
Some(list) => list,
None => {
self.handler.span_err(attr.span(),
"attribute must be of form: \
#[proc_macro_derive(TraitName)]");
return
}
};
if list.len() != 1 && list.len() != 2 {
self.handler.span_err(attr.span(),
"attribute must have either one or two arguments");
return
}
let trait_attr = &list[0];
let attributes_attr = list.get(1);
let trait_name = match trait_attr.name() {
Some(name) => name,
_ => {
self.handler.span_err(trait_attr.span(), "not a meta item");
return
}
};
if !trait_attr.is_word() {
self.handler.span_err(trait_attr.span(), "must only be one word");
}
if deriving::is_builtin_trait(trait_name) {
self.handler.span_err(trait_attr.span(),
"cannot override a built-in #[derive] mode");
}
if self.derives.iter().any(|d| d.trait_name == trait_name) {
self.handler.span_err(trait_attr.span(),
"derive mode defined twice in this crate");
}
let proc_attrs: Vec<_> = if let Some(attr) = attributes_attr {
if !attr.check_name("attributes") {
self.handler.span_err(attr.span(), "second argument must be `attributes`")
}
attr.meta_item_list().unwrap_or_else(|| {
self.handler.span_err(attr.span(),
"attribute must be of form: \
`attributes(foo, bar)`");
&[]
}).into_iter().filter_map(|attr| {
let name = match attr.name() {
Some(name) => name,
_ => {
self.handler.span_err(attr.span(), "not a meta item");
return None;
},
};
if !attr.is_word() {
self.handler.span_err(attr.span(), "must only be one word");
return None;
}
Some(name)
}).collect()
} else {
Vec::new()
};
if self.in_root && item.vis.node == ast::VisibilityKind::Public {
self.derives.push(ProcMacroDerive {
span: item.span,
trait_name,
function_name: item.ident,
attrs: proc_attrs,
});
} else {
let msg = if !self.in_root {
"functions tagged with `#[proc_macro_derive]` must \
currently reside in the root of the crate"
} else {
"functions tagged with `#[proc_macro_derive]` must be `pub`"
};
self.handler.span_err(item.span, msg);
}
}
fn collect_attr_proc_macro(&mut self, item: &'a ast::Item, attr: &'a ast::Attribute) {
if let Some(_) = attr.meta_item_list() {
self.handler.span_err(attr.span, "`#[proc_macro_attribute]` attribute
does not take any arguments");
return;
}
if self.in_root && item.vis.node == ast::VisibilityKind::Public {
self.attr_macros.push(ProcMacroDef {
span: item.span,
function_name: item.ident,
});
} else {
let msg = if !self.in_root {
"functions tagged with `#[proc_macro_attribute]` must \
currently reside in the root of the crate"
} else {
"functions tagged with `#[proc_macro_attribute]` must be `pub`"
};
self.handler.span_err(item.span, msg);
}
}
fn collect_bang_proc_macro(&mut self, item: &'a ast::Item, attr: &'a ast::Attribute) {
if let Some(_) = attr.meta_item_list() {
self.handler.span_err(attr.span, "`#[proc_macro]` attribute
does not take any arguments");
return;
}
if self.in_root && item.vis.node == ast::VisibilityKind::Public {
self.bang_macros.push(ProcMacroDef {
span: item.span,
function_name: item.ident,
});
} else {
let msg = if !self.in_root {
"functions tagged with `#[proc_macro]` must \
currently reside in the root of the crate"
} else {
"functions tagged with `#[proc_macro]` must be `pub`"
};
self.handler.span_err(item.span, msg);
}
}
}
impl<'a> Visitor<'a> for CollectProcMacros<'a> {
fn visit_item(&mut self, item: &'a ast::Item) {
if let ast::ItemKind::MacroDef(..) = item.node {
if self.is_proc_macro_crate && attr::contains_name(&item.attrs, "macro_export") {
let msg =
"cannot export macro_rules! macros from a `proc-macro` crate type currently";
self.handler.span_err(item.span, msg);
}
}
// First up, make sure we're checking a bare function. If we're not then
// we're just not interested in this item.
//
// If we find one, try to locate a `#[proc_macro_derive]` attribute on
// it.
let is_fn = match item.node {
ast::ItemKind::Fn(..) => true,
_ => false,
};
let mut found_attr: Option<&'a ast::Attribute> = None;
for attr in &item.attrs {
if is_proc_macro_attr(&attr) {
if let Some(prev_attr) = found_attr {
let msg = if attr.path == prev_attr.path {
format!("Only one `#[{}]` attribute is allowed on any given function",
attr.path)
} else {
format!("`#[{}]` and `#[{}]` attributes cannot both be applied \
to the same function", attr.path, prev_attr.path)
};
self.handler.struct_span_err(attr.span(), &msg)
.span_note(prev_attr.span(), "Previous attribute here")
.emit();
return;
}
found_attr = Some(attr);
}
}
let attr = match found_attr {
None => {
self.check_not_pub_in_root(&item.vis, item.span);
return visit::walk_item(self, item);
},
Some(attr) => attr,
};
if !is_fn {
let msg = format!("the `#[{}]` attribute may only be used on bare functions",
attr.path);
self.handler.span_err(attr.span(), &msg);
return;
}
if self.is_test_crate {
return;
}
if !self.is_proc_macro_crate {
let msg = format!("the `#[{}]` attribute is only usable with crates of the \
`proc-macro` crate type", attr.path);
self.handler.span_err(attr.span(), &msg);
return;
}
if attr.check_name("proc_macro_derive") {
self.collect_custom_derive(item, attr);
} else if attr.check_name("proc_macro_attribute") {
self.collect_attr_proc_macro(item, attr);
} else if attr.check_name("proc_macro") {
self.collect_bang_proc_macro(item, attr);
};
visit::walk_item(self, item);
}
fn visit_mod(&mut self, m: &'a ast::Mod, _s: Span, _a: &[ast::Attribute], id: NodeId) {
let mut prev_in_root = self.in_root;
if id != ast::CRATE_NODE_ID {
prev_in_root = mem::replace(&mut self.in_root, false);
}
visit::walk_mod(self, m);
self.in_root = prev_in_root;
}
fn visit_mac(&mut self, mac: &ast::Mac) {
visit::walk_mac(self, mac)
}
}
// Creates a new module which looks like:
//
// mod $gensym {
// extern crate proc_macro;
//
// use proc_macro::__internal::Registry;
//
// #[plugin_registrar]
// fn registrar(registrar: &mut Registry) {
// registrar.register_custom_derive($name_trait1, ::$name1, &[]);
// registrar.register_custom_derive($name_trait2, ::$name2, &["attribute_name"]);
// // ...
// }
// }
fn mk_registrar(cx: &mut ExtCtxt,
custom_derives: &[ProcMacroDerive],
custom_attrs: &[ProcMacroDef],
custom_macros: &[ProcMacroDef]) -> P<ast::Item> {
let mark = Mark::fresh(Mark::root());
mark.set_expn_info(ExpnInfo {
call_site: DUMMY_SP,
def_site: None,
format: MacroAttribute(Symbol::intern("proc_macro")),
allow_internal_unstable: true,
allow_internal_unsafe: false,
local_inner_macros: false,
edition: hygiene::default_edition(),
});
let span = DUMMY_SP.apply_mark(mark);
let proc_macro = Ident::from_str("proc_macro");
let krate = cx.item(span,
proc_macro,
Vec::new(),
ast::ItemKind::ExternCrate(None));
let __internal = Ident::from_str("__internal");
let registry = Ident::from_str("Registry");
let registrar = Ident::from_str("_registrar");
let register_custom_derive = Ident::from_str("register_custom_derive");
let register_attr_proc_macro = Ident::from_str("register_attr_proc_macro");
let register_bang_proc_macro = Ident::from_str("register_bang_proc_macro");
let mut stmts = custom_derives.iter().map(|cd| {
let path = cx.path_global(cd.span, vec![cd.function_name]);
let trait_name = cx.expr_str(cd.span, cd.trait_name);
let attrs = cx.expr_vec_slice(
span,
cd.attrs.iter().map(|&s| cx.expr_str(cd.span, s)).collect::<Vec<_>>()
);
let registrar = cx.expr_ident(span, registrar);
let ufcs_path = cx.path(span, vec![proc_macro, __internal, registry,
register_custom_derive]);
cx.stmt_expr(cx.expr_call(span, cx.expr_path(ufcs_path),
vec![registrar, trait_name, cx.expr_path(path), attrs]))
}).collect::<Vec<_>>();
stmts.extend(custom_attrs.iter().map(|ca| {
let name = cx.expr_str(ca.span, ca.function_name.name);
let path = cx.path_global(ca.span, vec![ca.function_name]);
let registrar = cx.expr_ident(ca.span, registrar);
let ufcs_path = cx.path(span,
vec![proc_macro, __internal, registry, register_attr_proc_macro]);
cx.stmt_expr(cx.expr_call(span, cx.expr_path(ufcs_path),
vec![registrar, name, cx.expr_path(path)]))
}));
stmts.extend(custom_macros.iter().map(|cm| {
let name = cx.expr_str(cm.span, cm.function_name.name);
let path = cx.path_global(cm.span, vec![cm.function_name]);
let registrar = cx.expr_ident(cm.span, registrar);
let ufcs_path = cx.path(span,
vec![proc_macro, __internal, registry, register_bang_proc_macro]);
cx.stmt_expr(cx.expr_call(span, cx.expr_path(ufcs_path),
vec![registrar, name, cx.expr_path(path)]))
}));
let path = cx.path(span, vec![proc_macro, __internal, registry]);
let registrar_path = cx.ty_path(path);
let arg_ty = cx.ty_rptr(span, registrar_path, None, ast::Mutability::Mutable);
let func = cx.item_fn(span,
registrar,
vec![cx.arg(span, registrar, arg_ty)],
cx.ty(span, ast::TyKind::Tup(Vec::new())),
cx.block(span, stmts));
let derive_registrar = cx.meta_word(span, Symbol::intern("rustc_derive_registrar"));
let derive_registrar = cx.attribute(span, derive_registrar);
let func = func.map(|mut i| {
i.attrs.push(derive_registrar);
i.vis = respan(span, ast::VisibilityKind::Public);
i
});
let ident = ast::Ident::with_empty_ctxt(Symbol::gensym("registrar"));
let module = cx.item_mod(span, span, ident, Vec::new(), vec![krate, func]).map(|mut i| {
i.vis = respan(span, ast::VisibilityKind::Public);
i
});
cx.monotonic_expander().fold_item(module).pop().unwrap()
}
| 36.02649 | 98 | 0.541789 |
e899b1b278f681721221e32e0ffd233419e8d8aa
| 8,228 |
//! Rust re-implementation of SFMT
use super::*;
use crate::packed::*;
/// Parameters used in sfmt.
pub trait SfmtParams<const MEXP: usize, const MEXP_N: usize>: Sized {
const SFMT_MEXP: usize = MEXP;
const SFMT_N: usize = MEXP_N; //Self::SFMT_MEXP / 128 + 1;
const SFMT_N32: usize = Self::SFMT_N * 4;
const SFMT_POS1: usize;
const SFMT_SL1: i32;
const SFMT_SL2: i32;
const SFMT_SR1: i32;
const SFMT_SR2: i32;
const SFMT_MSK1: i32;
const SFMT_MSK2: i32;
const SFMT_MSK3: i32;
const SFMT_MSK4: i32;
const SFMT_PARITY1: u32;
const SFMT_PARITY2: u32;
const SFMT_PARITY3: u32;
const SFMT_PARITY4: u32;
fn mm_recursion(a: i32x4, b: i32x4, c: i32x4, d: i32x4) -> i32x4 {
#[cfg(target_arch = "x86")]
use std::arch::x86::*;
#[cfg(target_arch = "x86_64")]
use std::arch::x86_64::*;
unsafe {
let mask = new(
Self::SFMT_MSK1,
Self::SFMT_MSK2,
Self::SFMT_MSK3,
Self::SFMT_MSK4,
);
let y = _mm_srli_epi32(b, Self::SFMT_SR1);
let z = _mm_srli_si128(c, Self::SFMT_SR2);
let v = _mm_slli_epi32(d, Self::SFMT_SL1);
let z = _mm_xor_si128(z, a);
let z = _mm_xor_si128(z, v);
let x = _mm_slli_si128(a, Self::SFMT_SL2);
let y = _mm_and_si128(y, mask);
let z = _mm_xor_si128(z, x);
_mm_xor_si128(z, y)
}
}
fn sfmt_gen_rand_all(sfmt: &mut paramed::SFMT<MEXP, MEXP_N>) {
let st = &mut sfmt.state;
let mut r1 = st[Self::SFMT_N - 2];
let mut r2 = st[Self::SFMT_N - 1];
for i in 0..(Self::SFMT_N - Self::SFMT_POS1) {
st[i] = Self::mm_recursion(st[i], st[i + Self::SFMT_POS1], r1, r2);
r1 = r2;
r2 = st[i];
}
for i in (Self::SFMT_N - Self::SFMT_POS1)..Self::SFMT_N {
st[i] = Self::mm_recursion(st[i], st[i + Self::SFMT_POS1 - Self::SFMT_N], r1, r2);
r1 = r2;
r2 = st[i];
}
}
fn period_certification(sfmt: &mut paramed::SFMT<MEXP, MEXP_N>) {
let mut inner = 0_u32;
let st = &mut sfmt.state[0];
let parity = [
Self::SFMT_PARITY1,
Self::SFMT_PARITY2,
Self::SFMT_PARITY3,
Self::SFMT_PARITY4,
];
for i in 0..4 {
inner ^= extract(*st, i) & parity[i];
}
for i in [16, 8, 4, 2, 1].iter() {
inner ^= inner >> i;
}
inner &= 1;
if inner == 1 {
return;
}
for i in 0..4 {
let mut work = 1_u32;
for _ in 0..32 {
if (work & parity[i]) != 0 {
let val = extract(*st, i) ^ work;
insert(st, val as i32, i);
return;
}
work = work << 1;
}
}
}
fn iterate(pre: i32, i: i32) -> i32 {
use std::num::Wrapping;
let pre = Wrapping(pre as u32);
let i = Wrapping(i as u32);
(Wrapping(1812433253) * (pre ^ (pre >> 30)) + i).0 as i32
}
fn map(a: i32, idx: i32) -> (i32x4, i32) {
let b = Self::iterate(a, 4 * idx + 1);
let c = Self::iterate(b, 4 * idx + 2);
let d = Self::iterate(c, 4 * idx + 3);
let a2 = Self::iterate(d, 4 * idx + 4);
(new(a, b, c, d), a2)
}
fn sfmt_init_gen_rand(sfmt: &mut paramed::SFMT<MEXP, MEXP_N>, seed: u32) {
let mut pre = seed as i32;
for (idx, v) in sfmt.state.iter_mut().enumerate() {
let (v_, pre_) = Self::map(pre, idx as i32);
*v = v_;
pre = pre_;
}
sfmt.idx = Self::SFMT_N32;
Self::period_certification(sfmt);
}
}
/// Wrapper for `MEXP` parameter.
pub struct SFMTMEXP<const MEXP: usize, const MEXP_N: usize>;
macro_rules! parms_impl {
($mexp : expr, $n : expr, $pos1 : expr, $sl1 : expr, $sl2 : expr, $sr1 : expr, $sr2 : expr,
$msk1 : expr, $msk2 : expr, $msk3 : expr, $msk4 : expr,
$parity1 : expr, $parity2 : expr, $parity3 : expr, $parity4 : expr) => {
impl SfmtParams<$mexp, $n> for SFMTMEXP<$mexp, $n> {
const SFMT_POS1: usize = $pos1;
const SFMT_SL1: i32 = $sl1;
const SFMT_SL2: i32 = $sl2;
const SFMT_SR1: i32 = $sr1;
const SFMT_SR2: i32 = $sr2;
const SFMT_MSK1: i32 = $msk1 as i32;
const SFMT_MSK2: i32 = $msk2 as i32;
const SFMT_MSK3: i32 = $msk3 as i32;
const SFMT_MSK4: i32 = $msk4 as i32;
const SFMT_PARITY1: u32 = $parity1;
const SFMT_PARITY2: u32 = $parity2;
const SFMT_PARITY3: u32 = $parity3;
const SFMT_PARITY4: u32 = $parity4;
}
};
}
parms_impl!(
607,
{ 607 / 128 + 1 },
2,
15,
3,
13,
3,
0xfdff_37ff_u32,
0xef7f_3f7d_u32,
0xff77_7b7d_u32,
0x7ff7_fb2f_u32,
0x0000_0001,
0x0000_0000,
0x0000_0000,
0x5986_f054
);
parms_impl!(
1279,
{ 1279 / 128 + 1 },
7,
14,
3,
5,
1,
0xf7fe_fffd_u32,
0x7fef_cfff_u32,
0xaff3_ef3f_u32,
0xb5ff_ff7f_u32,
0x0000_0001_u32,
0x0000_0000_u32,
0x0000_0000_u32,
0x2000_0000_u32
);
parms_impl!(
2281,
{ 2281 / 128 + 1 },
12,
19,
1,
5,
1,
0xbff7_ffbf_u32,
0xfdff_fffe_u32,
0xf7ff_ef7f_u32,
0xf2f7_cbbf_u32,
0x0000_0001_u32,
0x0000_0000_u32,
0x0000_0000_u32,
0x41df_a600_u32
);
parms_impl!(
4253,
{ 4253 / 128 + 1 },
17,
20,
1,
7,
1,
0x9f7b_ffff_u32,
0x9fff_ff5f_u32,
0x3eff_fffb_u32,
0xffff_f7bb_u32,
0xa800_0001_u32,
0xaf53_90a3_u32,
0xb740_b3f8_u32,
0x6c11_486d_u32
);
parms_impl!(
11213,
{ 11213 / 128 + 1 },
68,
14,
3,
7,
3,
0xefff_f7fb_u32,
0xffff_ffef_u32,
0xdfdf_bfff_u32,
0x7fff_dbfd_u32,
0x0000_0001_u32,
0x0000_0000_u32,
0xb740_b3f8_u32,
0x6c11_486d_u32
);
parms_impl!(
19937,
{ 19937 / 128 + 1 },
122,
18,
1,
11,
1,
0xdfff_ffef_u32,
0xddfe_cb7f_u32,
0xbffa_ffff_u32,
0xbfff_fff6_u32,
0x0000_0001_u32,
0x0000_0000_u32,
0x0000_0000_u32,
0x13c9_e684_u32
);
parms_impl!(
44497,
{ 44497 / 128 + 1 },
330,
5,
3,
9,
3,
0xefff_fffb_u32,
0xdfbe_bfff_u32,
0xbfbf_7bef_u32,
0x9ffd_7bff_u32,
0x0000_0001_u32,
0x0000_0000_u32,
0xa3ac_4000_u32,
0xecc1_327a_u32
);
parms_impl!(
86243,
{ 86243 / 128 + 1 },
366,
6,
7,
19,
1,
0xfdbf_fbff_u32,
0xbff7_ff3f_u32,
0xfd77_efff_u32,
0xbf9f_f3ff_u32,
0x0000_0001_u32,
0x0000_0000_u32,
0x0000_0000_u32,
0xe952_8d85_u32
);
parms_impl!(
132049,
{ 132049 / 128 + 1 },
110,
19,
1,
21,
1,
0xffff_bb5f_u32,
0xfb6e_bf95_u32,
0xfffe_fffa_u32,
0xcff7_7fff_u32,
0x0000_0001_u32,
0x0000_0000_u32,
0xcb52_0000_u32,
0xc7e9_1c7d_u32
);
parms_impl!(
216091,
{ 216091 / 128 + 1 },
627,
11,
3,
10,
1,
0xbff7_bff7_u32,
0xbfff_ffff_u32,
0xbfff_fa7f_u32,
0xffdd_fbfb_u32,
0xf800_0001_u32,
0x89e8_0709_u32,
0x3bd2_b64b_u32,
0x0c64_b1e4_u32
);
#[cfg(test)]
mod tests {
use super::*;
fn split(a: i32x4) -> [u32; 4] {
[extract(a, 0), extract(a, 1), extract(a, 2), extract(a, 3)]
}
#[test]
fn test_mm_recursion_19937() {
let a = new(1, 2, 3, 4);
let z = SFMTMEXP::<19937, { 19937 / 128 + 1 }>::mm_recursion(a, a, a, a);
let zc = new(33816833, 50856450, 67896067, 1049604); // calculated by C code
assert_eq!(split(z), split(zc));
let b = new(431, 232, 83, 14);
let c = new(213, 22, 93, 234);
let d = new(112, 882, 23, 124);
let z = SFMTMEXP::<19937, { 19937 / 128 + 1 }>::mm_recursion(a, b, c, d);
let zc = new(398459137, 1355284994, -363068669, 32506884); // calculated by C code
assert_eq!(split(z), split(zc));
}
}
| 23.918605 | 95 | 0.528318 |
086d8cff95770277b0497e47eb849bd3daeac8b1
| 174 |
// Unspecified kind should fail with an error
// compile-flags: -l =mylib
// error-pattern: unknown library kind ``, expected one of: static, dylib, framework
fn main() {}
| 24.857143 | 84 | 0.706897 |
5d3b3d7668fda6c96fce51da98b78ac663428415
| 1,405 |
use core::mem;
use core::pin::Pin;
use futures_core::future::{FusedFuture, Future};
use futures_core::ready;
use futures_core::stream::{FusedStream, TryStream};
use futures_core::task::{Context, Poll};
use pin_project_lite::pin_project;
pin_project! {
/// Future for the [`try_collect`](super::TryStreamExt::try_collect) method.
#[derive(Debug)]
#[must_use = "futures do nothing unless you `.await` or poll them"]
pub struct TryCollect<St, C> {
#[pin]
stream: St,
items: C,
}
}
impl<St: TryStream, C: Default> TryCollect<St, C> {
pub(super) fn new(s: St) -> Self {
Self { stream: s, items: Default::default() }
}
}
impl<St, C> FusedFuture for TryCollect<St, C>
where
St: TryStream + FusedStream,
C: Default + Extend<St::Ok>,
{
fn is_terminated(&self) -> bool {
self.stream.is_terminated()
}
}
impl<St, C> Future for TryCollect<St, C>
where
St: TryStream,
C: Default + Extend<St::Ok>,
{
type Output = Result<C, St::Error>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let mut this = self.project();
Poll::Ready(Ok(loop {
match ready!(this.stream.as_mut().try_poll_next(cx)?) {
Some(x) => this.items.extend(Some(x)),
None => break mem::replace(this.items, Default::default()),
}
}))
}
}
| 26.509434 | 80 | 0.59573 |
5b352492554d4d3d7406c7d8ced5684d4f0f762c
| 1,787 |
#![no_main]
#![no_std]
use panic_halt as _;
use stm32f0xx_hal as hal;
use crate::hal::{
prelude::*,
serial::Serial,
spi::Spi,
spi::{Mode, Phase, Polarity},
stm32,
};
use nb::block;
use cortex_m_rt::entry;
/// A basic serial to spi example
///
/// If you connect MOSI & MISO pins together, you'll see all characters
/// that you typed in your serial terminal echoed back
///
/// If you connect MISO to GND, you'll see nothing coming back
#[entry]
fn main() -> ! {
const MODE: Mode = Mode {
polarity: Polarity::IdleHigh,
phase: Phase::CaptureOnSecondTransition,
};
if let Some(p) = stm32::Peripherals::take() {
cortex_m::interrupt::free(move |cs| {
let mut flash = p.FLASH;
let mut rcc = p.RCC.configure().freeze(&mut flash);
let gpioa = p.GPIOA.split(&mut rcc);
// Configure pins for SPI
let sck = gpioa.pa5.into_alternate_af0(cs);
let miso = gpioa.pa6.into_alternate_af0(cs);
let mosi = gpioa.pa7.into_alternate_af0(cs);
// Configure SPI with 1MHz rate
let mut spi = Spi::spi1(p.SPI1, (sck, miso, mosi), MODE, 1.mhz(), &mut rcc);
let tx = gpioa.pa9.into_alternate_af1(cs);
let rx = gpioa.pa10.into_alternate_af1(cs);
let serial = Serial::usart1(p.USART1, (tx, rx), 115_200.bps(), &mut rcc);
let (mut tx, mut rx) = serial.split();
loop {
let serial_received = block!(rx.read()).unwrap();
block!(spi.send(serial_received)).ok();
let spi_received = block!(spi.read()).unwrap();
block!(tx.write(spi_received)).ok();
}
});
}
loop {
continue;
}
}
| 25.169014 | 88 | 0.558478 |
21ba2b5abcfb672a1c1cfa07b1f39a312884cb7e
| 199,776 |
// ignore-tidy-filelength
// ignore-tidy-undocumented-unsafe
//! Slice management and manipulation.
//!
//! For more details see [`std::slice`].
//!
//! [`std::slice`]: ../../std/slice/index.html
#![stable(feature = "rust1", since = "1.0.0")]
// How this module is organized.
//
// The library infrastructure for slices is fairly messy. There's
// a lot of stuff defined here. Let's keep it clean.
//
// The layout of this file is thus:
//
// * Inherent methods. This is where most of the slice API resides.
// * Implementations of a few common traits with important slice ops.
// * Definitions of a bunch of iterators.
// * Free functions.
// * The `raw` and `bytes` submodules.
// * Boilerplate trait implementations.
use crate::cmp;
use crate::cmp::Ordering::{self, Equal, Greater, Less};
use crate::fmt;
use crate::intrinsics::{assume, exact_div, is_aligned_and_not_null, unchecked_sub};
use crate::iter::*;
use crate::marker::{self, Copy, Send, Sized, Sync};
use crate::mem;
use crate::ops::{self, FnMut, Range};
use crate::option::Option;
use crate::option::Option::{None, Some};
use crate::ptr::{self, NonNull};
use crate::result::Result;
use crate::result::Result::{Err, Ok};
#[unstable(
feature = "slice_internals",
issue = "none",
reason = "exposed from core to be reused in std; use the memchr crate"
)]
/// Pure rust memchr implementation, taken from rust-memchr
pub mod memchr;
mod rotate;
mod sort;
//
// Extension traits
//
#[lang = "slice"]
#[cfg(not(test))]
impl<T> [T] {
/// Returns the number of elements in the slice.
///
/// # Examples
///
/// ```
/// let a = [1, 2, 3];
/// assert_eq!(a.len(), 3);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_stable(feature = "const_slice_len", since = "1.32.0")]
#[inline]
// SAFETY: const sound because we transmute out the length field as a usize (which it must be)
#[allow(unused_attributes)]
#[allow_internal_unstable(const_fn_union)]
pub const fn len(&self) -> usize {
unsafe { crate::ptr::Repr { rust: self }.raw.len }
}
/// Returns `true` if the slice has a length of 0.
///
/// # Examples
///
/// ```
/// let a = [1, 2, 3];
/// assert!(!a.is_empty());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_stable(feature = "const_slice_is_empty", since = "1.32.0")]
#[inline]
pub const fn is_empty(&self) -> bool {
self.len() == 0
}
/// Returns the first element of the slice, or `None` if it is empty.
///
/// # Examples
///
/// ```
/// let v = [10, 40, 30];
/// assert_eq!(Some(&10), v.first());
///
/// let w: &[i32] = &[];
/// assert_eq!(None, w.first());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn first(&self) -> Option<&T> {
if let [first, ..] = self { Some(first) } else { None }
}
/// Returns a mutable pointer to the first element of the slice, or `None` if it is empty.
///
/// # Examples
///
/// ```
/// let x = &mut [0, 1, 2];
///
/// if let Some(first) = x.first_mut() {
/// *first = 5;
/// }
/// assert_eq!(x, &[5, 1, 2]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn first_mut(&mut self) -> Option<&mut T> {
if let [first, ..] = self { Some(first) } else { None }
}
/// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
///
/// # Examples
///
/// ```
/// let x = &[0, 1, 2];
///
/// if let Some((first, elements)) = x.split_first() {
/// assert_eq!(first, &0);
/// assert_eq!(elements, &[1, 2]);
/// }
/// ```
#[stable(feature = "slice_splits", since = "1.5.0")]
#[inline]
pub fn split_first(&self) -> Option<(&T, &[T])> {
if let [first, tail @ ..] = self { Some((first, tail)) } else { None }
}
/// Returns the first and all the rest of the elements of the slice, or `None` if it is empty.
///
/// # Examples
///
/// ```
/// let x = &mut [0, 1, 2];
///
/// if let Some((first, elements)) = x.split_first_mut() {
/// *first = 3;
/// elements[0] = 4;
/// elements[1] = 5;
/// }
/// assert_eq!(x, &[3, 4, 5]);
/// ```
#[stable(feature = "slice_splits", since = "1.5.0")]
#[inline]
pub fn split_first_mut(&mut self) -> Option<(&mut T, &mut [T])> {
if let [first, tail @ ..] = self { Some((first, tail)) } else { None }
}
/// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
///
/// # Examples
///
/// ```
/// let x = &[0, 1, 2];
///
/// if let Some((last, elements)) = x.split_last() {
/// assert_eq!(last, &2);
/// assert_eq!(elements, &[0, 1]);
/// }
/// ```
#[stable(feature = "slice_splits", since = "1.5.0")]
#[inline]
pub fn split_last(&self) -> Option<(&T, &[T])> {
if let [init @ .., last] = self { Some((last, init)) } else { None }
}
/// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
///
/// # Examples
///
/// ```
/// let x = &mut [0, 1, 2];
///
/// if let Some((last, elements)) = x.split_last_mut() {
/// *last = 3;
/// elements[0] = 4;
/// elements[1] = 5;
/// }
/// assert_eq!(x, &[4, 5, 3]);
/// ```
#[stable(feature = "slice_splits", since = "1.5.0")]
#[inline]
pub fn split_last_mut(&mut self) -> Option<(&mut T, &mut [T])> {
if let [init @ .., last] = self { Some((last, init)) } else { None }
}
/// Returns the last element of the slice, or `None` if it is empty.
///
/// # Examples
///
/// ```
/// let v = [10, 40, 30];
/// assert_eq!(Some(&30), v.last());
///
/// let w: &[i32] = &[];
/// assert_eq!(None, w.last());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn last(&self) -> Option<&T> {
if let [.., last] = self { Some(last) } else { None }
}
/// Returns a mutable pointer to the last item in the slice.
///
/// # Examples
///
/// ```
/// let x = &mut [0, 1, 2];
///
/// if let Some(last) = x.last_mut() {
/// *last = 10;
/// }
/// assert_eq!(x, &[0, 1, 10]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn last_mut(&mut self) -> Option<&mut T> {
if let [.., last] = self { Some(last) } else { None }
}
/// Returns a reference to an element or subslice depending on the type of
/// index.
///
/// - If given a position, returns a reference to the element at that
/// position or `None` if out of bounds.
/// - If given a range, returns the subslice corresponding to that range,
/// or `None` if out of bounds.
///
/// # Examples
///
/// ```
/// let v = [10, 40, 30];
/// assert_eq!(Some(&40), v.get(1));
/// assert_eq!(Some(&[10, 40][..]), v.get(0..2));
/// assert_eq!(None, v.get(3));
/// assert_eq!(None, v.get(0..4));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn get<I>(&self, index: I) -> Option<&I::Output>
where
I: SliceIndex<Self>,
{
index.get(self)
}
/// Returns a mutable reference to an element or subslice depending on the
/// type of index (see [`get`]) or `None` if the index is out of bounds.
///
/// [`get`]: #method.get
///
/// # Examples
///
/// ```
/// let x = &mut [0, 1, 2];
///
/// if let Some(elem) = x.get_mut(1) {
/// *elem = 42;
/// }
/// assert_eq!(x, &[0, 42, 2]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
where
I: SliceIndex<Self>,
{
index.get_mut(self)
}
/// Returns a reference to an element or subslice, without doing bounds
/// checking.
///
/// This is generally not recommended, use with caution!
/// Calling this method with an out-of-bounds index is *[undefined behavior]*
/// even if the resulting reference is not used.
/// For a safe alternative see [`get`].
///
/// [`get`]: #method.get
/// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
///
/// # Examples
///
/// ```
/// let x = &[1, 2, 4];
///
/// unsafe {
/// assert_eq!(x.get_unchecked(1), &2);
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
where
I: SliceIndex<Self>,
{
index.get_unchecked(self)
}
/// Returns a mutable reference to an element or subslice, without doing
/// bounds checking.
///
/// This is generally not recommended, use with caution!
/// Calling this method with an out-of-bounds index is *[undefined behavior]*
/// even if the resulting reference is not used.
/// For a safe alternative see [`get_mut`].
///
/// [`get_mut`]: #method.get_mut
/// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
///
/// # Examples
///
/// ```
/// let x = &mut [1, 2, 4];
///
/// unsafe {
/// let elem = x.get_unchecked_mut(1);
/// *elem = 13;
/// }
/// assert_eq!(x, &[1, 13, 4]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
where
I: SliceIndex<Self>,
{
index.get_unchecked_mut(self)
}
/// Returns a raw pointer to the slice's buffer.
///
/// The caller must ensure that the slice outlives the pointer this
/// function returns, or else it will end up pointing to garbage.
///
/// The caller must also ensure that the memory the pointer (non-transitively) points to
/// is never written to (except inside an `UnsafeCell`) using this pointer or any pointer
/// derived from it. If you need to mutate the contents of the slice, use [`as_mut_ptr`].
///
/// Modifying the container referenced by this slice may cause its buffer
/// to be reallocated, which would also make any pointers to it invalid.
///
/// # Examples
///
/// ```
/// let x = &[1, 2, 4];
/// let x_ptr = x.as_ptr();
///
/// unsafe {
/// for i in 0..x.len() {
/// assert_eq!(x.get_unchecked(i), &*x_ptr.add(i));
/// }
/// }
/// ```
///
/// [`as_mut_ptr`]: #method.as_mut_ptr
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_stable(feature = "const_slice_as_ptr", since = "1.32.0")]
#[inline]
pub const fn as_ptr(&self) -> *const T {
self as *const [T] as *const T
}
/// Returns an unsafe mutable pointer to the slice's buffer.
///
/// The caller must ensure that the slice outlives the pointer this
/// function returns, or else it will end up pointing to garbage.
///
/// Modifying the container referenced by this slice may cause its buffer
/// to be reallocated, which would also make any pointers to it invalid.
///
/// # Examples
///
/// ```
/// let x = &mut [1, 2, 4];
/// let x_ptr = x.as_mut_ptr();
///
/// unsafe {
/// for i in 0..x.len() {
/// *x_ptr.add(i) += 2;
/// }
/// }
/// assert_eq!(x, &[3, 4, 6]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn as_mut_ptr(&mut self) -> *mut T {
self as *mut [T] as *mut T
}
/// Returns the two raw pointers spanning the slice.
///
/// The returned range is half-open, which means that the end pointer
/// points *one past* the last element of the slice. This way, an empty
/// slice is represented by two equal pointers, and the difference between
/// the two pointers represents the size of the slice.
///
/// See [`as_ptr`] for warnings on using these pointers. The end pointer
/// requires extra caution, as it does not point to a valid element in the
/// slice.
///
/// This function is useful for interacting with foreign interfaces which
/// use two pointers to refer to a range of elements in memory, as is
/// common in C++.
///
/// It can also be useful to check if a pointer to an element refers to an
/// element of this slice:
///
/// ```
/// #![feature(slice_ptr_range)]
///
/// let a = [1, 2, 3];
/// let x = &a[1] as *const _;
/// let y = &5 as *const _;
///
/// assert!(a.as_ptr_range().contains(&x));
/// assert!(!a.as_ptr_range().contains(&y));
/// ```
///
/// [`as_ptr`]: #method.as_ptr
#[unstable(feature = "slice_ptr_range", issue = "65807")]
#[inline]
pub fn as_ptr_range(&self) -> Range<*const T> {
// The `add` here is safe, because:
//
// - Both pointers are part of the same object, as pointing directly
// past the object also counts.
//
// - The size of the slice is never larger than isize::MAX bytes, as
// noted here:
// - https://github.com/rust-lang/unsafe-code-guidelines/issues/102#issuecomment-473340447
// - https://doc.rust-lang.org/reference/behavior-considered-undefined.html
// - https://doc.rust-lang.org/core/slice/fn.from_raw_parts.html#safety
// (This doesn't seem normative yet, but the very same assumption is
// made in many places, including the Index implementation of slices.)
//
// - There is no wrapping around involved, as slices do not wrap past
// the end of the address space.
//
// See the documentation of pointer::add.
let start = self.as_ptr();
let end = unsafe { start.add(self.len()) };
start..end
}
/// Returns the two unsafe mutable pointers spanning the slice.
///
/// The returned range is half-open, which means that the end pointer
/// points *one past* the last element of the slice. This way, an empty
/// slice is represented by two equal pointers, and the difference between
/// the two pointers represents the size of the slice.
///
/// See [`as_mut_ptr`] for warnings on using these pointers. The end
/// pointer requires extra caution, as it does not point to a valid element
/// in the slice.
///
/// This function is useful for interacting with foreign interfaces which
/// use two pointers to refer to a range of elements in memory, as is
/// common in C++.
///
/// [`as_mut_ptr`]: #method.as_mut_ptr
#[unstable(feature = "slice_ptr_range", issue = "65807")]
#[inline]
pub fn as_mut_ptr_range(&mut self) -> Range<*mut T> {
// See as_ptr_range() above for why `add` here is safe.
let start = self.as_mut_ptr();
let end = unsafe { start.add(self.len()) };
start..end
}
/// Swaps two elements in the slice.
///
/// # Arguments
///
/// * a - The index of the first element
/// * b - The index of the second element
///
/// # Panics
///
/// Panics if `a` or `b` are out of bounds.
///
/// # Examples
///
/// ```
/// let mut v = ["a", "b", "c", "d"];
/// v.swap(1, 3);
/// assert!(v == ["a", "d", "c", "b"]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn swap(&mut self, a: usize, b: usize) {
unsafe {
// Can't take two mutable loans from one vector, so instead just cast
// them to their raw pointers to do the swap
let pa: *mut T = &mut self[a];
let pb: *mut T = &mut self[b];
ptr::swap(pa, pb);
}
}
/// Reverses the order of elements in the slice, in place.
///
/// # Examples
///
/// ```
/// let mut v = [1, 2, 3];
/// v.reverse();
/// assert!(v == [3, 2, 1]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn reverse(&mut self) {
let mut i: usize = 0;
let ln = self.len();
// For very small types, all the individual reads in the normal
// path perform poorly. We can do better, given efficient unaligned
// load/store, by loading a larger chunk and reversing a register.
// Ideally LLVM would do this for us, as it knows better than we do
// whether unaligned reads are efficient (since that changes between
// different ARM versions, for example) and what the best chunk size
// would be. Unfortunately, as of LLVM 4.0 (2017-05) it only unrolls
// the loop, so we need to do this ourselves. (Hypothesis: reverse
// is troublesome because the sides can be aligned differently --
// will be, when the length is odd -- so there's no way of emitting
// pre- and postludes to use fully-aligned SIMD in the middle.)
let fast_unaligned = cfg!(any(target_arch = "x86", target_arch = "x86_64"));
if fast_unaligned && mem::size_of::<T>() == 1 {
// Use the llvm.bswap intrinsic to reverse u8s in a usize
let chunk = mem::size_of::<usize>();
while i + chunk - 1 < ln / 2 {
unsafe {
let pa: *mut T = self.get_unchecked_mut(i);
let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
let va = ptr::read_unaligned(pa as *mut usize);
let vb = ptr::read_unaligned(pb as *mut usize);
ptr::write_unaligned(pa as *mut usize, vb.swap_bytes());
ptr::write_unaligned(pb as *mut usize, va.swap_bytes());
}
i += chunk;
}
}
if fast_unaligned && mem::size_of::<T>() == 2 {
// Use rotate-by-16 to reverse u16s in a u32
let chunk = mem::size_of::<u32>() / 2;
while i + chunk - 1 < ln / 2 {
unsafe {
let pa: *mut T = self.get_unchecked_mut(i);
let pb: *mut T = self.get_unchecked_mut(ln - i - chunk);
let va = ptr::read_unaligned(pa as *mut u32);
let vb = ptr::read_unaligned(pb as *mut u32);
ptr::write_unaligned(pa as *mut u32, vb.rotate_left(16));
ptr::write_unaligned(pb as *mut u32, va.rotate_left(16));
}
i += chunk;
}
}
while i < ln / 2 {
// Unsafe swap to avoid the bounds check in safe swap.
unsafe {
let pa: *mut T = self.get_unchecked_mut(i);
let pb: *mut T = self.get_unchecked_mut(ln - i - 1);
ptr::swap(pa, pb);
}
i += 1;
}
}
/// Returns an iterator over the slice.
///
/// # Examples
///
/// ```
/// let x = &[1, 2, 4];
/// let mut iterator = x.iter();
///
/// assert_eq!(iterator.next(), Some(&1));
/// assert_eq!(iterator.next(), Some(&2));
/// assert_eq!(iterator.next(), Some(&4));
/// assert_eq!(iterator.next(), None);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn iter(&self) -> Iter<'_, T> {
unsafe {
let ptr = self.as_ptr();
assume(!ptr.is_null());
let end = if mem::size_of::<T>() == 0 {
(ptr as *const u8).wrapping_add(self.len()) as *const T
} else {
ptr.add(self.len())
};
Iter { ptr: NonNull::new_unchecked(ptr as *mut T), end, _marker: marker::PhantomData }
}
}
/// Returns an iterator that allows modifying each value.
///
/// # Examples
///
/// ```
/// let x = &mut [1, 2, 4];
/// for elem in x.iter_mut() {
/// *elem += 2;
/// }
/// assert_eq!(x, &[3, 4, 6]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn iter_mut(&mut self) -> IterMut<'_, T> {
unsafe {
let ptr = self.as_mut_ptr();
assume(!ptr.is_null());
let end = if mem::size_of::<T>() == 0 {
(ptr as *mut u8).wrapping_add(self.len()) as *mut T
} else {
ptr.add(self.len())
};
IterMut { ptr: NonNull::new_unchecked(ptr), end, _marker: marker::PhantomData }
}
}
/// Returns an iterator over all contiguous windows of length
/// `size`. The windows overlap. If the slice is shorter than
/// `size`, the iterator returns no values.
///
/// # Panics
///
/// Panics if `size` is 0.
///
/// # Examples
///
/// ```
/// let slice = ['r', 'u', 's', 't'];
/// let mut iter = slice.windows(2);
/// assert_eq!(iter.next().unwrap(), &['r', 'u']);
/// assert_eq!(iter.next().unwrap(), &['u', 's']);
/// assert_eq!(iter.next().unwrap(), &['s', 't']);
/// assert!(iter.next().is_none());
/// ```
///
/// If the slice is shorter than `size`:
///
/// ```
/// let slice = ['f', 'o', 'o'];
/// let mut iter = slice.windows(4);
/// assert!(iter.next().is_none());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn windows(&self, size: usize) -> Windows<'_, T> {
assert!(size != 0);
Windows { v: self, size }
}
/// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
/// beginning of the slice.
///
/// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
/// slice, then the last chunk will not have length `chunk_size`.
///
/// See [`chunks_exact`] for a variant of this iterator that returns chunks of always exactly
/// `chunk_size` elements, and [`rchunks`] for the same iterator but starting at the end of the
/// slice.
///
/// # Panics
///
/// Panics if `chunk_size` is 0.
///
/// # Examples
///
/// ```
/// let slice = ['l', 'o', 'r', 'e', 'm'];
/// let mut iter = slice.chunks(2);
/// assert_eq!(iter.next().unwrap(), &['l', 'o']);
/// assert_eq!(iter.next().unwrap(), &['r', 'e']);
/// assert_eq!(iter.next().unwrap(), &['m']);
/// assert!(iter.next().is_none());
/// ```
///
/// [`chunks_exact`]: #method.chunks_exact
/// [`rchunks`]: #method.rchunks
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn chunks(&self, chunk_size: usize) -> Chunks<'_, T> {
assert!(chunk_size != 0);
Chunks { v: self, chunk_size }
}
/// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
/// beginning of the slice.
///
/// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
/// length of the slice, then the last chunk will not have length `chunk_size`.
///
/// See [`chunks_exact_mut`] for a variant of this iterator that returns chunks of always
/// exactly `chunk_size` elements, and [`rchunks_mut`] for the same iterator but starting at
/// the end of the slice.
///
/// # Panics
///
/// Panics if `chunk_size` is 0.
///
/// # Examples
///
/// ```
/// let v = &mut [0, 0, 0, 0, 0];
/// let mut count = 1;
///
/// for chunk in v.chunks_mut(2) {
/// for elem in chunk.iter_mut() {
/// *elem += count;
/// }
/// count += 1;
/// }
/// assert_eq!(v, &[1, 1, 2, 2, 3]);
/// ```
///
/// [`chunks_exact_mut`]: #method.chunks_exact_mut
/// [`rchunks_mut`]: #method.rchunks_mut
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<'_, T> {
assert!(chunk_size != 0);
ChunksMut { v: self, chunk_size }
}
/// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
/// beginning of the slice.
///
/// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
/// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved
/// from the `remainder` function of the iterator.
///
/// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
/// resulting code better than in the case of [`chunks`].
///
/// See [`chunks`] for a variant of this iterator that also returns the remainder as a smaller
/// chunk, and [`rchunks_exact`] for the same iterator but starting at the end of the slice.
///
/// # Panics
///
/// Panics if `chunk_size` is 0.
///
/// # Examples
///
/// ```
/// let slice = ['l', 'o', 'r', 'e', 'm'];
/// let mut iter = slice.chunks_exact(2);
/// assert_eq!(iter.next().unwrap(), &['l', 'o']);
/// assert_eq!(iter.next().unwrap(), &['r', 'e']);
/// assert!(iter.next().is_none());
/// assert_eq!(iter.remainder(), &['m']);
/// ```
///
/// [`chunks`]: #method.chunks
/// [`rchunks_exact`]: #method.rchunks_exact
#[stable(feature = "chunks_exact", since = "1.31.0")]
#[inline]
pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T> {
assert!(chunk_size != 0);
let rem = self.len() % chunk_size;
let len = self.len() - rem;
let (fst, snd) = self.split_at(len);
ChunksExact { v: fst, rem: snd, chunk_size }
}
/// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
/// beginning of the slice.
///
/// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
/// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be
/// retrieved from the `into_remainder` function of the iterator.
///
/// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
/// resulting code better than in the case of [`chunks_mut`].
///
/// See [`chunks_mut`] for a variant of this iterator that also returns the remainder as a
/// smaller chunk, and [`rchunks_exact_mut`] for the same iterator but starting at the end of
/// the slice.
///
/// # Panics
///
/// Panics if `chunk_size` is 0.
///
/// # Examples
///
/// ```
/// let v = &mut [0, 0, 0, 0, 0];
/// let mut count = 1;
///
/// for chunk in v.chunks_exact_mut(2) {
/// for elem in chunk.iter_mut() {
/// *elem += count;
/// }
/// count += 1;
/// }
/// assert_eq!(v, &[1, 1, 2, 2, 0]);
/// ```
///
/// [`chunks_mut`]: #method.chunks_mut
/// [`rchunks_exact_mut`]: #method.rchunks_exact_mut
#[stable(feature = "chunks_exact", since = "1.31.0")]
#[inline]
pub fn chunks_exact_mut(&mut self, chunk_size: usize) -> ChunksExactMut<'_, T> {
assert!(chunk_size != 0);
let rem = self.len() % chunk_size;
let len = self.len() - rem;
let (fst, snd) = self.split_at_mut(len);
ChunksExactMut { v: fst, rem: snd, chunk_size }
}
/// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
/// of the slice.
///
/// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
/// slice, then the last chunk will not have length `chunk_size`.
///
/// See [`rchunks_exact`] for a variant of this iterator that returns chunks of always exactly
/// `chunk_size` elements, and [`chunks`] for the same iterator but starting at the beginning
/// of the slice.
///
/// # Panics
///
/// Panics if `chunk_size` is 0.
///
/// # Examples
///
/// ```
/// let slice = ['l', 'o', 'r', 'e', 'm'];
/// let mut iter = slice.rchunks(2);
/// assert_eq!(iter.next().unwrap(), &['e', 'm']);
/// assert_eq!(iter.next().unwrap(), &['o', 'r']);
/// assert_eq!(iter.next().unwrap(), &['l']);
/// assert!(iter.next().is_none());
/// ```
///
/// [`rchunks_exact`]: #method.rchunks_exact
/// [`chunks`]: #method.chunks
#[stable(feature = "rchunks", since = "1.31.0")]
#[inline]
pub fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T> {
assert!(chunk_size != 0);
RChunks { v: self, chunk_size }
}
/// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
/// of the slice.
///
/// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
/// length of the slice, then the last chunk will not have length `chunk_size`.
///
/// See [`rchunks_exact_mut`] for a variant of this iterator that returns chunks of always
/// exactly `chunk_size` elements, and [`chunks_mut`] for the same iterator but starting at the
/// beginning of the slice.
///
/// # Panics
///
/// Panics if `chunk_size` is 0.
///
/// # Examples
///
/// ```
/// let v = &mut [0, 0, 0, 0, 0];
/// let mut count = 1;
///
/// for chunk in v.rchunks_mut(2) {
/// for elem in chunk.iter_mut() {
/// *elem += count;
/// }
/// count += 1;
/// }
/// assert_eq!(v, &[3, 2, 2, 1, 1]);
/// ```
///
/// [`rchunks_exact_mut`]: #method.rchunks_exact_mut
/// [`chunks_mut`]: #method.chunks_mut
#[stable(feature = "rchunks", since = "1.31.0")]
#[inline]
pub fn rchunks_mut(&mut self, chunk_size: usize) -> RChunksMut<'_, T> {
assert!(chunk_size != 0);
RChunksMut { v: self, chunk_size }
}
/// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the
/// end of the slice.
///
/// The chunks are slices and do not overlap. If `chunk_size` does not divide the length of the
/// slice, then the last up to `chunk_size-1` elements will be omitted and can be retrieved
/// from the `remainder` function of the iterator.
///
/// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
/// resulting code better than in the case of [`chunks`].
///
/// See [`rchunks`] for a variant of this iterator that also returns the remainder as a smaller
/// chunk, and [`chunks_exact`] for the same iterator but starting at the beginning of the
/// slice.
///
/// # Panics
///
/// Panics if `chunk_size` is 0.
///
/// # Examples
///
/// ```
/// let slice = ['l', 'o', 'r', 'e', 'm'];
/// let mut iter = slice.rchunks_exact(2);
/// assert_eq!(iter.next().unwrap(), &['e', 'm']);
/// assert_eq!(iter.next().unwrap(), &['o', 'r']);
/// assert!(iter.next().is_none());
/// assert_eq!(iter.remainder(), &['l']);
/// ```
///
/// [`chunks`]: #method.chunks
/// [`rchunks`]: #method.rchunks
/// [`chunks_exact`]: #method.chunks_exact
#[stable(feature = "rchunks", since = "1.31.0")]
#[inline]
pub fn rchunks_exact(&self, chunk_size: usize) -> RChunksExact<'_, T> {
assert!(chunk_size != 0);
let rem = self.len() % chunk_size;
let (fst, snd) = self.split_at(rem);
RChunksExact { v: snd, rem: fst, chunk_size }
}
/// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end
/// of the slice.
///
/// The chunks are mutable slices, and do not overlap. If `chunk_size` does not divide the
/// length of the slice, then the last up to `chunk_size-1` elements will be omitted and can be
/// retrieved from the `into_remainder` function of the iterator.
///
/// Due to each chunk having exactly `chunk_size` elements, the compiler can often optimize the
/// resulting code better than in the case of [`chunks_mut`].
///
/// See [`rchunks_mut`] for a variant of this iterator that also returns the remainder as a
/// smaller chunk, and [`chunks_exact_mut`] for the same iterator but starting at the beginning
/// of the slice.
///
/// # Panics
///
/// Panics if `chunk_size` is 0.
///
/// # Examples
///
/// ```
/// let v = &mut [0, 0, 0, 0, 0];
/// let mut count = 1;
///
/// for chunk in v.rchunks_exact_mut(2) {
/// for elem in chunk.iter_mut() {
/// *elem += count;
/// }
/// count += 1;
/// }
/// assert_eq!(v, &[0, 2, 2, 1, 1]);
/// ```
///
/// [`chunks_mut`]: #method.chunks_mut
/// [`rchunks_mut`]: #method.rchunks_mut
/// [`chunks_exact_mut`]: #method.chunks_exact_mut
#[stable(feature = "rchunks", since = "1.31.0")]
#[inline]
pub fn rchunks_exact_mut(&mut self, chunk_size: usize) -> RChunksExactMut<'_, T> {
assert!(chunk_size != 0);
let rem = self.len() % chunk_size;
let (fst, snd) = self.split_at_mut(rem);
RChunksExactMut { v: snd, rem: fst, chunk_size }
}
/// Divides one slice into two at an index.
///
/// The first will contain all indices from `[0, mid)` (excluding
/// the index `mid` itself) and the second will contain all
/// indices from `[mid, len)` (excluding the index `len` itself).
///
/// # Panics
///
/// Panics if `mid > len`.
///
/// # Examples
///
/// ```
/// let v = [1, 2, 3, 4, 5, 6];
///
/// {
/// let (left, right) = v.split_at(0);
/// assert!(left == []);
/// assert!(right == [1, 2, 3, 4, 5, 6]);
/// }
///
/// {
/// let (left, right) = v.split_at(2);
/// assert!(left == [1, 2]);
/// assert!(right == [3, 4, 5, 6]);
/// }
///
/// {
/// let (left, right) = v.split_at(6);
/// assert!(left == [1, 2, 3, 4, 5, 6]);
/// assert!(right == []);
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn split_at(&self, mid: usize) -> (&[T], &[T]) {
(&self[..mid], &self[mid..])
}
/// Divides one mutable slice into two at an index.
///
/// The first will contain all indices from `[0, mid)` (excluding
/// the index `mid` itself) and the second will contain all
/// indices from `[mid, len)` (excluding the index `len` itself).
///
/// # Panics
///
/// Panics if `mid > len`.
///
/// # Examples
///
/// ```
/// let mut v = [1, 0, 3, 0, 5, 6];
/// // scoped to restrict the lifetime of the borrows
/// {
/// let (left, right) = v.split_at_mut(2);
/// assert!(left == [1, 0]);
/// assert!(right == [3, 0, 5, 6]);
/// left[1] = 2;
/// right[1] = 4;
/// }
/// assert!(v == [1, 2, 3, 4, 5, 6]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
let len = self.len();
let ptr = self.as_mut_ptr();
unsafe {
assert!(mid <= len);
(from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.add(mid), len - mid))
}
}
/// Returns an iterator over subslices separated by elements that match
/// `pred`. The matched element is not contained in the subslices.
///
/// # Examples
///
/// ```
/// let slice = [10, 40, 33, 20];
/// let mut iter = slice.split(|num| num % 3 == 0);
///
/// assert_eq!(iter.next().unwrap(), &[10, 40]);
/// assert_eq!(iter.next().unwrap(), &[20]);
/// assert!(iter.next().is_none());
/// ```
///
/// If the first element is matched, an empty slice will be the first item
/// returned by the iterator. Similarly, if the last element in the slice
/// is matched, an empty slice will be the last item returned by the
/// iterator:
///
/// ```
/// let slice = [10, 40, 33];
/// let mut iter = slice.split(|num| num % 3 == 0);
///
/// assert_eq!(iter.next().unwrap(), &[10, 40]);
/// assert_eq!(iter.next().unwrap(), &[]);
/// assert!(iter.next().is_none());
/// ```
///
/// If two matched elements are directly adjacent, an empty slice will be
/// present between them:
///
/// ```
/// let slice = [10, 6, 33, 20];
/// let mut iter = slice.split(|num| num % 3 == 0);
///
/// assert_eq!(iter.next().unwrap(), &[10]);
/// assert_eq!(iter.next().unwrap(), &[]);
/// assert_eq!(iter.next().unwrap(), &[20]);
/// assert!(iter.next().is_none());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn split<F>(&self, pred: F) -> Split<'_, T, F>
where
F: FnMut(&T) -> bool,
{
Split { v: self, pred, finished: false }
}
/// Returns an iterator over mutable subslices separated by elements that
/// match `pred`. The matched element is not contained in the subslices.
///
/// # Examples
///
/// ```
/// let mut v = [10, 40, 30, 20, 60, 50];
///
/// for group in v.split_mut(|num| *num % 3 == 0) {
/// group[0] = 1;
/// }
/// assert_eq!(v, [1, 40, 30, 1, 60, 1]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn split_mut<F>(&mut self, pred: F) -> SplitMut<'_, T, F>
where
F: FnMut(&T) -> bool,
{
SplitMut { v: self, pred, finished: false }
}
/// Returns an iterator over subslices separated by elements that match
/// `pred`. The matched element is contained in the end of the previous
/// subslice as a terminator.
///
/// # Examples
///
/// ```
/// #![feature(split_inclusive)]
/// let slice = [10, 40, 33, 20];
/// let mut iter = slice.split_inclusive(|num| num % 3 == 0);
///
/// assert_eq!(iter.next().unwrap(), &[10, 40, 33]);
/// assert_eq!(iter.next().unwrap(), &[20]);
/// assert!(iter.next().is_none());
/// ```
///
/// If the last element of the slice is matched,
/// that element will be considered the terminator of the preceding slice.
/// That slice will be the last item returned by the iterator.
///
/// ```
/// #![feature(split_inclusive)]
/// let slice = [3, 10, 40, 33];
/// let mut iter = slice.split_inclusive(|num| num % 3 == 0);
///
/// assert_eq!(iter.next().unwrap(), &[3]);
/// assert_eq!(iter.next().unwrap(), &[10, 40, 33]);
/// assert!(iter.next().is_none());
/// ```
#[unstable(feature = "split_inclusive", issue = "72360")]
#[inline]
pub fn split_inclusive<F>(&self, pred: F) -> SplitInclusive<'_, T, F>
where
F: FnMut(&T) -> bool,
{
SplitInclusive { v: self, pred, finished: false }
}
/// Returns an iterator over mutable subslices separated by elements that
/// match `pred`. The matched element is contained in the previous
/// subslice as a terminator.
///
/// # Examples
///
/// ```
/// #![feature(split_inclusive)]
/// let mut v = [10, 40, 30, 20, 60, 50];
///
/// for group in v.split_inclusive_mut(|num| *num % 3 == 0) {
/// let terminator_idx = group.len()-1;
/// group[terminator_idx] = 1;
/// }
/// assert_eq!(v, [10, 40, 1, 20, 1, 1]);
/// ```
#[unstable(feature = "split_inclusive", issue = "72360")]
#[inline]
pub fn split_inclusive_mut<F>(&mut self, pred: F) -> SplitInclusiveMut<'_, T, F>
where
F: FnMut(&T) -> bool,
{
SplitInclusiveMut { v: self, pred, finished: false }
}
/// Returns an iterator over subslices separated by elements that match
/// `pred`, starting at the end of the slice and working backwards.
/// The matched element is not contained in the subslices.
///
/// # Examples
///
/// ```
/// let slice = [11, 22, 33, 0, 44, 55];
/// let mut iter = slice.rsplit(|num| *num == 0);
///
/// assert_eq!(iter.next().unwrap(), &[44, 55]);
/// assert_eq!(iter.next().unwrap(), &[11, 22, 33]);
/// assert_eq!(iter.next(), None);
/// ```
///
/// As with `split()`, if the first or last element is matched, an empty
/// slice will be the first (or last) item returned by the iterator.
///
/// ```
/// let v = &[0, 1, 1, 2, 3, 5, 8];
/// let mut it = v.rsplit(|n| *n % 2 == 0);
/// assert_eq!(it.next().unwrap(), &[]);
/// assert_eq!(it.next().unwrap(), &[3, 5]);
/// assert_eq!(it.next().unwrap(), &[1, 1]);
/// assert_eq!(it.next().unwrap(), &[]);
/// assert_eq!(it.next(), None);
/// ```
#[stable(feature = "slice_rsplit", since = "1.27.0")]
#[inline]
pub fn rsplit<F>(&self, pred: F) -> RSplit<'_, T, F>
where
F: FnMut(&T) -> bool,
{
RSplit { inner: self.split(pred) }
}
/// Returns an iterator over mutable subslices separated by elements that
/// match `pred`, starting at the end of the slice and working
/// backwards. The matched element is not contained in the subslices.
///
/// # Examples
///
/// ```
/// let mut v = [100, 400, 300, 200, 600, 500];
///
/// let mut count = 0;
/// for group in v.rsplit_mut(|num| *num % 3 == 0) {
/// count += 1;
/// group[0] = count;
/// }
/// assert_eq!(v, [3, 400, 300, 2, 600, 1]);
/// ```
///
#[stable(feature = "slice_rsplit", since = "1.27.0")]
#[inline]
pub fn rsplit_mut<F>(&mut self, pred: F) -> RSplitMut<'_, T, F>
where
F: FnMut(&T) -> bool,
{
RSplitMut { inner: self.split_mut(pred) }
}
/// Returns an iterator over subslices separated by elements that match
/// `pred`, limited to returning at most `n` items. The matched element is
/// not contained in the subslices.
///
/// The last element returned, if any, will contain the remainder of the
/// slice.
///
/// # Examples
///
/// Print the slice split once by numbers divisible by 3 (i.e., `[10, 40]`,
/// `[20, 60, 50]`):
///
/// ```
/// let v = [10, 40, 30, 20, 60, 50];
///
/// for group in v.splitn(2, |num| *num % 3 == 0) {
/// println!("{:?}", group);
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn splitn<F>(&self, n: usize, pred: F) -> SplitN<'_, T, F>
where
F: FnMut(&T) -> bool,
{
SplitN { inner: GenericSplitN { iter: self.split(pred), count: n } }
}
/// Returns an iterator over subslices separated by elements that match
/// `pred`, limited to returning at most `n` items. The matched element is
/// not contained in the subslices.
///
/// The last element returned, if any, will contain the remainder of the
/// slice.
///
/// # Examples
///
/// ```
/// let mut v = [10, 40, 30, 20, 60, 50];
///
/// for group in v.splitn_mut(2, |num| *num % 3 == 0) {
/// group[0] = 1;
/// }
/// assert_eq!(v, [1, 40, 30, 1, 60, 50]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn splitn_mut<F>(&mut self, n: usize, pred: F) -> SplitNMut<'_, T, F>
where
F: FnMut(&T) -> bool,
{
SplitNMut { inner: GenericSplitN { iter: self.split_mut(pred), count: n } }
}
/// Returns an iterator over subslices separated by elements that match
/// `pred` limited to returning at most `n` items. This starts at the end of
/// the slice and works backwards. The matched element is not contained in
/// the subslices.
///
/// The last element returned, if any, will contain the remainder of the
/// slice.
///
/// # Examples
///
/// Print the slice split once, starting from the end, by numbers divisible
/// by 3 (i.e., `[50]`, `[10, 40, 30, 20]`):
///
/// ```
/// let v = [10, 40, 30, 20, 60, 50];
///
/// for group in v.rsplitn(2, |num| *num % 3 == 0) {
/// println!("{:?}", group);
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn rsplitn<F>(&self, n: usize, pred: F) -> RSplitN<'_, T, F>
where
F: FnMut(&T) -> bool,
{
RSplitN { inner: GenericSplitN { iter: self.rsplit(pred), count: n } }
}
/// Returns an iterator over subslices separated by elements that match
/// `pred` limited to returning at most `n` items. This starts at the end of
/// the slice and works backwards. The matched element is not contained in
/// the subslices.
///
/// The last element returned, if any, will contain the remainder of the
/// slice.
///
/// # Examples
///
/// ```
/// let mut s = [10, 40, 30, 20, 60, 50];
///
/// for group in s.rsplitn_mut(2, |num| *num % 3 == 0) {
/// group[0] = 1;
/// }
/// assert_eq!(s, [1, 40, 30, 20, 60, 1]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn rsplitn_mut<F>(&mut self, n: usize, pred: F) -> RSplitNMut<'_, T, F>
where
F: FnMut(&T) -> bool,
{
RSplitNMut { inner: GenericSplitN { iter: self.rsplit_mut(pred), count: n } }
}
/// Returns `true` if the slice contains an element with the given value.
///
/// # Examples
///
/// ```
/// let v = [10, 40, 30];
/// assert!(v.contains(&30));
/// assert!(!v.contains(&50));
/// ```
///
/// If you do not have an `&T`, but just an `&U` such that `T: Borrow<U>`
/// (e.g. `String: Borrow<str>`), you can use `iter().any`:
///
/// ```
/// let v = [String::from("hello"), String::from("world")]; // slice of `String`
/// assert!(v.iter().any(|e| e == "hello")); // search with `&str`
/// assert!(!v.iter().any(|e| e == "hi"));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn contains(&self, x: &T) -> bool
where
T: PartialEq,
{
x.slice_contains(self)
}
/// Returns `true` if `needle` is a prefix of the slice.
///
/// # Examples
///
/// ```
/// let v = [10, 40, 30];
/// assert!(v.starts_with(&[10]));
/// assert!(v.starts_with(&[10, 40]));
/// assert!(!v.starts_with(&[50]));
/// assert!(!v.starts_with(&[10, 50]));
/// ```
///
/// Always returns `true` if `needle` is an empty slice:
///
/// ```
/// let v = &[10, 40, 30];
/// assert!(v.starts_with(&[]));
/// let v: &[u8] = &[];
/// assert!(v.starts_with(&[]));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn starts_with(&self, needle: &[T]) -> bool
where
T: PartialEq,
{
let n = needle.len();
self.len() >= n && needle == &self[..n]
}
/// Returns `true` if `needle` is a suffix of the slice.
///
/// # Examples
///
/// ```
/// let v = [10, 40, 30];
/// assert!(v.ends_with(&[30]));
/// assert!(v.ends_with(&[40, 30]));
/// assert!(!v.ends_with(&[50]));
/// assert!(!v.ends_with(&[50, 30]));
/// ```
///
/// Always returns `true` if `needle` is an empty slice:
///
/// ```
/// let v = &[10, 40, 30];
/// assert!(v.ends_with(&[]));
/// let v: &[u8] = &[];
/// assert!(v.ends_with(&[]));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn ends_with(&self, needle: &[T]) -> bool
where
T: PartialEq,
{
let (m, n) = (self.len(), needle.len());
m >= n && needle == &self[m - n..]
}
/// Binary searches this sorted slice for a given element.
///
/// If the value is found then [`Result::Ok`] is returned, containing the
/// index of the matching element. If there are multiple matches, then any
/// one of the matches could be returned. If the value is not found then
/// [`Result::Err`] is returned, containing the index where a matching
/// element could be inserted while maintaining sorted order.
///
/// # Examples
///
/// Looks up a series of four elements. The first is found, with a
/// uniquely determined position; the second and third are not
/// found; the fourth could match any position in `[1, 4]`.
///
/// ```
/// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
///
/// assert_eq!(s.binary_search(&13), Ok(9));
/// assert_eq!(s.binary_search(&4), Err(7));
/// assert_eq!(s.binary_search(&100), Err(13));
/// let r = s.binary_search(&1);
/// assert!(match r { Ok(1..=4) => true, _ => false, });
/// ```
///
/// If you want to insert an item to a sorted vector, while maintaining
/// sort order:
///
/// ```
/// let mut s = vec![0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
/// let num = 42;
/// let idx = s.binary_search(&num).unwrap_or_else(|x| x);
/// s.insert(idx, num);
/// assert_eq!(s, [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 42, 55]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn binary_search(&self, x: &T) -> Result<usize, usize>
where
T: Ord,
{
self.binary_search_by(|p| p.cmp(x))
}
/// Binary searches this sorted slice with a comparator function.
///
/// The comparator function should implement an order consistent
/// with the sort order of the underlying slice, returning an
/// order code that indicates whether its argument is `Less`,
/// `Equal` or `Greater` the desired target.
///
/// If the value is found then [`Result::Ok`] is returned, containing the
/// index of the matching element. If there are multiple matches, then any
/// one of the matches could be returned. If the value is not found then
/// [`Result::Err`] is returned, containing the index where a matching
/// element could be inserted while maintaining sorted order.
///
/// # Examples
///
/// Looks up a series of four elements. The first is found, with a
/// uniquely determined position; the second and third are not
/// found; the fourth could match any position in `[1, 4]`.
///
/// ```
/// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
///
/// let seek = 13;
/// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Ok(9));
/// let seek = 4;
/// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(7));
/// let seek = 100;
/// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(13));
/// let seek = 1;
/// let r = s.binary_search_by(|probe| probe.cmp(&seek));
/// assert!(match r { Ok(1..=4) => true, _ => false, });
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
where
F: FnMut(&'a T) -> Ordering,
{
let s = self;
let mut size = s.len();
if size == 0 {
return Err(0);
}
let mut base = 0usize;
while size > 1 {
let half = size / 2;
let mid = base + half;
// mid is always in [0, size), that means mid is >= 0 and < size.
// mid >= 0: by definition
// mid < size: mid = size / 2 + size / 4 + size / 8 ...
let cmp = f(unsafe { s.get_unchecked(mid) });
base = if cmp == Greater { base } else { mid };
size -= half;
}
// base is always in [0, size) because base <= mid.
let cmp = f(unsafe { s.get_unchecked(base) });
if cmp == Equal { Ok(base) } else { Err(base + (cmp == Less) as usize) }
}
/// Binary searches this sorted slice with a key extraction function.
///
/// Assumes that the slice is sorted by the key, for instance with
/// [`sort_by_key`] using the same key extraction function.
///
/// If the value is found then [`Result::Ok`] is returned, containing the
/// index of the matching element. If there are multiple matches, then any
/// one of the matches could be returned. If the value is not found then
/// [`Result::Err`] is returned, containing the index where a matching
/// element could be inserted while maintaining sorted order.
///
/// [`sort_by_key`]: #method.sort_by_key
///
/// # Examples
///
/// Looks up a series of four elements in a slice of pairs sorted by
/// their second elements. The first is found, with a uniquely
/// determined position; the second and third are not found; the
/// fourth could match any position in `[1, 4]`.
///
/// ```
/// let s = [(0, 0), (2, 1), (4, 1), (5, 1), (3, 1),
/// (1, 2), (2, 3), (4, 5), (5, 8), (3, 13),
/// (1, 21), (2, 34), (4, 55)];
///
/// assert_eq!(s.binary_search_by_key(&13, |&(a,b)| b), Ok(9));
/// assert_eq!(s.binary_search_by_key(&4, |&(a,b)| b), Err(7));
/// assert_eq!(s.binary_search_by_key(&100, |&(a,b)| b), Err(13));
/// let r = s.binary_search_by_key(&1, |&(a,b)| b);
/// assert!(match r { Ok(1..=4) => true, _ => false, });
/// ```
#[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
#[inline]
pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize>
where
F: FnMut(&'a T) -> B,
B: Ord,
{
self.binary_search_by(|k| f(k).cmp(b))
}
/// Sorts the slice, but may not preserve the order of equal elements.
///
/// This sort is unstable (i.e., may reorder equal elements), in-place
/// (i.e., does not allocate), and `O(n * log(n))` worst-case.
///
/// # Current implementation
///
/// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
/// which combines the fast average case of randomized quicksort with the fast worst case of
/// heapsort, while achieving linear time on slices with certain patterns. It uses some
/// randomization to avoid degenerate cases, but with a fixed seed to always provide
/// deterministic behavior.
///
/// It is typically faster than stable sorting, except in a few special cases, e.g., when the
/// slice consists of several concatenated sorted sequences.
///
/// # Examples
///
/// ```
/// let mut v = [-5, 4, 1, -3, 2];
///
/// v.sort_unstable();
/// assert!(v == [-5, -3, 1, 2, 4]);
/// ```
///
/// [pdqsort]: https://github.com/orlp/pdqsort
#[stable(feature = "sort_unstable", since = "1.20.0")]
#[inline]
pub fn sort_unstable(&mut self)
where
T: Ord,
{
sort::quicksort(self, |a, b| a.lt(b));
}
/// Sorts the slice with a comparator function, but may not preserve the order of equal
/// elements.
///
/// This sort is unstable (i.e., may reorder equal elements), in-place
/// (i.e., does not allocate), and `O(n * log(n))` worst-case.
///
/// The comparator function must define a total ordering for the elements in the slice. If
/// the ordering is not total, the order of the elements is unspecified. An order is a
/// total order if it is (for all a, b and c):
///
/// * total and antisymmetric: exactly one of a < b, a == b or a > b is true; and
/// * transitive, a < b and b < c implies a < c. The same must hold for both == and >.
///
/// For example, while [`f64`] doesn't implement [`Ord`] because `NaN != NaN`, we can use
/// `partial_cmp` as our sort function when we know the slice doesn't contain a `NaN`.
///
/// ```
/// let mut floats = [5f64, 4.0, 1.0, 3.0, 2.0];
/// floats.sort_unstable_by(|a, b| a.partial_cmp(b).unwrap());
/// assert_eq!(floats, [1.0, 2.0, 3.0, 4.0, 5.0]);
/// ```
///
/// # Current implementation
///
/// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
/// which combines the fast average case of randomized quicksort with the fast worst case of
/// heapsort, while achieving linear time on slices with certain patterns. It uses some
/// randomization to avoid degenerate cases, but with a fixed seed to always provide
/// deterministic behavior.
///
/// It is typically faster than stable sorting, except in a few special cases, e.g., when the
/// slice consists of several concatenated sorted sequences.
///
/// # Examples
///
/// ```
/// let mut v = [5, 4, 1, 3, 2];
/// v.sort_unstable_by(|a, b| a.cmp(b));
/// assert!(v == [1, 2, 3, 4, 5]);
///
/// // reverse sorting
/// v.sort_unstable_by(|a, b| b.cmp(a));
/// assert!(v == [5, 4, 3, 2, 1]);
/// ```
///
/// [pdqsort]: https://github.com/orlp/pdqsort
#[stable(feature = "sort_unstable", since = "1.20.0")]
#[inline]
pub fn sort_unstable_by<F>(&mut self, mut compare: F)
where
F: FnMut(&T, &T) -> Ordering,
{
sort::quicksort(self, |a, b| compare(a, b) == Ordering::Less);
}
/// Sorts the slice with a key extraction function, but may not preserve the order of equal
/// elements.
///
/// This sort is unstable (i.e., may reorder equal elements), in-place
/// (i.e., does not allocate), and `O(m * n * log(n))` worst-case, where the key function is
/// `O(m)`.
///
/// # Current implementation
///
/// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
/// which combines the fast average case of randomized quicksort with the fast worst case of
/// heapsort, while achieving linear time on slices with certain patterns. It uses some
/// randomization to avoid degenerate cases, but with a fixed seed to always provide
/// deterministic behavior.
///
/// Due to its key calling strategy, [`sort_unstable_by_key`](#method.sort_unstable_by_key)
/// is likely to be slower than [`sort_by_cached_key`](#method.sort_by_cached_key) in
/// cases where the key function is expensive.
///
/// # Examples
///
/// ```
/// let mut v = [-5i32, 4, 1, -3, 2];
///
/// v.sort_unstable_by_key(|k| k.abs());
/// assert!(v == [1, 2, -3, 4, -5]);
/// ```
///
/// [pdqsort]: https://github.com/orlp/pdqsort
#[stable(feature = "sort_unstable", since = "1.20.0")]
#[inline]
pub fn sort_unstable_by_key<K, F>(&mut self, mut f: F)
where
F: FnMut(&T) -> K,
K: Ord,
{
sort::quicksort(self, |a, b| f(a).lt(&f(b)));
}
/// Reorder the slice such that the element at `index` is at its final sorted position.
///
/// This reordering has the additional property that any value at position `i < index` will be
/// less than or equal to any value at a position `j > index`. Additionally, this reordering is
/// unstable (i.e. any number of equal elements may end up at position `index`), in-place
/// (i.e. does not allocate), and `O(n)` worst-case. This function is also/ known as "kth
/// element" in other libraries. It returns a triplet of the following values: all elements less
/// than the one at the given index, the value at the given index, and all elements greater than
/// the one at the given index.
///
/// # Current implementation
///
/// The current algorithm is based on the quickselect portion of the same quicksort algorithm
/// used for [`sort_unstable`].
///
/// [`sort_unstable`]: #method.sort_unstable
///
/// # Panics
///
/// Panics when `index >= len()`, meaning it always panics on empty slices.
///
/// # Examples
///
/// ```
/// #![feature(slice_partition_at_index)]
///
/// let mut v = [-5i32, 4, 1, -3, 2];
///
/// // Find the median
/// v.partition_at_index(2);
///
/// // We are only guaranteed the slice will be one of the following, based on the way we sort
/// // about the specified index.
/// assert!(v == [-3, -5, 1, 2, 4] ||
/// v == [-5, -3, 1, 2, 4] ||
/// v == [-3, -5, 1, 4, 2] ||
/// v == [-5, -3, 1, 4, 2]);
/// ```
#[unstable(feature = "slice_partition_at_index", issue = "55300")]
#[inline]
pub fn partition_at_index(&mut self, index: usize) -> (&mut [T], &mut T, &mut [T])
where
T: Ord,
{
let mut f = |a: &T, b: &T| a.lt(b);
sort::partition_at_index(self, index, &mut f)
}
/// Reorder the slice with a comparator function such that the element at `index` is at its
/// final sorted position.
///
/// This reordering has the additional property that any value at position `i < index` will be
/// less than or equal to any value at a position `j > index` using the comparator function.
/// Additionally, this reordering is unstable (i.e. any number of equal elements may end up at
/// position `index`), in-place (i.e. does not allocate), and `O(n)` worst-case. This function
/// is also known as "kth element" in other libraries. It returns a triplet of the following
/// values: all elements less than the one at the given index, the value at the given index,
/// and all elements greater than the one at the given index, using the provided comparator
/// function.
///
/// # Current implementation
///
/// The current algorithm is based on the quickselect portion of the same quicksort algorithm
/// used for [`sort_unstable`].
///
/// [`sort_unstable`]: #method.sort_unstable
///
/// # Panics
///
/// Panics when `index >= len()`, meaning it always panics on empty slices.
///
/// # Examples
///
/// ```
/// #![feature(slice_partition_at_index)]
///
/// let mut v = [-5i32, 4, 1, -3, 2];
///
/// // Find the median as if the slice were sorted in descending order.
/// v.partition_at_index_by(2, |a, b| b.cmp(a));
///
/// // We are only guaranteed the slice will be one of the following, based on the way we sort
/// // about the specified index.
/// assert!(v == [2, 4, 1, -5, -3] ||
/// v == [2, 4, 1, -3, -5] ||
/// v == [4, 2, 1, -5, -3] ||
/// v == [4, 2, 1, -3, -5]);
/// ```
#[unstable(feature = "slice_partition_at_index", issue = "55300")]
#[inline]
pub fn partition_at_index_by<F>(
&mut self,
index: usize,
mut compare: F,
) -> (&mut [T], &mut T, &mut [T])
where
F: FnMut(&T, &T) -> Ordering,
{
let mut f = |a: &T, b: &T| compare(a, b) == Less;
sort::partition_at_index(self, index, &mut f)
}
/// Reorder the slice with a key extraction function such that the element at `index` is at its
/// final sorted position.
///
/// This reordering has the additional property that any value at position `i < index` will be
/// less than or equal to any value at a position `j > index` using the key extraction function.
/// Additionally, this reordering is unstable (i.e. any number of equal elements may end up at
/// position `index`), in-place (i.e. does not allocate), and `O(n)` worst-case. This function
/// is also known as "kth element" in other libraries. It returns a triplet of the following
/// values: all elements less than the one at the given index, the value at the given index, and
/// all elements greater than the one at the given index, using the provided key extraction
/// function.
///
/// # Current implementation
///
/// The current algorithm is based on the quickselect portion of the same quicksort algorithm
/// used for [`sort_unstable`].
///
/// [`sort_unstable`]: #method.sort_unstable
///
/// # Panics
///
/// Panics when `index >= len()`, meaning it always panics on empty slices.
///
/// # Examples
///
/// ```
/// #![feature(slice_partition_at_index)]
///
/// let mut v = [-5i32, 4, 1, -3, 2];
///
/// // Return the median as if the array were sorted according to absolute value.
/// v.partition_at_index_by_key(2, |a| a.abs());
///
/// // We are only guaranteed the slice will be one of the following, based on the way we sort
/// // about the specified index.
/// assert!(v == [1, 2, -3, 4, -5] ||
/// v == [1, 2, -3, -5, 4] ||
/// v == [2, 1, -3, 4, -5] ||
/// v == [2, 1, -3, -5, 4]);
/// ```
#[unstable(feature = "slice_partition_at_index", issue = "55300")]
#[inline]
pub fn partition_at_index_by_key<K, F>(
&mut self,
index: usize,
mut f: F,
) -> (&mut [T], &mut T, &mut [T])
where
F: FnMut(&T) -> K,
K: Ord,
{
let mut g = |a: &T, b: &T| f(a).lt(&f(b));
sort::partition_at_index(self, index, &mut g)
}
/// Moves all consecutive repeated elements to the end of the slice according to the
/// [`PartialEq`] trait implementation.
///
/// Returns two slices. The first contains no consecutive repeated elements.
/// The second contains all the duplicates in no specified order.
///
/// If the slice is sorted, the first returned slice contains no duplicates.
///
/// # Examples
///
/// ```
/// #![feature(slice_partition_dedup)]
///
/// let mut slice = [1, 2, 2, 3, 3, 2, 1, 1];
///
/// let (dedup, duplicates) = slice.partition_dedup();
///
/// assert_eq!(dedup, [1, 2, 3, 2, 1]);
/// assert_eq!(duplicates, [2, 3, 1]);
/// ```
#[unstable(feature = "slice_partition_dedup", issue = "54279")]
#[inline]
pub fn partition_dedup(&mut self) -> (&mut [T], &mut [T])
where
T: PartialEq,
{
self.partition_dedup_by(|a, b| a == b)
}
/// Moves all but the first of consecutive elements to the end of the slice satisfying
/// a given equality relation.
///
/// Returns two slices. The first contains no consecutive repeated elements.
/// The second contains all the duplicates in no specified order.
///
/// The `same_bucket` function is passed references to two elements from the slice and
/// must determine if the elements compare equal. The elements are passed in opposite order
/// from their order in the slice, so if `same_bucket(a, b)` returns `true`, `a` is moved
/// at the end of the slice.
///
/// If the slice is sorted, the first returned slice contains no duplicates.
///
/// # Examples
///
/// ```
/// #![feature(slice_partition_dedup)]
///
/// let mut slice = ["foo", "Foo", "BAZ", "Bar", "bar", "baz", "BAZ"];
///
/// let (dedup, duplicates) = slice.partition_dedup_by(|a, b| a.eq_ignore_ascii_case(b));
///
/// assert_eq!(dedup, ["foo", "BAZ", "Bar", "baz"]);
/// assert_eq!(duplicates, ["bar", "Foo", "BAZ"]);
/// ```
#[unstable(feature = "slice_partition_dedup", issue = "54279")]
#[inline]
pub fn partition_dedup_by<F>(&mut self, mut same_bucket: F) -> (&mut [T], &mut [T])
where
F: FnMut(&mut T, &mut T) -> bool,
{
// Although we have a mutable reference to `self`, we cannot make
// *arbitrary* changes. The `same_bucket` calls could panic, so we
// must ensure that the slice is in a valid state at all times.
//
// The way that we handle this is by using swaps; we iterate
// over all the elements, swapping as we go so that at the end
// the elements we wish to keep are in the front, and those we
// wish to reject are at the back. We can then split the slice.
// This operation is still `O(n)`.
//
// Example: We start in this state, where `r` represents "next
// read" and `w` represents "next_write`.
//
// r
// +---+---+---+---+---+---+
// | 0 | 1 | 1 | 2 | 3 | 3 |
// +---+---+---+---+---+---+
// w
//
// Comparing self[r] against self[w-1], this is not a duplicate, so
// we swap self[r] and self[w] (no effect as r==w) and then increment both
// r and w, leaving us with:
//
// r
// +---+---+---+---+---+---+
// | 0 | 1 | 1 | 2 | 3 | 3 |
// +---+---+---+---+---+---+
// w
//
// Comparing self[r] against self[w-1], this value is a duplicate,
// so we increment `r` but leave everything else unchanged:
//
// r
// +---+---+---+---+---+---+
// | 0 | 1 | 1 | 2 | 3 | 3 |
// +---+---+---+---+---+---+
// w
//
// Comparing self[r] against self[w-1], this is not a duplicate,
// so swap self[r] and self[w] and advance r and w:
//
// r
// +---+---+---+---+---+---+
// | 0 | 1 | 2 | 1 | 3 | 3 |
// +---+---+---+---+---+---+
// w
//
// Not a duplicate, repeat:
//
// r
// +---+---+---+---+---+---+
// | 0 | 1 | 2 | 3 | 1 | 3 |
// +---+---+---+---+---+---+
// w
//
// Duplicate, advance r. End of slice. Split at w.
let len = self.len();
if len <= 1 {
return (self, &mut []);
}
let ptr = self.as_mut_ptr();
let mut next_read: usize = 1;
let mut next_write: usize = 1;
unsafe {
// Avoid bounds checks by using raw pointers.
while next_read < len {
let ptr_read = ptr.add(next_read);
let prev_ptr_write = ptr.add(next_write - 1);
if !same_bucket(&mut *ptr_read, &mut *prev_ptr_write) {
if next_read != next_write {
let ptr_write = prev_ptr_write.offset(1);
mem::swap(&mut *ptr_read, &mut *ptr_write);
}
next_write += 1;
}
next_read += 1;
}
}
self.split_at_mut(next_write)
}
/// Moves all but the first of consecutive elements to the end of the slice that resolve
/// to the same key.
///
/// Returns two slices. The first contains no consecutive repeated elements.
/// The second contains all the duplicates in no specified order.
///
/// If the slice is sorted, the first returned slice contains no duplicates.
///
/// # Examples
///
/// ```
/// #![feature(slice_partition_dedup)]
///
/// let mut slice = [10, 20, 21, 30, 30, 20, 11, 13];
///
/// let (dedup, duplicates) = slice.partition_dedup_by_key(|i| *i / 10);
///
/// assert_eq!(dedup, [10, 20, 30, 20, 11]);
/// assert_eq!(duplicates, [21, 30, 13]);
/// ```
#[unstable(feature = "slice_partition_dedup", issue = "54279")]
#[inline]
pub fn partition_dedup_by_key<K, F>(&mut self, mut key: F) -> (&mut [T], &mut [T])
where
F: FnMut(&mut T) -> K,
K: PartialEq,
{
self.partition_dedup_by(|a, b| key(a) == key(b))
}
/// Rotates the slice in-place such that the first `mid` elements of the
/// slice move to the end while the last `self.len() - mid` elements move to
/// the front. After calling `rotate_left`, the element previously at index
/// `mid` will become the first element in the slice.
///
/// # Panics
///
/// This function will panic if `mid` is greater than the length of the
/// slice. Note that `mid == self.len()` does _not_ panic and is a no-op
/// rotation.
///
/// # Complexity
///
/// Takes linear (in `self.len()`) time.
///
/// # Examples
///
/// ```
/// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
/// a.rotate_left(2);
/// assert_eq!(a, ['c', 'd', 'e', 'f', 'a', 'b']);
/// ```
///
/// Rotating a subslice:
///
/// ```
/// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
/// a[1..5].rotate_left(1);
/// assert_eq!(a, ['a', 'c', 'd', 'e', 'b', 'f']);
/// ```
#[stable(feature = "slice_rotate", since = "1.26.0")]
pub fn rotate_left(&mut self, mid: usize) {
assert!(mid <= self.len());
let k = self.len() - mid;
unsafe {
let p = self.as_mut_ptr();
rotate::ptr_rotate(mid, p.add(mid), k);
}
}
/// Rotates the slice in-place such that the first `self.len() - k`
/// elements of the slice move to the end while the last `k` elements move
/// to the front. After calling `rotate_right`, the element previously at
/// index `self.len() - k` will become the first element in the slice.
///
/// # Panics
///
/// This function will panic if `k` is greater than the length of the
/// slice. Note that `k == self.len()` does _not_ panic and is a no-op
/// rotation.
///
/// # Complexity
///
/// Takes linear (in `self.len()`) time.
///
/// # Examples
///
/// ```
/// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
/// a.rotate_right(2);
/// assert_eq!(a, ['e', 'f', 'a', 'b', 'c', 'd']);
/// ```
///
/// Rotate a subslice:
///
/// ```
/// let mut a = ['a', 'b', 'c', 'd', 'e', 'f'];
/// a[1..5].rotate_right(1);
/// assert_eq!(a, ['a', 'e', 'b', 'c', 'd', 'f']);
/// ```
#[stable(feature = "slice_rotate", since = "1.26.0")]
pub fn rotate_right(&mut self, k: usize) {
assert!(k <= self.len());
let mid = self.len() - k;
unsafe {
let p = self.as_mut_ptr();
rotate::ptr_rotate(mid, p.add(mid), k);
}
}
/// Fills `self` with elements by cloning `value`.
///
/// # Examples
///
/// ```
/// #![feature(slice_fill)]
///
/// let mut buf = vec![0; 10];
/// buf.fill(1);
/// assert_eq!(buf, vec![1; 10]);
/// ```
#[unstable(feature = "slice_fill", issue = "70758")]
pub fn fill(&mut self, value: T)
where
T: Clone,
{
if let Some((last, elems)) = self.split_last_mut() {
for el in elems {
el.clone_from(&value);
}
*last = value
}
}
/// Copies the elements from `src` into `self`.
///
/// The length of `src` must be the same as `self`.
///
/// If `T` implements `Copy`, it can be more performant to use
/// [`copy_from_slice`].
///
/// # Panics
///
/// This function will panic if the two slices have different lengths.
///
/// # Examples
///
/// Cloning two elements from a slice into another:
///
/// ```
/// let src = [1, 2, 3, 4];
/// let mut dst = [0, 0];
///
/// // Because the slices have to be the same length,
/// // we slice the source slice from four elements
/// // to two. It will panic if we don't do this.
/// dst.clone_from_slice(&src[2..]);
///
/// assert_eq!(src, [1, 2, 3, 4]);
/// assert_eq!(dst, [3, 4]);
/// ```
///
/// Rust enforces that there can only be one mutable reference with no
/// immutable references to a particular piece of data in a particular
/// scope. Because of this, attempting to use `clone_from_slice` on a
/// single slice will result in a compile failure:
///
/// ```compile_fail
/// let mut slice = [1, 2, 3, 4, 5];
///
/// slice[..2].clone_from_slice(&slice[3..]); // compile fail!
/// ```
///
/// To work around this, we can use [`split_at_mut`] to create two distinct
/// sub-slices from a slice:
///
/// ```
/// let mut slice = [1, 2, 3, 4, 5];
///
/// {
/// let (left, right) = slice.split_at_mut(2);
/// left.clone_from_slice(&right[1..]);
/// }
///
/// assert_eq!(slice, [4, 5, 3, 4, 5]);
/// ```
///
/// [`copy_from_slice`]: #method.copy_from_slice
/// [`split_at_mut`]: #method.split_at_mut
#[stable(feature = "clone_from_slice", since = "1.7.0")]
pub fn clone_from_slice(&mut self, src: &[T])
where
T: Clone,
{
assert!(self.len() == src.len(), "destination and source slices have different lengths");
// NOTE: We need to explicitly slice them to the same length
// for bounds checking to be elided, and the optimizer will
// generate memcpy for simple cases (for example T = u8).
let len = self.len();
let src = &src[..len];
for i in 0..len {
self[i].clone_from(&src[i]);
}
}
/// Copies all elements from `src` into `self`, using a memcpy.
///
/// The length of `src` must be the same as `self`.
///
/// If `T` does not implement `Copy`, use [`clone_from_slice`].
///
/// # Panics
///
/// This function will panic if the two slices have different lengths.
///
/// # Examples
///
/// Copying two elements from a slice into another:
///
/// ```
/// let src = [1, 2, 3, 4];
/// let mut dst = [0, 0];
///
/// // Because the slices have to be the same length,
/// // we slice the source slice from four elements
/// // to two. It will panic if we don't do this.
/// dst.copy_from_slice(&src[2..]);
///
/// assert_eq!(src, [1, 2, 3, 4]);
/// assert_eq!(dst, [3, 4]);
/// ```
///
/// Rust enforces that there can only be one mutable reference with no
/// immutable references to a particular piece of data in a particular
/// scope. Because of this, attempting to use `copy_from_slice` on a
/// single slice will result in a compile failure:
///
/// ```compile_fail
/// let mut slice = [1, 2, 3, 4, 5];
///
/// slice[..2].copy_from_slice(&slice[3..]); // compile fail!
/// ```
///
/// To work around this, we can use [`split_at_mut`] to create two distinct
/// sub-slices from a slice:
///
/// ```
/// let mut slice = [1, 2, 3, 4, 5];
///
/// {
/// let (left, right) = slice.split_at_mut(2);
/// left.copy_from_slice(&right[1..]);
/// }
///
/// assert_eq!(slice, [4, 5, 3, 4, 5]);
/// ```
///
/// [`clone_from_slice`]: #method.clone_from_slice
/// [`split_at_mut`]: #method.split_at_mut
#[stable(feature = "copy_from_slice", since = "1.9.0")]
pub fn copy_from_slice(&mut self, src: &[T])
where
T: Copy,
{
assert_eq!(self.len(), src.len(), "destination and source slices have different lengths");
unsafe {
ptr::copy_nonoverlapping(src.as_ptr(), self.as_mut_ptr(), self.len());
}
}
/// Copies elements from one part of the slice to another part of itself,
/// using a memmove.
///
/// `src` is the range within `self` to copy from. `dest` is the starting
/// index of the range within `self` to copy to, which will have the same
/// length as `src`. The two ranges may overlap. The ends of the two ranges
/// must be less than or equal to `self.len()`.
///
/// # Panics
///
/// This function will panic if either range exceeds the end of the slice,
/// or if the end of `src` is before the start.
///
/// # Examples
///
/// Copying four bytes within a slice:
///
/// ```
/// let mut bytes = *b"Hello, World!";
///
/// bytes.copy_within(1..5, 8);
///
/// assert_eq!(&bytes, b"Hello, Wello!");
/// ```
#[stable(feature = "copy_within", since = "1.37.0")]
#[track_caller]
pub fn copy_within<R: ops::RangeBounds<usize>>(&mut self, src: R, dest: usize)
where
T: Copy,
{
let src_start = match src.start_bound() {
ops::Bound::Included(&n) => n,
ops::Bound::Excluded(&n) => {
n.checked_add(1).unwrap_or_else(|| slice_index_overflow_fail())
}
ops::Bound::Unbounded => 0,
};
let src_end = match src.end_bound() {
ops::Bound::Included(&n) => {
n.checked_add(1).unwrap_or_else(|| slice_index_overflow_fail())
}
ops::Bound::Excluded(&n) => n,
ops::Bound::Unbounded => self.len(),
};
assert!(src_start <= src_end, "src end is before src start");
assert!(src_end <= self.len(), "src is out of bounds");
let count = src_end - src_start;
assert!(dest <= self.len() - count, "dest is out of bounds");
unsafe {
ptr::copy(self.as_ptr().add(src_start), self.as_mut_ptr().add(dest), count);
}
}
/// Swaps all elements in `self` with those in `other`.
///
/// The length of `other` must be the same as `self`.
///
/// # Panics
///
/// This function will panic if the two slices have different lengths.
///
/// # Example
///
/// Swapping two elements across slices:
///
/// ```
/// let mut slice1 = [0, 0];
/// let mut slice2 = [1, 2, 3, 4];
///
/// slice1.swap_with_slice(&mut slice2[2..]);
///
/// assert_eq!(slice1, [3, 4]);
/// assert_eq!(slice2, [1, 2, 0, 0]);
/// ```
///
/// Rust enforces that there can only be one mutable reference to a
/// particular piece of data in a particular scope. Because of this,
/// attempting to use `swap_with_slice` on a single slice will result in
/// a compile failure:
///
/// ```compile_fail
/// let mut slice = [1, 2, 3, 4, 5];
/// slice[..2].swap_with_slice(&mut slice[3..]); // compile fail!
/// ```
///
/// To work around this, we can use [`split_at_mut`] to create two distinct
/// mutable sub-slices from a slice:
///
/// ```
/// let mut slice = [1, 2, 3, 4, 5];
///
/// {
/// let (left, right) = slice.split_at_mut(2);
/// left.swap_with_slice(&mut right[1..]);
/// }
///
/// assert_eq!(slice, [4, 5, 3, 1, 2]);
/// ```
///
/// [`split_at_mut`]: #method.split_at_mut
#[stable(feature = "swap_with_slice", since = "1.27.0")]
pub fn swap_with_slice(&mut self, other: &mut [T]) {
assert!(self.len() == other.len(), "destination and source slices have different lengths");
unsafe {
ptr::swap_nonoverlapping(self.as_mut_ptr(), other.as_mut_ptr(), self.len());
}
}
/// Function to calculate lengths of the middle and trailing slice for `align_to{,_mut}`.
fn align_to_offsets<U>(&self) -> (usize, usize) {
// What we gonna do about `rest` is figure out what multiple of `U`s we can put in a
// lowest number of `T`s. And how many `T`s we need for each such "multiple".
//
// Consider for example T=u8 U=u16. Then we can put 1 U in 2 Ts. Simple. Now, consider
// for example a case where size_of::<T> = 16, size_of::<U> = 24. We can put 2 Us in
// place of every 3 Ts in the `rest` slice. A bit more complicated.
//
// Formula to calculate this is:
//
// Us = lcm(size_of::<T>, size_of::<U>) / size_of::<U>
// Ts = lcm(size_of::<T>, size_of::<U>) / size_of::<T>
//
// Expanded and simplified:
//
// Us = size_of::<T> / gcd(size_of::<T>, size_of::<U>)
// Ts = size_of::<U> / gcd(size_of::<T>, size_of::<U>)
//
// Luckily since all this is constant-evaluated... performance here matters not!
#[inline]
fn gcd(a: usize, b: usize) -> usize {
use crate::intrinsics;
// iterative stein’s algorithm
// We should still make this `const fn` (and revert to recursive algorithm if we do)
// because relying on llvm to consteval all this is… well, it makes me uncomfortable.
let (ctz_a, mut ctz_b) = unsafe {
if a == 0 {
return b;
}
if b == 0 {
return a;
}
(intrinsics::cttz_nonzero(a), intrinsics::cttz_nonzero(b))
};
let k = ctz_a.min(ctz_b);
let mut a = a >> ctz_a;
let mut b = b;
loop {
// remove all factors of 2 from b
b >>= ctz_b;
if a > b {
mem::swap(&mut a, &mut b);
}
b = b - a;
unsafe {
if b == 0 {
break;
}
ctz_b = intrinsics::cttz_nonzero(b);
}
}
a << k
}
let gcd: usize = gcd(mem::size_of::<T>(), mem::size_of::<U>());
let ts: usize = mem::size_of::<U>() / gcd;
let us: usize = mem::size_of::<T>() / gcd;
// Armed with this knowledge, we can find how many `U`s we can fit!
let us_len = self.len() / ts * us;
// And how many `T`s will be in the trailing slice!
let ts_len = self.len() % ts;
(us_len, ts_len)
}
/// Transmute the slice to a slice of another type, ensuring alignment of the types is
/// maintained.
///
/// This method splits the slice into three distinct slices: prefix, correctly aligned middle
/// slice of a new type, and the suffix slice. The method may make the middle slice the greatest
/// length possible for a given type and input slice, but only your algorithm's performance
/// should depend on that, not its correctness. It is permissible for all of the input data to
/// be returned as the prefix or suffix slice.
///
/// This method has no purpose when either input element `T` or output element `U` are
/// zero-sized and will return the original slice without splitting anything.
///
/// # Safety
///
/// This method is essentially a `transmute` with respect to the elements in the returned
/// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// unsafe {
/// let bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
/// let (prefix, shorts, suffix) = bytes.align_to::<u16>();
/// // less_efficient_algorithm_for_bytes(prefix);
/// // more_efficient_algorithm_for_aligned_shorts(shorts);
/// // less_efficient_algorithm_for_bytes(suffix);
/// }
/// ```
#[stable(feature = "slice_align_to", since = "1.30.0")]
pub unsafe fn align_to<U>(&self) -> (&[T], &[U], &[T]) {
// Note that most of this function will be constant-evaluated,
if mem::size_of::<U>() == 0 || mem::size_of::<T>() == 0 {
// handle ZSTs specially, which is – don't handle them at all.
return (self, &[], &[]);
}
// First, find at what point do we split between the first and 2nd slice. Easy with
// ptr.align_offset.
let ptr = self.as_ptr();
let offset = crate::ptr::align_offset(ptr, mem::align_of::<U>());
if offset > self.len() {
(self, &[], &[])
} else {
let (left, rest) = self.split_at(offset);
// now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay
let (us_len, ts_len) = rest.align_to_offsets::<U>();
(
left,
from_raw_parts(rest.as_ptr() as *const U, us_len),
from_raw_parts(rest.as_ptr().add(rest.len() - ts_len), ts_len),
)
}
}
/// Transmute the slice to a slice of another type, ensuring alignment of the types is
/// maintained.
///
/// This method splits the slice into three distinct slices: prefix, correctly aligned middle
/// slice of a new type, and the suffix slice. The method may make the middle slice the greatest
/// length possible for a given type and input slice, but only your algorithm's performance
/// should depend on that, not its correctness. It is permissible for all of the input data to
/// be returned as the prefix or suffix slice.
///
/// This method has no purpose when either input element `T` or output element `U` are
/// zero-sized and will return the original slice without splitting anything.
///
/// # Safety
///
/// This method is essentially a `transmute` with respect to the elements in the returned
/// middle slice, so all the usual caveats pertaining to `transmute::<T, U>` also apply here.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// unsafe {
/// let mut bytes: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
/// let (prefix, shorts, suffix) = bytes.align_to_mut::<u16>();
/// // less_efficient_algorithm_for_bytes(prefix);
/// // more_efficient_algorithm_for_aligned_shorts(shorts);
/// // less_efficient_algorithm_for_bytes(suffix);
/// }
/// ```
#[stable(feature = "slice_align_to", since = "1.30.0")]
pub unsafe fn align_to_mut<U>(&mut self) -> (&mut [T], &mut [U], &mut [T]) {
// Note that most of this function will be constant-evaluated,
if mem::size_of::<U>() == 0 || mem::size_of::<T>() == 0 {
// handle ZSTs specially, which is – don't handle them at all.
return (self, &mut [], &mut []);
}
// First, find at what point do we split between the first and 2nd slice. Easy with
// ptr.align_offset.
let ptr = self.as_ptr();
let offset = crate::ptr::align_offset(ptr, mem::align_of::<U>());
if offset > self.len() {
(self, &mut [], &mut [])
} else {
let (left, rest) = self.split_at_mut(offset);
// now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay
let (us_len, ts_len) = rest.align_to_offsets::<U>();
let rest_len = rest.len();
let mut_ptr = rest.as_mut_ptr();
// We can't use `rest` again after this, that would invalidate its alias `mut_ptr`!
(
left,
from_raw_parts_mut(mut_ptr as *mut U, us_len),
from_raw_parts_mut(mut_ptr.add(rest_len - ts_len), ts_len),
)
}
}
/// Checks if the elements of this slice are sorted.
///
/// That is, for each element `a` and its following element `b`, `a <= b` must hold. If the
/// slice yields exactly zero or one element, `true` is returned.
///
/// Note that if `Self::Item` is only `PartialOrd`, but not `Ord`, the above definition
/// implies that this function returns `false` if any two consecutive items are not
/// comparable.
///
/// # Examples
///
/// ```
/// #![feature(is_sorted)]
/// let empty: [i32; 0] = [];
///
/// assert!([1, 2, 2, 9].is_sorted());
/// assert!(![1, 3, 2, 4].is_sorted());
/// assert!([0].is_sorted());
/// assert!(empty.is_sorted());
/// assert!(![0.0, 1.0, f32::NAN].is_sorted());
/// ```
#[inline]
#[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
pub fn is_sorted(&self) -> bool
where
T: PartialOrd,
{
self.is_sorted_by(|a, b| a.partial_cmp(b))
}
/// Checks if the elements of this slice are sorted using the given comparator function.
///
/// Instead of using `PartialOrd::partial_cmp`, this function uses the given `compare`
/// function to determine the ordering of two elements. Apart from that, it's equivalent to
/// [`is_sorted`]; see its documentation for more information.
///
/// [`is_sorted`]: #method.is_sorted
#[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
pub fn is_sorted_by<F>(&self, mut compare: F) -> bool
where
F: FnMut(&T, &T) -> Option<Ordering>,
{
self.iter().is_sorted_by(|a, b| compare(*a, *b))
}
/// Checks if the elements of this slice are sorted using the given key extraction function.
///
/// Instead of comparing the slice's elements directly, this function compares the keys of the
/// elements, as determined by `f`. Apart from that, it's equivalent to [`is_sorted`]; see its
/// documentation for more information.
///
/// [`is_sorted`]: #method.is_sorted
///
/// # Examples
///
/// ```
/// #![feature(is_sorted)]
///
/// assert!(["c", "bb", "aaa"].is_sorted_by_key(|s| s.len()));
/// assert!(![-2i32, -1, 0, 3].is_sorted_by_key(|n| n.abs()));
/// ```
#[inline]
#[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
pub fn is_sorted_by_key<F, K>(&self, f: F) -> bool
where
F: FnMut(&T) -> K,
K: PartialOrd,
{
self.iter().is_sorted_by_key(f)
}
}
#[lang = "slice_u8"]
#[cfg(not(test))]
impl [u8] {
/// Checks if all bytes in this slice are within the ASCII range.
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
#[inline]
pub fn is_ascii(&self) -> bool {
self.iter().all(|b| b.is_ascii())
}
/// Checks that two slices are an ASCII case-insensitive match.
///
/// Same as `to_ascii_lowercase(a) == to_ascii_lowercase(b)`,
/// but without allocating and copying temporaries.
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
#[inline]
pub fn eq_ignore_ascii_case(&self, other: &[u8]) -> bool {
self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a.eq_ignore_ascii_case(b))
}
/// Converts this slice to its ASCII upper case equivalent in-place.
///
/// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
/// but non-ASCII letters are unchanged.
///
/// To return a new uppercased value without modifying the existing one, use
/// [`to_ascii_uppercase`].
///
/// [`to_ascii_uppercase`]: #method.to_ascii_uppercase
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
#[inline]
pub fn make_ascii_uppercase(&mut self) {
for byte in self {
byte.make_ascii_uppercase();
}
}
/// Converts this slice to its ASCII lower case equivalent in-place.
///
/// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
/// but non-ASCII letters are unchanged.
///
/// To return a new lowercased value without modifying the existing one, use
/// [`to_ascii_lowercase`].
///
/// [`to_ascii_lowercase`]: #method.to_ascii_lowercase
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
#[inline]
pub fn make_ascii_lowercase(&mut self) {
for byte in self {
byte.make_ascii_lowercase();
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, I> ops::Index<I> for [T]
where
I: SliceIndex<[T]>,
{
type Output = I::Output;
#[inline]
fn index(&self, index: I) -> &I::Output {
index.index(self)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, I> ops::IndexMut<I> for [T]
where
I: SliceIndex<[T]>,
{
#[inline]
fn index_mut(&mut self, index: I) -> &mut I::Output {
index.index_mut(self)
}
}
#[inline(never)]
#[cold]
#[track_caller]
fn slice_index_len_fail(index: usize, len: usize) -> ! {
panic!("index {} out of range for slice of length {}", index, len);
}
#[inline(never)]
#[cold]
#[track_caller]
fn slice_index_order_fail(index: usize, end: usize) -> ! {
panic!("slice index starts at {} but ends at {}", index, end);
}
#[inline(never)]
#[cold]
#[track_caller]
fn slice_index_overflow_fail() -> ! {
panic!("attempted to index slice up to maximum usize");
}
mod private_slice_index {
use super::ops;
#[stable(feature = "slice_get_slice", since = "1.28.0")]
pub trait Sealed {}
#[stable(feature = "slice_get_slice", since = "1.28.0")]
impl Sealed for usize {}
#[stable(feature = "slice_get_slice", since = "1.28.0")]
impl Sealed for ops::Range<usize> {}
#[stable(feature = "slice_get_slice", since = "1.28.0")]
impl Sealed for ops::RangeTo<usize> {}
#[stable(feature = "slice_get_slice", since = "1.28.0")]
impl Sealed for ops::RangeFrom<usize> {}
#[stable(feature = "slice_get_slice", since = "1.28.0")]
impl Sealed for ops::RangeFull {}
#[stable(feature = "slice_get_slice", since = "1.28.0")]
impl Sealed for ops::RangeInclusive<usize> {}
#[stable(feature = "slice_get_slice", since = "1.28.0")]
impl Sealed for ops::RangeToInclusive<usize> {}
}
/// A helper trait used for indexing operations.
#[stable(feature = "slice_get_slice", since = "1.28.0")]
#[rustc_on_unimplemented(
on(T = "str", label = "string indices are ranges of `usize`",),
on(
all(any(T = "str", T = "&str", T = "std::string::String"), _Self = "{integer}"),
note = "you can use `.chars().nth()` or `.bytes().nth()`
see chapter in The Book <https://doc.rust-lang.org/book/ch08-02-strings.html#indexing-into-strings>"
),
message = "the type `{T}` cannot be indexed by `{Self}`",
label = "slice indices are of type `usize` or ranges of `usize`"
)]
pub trait SliceIndex<T: ?Sized>: private_slice_index::Sealed {
/// The output type returned by methods.
#[stable(feature = "slice_get_slice", since = "1.28.0")]
type Output: ?Sized;
/// Returns a shared reference to the output at this location, if in
/// bounds.
#[unstable(feature = "slice_index_methods", issue = "none")]
fn get(self, slice: &T) -> Option<&Self::Output>;
/// Returns a mutable reference to the output at this location, if in
/// bounds.
#[unstable(feature = "slice_index_methods", issue = "none")]
fn get_mut(self, slice: &mut T) -> Option<&mut Self::Output>;
/// Returns a shared reference to the output at this location, without
/// performing any bounds checking.
/// Calling this method with an out-of-bounds index is *[undefined behavior]*
/// even if the resulting reference is not used.
/// [undefined behavior]: ../../reference/behavior-considered-undefined.html
#[unstable(feature = "slice_index_methods", issue = "none")]
unsafe fn get_unchecked(self, slice: &T) -> &Self::Output;
/// Returns a mutable reference to the output at this location, without
/// performing any bounds checking.
/// Calling this method with an out-of-bounds index is *[undefined behavior]*
/// even if the resulting reference is not used.
/// [undefined behavior]: ../../reference/behavior-considered-undefined.html
#[unstable(feature = "slice_index_methods", issue = "none")]
unsafe fn get_unchecked_mut(self, slice: &mut T) -> &mut Self::Output;
/// Returns a shared reference to the output at this location, panicking
/// if out of bounds.
#[unstable(feature = "slice_index_methods", issue = "none")]
#[track_caller]
fn index(self, slice: &T) -> &Self::Output;
/// Returns a mutable reference to the output at this location, panicking
/// if out of bounds.
#[unstable(feature = "slice_index_methods", issue = "none")]
#[track_caller]
fn index_mut(self, slice: &mut T) -> &mut Self::Output;
}
#[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
impl<T> SliceIndex<[T]> for usize {
type Output = T;
#[inline]
fn get(self, slice: &[T]) -> Option<&T> {
if self < slice.len() { unsafe { Some(self.get_unchecked(slice)) } } else { None }
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut T> {
if self < slice.len() { unsafe { Some(self.get_unchecked_mut(slice)) } } else { None }
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &T {
&*slice.as_ptr().add(self)
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut T {
&mut *slice.as_mut_ptr().add(self)
}
#[inline]
fn index(self, slice: &[T]) -> &T {
// N.B., use intrinsic indexing
&(*slice)[self]
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut T {
// N.B., use intrinsic indexing
&mut (*slice)[self]
}
}
#[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
impl<T> SliceIndex<[T]> for ops::Range<usize> {
type Output = [T];
#[inline]
fn get(self, slice: &[T]) -> Option<&[T]> {
if self.start > self.end || self.end > slice.len() {
None
} else {
unsafe { Some(self.get_unchecked(slice)) }
}
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
if self.start > self.end || self.end > slice.len() {
None
} else {
unsafe { Some(self.get_unchecked_mut(slice)) }
}
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start)
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
from_raw_parts_mut(slice.as_mut_ptr().add(self.start), self.end - self.start)
}
#[inline]
fn index(self, slice: &[T]) -> &[T] {
if self.start > self.end {
slice_index_order_fail(self.start, self.end);
} else if self.end > slice.len() {
slice_index_len_fail(self.end, slice.len());
}
unsafe { self.get_unchecked(slice) }
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
if self.start > self.end {
slice_index_order_fail(self.start, self.end);
} else if self.end > slice.len() {
slice_index_len_fail(self.end, slice.len());
}
unsafe { self.get_unchecked_mut(slice) }
}
}
#[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
impl<T> SliceIndex<[T]> for ops::RangeTo<usize> {
type Output = [T];
#[inline]
fn get(self, slice: &[T]) -> Option<&[T]> {
(0..self.end).get(slice)
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
(0..self.end).get_mut(slice)
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
(0..self.end).get_unchecked(slice)
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
(0..self.end).get_unchecked_mut(slice)
}
#[inline]
fn index(self, slice: &[T]) -> &[T] {
(0..self.end).index(slice)
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
(0..self.end).index_mut(slice)
}
}
#[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
impl<T> SliceIndex<[T]> for ops::RangeFrom<usize> {
type Output = [T];
#[inline]
fn get(self, slice: &[T]) -> Option<&[T]> {
(self.start..slice.len()).get(slice)
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
(self.start..slice.len()).get_mut(slice)
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
(self.start..slice.len()).get_unchecked(slice)
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
(self.start..slice.len()).get_unchecked_mut(slice)
}
#[inline]
fn index(self, slice: &[T]) -> &[T] {
(self.start..slice.len()).index(slice)
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
(self.start..slice.len()).index_mut(slice)
}
}
#[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
impl<T> SliceIndex<[T]> for ops::RangeFull {
type Output = [T];
#[inline]
fn get(self, slice: &[T]) -> Option<&[T]> {
Some(slice)
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
Some(slice)
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
slice
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
slice
}
#[inline]
fn index(self, slice: &[T]) -> &[T] {
slice
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
slice
}
}
#[stable(feature = "inclusive_range", since = "1.26.0")]
impl<T> SliceIndex<[T]> for ops::RangeInclusive<usize> {
type Output = [T];
#[inline]
fn get(self, slice: &[T]) -> Option<&[T]> {
if *self.end() == usize::MAX { None } else { (*self.start()..self.end() + 1).get(slice) }
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
if *self.end() == usize::MAX {
None
} else {
(*self.start()..self.end() + 1).get_mut(slice)
}
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
(*self.start()..self.end() + 1).get_unchecked(slice)
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
(*self.start()..self.end() + 1).get_unchecked_mut(slice)
}
#[inline]
fn index(self, slice: &[T]) -> &[T] {
if *self.end() == usize::MAX {
slice_index_overflow_fail();
}
(*self.start()..self.end() + 1).index(slice)
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
if *self.end() == usize::MAX {
slice_index_overflow_fail();
}
(*self.start()..self.end() + 1).index_mut(slice)
}
}
#[stable(feature = "inclusive_range", since = "1.26.0")]
impl<T> SliceIndex<[T]> for ops::RangeToInclusive<usize> {
type Output = [T];
#[inline]
fn get(self, slice: &[T]) -> Option<&[T]> {
(0..=self.end).get(slice)
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
(0..=self.end).get_mut(slice)
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
(0..=self.end).get_unchecked(slice)
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
(0..=self.end).get_unchecked_mut(slice)
}
#[inline]
fn index(self, slice: &[T]) -> &[T] {
(0..=self.end).index(slice)
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
(0..=self.end).index_mut(slice)
}
}
////////////////////////////////////////////////////////////////////////////////
// Common traits
////////////////////////////////////////////////////////////////////////////////
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Default for &[T] {
/// Creates an empty slice.
fn default() -> Self {
&[]
}
}
#[stable(feature = "mut_slice_default", since = "1.5.0")]
impl<T> Default for &mut [T] {
/// Creates a mutable empty slice.
fn default() -> Self {
&mut []
}
}
//
// Iterators
//
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> IntoIterator for &'a [T] {
type Item = &'a T;
type IntoIter = Iter<'a, T>;
fn into_iter(self) -> Iter<'a, T> {
self.iter()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> IntoIterator for &'a mut [T] {
type Item = &'a mut T;
type IntoIter = IterMut<'a, T>;
fn into_iter(self) -> IterMut<'a, T> {
self.iter_mut()
}
}
// Macro helper functions
#[inline(always)]
fn size_from_ptr<T>(_: *const T) -> usize {
mem::size_of::<T>()
}
// Inlining is_empty and len makes a huge performance difference
macro_rules! is_empty {
// The way we encode the length of a ZST iterator, this works both for ZST
// and non-ZST.
($self: ident) => {
$self.ptr.as_ptr() as *const T == $self.end
};
}
// To get rid of some bounds checks (see `position`), we compute the length in a somewhat
// unexpected way. (Tested by `codegen/slice-position-bounds-check`.)
macro_rules! len {
($self: ident) => {{
#![allow(unused_unsafe)] // we're sometimes used within an unsafe block
let start = $self.ptr;
let size = size_from_ptr(start.as_ptr());
if size == 0 {
// This _cannot_ use `unchecked_sub` because we depend on wrapping
// to represent the length of long ZST slice iterators.
($self.end as usize).wrapping_sub(start.as_ptr() as usize)
} else {
// We know that `start <= end`, so can do better than `offset_from`,
// which needs to deal in signed. By setting appropriate flags here
// we can tell LLVM this, which helps it remove bounds checks.
// SAFETY: By the type invariant, `start <= end`
let diff = unsafe { unchecked_sub($self.end as usize, start.as_ptr() as usize) };
// By also telling LLVM that the pointers are apart by an exact
// multiple of the type size, it can optimize `len() == 0` down to
// `start == end` instead of `(end - start) < size`.
// SAFETY: By the type invariant, the pointers are aligned so the
// distance between them must be a multiple of pointee size
unsafe { exact_div(diff, size) }
}
}};
}
// The shared definition of the `Iter` and `IterMut` iterators
macro_rules! iterator {
(
struct $name:ident -> $ptr:ty,
$elem:ty,
$raw_mut:tt,
{$( $mut_:tt )*},
{$($extra:tt)*}
) => {
// Returns the first element and moves the start of the iterator forwards by 1.
// Greatly improves performance compared to an inlined function. The iterator
// must not be empty.
macro_rules! next_unchecked {
($self: ident) => {& $( $mut_ )* *$self.post_inc_start(1)}
}
// Returns the last element and moves the end of the iterator backwards by 1.
// Greatly improves performance compared to an inlined function. The iterator
// must not be empty.
macro_rules! next_back_unchecked {
($self: ident) => {& $( $mut_ )* *$self.pre_dec_end(1)}
}
// Shrinks the iterator when T is a ZST, by moving the end of the iterator
// backwards by `n`. `n` must not exceed `self.len()`.
macro_rules! zst_shrink {
($self: ident, $n: ident) => {
$self.end = ($self.end as * $raw_mut u8).wrapping_offset(-$n) as * $raw_mut T;
}
}
impl<'a, T> $name<'a, T> {
// Helper function for creating a slice from the iterator.
#[inline(always)]
fn make_slice(&self) -> &'a [T] {
unsafe { from_raw_parts(self.ptr.as_ptr(), len!(self)) }
}
// Helper function for moving the start of the iterator forwards by `offset` elements,
// returning the old start.
// Unsafe because the offset must not exceed `self.len()`.
#[inline(always)]
unsafe fn post_inc_start(&mut self, offset: isize) -> * $raw_mut T {
if mem::size_of::<T>() == 0 {
zst_shrink!(self, offset);
self.ptr.as_ptr()
} else {
let old = self.ptr.as_ptr();
self.ptr = NonNull::new_unchecked(self.ptr.as_ptr().offset(offset));
old
}
}
// Helper function for moving the end of the iterator backwards by `offset` elements,
// returning the new end.
// Unsafe because the offset must not exceed `self.len()`.
#[inline(always)]
unsafe fn pre_dec_end(&mut self, offset: isize) -> * $raw_mut T {
if mem::size_of::<T>() == 0 {
zst_shrink!(self, offset);
self.ptr.as_ptr()
} else {
self.end = self.end.offset(-offset);
self.end
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ExactSizeIterator for $name<'_, T> {
#[inline(always)]
fn len(&self) -> usize {
len!(self)
}
#[inline(always)]
fn is_empty(&self) -> bool {
is_empty!(self)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Iterator for $name<'a, T> {
type Item = $elem;
#[inline]
fn next(&mut self) -> Option<$elem> {
// could be implemented with slices, but this avoids bounds checks
unsafe {
assume(!self.ptr.as_ptr().is_null());
if mem::size_of::<T>() != 0 {
assume(!self.end.is_null());
}
if is_empty!(self) {
None
} else {
Some(next_unchecked!(self))
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let exact = len!(self);
(exact, Some(exact))
}
#[inline]
fn count(self) -> usize {
len!(self)
}
#[inline]
fn nth(&mut self, n: usize) -> Option<$elem> {
if n >= len!(self) {
// This iterator is now empty.
if mem::size_of::<T>() == 0 {
// We have to do it this way as `ptr` may never be 0, but `end`
// could be (due to wrapping).
self.end = self.ptr.as_ptr();
} else {
unsafe {
// End can't be 0 if T isn't ZST because ptr isn't 0 and end >= ptr
self.ptr = NonNull::new_unchecked(self.end as *mut T);
}
}
return None;
}
// We are in bounds. `post_inc_start` does the right thing even for ZSTs.
unsafe {
self.post_inc_start(n as isize);
Some(next_unchecked!(self))
}
}
#[inline]
fn last(mut self) -> Option<$elem> {
self.next_back()
}
// We override the default implementation, which uses `try_fold`,
// because this simple implementation generates less LLVM IR and is
// faster to compile.
#[inline]
fn for_each<F>(mut self, mut f: F)
where
Self: Sized,
F: FnMut(Self::Item),
{
while let Some(x) = self.next() {
f(x);
}
}
// We override the default implementation, which uses `try_fold`,
// because this simple implementation generates less LLVM IR and is
// faster to compile.
#[inline]
fn all<F>(&mut self, mut f: F) -> bool
where
Self: Sized,
F: FnMut(Self::Item) -> bool,
{
while let Some(x) = self.next() {
if !f(x) {
return false;
}
}
true
}
// We override the default implementation, which uses `try_fold`,
// because this simple implementation generates less LLVM IR and is
// faster to compile.
#[inline]
fn any<F>(&mut self, mut f: F) -> bool
where
Self: Sized,
F: FnMut(Self::Item) -> bool,
{
while let Some(x) = self.next() {
if f(x) {
return true;
}
}
false
}
// We override the default implementation, which uses `try_fold`,
// because this simple implementation generates less LLVM IR and is
// faster to compile.
#[inline]
fn find<P>(&mut self, mut predicate: P) -> Option<Self::Item>
where
Self: Sized,
P: FnMut(&Self::Item) -> bool,
{
while let Some(x) = self.next() {
if predicate(&x) {
return Some(x);
}
}
None
}
// We override the default implementation, which uses `try_fold`,
// because this simple implementation generates less LLVM IR and is
// faster to compile.
#[inline]
fn find_map<B, F>(&mut self, mut f: F) -> Option<B>
where
Self: Sized,
F: FnMut(Self::Item) -> Option<B>,
{
while let Some(x) = self.next() {
if let Some(y) = f(x) {
return Some(y);
}
}
None
}
// We override the default implementation, which uses `try_fold`,
// because this simple implementation generates less LLVM IR and is
// faster to compile. Also, the `assume` avoids a bounds check.
#[inline]
#[rustc_inherit_overflow_checks]
fn position<P>(&mut self, mut predicate: P) -> Option<usize> where
Self: Sized,
P: FnMut(Self::Item) -> bool,
{
let n = len!(self);
let mut i = 0;
while let Some(x) = self.next() {
if predicate(x) {
unsafe { assume(i < n) };
return Some(i);
}
i += 1;
}
None
}
// We override the default implementation, which uses `try_fold`,
// because this simple implementation generates less LLVM IR and is
// faster to compile. Also, the `assume` avoids a bounds check.
#[inline]
fn rposition<P>(&mut self, mut predicate: P) -> Option<usize> where
P: FnMut(Self::Item) -> bool,
Self: Sized + ExactSizeIterator + DoubleEndedIterator
{
let n = len!(self);
let mut i = n;
while let Some(x) = self.next_back() {
i -= 1;
if predicate(x) {
unsafe { assume(i < n) };
return Some(i);
}
}
None
}
$($extra)*
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for $name<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<$elem> {
// could be implemented with slices, but this avoids bounds checks
unsafe {
assume(!self.ptr.as_ptr().is_null());
if mem::size_of::<T>() != 0 {
assume(!self.end.is_null());
}
if is_empty!(self) {
None
} else {
Some(next_back_unchecked!(self))
}
}
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<$elem> {
if n >= len!(self) {
// This iterator is now empty.
self.end = self.ptr.as_ptr();
return None;
}
// We are in bounds. `pre_dec_end` does the right thing even for ZSTs.
unsafe {
self.pre_dec_end(n as isize);
Some(next_back_unchecked!(self))
}
}
}
#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for $name<'_, T> {}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for $name<'_, T> {}
}
}
/// Immutable slice iterator
///
/// This struct is created by the [`iter`] method on [slices].
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// // First, we declare a type which has `iter` method to get the `Iter` struct (&[usize here]):
/// let slice = &[1, 2, 3];
///
/// // Then, we iterate over it:
/// for element in slice.iter() {
/// println!("{}", element);
/// }
/// ```
///
/// [`iter`]: ../../std/primitive.slice.html#method.iter
/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, T: 'a> {
ptr: NonNull<T>,
end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
// ptr == end is a quick test for the Iterator being empty, that works
// for both ZST and non-ZST.
_marker: marker::PhantomData<&'a T>,
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("Iter").field(&self.as_slice()).finish()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<T: Sync> Sync for Iter<'_, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<T: Sync> Send for Iter<'_, T> {}
impl<'a, T> Iter<'a, T> {
/// Views the underlying data as a subslice of the original data.
///
/// This has the same lifetime as the original slice, and so the
/// iterator can continue to be used while this exists.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// // First, we declare a type which has the `iter` method to get the `Iter`
/// // struct (&[usize here]):
/// let slice = &[1, 2, 3];
///
/// // Then, we get the iterator:
/// let mut iter = slice.iter();
/// // So if we print what `as_slice` method returns here, we have "[1, 2, 3]":
/// println!("{:?}", iter.as_slice());
///
/// // Next, we move to the second element of the slice:
/// iter.next();
/// // Now `as_slice` returns "[2, 3]":
/// println!("{:?}", iter.as_slice());
/// ```
#[stable(feature = "iter_to_slice", since = "1.4.0")]
pub fn as_slice(&self) -> &'a [T] {
self.make_slice()
}
}
iterator! {struct Iter -> *const T, &'a T, const, {/* no mut */}, {
fn is_sorted_by<F>(self, mut compare: F) -> bool
where
Self: Sized,
F: FnMut(&Self::Item, &Self::Item) -> Option<Ordering>,
{
self.as_slice().windows(2).all(|w| {
compare(&&w[0], &&w[1]).map(|o| o != Ordering::Greater).unwrap_or(false)
})
}
}}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Clone for Iter<'_, T> {
fn clone(&self) -> Self {
Iter { ptr: self.ptr, end: self.end, _marker: self._marker }
}
}
#[stable(feature = "slice_iter_as_ref", since = "1.13.0")]
impl<T> AsRef<[T]> for Iter<'_, T> {
fn as_ref(&self) -> &[T] {
self.as_slice()
}
}
/// Mutable slice iterator.
///
/// This struct is created by the [`iter_mut`] method on [slices].
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// // First, we declare a type which has `iter_mut` method to get the `IterMut`
/// // struct (&[usize here]):
/// let mut slice = &mut [1, 2, 3];
///
/// // Then, we iterate over it and increment each element value:
/// for element in slice.iter_mut() {
/// *element += 1;
/// }
///
/// // We now have "[2, 3, 4]":
/// println!("{:?}", slice);
/// ```
///
/// [`iter_mut`]: ../../std/primitive.slice.html#method.iter_mut
/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IterMut<'a, T: 'a> {
ptr: NonNull<T>,
end: *mut T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
// ptr == end is a quick test for the Iterator being empty, that works
// for both ZST and non-ZST.
_marker: marker::PhantomData<&'a mut T>,
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<T: fmt::Debug> fmt::Debug for IterMut<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("IterMut").field(&self.make_slice()).finish()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<T: Sync> Sync for IterMut<'_, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<T: Send> Send for IterMut<'_, T> {}
impl<'a, T> IterMut<'a, T> {
/// Views the underlying data as a subslice of the original data.
///
/// To avoid creating `&mut` references that alias, this is forced
/// to consume the iterator.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// // First, we declare a type which has `iter_mut` method to get the `IterMut`
/// // struct (&[usize here]):
/// let mut slice = &mut [1, 2, 3];
///
/// {
/// // Then, we get the iterator:
/// let mut iter = slice.iter_mut();
/// // We move to next element:
/// iter.next();
/// // So if we print what `into_slice` method returns here, we have "[2, 3]":
/// println!("{:?}", iter.into_slice());
/// }
///
/// // Now let's modify a value of the slice:
/// {
/// // First we get back the iterator:
/// let mut iter = slice.iter_mut();
/// // We change the value of the first element of the slice returned by the `next` method:
/// *iter.next().unwrap() += 1;
/// }
/// // Now slice is "[2, 2, 3]":
/// println!("{:?}", slice);
/// ```
#[stable(feature = "iter_to_slice", since = "1.4.0")]
pub fn into_slice(self) -> &'a mut [T] {
unsafe { from_raw_parts_mut(self.ptr.as_ptr(), len!(self)) }
}
/// Views the underlying data as a subslice of the original data.
///
/// To avoid creating `&mut [T]` references that alias, the returned slice
/// borrows its lifetime from the iterator the method is applied on.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// # #![feature(slice_iter_mut_as_slice)]
/// let mut slice: &mut [usize] = &mut [1, 2, 3];
///
/// // First, we get the iterator:
/// let mut iter = slice.iter_mut();
/// // So if we check what the `as_slice` method returns here, we have "[1, 2, 3]":
/// assert_eq!(iter.as_slice(), &[1, 2, 3]);
///
/// // Next, we move to the second element of the slice:
/// iter.next();
/// // Now `as_slice` returns "[2, 3]":
/// assert_eq!(iter.as_slice(), &[2, 3]);
/// ```
#[unstable(feature = "slice_iter_mut_as_slice", reason = "recently added", issue = "58957")]
pub fn as_slice(&self) -> &[T] {
self.make_slice()
}
}
iterator! {struct IterMut -> *mut T, &'a mut T, mut, {mut}, {}}
/// An internal abstraction over the splitting iterators, so that
/// splitn, splitn_mut etc can be implemented once.
#[doc(hidden)]
trait SplitIter: DoubleEndedIterator {
/// Marks the underlying iterator as complete, extracting the remaining
/// portion of the slice.
fn finish(&mut self) -> Option<Self::Item>;
}
/// An iterator over subslices separated by elements that match a predicate
/// function.
///
/// This struct is created by the [`split`] method on [slices].
///
/// [`split`]: ../../std/primitive.slice.html#method.split
/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Split<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
v: &'a [T],
pred: P,
finished: bool,
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<T: fmt::Debug, P> fmt::Debug for Split<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Split").field("v", &self.v).field("finished", &self.finished).finish()
}
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, P> Clone for Split<'_, T, P>
where
P: Clone + FnMut(&T) -> bool,
{
fn clone(&self) -> Self {
Split { v: self.v, pred: self.pred.clone(), finished: self.finished }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, P> Iterator for Split<'a, T, P>
where
P: FnMut(&T) -> bool,
{
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
if self.finished {
return None;
}
match self.v.iter().position(|x| (self.pred)(x)) {
None => self.finish(),
Some(idx) => {
let ret = Some(&self.v[..idx]);
self.v = &self.v[idx + 1..];
ret
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.finished { (0, Some(0)) } else { (1, Some(self.v.len() + 1)) }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
if self.finished {
return None;
}
match self.v.iter().rposition(|x| (self.pred)(x)) {
None => self.finish(),
Some(idx) => {
let ret = Some(&self.v[idx + 1..]);
self.v = &self.v[..idx];
ret
}
}
}
}
impl<'a, T, P> SplitIter for Split<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn finish(&mut self) -> Option<&'a [T]> {
if self.finished {
None
} else {
self.finished = true;
Some(self.v)
}
}
}
#[stable(feature = "fused", since = "1.26.0")]
impl<T, P> FusedIterator for Split<'_, T, P> where P: FnMut(&T) -> bool {}
/// An iterator over subslices separated by elements that match a predicate
/// function. Unlike `Split`, it contains the matched part as a terminator
/// of the subslice.
///
/// This struct is created by the [`split_inclusive`] method on [slices].
///
/// [`split_inclusive`]: ../../std/primitive.slice.html#method.split_inclusive
/// [slices]: ../../std/primitive.slice.html
#[unstable(feature = "split_inclusive", issue = "72360")]
pub struct SplitInclusive<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
v: &'a [T],
pred: P,
finished: bool,
}
#[unstable(feature = "split_inclusive", issue = "72360")]
impl<T: fmt::Debug, P> fmt::Debug for SplitInclusive<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("SplitInclusive")
.field("v", &self.v)
.field("finished", &self.finished)
.finish()
}
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[unstable(feature = "split_inclusive", issue = "72360")]
impl<T, P> Clone for SplitInclusive<'_, T, P>
where
P: Clone + FnMut(&T) -> bool,
{
fn clone(&self) -> Self {
SplitInclusive { v: self.v, pred: self.pred.clone(), finished: self.finished }
}
}
#[unstable(feature = "split_inclusive", issue = "72360")]
impl<'a, T, P> Iterator for SplitInclusive<'a, T, P>
where
P: FnMut(&T) -> bool,
{
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
if self.finished {
return None;
}
let idx =
self.v.iter().position(|x| (self.pred)(x)).map(|idx| idx + 1).unwrap_or(self.v.len());
if idx == self.v.len() {
self.finished = true;
}
let ret = Some(&self.v[..idx]);
self.v = &self.v[idx..];
ret
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.finished { (0, Some(0)) } else { (1, Some(self.v.len() + 1)) }
}
}
#[unstable(feature = "split_inclusive", issue = "72360")]
impl<'a, T, P> DoubleEndedIterator for SplitInclusive<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
if self.finished {
return None;
}
// The last index of self.v is already checked and found to match
// by the last iteration, so we start searching a new match
// one index to the left.
let remainder = if self.v.is_empty() { &[] } else { &self.v[..(self.v.len() - 1)] };
let idx = remainder.iter().rposition(|x| (self.pred)(x)).map(|idx| idx + 1).unwrap_or(0);
if idx == 0 {
self.finished = true;
}
let ret = Some(&self.v[idx..]);
self.v = &self.v[..idx];
ret
}
}
#[unstable(feature = "split_inclusive", issue = "72360")]
impl<T, P> FusedIterator for SplitInclusive<'_, T, P> where P: FnMut(&T) -> bool {}
/// An iterator over the mutable subslices of the vector which are separated
/// by elements that match `pred`.
///
/// This struct is created by the [`split_mut`] method on [slices].
///
/// [`split_mut`]: ../../std/primitive.slice.html#method.split_mut
/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct SplitMut<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
v: &'a mut [T],
pred: P,
finished: bool,
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<T: fmt::Debug, P> fmt::Debug for SplitMut<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("SplitMut").field("v", &self.v).field("finished", &self.finished).finish()
}
}
impl<'a, T, P> SplitIter for SplitMut<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn finish(&mut self) -> Option<&'a mut [T]> {
if self.finished {
None
} else {
self.finished = true;
Some(mem::replace(&mut self.v, &mut []))
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, P> Iterator for SplitMut<'a, T, P>
where
P: FnMut(&T) -> bool,
{
type Item = &'a mut [T];
#[inline]
fn next(&mut self) -> Option<&'a mut [T]> {
if self.finished {
return None;
}
let idx_opt = {
// work around borrowck limitations
let pred = &mut self.pred;
self.v.iter().position(|x| (*pred)(x))
};
match idx_opt {
None => self.finish(),
Some(idx) => {
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(idx);
self.v = &mut tail[1..];
Some(head)
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.finished {
(0, Some(0))
} else {
// if the predicate doesn't match anything, we yield one slice
// if it matches every element, we yield len+1 empty slices.
(1, Some(self.v.len() + 1))
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn next_back(&mut self) -> Option<&'a mut [T]> {
if self.finished {
return None;
}
let idx_opt = {
// work around borrowck limitations
let pred = &mut self.pred;
self.v.iter().rposition(|x| (*pred)(x))
};
match idx_opt {
None => self.finish(),
Some(idx) => {
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(idx);
self.v = head;
Some(&mut tail[1..])
}
}
}
}
#[stable(feature = "fused", since = "1.26.0")]
impl<T, P> FusedIterator for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
/// An iterator over the mutable subslices of the vector which are separated
/// by elements that match `pred`. Unlike `SplitMut`, it contains the matched
/// parts in the ends of the subslices.
///
/// This struct is created by the [`split_inclusive_mut`] method on [slices].
///
/// [`split_inclusive_mut`]: ../../std/primitive.slice.html#method.split_inclusive_mut
/// [slices]: ../../std/primitive.slice.html
#[unstable(feature = "split_inclusive", issue = "72360")]
pub struct SplitInclusiveMut<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
v: &'a mut [T],
pred: P,
finished: bool,
}
#[unstable(feature = "split_inclusive", issue = "72360")]
impl<T: fmt::Debug, P> fmt::Debug for SplitInclusiveMut<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("SplitInclusiveMut")
.field("v", &self.v)
.field("finished", &self.finished)
.finish()
}
}
#[unstable(feature = "split_inclusive", issue = "72360")]
impl<'a, T, P> Iterator for SplitInclusiveMut<'a, T, P>
where
P: FnMut(&T) -> bool,
{
type Item = &'a mut [T];
#[inline]
fn next(&mut self) -> Option<&'a mut [T]> {
if self.finished {
return None;
}
let idx_opt = {
// work around borrowck limitations
let pred = &mut self.pred;
self.v.iter().position(|x| (*pred)(x))
};
let idx = idx_opt.map(|idx| idx + 1).unwrap_or(self.v.len());
if idx == self.v.len() {
self.finished = true;
}
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(idx);
self.v = tail;
Some(head)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.finished {
(0, Some(0))
} else {
// if the predicate doesn't match anything, we yield one slice
// if it matches every element, we yield len+1 empty slices.
(1, Some(self.v.len() + 1))
}
}
}
#[unstable(feature = "split_inclusive", issue = "72360")]
impl<'a, T, P> DoubleEndedIterator for SplitInclusiveMut<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn next_back(&mut self) -> Option<&'a mut [T]> {
if self.finished {
return None;
}
let idx_opt = if self.v.is_empty() {
None
} else {
// work around borrowck limitations
let pred = &mut self.pred;
// The last index of self.v is already checked and found to match
// by the last iteration, so we start searching a new match
// one index to the left.
let remainder = &self.v[..(self.v.len() - 1)];
remainder.iter().rposition(|x| (*pred)(x))
};
let idx = idx_opt.map(|idx| idx + 1).unwrap_or(0);
if idx == 0 {
self.finished = true;
}
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(idx);
self.v = head;
Some(tail)
}
}
#[unstable(feature = "split_inclusive", issue = "72360")]
impl<T, P> FusedIterator for SplitInclusiveMut<'_, T, P> where P: FnMut(&T) -> bool {}
/// An iterator over subslices separated by elements that match a predicate
/// function, starting from the end of the slice.
///
/// This struct is created by the [`rsplit`] method on [slices].
///
/// [`rsplit`]: ../../std/primitive.slice.html#method.rsplit
/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "slice_rsplit", since = "1.27.0")]
#[derive(Clone)] // Is this correct, or does it incorrectly require `T: Clone`?
pub struct RSplit<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
inner: Split<'a, T, P>,
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
impl<T: fmt::Debug, P> fmt::Debug for RSplit<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RSplit")
.field("v", &self.inner.v)
.field("finished", &self.inner.finished)
.finish()
}
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
impl<'a, T, P> Iterator for RSplit<'a, T, P>
where
P: FnMut(&T) -> bool,
{
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
self.inner.next_back()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
impl<'a, T, P> DoubleEndedIterator for RSplit<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
self.inner.next()
}
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
impl<'a, T, P> SplitIter for RSplit<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn finish(&mut self) -> Option<&'a [T]> {
self.inner.finish()
}
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
impl<T, P> FusedIterator for RSplit<'_, T, P> where P: FnMut(&T) -> bool {}
/// An iterator over the subslices of the vector which are separated
/// by elements that match `pred`, starting from the end of the slice.
///
/// This struct is created by the [`rsplit_mut`] method on [slices].
///
/// [`rsplit_mut`]: ../../std/primitive.slice.html#method.rsplit_mut
/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "slice_rsplit", since = "1.27.0")]
pub struct RSplitMut<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
inner: SplitMut<'a, T, P>,
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
impl<T: fmt::Debug, P> fmt::Debug for RSplitMut<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RSplitMut")
.field("v", &self.inner.v)
.field("finished", &self.inner.finished)
.finish()
}
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
impl<'a, T, P> SplitIter for RSplitMut<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn finish(&mut self) -> Option<&'a mut [T]> {
self.inner.finish()
}
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
impl<'a, T, P> Iterator for RSplitMut<'a, T, P>
where
P: FnMut(&T) -> bool,
{
type Item = &'a mut [T];
#[inline]
fn next(&mut self) -> Option<&'a mut [T]> {
self.inner.next_back()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
impl<'a, T, P> DoubleEndedIterator for RSplitMut<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn next_back(&mut self) -> Option<&'a mut [T]> {
self.inner.next()
}
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
impl<T, P> FusedIterator for RSplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
/// An private iterator over subslices separated by elements that
/// match a predicate function, splitting at most a fixed number of
/// times.
#[derive(Debug)]
struct GenericSplitN<I> {
iter: I,
count: usize,
}
impl<T, I: SplitIter<Item = T>> Iterator for GenericSplitN<I> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
match self.count {
0 => None,
1 => {
self.count -= 1;
self.iter.finish()
}
_ => {
self.count -= 1;
self.iter.next()
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (lower, upper_opt) = self.iter.size_hint();
(lower, upper_opt.map(|upper| cmp::min(self.count, upper)))
}
}
/// An iterator over subslices separated by elements that match a predicate
/// function, limited to a given number of splits.
///
/// This struct is created by the [`splitn`] method on [slices].
///
/// [`splitn`]: ../../std/primitive.slice.html#method.splitn
/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct SplitN<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
inner: GenericSplitN<Split<'a, T, P>>,
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<T: fmt::Debug, P> fmt::Debug for SplitN<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("SplitN").field("inner", &self.inner).finish()
}
}
/// An iterator over subslices separated by elements that match a
/// predicate function, limited to a given number of splits, starting
/// from the end of the slice.
///
/// This struct is created by the [`rsplitn`] method on [slices].
///
/// [`rsplitn`]: ../../std/primitive.slice.html#method.rsplitn
/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct RSplitN<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
inner: GenericSplitN<RSplit<'a, T, P>>,
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<T: fmt::Debug, P> fmt::Debug for RSplitN<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RSplitN").field("inner", &self.inner).finish()
}
}
/// An iterator over subslices separated by elements that match a predicate
/// function, limited to a given number of splits.
///
/// This struct is created by the [`splitn_mut`] method on [slices].
///
/// [`splitn_mut`]: ../../std/primitive.slice.html#method.splitn_mut
/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct SplitNMut<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
inner: GenericSplitN<SplitMut<'a, T, P>>,
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<T: fmt::Debug, P> fmt::Debug for SplitNMut<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("SplitNMut").field("inner", &self.inner).finish()
}
}
/// An iterator over subslices separated by elements that match a
/// predicate function, limited to a given number of splits, starting
/// from the end of the slice.
///
/// This struct is created by the [`rsplitn_mut`] method on [slices].
///
/// [`rsplitn_mut`]: ../../std/primitive.slice.html#method.rsplitn_mut
/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct RSplitNMut<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
inner: GenericSplitN<RSplitMut<'a, T, P>>,
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<T: fmt::Debug, P> fmt::Debug for RSplitNMut<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RSplitNMut").field("inner", &self.inner).finish()
}
}
macro_rules! forward_iterator {
($name:ident: $elem:ident, $iter_of:ty) => {
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, $elem, P> Iterator for $name<'a, $elem, P>
where
P: FnMut(&T) -> bool,
{
type Item = $iter_of;
#[inline]
fn next(&mut self) -> Option<$iter_of> {
self.inner.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
#[stable(feature = "fused", since = "1.26.0")]
impl<'a, $elem, P> FusedIterator for $name<'a, $elem, P> where P: FnMut(&T) -> bool {}
};
}
forward_iterator! { SplitN: T, &'a [T] }
forward_iterator! { RSplitN: T, &'a [T] }
forward_iterator! { SplitNMut: T, &'a mut [T] }
forward_iterator! { RSplitNMut: T, &'a mut [T] }
/// An iterator over overlapping subslices of length `size`.
///
/// This struct is created by the [`windows`] method on [slices].
///
/// [`windows`]: ../../std/primitive.slice.html#method.windows
/// [slices]: ../../std/primitive.slice.html
#[derive(Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Windows<'a, T: 'a> {
v: &'a [T],
size: usize,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Clone for Windows<'_, T> {
fn clone(&self) -> Self {
Windows { v: self.v, size: self.size }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Iterator for Windows<'a, T> {
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
if self.size > self.v.len() {
None
} else {
let ret = Some(&self.v[..self.size]);
self.v = &self.v[1..];
ret
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.size > self.v.len() {
(0, Some(0))
} else {
let size = self.v.len() - self.size + 1;
(size, Some(size))
}
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<Self::Item> {
let (end, overflow) = self.size.overflowing_add(n);
if end > self.v.len() || overflow {
self.v = &[];
None
} else {
let nth = &self.v[n..end];
self.v = &self.v[n + 1..];
Some(nth)
}
}
#[inline]
fn last(self) -> Option<Self::Item> {
if self.size > self.v.len() {
None
} else {
let start = self.v.len() - self.size;
Some(&self.v[start..])
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for Windows<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
if self.size > self.v.len() {
None
} else {
let ret = Some(&self.v[self.v.len() - self.size..]);
self.v = &self.v[..self.v.len() - 1];
ret
}
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
let (end, overflow) = self.v.len().overflowing_sub(n);
if end < self.size || overflow {
self.v = &[];
None
} else {
let ret = &self.v[end - self.size..end];
self.v = &self.v[..end - 1];
Some(ret)
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ExactSizeIterator for Windows<'_, T> {}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for Windows<'_, T> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for Windows<'_, T> {}
#[doc(hidden)]
unsafe impl<'a, T> TrustedRandomAccess for Windows<'a, T> {
unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
from_raw_parts(self.v.as_ptr().add(i), self.size)
}
fn may_have_side_effect() -> bool {
false
}
}
/// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
/// time), starting at the beginning of the slice.
///
/// When the slice len is not evenly divided by the chunk size, the last slice
/// of the iteration will be the remainder.
///
/// This struct is created by the [`chunks`] method on [slices].
///
/// [`chunks`]: ../../std/primitive.slice.html#method.chunks
/// [slices]: ../../std/primitive.slice.html
#[derive(Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Chunks<'a, T: 'a> {
v: &'a [T],
chunk_size: usize,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Clone for Chunks<'_, T> {
fn clone(&self) -> Self {
Chunks { v: self.v, chunk_size: self.chunk_size }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Iterator for Chunks<'a, T> {
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
if self.v.is_empty() {
None
} else {
let chunksz = cmp::min(self.v.len(), self.chunk_size);
let (fst, snd) = self.v.split_at(chunksz);
self.v = snd;
Some(fst)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.v.is_empty() {
(0, Some(0))
} else {
let n = self.v.len() / self.chunk_size;
let rem = self.v.len() % self.chunk_size;
let n = if rem > 0 { n + 1 } else { n };
(n, Some(n))
}
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<Self::Item> {
let (start, overflow) = n.overflowing_mul(self.chunk_size);
if start >= self.v.len() || overflow {
self.v = &[];
None
} else {
let end = match start.checked_add(self.chunk_size) {
Some(sum) => cmp::min(self.v.len(), sum),
None => self.v.len(),
};
let nth = &self.v[start..end];
self.v = &self.v[end..];
Some(nth)
}
}
#[inline]
fn last(self) -> Option<Self::Item> {
if self.v.is_empty() {
None
} else {
let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
Some(&self.v[start..])
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for Chunks<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
if self.v.is_empty() {
None
} else {
let remainder = self.v.len() % self.chunk_size;
let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
self.v = fst;
Some(snd)
}
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
let len = self.len();
if n >= len {
self.v = &[];
None
} else {
let start = (len - 1 - n) * self.chunk_size;
let end = match start.checked_add(self.chunk_size) {
Some(res) => cmp::min(res, self.v.len()),
None => self.v.len(),
};
let nth_back = &self.v[start..end];
self.v = &self.v[..start];
Some(nth_back)
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ExactSizeIterator for Chunks<'_, T> {}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for Chunks<'_, T> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for Chunks<'_, T> {}
#[doc(hidden)]
unsafe impl<'a, T> TrustedRandomAccess for Chunks<'a, T> {
unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
let start = i * self.chunk_size;
let end = match start.checked_add(self.chunk_size) {
None => self.v.len(),
Some(end) => cmp::min(end, self.v.len()),
};
from_raw_parts(self.v.as_ptr().add(start), end - start)
}
fn may_have_side_effect() -> bool {
false
}
}
/// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
/// elements at a time), starting at the beginning of the slice.
///
/// When the slice len is not evenly divided by the chunk size, the last slice
/// of the iteration will be the remainder.
///
/// This struct is created by the [`chunks_mut`] method on [slices].
///
/// [`chunks_mut`]: ../../std/primitive.slice.html#method.chunks_mut
/// [slices]: ../../std/primitive.slice.html
#[derive(Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ChunksMut<'a, T: 'a> {
v: &'a mut [T],
chunk_size: usize,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Iterator for ChunksMut<'a, T> {
type Item = &'a mut [T];
#[inline]
fn next(&mut self) -> Option<&'a mut [T]> {
if self.v.is_empty() {
None
} else {
let sz = cmp::min(self.v.len(), self.chunk_size);
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(sz);
self.v = tail;
Some(head)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.v.is_empty() {
(0, Some(0))
} else {
let n = self.v.len() / self.chunk_size;
let rem = self.v.len() % self.chunk_size;
let n = if rem > 0 { n + 1 } else { n };
(n, Some(n))
}
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
let (start, overflow) = n.overflowing_mul(self.chunk_size);
if start >= self.v.len() || overflow {
self.v = &mut [];
None
} else {
let end = match start.checked_add(self.chunk_size) {
Some(sum) => cmp::min(self.v.len(), sum),
None => self.v.len(),
};
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(end);
let (_, nth) = head.split_at_mut(start);
self.v = tail;
Some(nth)
}
}
#[inline]
fn last(self) -> Option<Self::Item> {
if self.v.is_empty() {
None
} else {
let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
Some(&mut self.v[start..])
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a mut [T]> {
if self.v.is_empty() {
None
} else {
let remainder = self.v.len() % self.chunk_size;
let sz = if remainder != 0 { remainder } else { self.chunk_size };
let tmp = mem::replace(&mut self.v, &mut []);
let tmp_len = tmp.len();
let (head, tail) = tmp.split_at_mut(tmp_len - sz);
self.v = head;
Some(tail)
}
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
let len = self.len();
if n >= len {
self.v = &mut [];
None
} else {
let start = (len - 1 - n) * self.chunk_size;
let end = match start.checked_add(self.chunk_size) {
Some(res) => cmp::min(res, self.v.len()),
None => self.v.len(),
};
let (temp, _tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
let (head, nth_back) = temp.split_at_mut(start);
self.v = head;
Some(nth_back)
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ExactSizeIterator for ChunksMut<'_, T> {}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for ChunksMut<'_, T> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for ChunksMut<'_, T> {}
#[doc(hidden)]
unsafe impl<'a, T> TrustedRandomAccess for ChunksMut<'a, T> {
unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
let start = i * self.chunk_size;
let end = match start.checked_add(self.chunk_size) {
None => self.v.len(),
Some(end) => cmp::min(end, self.v.len()),
};
from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start)
}
fn may_have_side_effect() -> bool {
false
}
}
/// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
/// time), starting at the beginning of the slice.
///
/// When the slice len is not evenly divided by the chunk size, the last
/// up to `chunk_size-1` elements will be omitted but can be retrieved from
/// the [`remainder`] function from the iterator.
///
/// This struct is created by the [`chunks_exact`] method on [slices].
///
/// [`chunks_exact`]: ../../std/primitive.slice.html#method.chunks_exact
/// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
/// [slices]: ../../std/primitive.slice.html
#[derive(Debug)]
#[stable(feature = "chunks_exact", since = "1.31.0")]
pub struct ChunksExact<'a, T: 'a> {
v: &'a [T],
rem: &'a [T],
chunk_size: usize,
}
impl<'a, T> ChunksExact<'a, T> {
/// Returns the remainder of the original slice that is not going to be
/// returned by the iterator. The returned slice has at most `chunk_size-1`
/// elements.
#[stable(feature = "chunks_exact", since = "1.31.0")]
pub fn remainder(&self) -> &'a [T] {
self.rem
}
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[stable(feature = "chunks_exact", since = "1.31.0")]
impl<T> Clone for ChunksExact<'_, T> {
fn clone(&self) -> Self {
ChunksExact { v: self.v, rem: self.rem, chunk_size: self.chunk_size }
}
}
#[stable(feature = "chunks_exact", since = "1.31.0")]
impl<'a, T> Iterator for ChunksExact<'a, T> {
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
if self.v.len() < self.chunk_size {
None
} else {
let (fst, snd) = self.v.split_at(self.chunk_size);
self.v = snd;
Some(fst)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let n = self.v.len() / self.chunk_size;
(n, Some(n))
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<Self::Item> {
let (start, overflow) = n.overflowing_mul(self.chunk_size);
if start >= self.v.len() || overflow {
self.v = &[];
None
} else {
let (_, snd) = self.v.split_at(start);
self.v = snd;
self.next()
}
}
#[inline]
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
}
#[stable(feature = "chunks_exact", since = "1.31.0")]
impl<'a, T> DoubleEndedIterator for ChunksExact<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
if self.v.len() < self.chunk_size {
None
} else {
let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
self.v = fst;
Some(snd)
}
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
let len = self.len();
if n >= len {
self.v = &[];
None
} else {
let start = (len - 1 - n) * self.chunk_size;
let end = start + self.chunk_size;
let nth_back = &self.v[start..end];
self.v = &self.v[..start];
Some(nth_back)
}
}
}
#[stable(feature = "chunks_exact", since = "1.31.0")]
impl<T> ExactSizeIterator for ChunksExact<'_, T> {
fn is_empty(&self) -> bool {
self.v.is_empty()
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for ChunksExact<'_, T> {}
#[stable(feature = "chunks_exact", since = "1.31.0")]
impl<T> FusedIterator for ChunksExact<'_, T> {}
#[doc(hidden)]
#[stable(feature = "chunks_exact", since = "1.31.0")]
unsafe impl<'a, T> TrustedRandomAccess for ChunksExact<'a, T> {
unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
let start = i * self.chunk_size;
from_raw_parts(self.v.as_ptr().add(start), self.chunk_size)
}
fn may_have_side_effect() -> bool {
false
}
}
/// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
/// elements at a time), starting at the beginning of the slice.
///
/// When the slice len is not evenly divided by the chunk size, the last up to
/// `chunk_size-1` elements will be omitted but can be retrieved from the
/// [`into_remainder`] function from the iterator.
///
/// This struct is created by the [`chunks_exact_mut`] method on [slices].
///
/// [`chunks_exact_mut`]: ../../std/primitive.slice.html#method.chunks_exact_mut
/// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
/// [slices]: ../../std/primitive.slice.html
#[derive(Debug)]
#[stable(feature = "chunks_exact", since = "1.31.0")]
pub struct ChunksExactMut<'a, T: 'a> {
v: &'a mut [T],
rem: &'a mut [T],
chunk_size: usize,
}
impl<'a, T> ChunksExactMut<'a, T> {
/// Returns the remainder of the original slice that is not going to be
/// returned by the iterator. The returned slice has at most `chunk_size-1`
/// elements.
#[stable(feature = "chunks_exact", since = "1.31.0")]
pub fn into_remainder(self) -> &'a mut [T] {
self.rem
}
}
#[stable(feature = "chunks_exact", since = "1.31.0")]
impl<'a, T> Iterator for ChunksExactMut<'a, T> {
type Item = &'a mut [T];
#[inline]
fn next(&mut self) -> Option<&'a mut [T]> {
if self.v.len() < self.chunk_size {
None
} else {
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(self.chunk_size);
self.v = tail;
Some(head)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let n = self.v.len() / self.chunk_size;
(n, Some(n))
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
let (start, overflow) = n.overflowing_mul(self.chunk_size);
if start >= self.v.len() || overflow {
self.v = &mut [];
None
} else {
let tmp = mem::replace(&mut self.v, &mut []);
let (_, snd) = tmp.split_at_mut(start);
self.v = snd;
self.next()
}
}
#[inline]
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
}
#[stable(feature = "chunks_exact", since = "1.31.0")]
impl<'a, T> DoubleEndedIterator for ChunksExactMut<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a mut [T]> {
if self.v.len() < self.chunk_size {
None
} else {
let tmp = mem::replace(&mut self.v, &mut []);
let tmp_len = tmp.len();
let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
self.v = head;
Some(tail)
}
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
let len = self.len();
if n >= len {
self.v = &mut [];
None
} else {
let start = (len - 1 - n) * self.chunk_size;
let end = start + self.chunk_size;
let (temp, _tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
let (head, nth_back) = temp.split_at_mut(start);
self.v = head;
Some(nth_back)
}
}
}
#[stable(feature = "chunks_exact", since = "1.31.0")]
impl<T> ExactSizeIterator for ChunksExactMut<'_, T> {
fn is_empty(&self) -> bool {
self.v.is_empty()
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for ChunksExactMut<'_, T> {}
#[stable(feature = "chunks_exact", since = "1.31.0")]
impl<T> FusedIterator for ChunksExactMut<'_, T> {}
#[doc(hidden)]
#[stable(feature = "chunks_exact", since = "1.31.0")]
unsafe impl<'a, T> TrustedRandomAccess for ChunksExactMut<'a, T> {
unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
let start = i * self.chunk_size;
from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size)
}
fn may_have_side_effect() -> bool {
false
}
}
/// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
/// time), starting at the end of the slice.
///
/// When the slice len is not evenly divided by the chunk size, the last slice
/// of the iteration will be the remainder.
///
/// This struct is created by the [`rchunks`] method on [slices].
///
/// [`rchunks`]: ../../std/primitive.slice.html#method.rchunks
/// [slices]: ../../std/primitive.slice.html
#[derive(Debug)]
#[stable(feature = "rchunks", since = "1.31.0")]
pub struct RChunks<'a, T: 'a> {
v: &'a [T],
chunk_size: usize,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[stable(feature = "rchunks", since = "1.31.0")]
impl<T> Clone for RChunks<'_, T> {
fn clone(&self) -> Self {
RChunks { v: self.v, chunk_size: self.chunk_size }
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<'a, T> Iterator for RChunks<'a, T> {
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
if self.v.is_empty() {
None
} else {
let chunksz = cmp::min(self.v.len(), self.chunk_size);
let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
self.v = fst;
Some(snd)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.v.is_empty() {
(0, Some(0))
} else {
let n = self.v.len() / self.chunk_size;
let rem = self.v.len() % self.chunk_size;
let n = if rem > 0 { n + 1 } else { n };
(n, Some(n))
}
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<Self::Item> {
let (end, overflow) = n.overflowing_mul(self.chunk_size);
if end >= self.v.len() || overflow {
self.v = &[];
None
} else {
// Can't underflow because of the check above
let end = self.v.len() - end;
let start = match end.checked_sub(self.chunk_size) {
Some(sum) => sum,
None => 0,
};
let nth = &self.v[start..end];
self.v = &self.v[0..start];
Some(nth)
}
}
#[inline]
fn last(self) -> Option<Self::Item> {
if self.v.is_empty() {
None
} else {
let rem = self.v.len() % self.chunk_size;
let end = if rem == 0 { self.chunk_size } else { rem };
Some(&self.v[0..end])
}
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<'a, T> DoubleEndedIterator for RChunks<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
if self.v.is_empty() {
None
} else {
let remainder = self.v.len() % self.chunk_size;
let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
let (fst, snd) = self.v.split_at(chunksz);
self.v = snd;
Some(fst)
}
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
let len = self.len();
if n >= len {
self.v = &[];
None
} else {
// can't underflow because `n < len`
let offset_from_end = (len - 1 - n) * self.chunk_size;
let end = self.v.len() - offset_from_end;
let start = end.saturating_sub(self.chunk_size);
let nth_back = &self.v[start..end];
self.v = &self.v[end..];
Some(nth_back)
}
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<T> ExactSizeIterator for RChunks<'_, T> {}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for RChunks<'_, T> {}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<T> FusedIterator for RChunks<'_, T> {}
#[doc(hidden)]
#[stable(feature = "rchunks", since = "1.31.0")]
unsafe impl<'a, T> TrustedRandomAccess for RChunks<'a, T> {
unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
let end = self.v.len() - i * self.chunk_size;
let start = match end.checked_sub(self.chunk_size) {
None => 0,
Some(start) => start,
};
from_raw_parts(self.v.as_ptr().add(start), end - start)
}
fn may_have_side_effect() -> bool {
false
}
}
/// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
/// elements at a time), starting at the end of the slice.
///
/// When the slice len is not evenly divided by the chunk size, the last slice
/// of the iteration will be the remainder.
///
/// This struct is created by the [`rchunks_mut`] method on [slices].
///
/// [`rchunks_mut`]: ../../std/primitive.slice.html#method.rchunks_mut
/// [slices]: ../../std/primitive.slice.html
#[derive(Debug)]
#[stable(feature = "rchunks", since = "1.31.0")]
pub struct RChunksMut<'a, T: 'a> {
v: &'a mut [T],
chunk_size: usize,
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<'a, T> Iterator for RChunksMut<'a, T> {
type Item = &'a mut [T];
#[inline]
fn next(&mut self) -> Option<&'a mut [T]> {
if self.v.is_empty() {
None
} else {
let sz = cmp::min(self.v.len(), self.chunk_size);
let tmp = mem::replace(&mut self.v, &mut []);
let tmp_len = tmp.len();
let (head, tail) = tmp.split_at_mut(tmp_len - sz);
self.v = head;
Some(tail)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.v.is_empty() {
(0, Some(0))
} else {
let n = self.v.len() / self.chunk_size;
let rem = self.v.len() % self.chunk_size;
let n = if rem > 0 { n + 1 } else { n };
(n, Some(n))
}
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
let (end, overflow) = n.overflowing_mul(self.chunk_size);
if end >= self.v.len() || overflow {
self.v = &mut [];
None
} else {
// Can't underflow because of the check above
let end = self.v.len() - end;
let start = match end.checked_sub(self.chunk_size) {
Some(sum) => sum,
None => 0,
};
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(start);
let (nth, _) = tail.split_at_mut(end - start);
self.v = head;
Some(nth)
}
}
#[inline]
fn last(self) -> Option<Self::Item> {
if self.v.is_empty() {
None
} else {
let rem = self.v.len() % self.chunk_size;
let end = if rem == 0 { self.chunk_size } else { rem };
Some(&mut self.v[0..end])
}
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<'a, T> DoubleEndedIterator for RChunksMut<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a mut [T]> {
if self.v.is_empty() {
None
} else {
let remainder = self.v.len() % self.chunk_size;
let sz = if remainder != 0 { remainder } else { self.chunk_size };
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(sz);
self.v = tail;
Some(head)
}
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
let len = self.len();
if n >= len {
self.v = &mut [];
None
} else {
// can't underflow because `n < len`
let offset_from_end = (len - 1 - n) * self.chunk_size;
let end = self.v.len() - offset_from_end;
let start = end.saturating_sub(self.chunk_size);
let (tmp, tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
let (_, nth_back) = tmp.split_at_mut(start);
self.v = tail;
Some(nth_back)
}
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<T> ExactSizeIterator for RChunksMut<'_, T> {}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for RChunksMut<'_, T> {}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<T> FusedIterator for RChunksMut<'_, T> {}
#[doc(hidden)]
#[stable(feature = "rchunks", since = "1.31.0")]
unsafe impl<'a, T> TrustedRandomAccess for RChunksMut<'a, T> {
unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
let end = self.v.len() - i * self.chunk_size;
let start = match end.checked_sub(self.chunk_size) {
None => 0,
Some(start) => start,
};
from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start)
}
fn may_have_side_effect() -> bool {
false
}
}
/// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
/// time), starting at the end of the slice.
///
/// When the slice len is not evenly divided by the chunk size, the last
/// up to `chunk_size-1` elements will be omitted but can be retrieved from
/// the [`remainder`] function from the iterator.
///
/// This struct is created by the [`rchunks_exact`] method on [slices].
///
/// [`rchunks_exact`]: ../../std/primitive.slice.html#method.rchunks_exact
/// [`remainder`]: ../../std/slice/struct.ChunksExact.html#method.remainder
/// [slices]: ../../std/primitive.slice.html
#[derive(Debug)]
#[stable(feature = "rchunks", since = "1.31.0")]
pub struct RChunksExact<'a, T: 'a> {
v: &'a [T],
rem: &'a [T],
chunk_size: usize,
}
impl<'a, T> RChunksExact<'a, T> {
/// Returns the remainder of the original slice that is not going to be
/// returned by the iterator. The returned slice has at most `chunk_size-1`
/// elements.
#[stable(feature = "rchunks", since = "1.31.0")]
pub fn remainder(&self) -> &'a [T] {
self.rem
}
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[stable(feature = "rchunks", since = "1.31.0")]
impl<'a, T> Clone for RChunksExact<'a, T> {
fn clone(&self) -> RChunksExact<'a, T> {
RChunksExact { v: self.v, rem: self.rem, chunk_size: self.chunk_size }
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<'a, T> Iterator for RChunksExact<'a, T> {
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
if self.v.len() < self.chunk_size {
None
} else {
let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
self.v = fst;
Some(snd)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let n = self.v.len() / self.chunk_size;
(n, Some(n))
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<Self::Item> {
let (end, overflow) = n.overflowing_mul(self.chunk_size);
if end >= self.v.len() || overflow {
self.v = &[];
None
} else {
let (fst, _) = self.v.split_at(self.v.len() - end);
self.v = fst;
self.next()
}
}
#[inline]
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<'a, T> DoubleEndedIterator for RChunksExact<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
if self.v.len() < self.chunk_size {
None
} else {
let (fst, snd) = self.v.split_at(self.chunk_size);
self.v = snd;
Some(fst)
}
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
let len = self.len();
if n >= len {
self.v = &[];
None
} else {
// now that we know that `n` corresponds to a chunk,
// none of these operations can underflow/overflow
let offset = (len - n) * self.chunk_size;
let start = self.v.len() - offset;
let end = start + self.chunk_size;
let nth_back = &self.v[start..end];
self.v = &self.v[end..];
Some(nth_back)
}
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<'a, T> ExactSizeIterator for RChunksExact<'a, T> {
fn is_empty(&self) -> bool {
self.v.is_empty()
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for RChunksExact<'_, T> {}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<T> FusedIterator for RChunksExact<'_, T> {}
#[doc(hidden)]
#[stable(feature = "rchunks", since = "1.31.0")]
unsafe impl<'a, T> TrustedRandomAccess for RChunksExact<'a, T> {
unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] {
let end = self.v.len() - i * self.chunk_size;
let start = end - self.chunk_size;
from_raw_parts(self.v.as_ptr().add(start), self.chunk_size)
}
fn may_have_side_effect() -> bool {
false
}
}
/// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
/// elements at a time), starting at the end of the slice.
///
/// When the slice len is not evenly divided by the chunk size, the last up to
/// `chunk_size-1` elements will be omitted but can be retrieved from the
/// [`into_remainder`] function from the iterator.
///
/// This struct is created by the [`rchunks_exact_mut`] method on [slices].
///
/// [`rchunks_exact_mut`]: ../../std/primitive.slice.html#method.rchunks_exact_mut
/// [`into_remainder`]: ../../std/slice/struct.ChunksExactMut.html#method.into_remainder
/// [slices]: ../../std/primitive.slice.html
#[derive(Debug)]
#[stable(feature = "rchunks", since = "1.31.0")]
pub struct RChunksExactMut<'a, T: 'a> {
v: &'a mut [T],
rem: &'a mut [T],
chunk_size: usize,
}
impl<'a, T> RChunksExactMut<'a, T> {
/// Returns the remainder of the original slice that is not going to be
/// returned by the iterator. The returned slice has at most `chunk_size-1`
/// elements.
#[stable(feature = "rchunks", since = "1.31.0")]
pub fn into_remainder(self) -> &'a mut [T] {
self.rem
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<'a, T> Iterator for RChunksExactMut<'a, T> {
type Item = &'a mut [T];
#[inline]
fn next(&mut self) -> Option<&'a mut [T]> {
if self.v.len() < self.chunk_size {
None
} else {
let tmp = mem::replace(&mut self.v, &mut []);
let tmp_len = tmp.len();
let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
self.v = head;
Some(tail)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let n = self.v.len() / self.chunk_size;
(n, Some(n))
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
let (end, overflow) = n.overflowing_mul(self.chunk_size);
if end >= self.v.len() || overflow {
self.v = &mut [];
None
} else {
let tmp = mem::replace(&mut self.v, &mut []);
let tmp_len = tmp.len();
let (fst, _) = tmp.split_at_mut(tmp_len - end);
self.v = fst;
self.next()
}
}
#[inline]
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<'a, T> DoubleEndedIterator for RChunksExactMut<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a mut [T]> {
if self.v.len() < self.chunk_size {
None
} else {
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(self.chunk_size);
self.v = tail;
Some(head)
}
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
let len = self.len();
if n >= len {
self.v = &mut [];
None
} else {
// now that we know that `n` corresponds to a chunk,
// none of these operations can underflow/overflow
let offset = (len - n) * self.chunk_size;
let start = self.v.len() - offset;
let end = start + self.chunk_size;
let (tmp, tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
let (_, nth_back) = tmp.split_at_mut(start);
self.v = tail;
Some(nth_back)
}
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<T> ExactSizeIterator for RChunksExactMut<'_, T> {
fn is_empty(&self) -> bool {
self.v.is_empty()
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for RChunksExactMut<'_, T> {}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<T> FusedIterator for RChunksExactMut<'_, T> {}
#[doc(hidden)]
#[stable(feature = "rchunks", since = "1.31.0")]
unsafe impl<'a, T> TrustedRandomAccess for RChunksExactMut<'a, T> {
unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut [T] {
let end = self.v.len() - i * self.chunk_size;
let start = end - self.chunk_size;
from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size)
}
fn may_have_side_effect() -> bool {
false
}
}
//
// Free functions
//
/// Forms a slice from a pointer and a length.
///
/// The `len` argument is the number of **elements**, not the number of bytes.
///
/// # Safety
///
/// Behavior is undefined if any of the following conditions are violated:
///
/// * `data` must be [valid] for reads for `len * mem::size_of::<T>()` many bytes,
/// and it must be properly aligned. This means in particular:
///
/// * The entire memory range of this slice must be contained within a single allocated object!
/// Slices can never span across multiple allocated objects. See [below](#incorrect-usage)
/// for an example incorrectly not taking this into account.
/// * `data` must be non-null and aligned even for zero-length slices. One
/// reason for this is that enum layout optimizations may rely on references
/// (including slices of any length) being aligned and non-null to distinguish
/// them from other data. You can obtain a pointer that is usable as `data`
/// for zero-length slices using [`NonNull::dangling()`].
///
/// * The memory referenced by the returned slice must not be mutated for the duration
/// of lifetime `'a`, except inside an `UnsafeCell`.
///
/// * The total size `len * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
/// See the safety documentation of [`pointer::offset`].
///
/// # Caveat
///
/// The lifetime for the returned slice is inferred from its usage. To
/// prevent accidental misuse, it's suggested to tie the lifetime to whichever
/// source lifetime is safe in the context, such as by providing a helper
/// function taking the lifetime of a host value for the slice, or by explicit
/// annotation.
///
/// # Examples
///
/// ```
/// use std::slice;
///
/// // manifest a slice for a single element
/// let x = 42;
/// let ptr = &x as *const _;
/// let slice = unsafe { slice::from_raw_parts(ptr, 1) };
/// assert_eq!(slice[0], 42);
/// ```
///
/// ### Incorrect usage
///
/// The following `join_slices` function is **unsound** ⚠️
///
/// ```rust,no_run
/// use std::slice;
///
/// fn join_slices<'a, T>(fst: &'a [T], snd: &'a [T]) -> &'a [T] {
/// let fst_end = fst.as_ptr().wrapping_add(fst.len());
/// let snd_start = snd.as_ptr();
/// assert_eq!(fst_end, snd_start, "Slices must be contiguous!");
/// unsafe {
/// // The assertion above ensures `fst` and `snd` are contiguous, but they might
/// // still be contained within _different allocated objects_, in which case
/// // creating this slice is undefined behavior.
/// slice::from_raw_parts(fst.as_ptr(), fst.len() + snd.len())
/// }
/// }
///
/// fn main() {
/// // `a` and `b` are different allocated objects...
/// let a = 42;
/// let b = 27;
/// // ... which may nevertheless be laid out contiguously in memory: | a | b |
/// let _ = join_slices(slice::from_ref(&a), slice::from_ref(&b)); // UB
/// }
/// ```
///
/// [valid]: ../../std/ptr/index.html#safety
/// [`NonNull::dangling()`]: ../../std/ptr/struct.NonNull.html#method.dangling
/// [`pointer::offset`]: ../../std/primitive.pointer.html#method.offset
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] {
debug_assert!(is_aligned_and_not_null(data), "attempt to create unaligned or null slice");
debug_assert!(
mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
"attempt to create slice covering at least half the address space"
);
&*ptr::slice_from_raw_parts(data, len)
}
/// Performs the same functionality as [`from_raw_parts`], except that a
/// mutable slice is returned.
///
/// # Safety
///
/// Behavior is undefined if any of the following conditions are violated:
///
/// * `data` must be [valid] for writes for `len * mem::size_of::<T>()` many bytes,
/// and it must be properly aligned. This means in particular:
///
/// * The entire memory range of this slice must be contained within a single allocated object!
/// Slices can never span across multiple allocated objects.
/// * `data` must be non-null and aligned even for zero-length slices. One
/// reason for this is that enum layout optimizations may rely on references
/// (including slices of any length) being aligned and non-null to distinguish
/// them from other data. You can obtain a pointer that is usable as `data`
/// for zero-length slices using [`NonNull::dangling()`].
///
/// * The memory referenced by the returned slice must not be accessed through any other pointer
/// (not derived from the return value) for the duration of lifetime `'a`.
/// Both read and write accesses are forbidden.
///
/// * The total size `len * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
/// See the safety documentation of [`pointer::offset`].
///
/// [valid]: ../../std/ptr/index.html#safety
/// [`NonNull::dangling()`]: ../../std/ptr/struct.NonNull.html#method.dangling
/// [`pointer::offset`]: ../../std/primitive.pointer.html#method.offset
/// [`from_raw_parts`]: ../../std/slice/fn.from_raw_parts.html
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_raw_parts_mut<'a, T>(data: *mut T, len: usize) -> &'a mut [T] {
debug_assert!(is_aligned_and_not_null(data), "attempt to create unaligned or null slice");
debug_assert!(
mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
"attempt to create slice covering at least half the address space"
);
&mut *ptr::slice_from_raw_parts_mut(data, len)
}
/// Converts a reference to T into a slice of length 1 (without copying).
#[stable(feature = "from_ref", since = "1.28.0")]
pub fn from_ref<T>(s: &T) -> &[T] {
unsafe { from_raw_parts(s, 1) }
}
/// Converts a reference to T into a slice of length 1 (without copying).
#[stable(feature = "from_ref", since = "1.28.0")]
pub fn from_mut<T>(s: &mut T) -> &mut [T] {
unsafe { from_raw_parts_mut(s, 1) }
}
// This function is public only because there is no other way to unit test heapsort.
#[unstable(feature = "sort_internals", reason = "internal to sort module", issue = "none")]
#[doc(hidden)]
pub fn heapsort<T, F>(v: &mut [T], mut is_less: F)
where
F: FnMut(&T, &T) -> bool,
{
sort::heapsort(v, &mut is_less);
}
//
// Comparison traits
//
extern "C" {
/// Calls implementation provided memcmp.
///
/// Interprets the data as u8.
///
/// Returns 0 for equal, < 0 for less than and > 0 for greater
/// than.
// FIXME(#32610): Return type should be c_int
fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32;
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A, B> PartialEq<[B]> for [A]
where
A: PartialEq<B>,
{
fn eq(&self, other: &[B]) -> bool {
SlicePartialEq::equal(self, other)
}
fn ne(&self, other: &[B]) -> bool {
SlicePartialEq::not_equal(self, other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Eq> Eq for [T] {}
/// Implements comparison of vectors lexicographically.
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Ord> Ord for [T] {
fn cmp(&self, other: &[T]) -> Ordering {
SliceOrd::compare(self, other)
}
}
/// Implements comparison of vectors lexicographically.
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: PartialOrd> PartialOrd for [T] {
fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
SlicePartialOrd::partial_compare(self, other)
}
}
#[doc(hidden)]
// intermediate trait for specialization of slice's PartialEq
trait SlicePartialEq<B> {
fn equal(&self, other: &[B]) -> bool;
fn not_equal(&self, other: &[B]) -> bool {
!self.equal(other)
}
}
// Generic slice equality
impl<A, B> SlicePartialEq<B> for [A]
where
A: PartialEq<B>,
{
default fn equal(&self, other: &[B]) -> bool {
if self.len() != other.len() {
return false;
}
self.iter().zip(other.iter()).all(|(x, y)| x == y)
}
}
// Use an equal-pointer optimization when types are `Eq`
impl<A> SlicePartialEq<A> for [A]
where
A: PartialEq<A> + Eq,
{
default fn equal(&self, other: &[A]) -> bool {
if self.len() != other.len() {
return false;
}
if self.as_ptr() == other.as_ptr() {
return true;
}
self.iter().zip(other.iter()).all(|(x, y)| x == y)
}
}
// Use memcmp for bytewise equality when the types allow
impl<A> SlicePartialEq<A> for [A]
where
A: PartialEq<A> + BytewiseEquality,
{
fn equal(&self, other: &[A]) -> bool {
if self.len() != other.len() {
return false;
}
if self.as_ptr() == other.as_ptr() {
return true;
}
unsafe {
let size = mem::size_of_val(self);
memcmp(self.as_ptr() as *const u8, other.as_ptr() as *const u8, size) == 0
}
}
}
#[doc(hidden)]
// intermediate trait for specialization of slice's PartialOrd
trait SlicePartialOrd: Sized {
fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>;
}
impl<A: PartialOrd> SlicePartialOrd for A {
default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
let l = cmp::min(left.len(), right.len());
// Slice to the loop iteration range to enable bound check
// elimination in the compiler
let lhs = &left[..l];
let rhs = &right[..l];
for i in 0..l {
match lhs[i].partial_cmp(&rhs[i]) {
Some(Ordering::Equal) => (),
non_eq => return non_eq,
}
}
left.len().partial_cmp(&right.len())
}
}
// This is the impl that we would like to have. Unfortunately it's not sound.
// See `partial_ord_slice.rs`.
/*
impl<A> SlicePartialOrd for A
where
A: Ord,
{
default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
Some(SliceOrd::compare(left, right))
}
}
*/
impl<A: AlwaysApplicableOrd> SlicePartialOrd for A {
fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
Some(SliceOrd::compare(left, right))
}
}
trait AlwaysApplicableOrd: SliceOrd + Ord {}
macro_rules! always_applicable_ord {
($([$($p:tt)*] $t:ty,)*) => {
$(impl<$($p)*> AlwaysApplicableOrd for $t {})*
}
}
always_applicable_ord! {
[] u8, [] u16, [] u32, [] u64, [] u128, [] usize,
[] i8, [] i16, [] i32, [] i64, [] i128, [] isize,
[] bool, [] char,
[T: ?Sized] *const T, [T: ?Sized] *mut T,
[T: AlwaysApplicableOrd] &T,
[T: AlwaysApplicableOrd] &mut T,
[T: AlwaysApplicableOrd] Option<T>,
}
#[doc(hidden)]
// intermediate trait for specialization of slice's Ord
trait SliceOrd: Sized {
fn compare(left: &[Self], right: &[Self]) -> Ordering;
}
impl<A: Ord> SliceOrd for A {
default fn compare(left: &[Self], right: &[Self]) -> Ordering {
let l = cmp::min(left.len(), right.len());
// Slice to the loop iteration range to enable bound check
// elimination in the compiler
let lhs = &left[..l];
let rhs = &right[..l];
for i in 0..l {
match lhs[i].cmp(&rhs[i]) {
Ordering::Equal => (),
non_eq => return non_eq,
}
}
left.len().cmp(&right.len())
}
}
// memcmp compares a sequence of unsigned bytes lexicographically.
// this matches the order we want for [u8], but no others (not even [i8]).
impl SliceOrd for u8 {
#[inline]
fn compare(left: &[Self], right: &[Self]) -> Ordering {
let order =
unsafe { memcmp(left.as_ptr(), right.as_ptr(), cmp::min(left.len(), right.len())) };
if order == 0 {
left.len().cmp(&right.len())
} else if order < 0 {
Less
} else {
Greater
}
}
}
#[doc(hidden)]
/// Trait implemented for types that can be compared for equality using
/// their bytewise representation
trait BytewiseEquality: Eq + Copy {}
macro_rules! impl_marker_for {
($traitname:ident, $($ty:ty)*) => {
$(
impl $traitname for $ty { }
)*
}
}
impl_marker_for!(BytewiseEquality,
u8 i8 u16 i16 u32 i32 u64 i64 u128 i128 usize isize char bool);
#[doc(hidden)]
unsafe impl<'a, T> TrustedRandomAccess for Iter<'a, T> {
unsafe fn get_unchecked(&mut self, i: usize) -> &'a T {
&*self.ptr.as_ptr().add(i)
}
fn may_have_side_effect() -> bool {
false
}
}
#[doc(hidden)]
unsafe impl<'a, T> TrustedRandomAccess for IterMut<'a, T> {
unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut T {
&mut *self.ptr.as_ptr().add(i)
}
fn may_have_side_effect() -> bool {
false
}
}
trait SliceContains: Sized {
fn slice_contains(&self, x: &[Self]) -> bool;
}
impl<T> SliceContains for T
where
T: PartialEq,
{
default fn slice_contains(&self, x: &[Self]) -> bool {
x.iter().any(|y| *y == *self)
}
}
impl SliceContains for u8 {
fn slice_contains(&self, x: &[Self]) -> bool {
memchr::memchr(*self, x).is_some()
}
}
impl SliceContains for i8 {
fn slice_contains(&self, x: &[Self]) -> bool {
let byte = *self as u8;
let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
memchr::memchr(byte, bytes).is_some()
}
}
| 32.462788 | 104 | 0.532932 |
0335d2646e71550794e310d5ae4c398e5416faf6
| 2,161 |
use std::ptr::{drop_in_place, NonNull};
use crate::traits::Class;
/// An owning wrapper around a pointer. PhysX object creation returns pointers
/// which in normal Rust would be a Box, but Box cannot be used here because it
/// has specific allocation requirements that the C++ code does not fulfill.
#[repr(transparent)]
pub struct Owner<T> {
ptr: NonNull<T>,
}
impl<T> Owner<T> {
/// Create a new owner from a raw pointer. Use the `from_raw` method on the type!
///
/// # Safety
/// Only one of these may be constructed per pointer. In particular,
/// this must not be called on a pointer attained via Class::as_mut_ptr. The
/// intended pattern is to wrap the raw FFI constructors in this call, so that
/// the amount of time spent dealing with raw pointers is as little as possible.
pub(crate) unsafe fn from_raw(ptr: *mut T) -> Option<Self> {
Some(Self {
ptr: NonNull::new(ptr)?,
})
}
/// Consumes the Owner without calling Drop and returns the raw pointer it was wrapping.
#[allow(clippy::mem_forget)]
pub fn into_ptr<S>(mut self) -> *mut S
where
T: Class<S>,
{
let ptr = self.as_mut_ptr();
std::mem::forget(self);
ptr
}
}
unsafe impl<T, S> Class<S> for Owner<T>
where
T: Class<S>,
{
fn as_ptr(&self) -> *const S {
self.as_ref().as_ptr()
}
fn as_mut_ptr(&mut self) -> *mut S {
self.as_mut().as_mut_ptr()
}
}
unsafe impl<T: Send> Send for Owner<T> {}
unsafe impl<T: Sync> Sync for Owner<T> {}
impl<T> Drop for Owner<T> {
fn drop(&mut self) {
unsafe { drop_in_place(self.ptr.as_ptr()) }
}
}
impl<T> AsRef<T> for Owner<T> {
fn as_ref(&self) -> &T {
unsafe { self.ptr.as_ref() }
}
}
impl<T> AsMut<T> for Owner<T> {
fn as_mut(&mut self) -> &mut T {
unsafe { self.ptr.as_mut() }
}
}
impl<T> std::ops::Deref for Owner<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
self.as_ref()
}
}
impl<T> std::ops::DerefMut for Owner<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.as_mut()
}
}
| 25.127907 | 92 | 0.597409 |
5d9bc81a87b2e4e5347bcebabf9fcb15788031c5
| 4,559 |
use crate::{
common::{DesktopEntry, ExecMode, UserPath},
error::Result,
CONFIG,
};
use once_cell::sync::Lazy;
use regex::RegexSet;
use serde::{Deserialize, Serialize};
use std::{
collections::HashMap,
ffi::OsString,
hash::{Hash, Hasher},
};
pub static REGEX_APPS: Lazy<RegexApps> = Lazy::new(RegexApps::populate);
// used for deserializing from config file
#[derive(Debug, Deserialize, Serialize)]
pub struct ConfigHandler {
exec: String,
#[serde(default)]
terminal: bool,
regexes: Vec<String>,
}
impl ConfigHandler {
// convert to RegexHandler
fn compile_regex(&self) -> Result<RegexHandler> {
Ok(RegexHandler {
exec: self.exec.clone(),
terminal: self.terminal,
regexes: HandlerRegexSet::new(self.regexes.clone())?,
})
}
}
// wrapping RegexSet in a struct and implementing Eq and Hash for it
// saves us from having to implement them for RegexHandler as a whole
// although it complicates method calls a bit
#[derive(Debug, Clone)]
struct HandlerRegexSet(RegexSet);
impl HandlerRegexSet {
fn new<I, S>(exprs: I) -> Result<HandlerRegexSet>
where
S: AsRef<str>,
I: IntoIterator<Item = S>,
{
Ok(HandlerRegexSet(RegexSet::new(exprs)?))
}
fn is_match(&self, text: &str) -> bool {
self.0.is_match(text)
}
}
impl PartialEq for HandlerRegexSet {
fn eq(&self, other: &Self) -> bool {
self.0.patterns() == other.0.patterns()
}
}
impl Eq for HandlerRegexSet {}
impl Hash for HandlerRegexSet {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.patterns().hash(state);
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct RegexHandler {
exec: String,
terminal: bool,
regexes: HandlerRegexSet,
}
impl RegexHandler {
// kludge together a fake DesktopEntry
// there's probably a better way to avoid reinventing the wheel with the program execution code
fn get_entry(&self) -> DesktopEntry {
//
DesktopEntry {
name: String::from(""),
exec: self.exec.clone(),
file_name: OsString::from(""),
terminal: self.terminal,
mimes: Vec::new(),
categories: HashMap::new(),
}
}
// open the given paths with handler
pub fn open(&self, args: Vec<String>) -> Result<()> {
self.get_entry().exec(ExecMode::Open, args)
}
fn is_match(&self, path: &str) -> bool {
self.regexes.is_match(path)
}
}
#[derive(Debug)]
pub struct RegexApps(Vec<RegexHandler>);
impl RegexApps {
// convert Config's ConfigHandlers
pub fn populate() -> Self {
RegexApps(
CONFIG
.handlers
.iter()
.filter_map(|handler| handler.compile_regex().ok())
.collect(),
)
}
// get matching handler
pub fn get_handler(&self, path: &UserPath) -> Option<RegexHandler> {
Some(
self.0
.iter()
.find(|app| app.is_match(&path.to_string()))?
.clone(),
)
}
}
#[cfg(test)]
mod tests {
use super::*;
use url::Url;
#[test]
fn regex_handlers() -> Result<()> {
let exec: &str = "freetube %u";
let regexes: &[String] =
&[String::from(r"(https://)?(www\.)?youtu(be\.com|\.be)/*")];
let config_handler = ConfigHandler {
exec: String::from(exec),
terminal: false,
regexes: regexes.to_owned(),
};
let regex_handler = config_handler
.compile_regex()
.expect("ConfigHandler::compile_regex() returned Err");
let expected_regex_handler = RegexHandler {
exec: String::from(exec),
terminal: false,
regexes: HandlerRegexSet::new(regexes)
.expect("Test regex is invalid"),
};
assert_eq!(regex_handler, expected_regex_handler);
let regex_apps = RegexApps(vec![regex_handler]);
assert_eq!(
regex_apps
.get_handler(&UserPath::Url(
Url::parse("https://youtu.be/dQw4w9WgXcQ").unwrap()
))
.expect("RegexApps::get_handler() returned None"),
expected_regex_handler
);
assert_eq!(
regex_apps.get_handler(&UserPath::Url(
Url::parse("https://en.wikipedia.org").unwrap()
)),
None
);
Ok(())
}
}
| 25.469274 | 99 | 0.563939 |
1d0643de2976809d58091207080000c0afd19009
| 7,395 |
use pir_8_emu::binutils::pir_8_emu::ExecutionConfig;
use std::env::temp_dir;
use std::fs;
#[test]
fn nonexistant() {
let root = temp_dir().join("pir_8_emu-test").join("binutils-pir_8_emu-ExecutionConfig-read_from_config_dir-ok-nonexistant");
assert_eq!(ExecutionConfig::read_from_config_dir(root.clone()).unwrap(), None);
}
#[test]
fn with_general_purpose_register_letters_with_auto_load_next_instruction_with_execute_full_instructions() {
let root = temp_dir()
.join("pir_8_emu-test")
.join("binutils-pir_8_emu-ExecutionConfig-read_from_config_dir-ok-\
with_general_purpose_register_letters_with_auto_load_next_instruction_with_execute_full_instructions");
fs::create_dir_all(&root).unwrap();
fs::write(root.join("exec_cfg.toml"),
r#"auto_load_next_instruction = true
execute_full_instructions = true
hewwo = "uwu"
general_purpose_register_letters = ['H', 'e', 'w', 'w', 'o', 'U', 'w', 'U']
mew = 123"#
.as_bytes())
.unwrap();
assert_eq!(ExecutionConfig::read_from_config_dir(root.clone()).unwrap(),
Some(ExecutionConfig {
auto_load_next_instruction: true,
execute_full_instructions: true,
general_purpose_register_letters: ['H', 'e', 'w', 'w', 'o', 'U', 'w', 'U'],
..ExecutionConfig::new()
}));
}
#[test]
fn with_general_purpose_register_letters_with_auto_load_next_instruction_without_execute_full_instructions() {
let root = temp_dir()
.join("pir_8_emu-test")
.join("binutils-pir_8_emu-ExecutionConfig-read_from_config_dir-ok-\
with_general_purpose_register_letters_with_auto_load_next_instruction_without_execute_full_instructions");
fs::create_dir_all(&root).unwrap();
fs::write(root.join("exec_cfg.toml"),
r#"auto_load_next_instruction = true
hewwo = "uwu"
general_purpose_register_letters = ['H', 'e', 'w', 'w', 'o', 'U', 'w', 'U']
mew = 123"#
.as_bytes())
.unwrap();
assert_eq!(ExecutionConfig::read_from_config_dir(root.clone()).unwrap(),
Some(ExecutionConfig {
auto_load_next_instruction: true,
general_purpose_register_letters: ['H', 'e', 'w', 'w', 'o', 'U', 'w', 'U'],
..ExecutionConfig::new()
}));
}
#[test]
fn with_general_purpose_register_letters_without_auto_load_next_instruction_with_execute_full_instructions() {
let root = temp_dir()
.join("pir_8_emu-test")
.join("binutils-pir_8_emu-ExecutionConfig-read_from_config_dir-ok-\
with_general_purpose_register_letters_without_auto_load_next_instruction_with_execute_full_instructions");
fs::create_dir_all(&root).unwrap();
fs::write(root.join("exec_cfg.toml"),
r#"execute_full_instructions = true
hewwo = "uwu"
general_purpose_register_letters = ['H', 'e', 'w', 'w', 'o', 'U', 'w', 'U']"#
.as_bytes())
.unwrap();
assert_eq!(ExecutionConfig::read_from_config_dir(root.clone()).unwrap(),
Some(ExecutionConfig {
execute_full_instructions: true,
general_purpose_register_letters: ['H', 'e', 'w', 'w', 'o', 'U', 'w', 'U'],
..ExecutionConfig::new()
}));
}
#[test]
fn with_general_purpose_register_letters_without_auto_load_next_instruction_without_execute_full_instructions() {
let root = temp_dir().join("pir_8_emu-test").join("binutils-pir_8_emu-ExecutionConfig-read_from_config_dir-ok-\
with_general_purpose_register_letters_without_auto_load_next_instruction_without_execute_full_instructions");
fs::create_dir_all(&root).unwrap();
fs::write(root.join("exec_cfg.toml"),
r#"hewwo = "uwu"
general_purpose_register_letters = ['H', 'e', 'w', 'w', 'o', 'U', 'w', 'U']
mew = 123"#
.as_bytes())
.unwrap();
assert_eq!(ExecutionConfig::read_from_config_dir(root.clone()).unwrap(),
Some(ExecutionConfig { general_purpose_register_letters: ['H', 'e', 'w', 'w', 'o', 'U', 'w', 'U'], ..ExecutionConfig::new() }));
}
#[test]
fn without_general_purpose_register_letters_with_auto_load_next_instruction_with_execute_full_instructions() {
let root = temp_dir()
.join("pir_8_emu-test")
.join("binutils-pir_8_emu-ExecutionConfig-read_from_config_dir-ok-\
without_general_purpose_register_letters_with_auto_load_next_instruction_with_execute_full_instructions");
fs::create_dir_all(&root).unwrap();
fs::write(root.join("exec_cfg.toml"),
r#"auto_load_next_instruction = true
execute_full_instructions = true
hewwo = "uwu"
mew = 123"#
.as_bytes())
.unwrap();
assert_eq!(ExecutionConfig::read_from_config_dir(root.clone()).unwrap(),
Some(ExecutionConfig {
auto_load_next_instruction: true,
execute_full_instructions: true,
..ExecutionConfig::new()
}));
}
#[test]
fn without_general_purpose_register_letters_with_auto_load_next_instruction_without_execute_full_instructions() {
let root = temp_dir()
.join("pir_8_emu-test")
.join("binutils-pir_8_emu-ExecutionConfig-read_from_config_dir-ok-\
without_general_purpose_register_letters_with_auto_load_next_instruction_without_execute_full_instructions");
fs::create_dir_all(&root).unwrap();
fs::write(root.join("exec_cfg.toml"),
r#"auto_load_next_instruction = true
hewwo = "uwu"
mew = 123"#
.as_bytes())
.unwrap();
assert_eq!(ExecutionConfig::read_from_config_dir(root.clone()).unwrap(),
Some(ExecutionConfig { auto_load_next_instruction: true, ..ExecutionConfig::new() }));
}
#[test]
fn without_general_purpose_register_letters_without_auto_load_next_instruction_with_execute_full_instructions() {
let root = temp_dir()
.join("pir_8_emu-test")
.join("binutils-pir_8_emu-ExecutionConfig-read_from_config_dir-ok-\
without_general_purpose_register_letters_without_auto_load_next_instruction_with_execute_full_instructions");
fs::create_dir_all(&root).unwrap();
fs::write(root.join("exec_cfg.toml"),
r#"execute_full_instructions = true
hewwo = "uwu""#
.as_bytes())
.unwrap();
assert_eq!(ExecutionConfig::read_from_config_dir(root.clone()).unwrap(),
Some(ExecutionConfig { execute_full_instructions: true, ..ExecutionConfig::new() }));
}
#[test]
fn empty() {
let root = temp_dir().join("pir_8_emu-test").join("binutils-pir_8_emu-ExecutionConfig-read_from_config_dir-ok-empty");
fs::create_dir_all(&root).unwrap();
fs::write(root.join("exec_cfg.toml"),
r#"hewwo = "uwu"
mew = 123"#
.as_bytes())
.unwrap();
assert_eq!(ExecutionConfig::read_from_config_dir(root.clone()).unwrap(), Some(ExecutionConfig::new()));
}
| 41.544944 | 143 | 0.631643 |
9c621e6121169551dccaf64dd687193482e650c7
| 31,271 |
extern crate nameless_clap as clap;
mod utils;
use clap::{App, Arg, ArgGroup, ErrorKind};
static REQUIRE_EQUALS: &str = "error: The following required arguments were not provided:
--opt=<FILE>
USAGE:
clap-test --opt=<FILE>
For more information try --help";
static REQUIRE_EQUALS_FILTERED: &str = "error: The following required arguments were not provided:
--opt=<FILE>
USAGE:
clap-test --opt=<FILE> --foo=<FILE>
For more information try --help";
static REQUIRE_EQUALS_FILTERED_GROUP: &str =
"error: The following required arguments were not provided:
--opt=<FILE>
USAGE:
clap-test --opt=<FILE> --foo=<FILE> <--g1=<FILE>|--g2=<FILE>>
For more information try --help";
static MISSING_REQ: &str = "error: The following required arguments were not provided:
--long-option-2 <option2>
<positional2>
USAGE:
clap-test --long-option-2 <option2> -F <positional2>
For more information try --help";
static COND_REQ_IN_USAGE: &str = "error: The following required arguments were not provided:
--output <output>
USAGE:
test --target <target> --input <input> --output <output>
For more information try --help";
#[test]
fn flag_required() {
let result = App::new("flag_required")
.arg(Arg::from("-f, --flag 'some flag'").requires("color"))
.arg(Arg::from("-c, --color 'third flag'"))
.try_get_matches_from(vec!["", "-f"]);
assert!(result.is_err());
let err = result.err().unwrap();
assert_eq!(err.kind, ErrorKind::MissingRequiredArgument);
}
#[test]
fn flag_required_2() {
let m = App::new("flag_required")
.arg(Arg::from("-f, --flag 'some flag'").requires("color"))
.arg(Arg::from("-c, --color 'third flag'"))
.get_matches_from(vec!["", "-f", "-c"]);
assert!(m.is_present("color"));
assert!(m.is_present("flag"));
}
#[test]
fn option_required() {
let result = App::new("option_required")
.arg(Arg::from("-f [flag] 'some flag'").requires("c"))
.arg(Arg::from("-c [color] 'third flag'"))
.try_get_matches_from(vec!["", "-f", "val"]);
assert!(result.is_err());
let err = result.err().unwrap();
assert_eq!(err.kind, ErrorKind::MissingRequiredArgument);
}
#[test]
fn option_required_2() {
let m = App::new("option_required")
.arg(Arg::from("-f [flag] 'some flag'").requires("c"))
.arg(Arg::from("-c [color] 'third flag'"))
.get_matches_from(vec!["", "-f", "val", "-c", "other_val"]);
assert!(m.is_present("c"));
assert_eq!(m.value_of("c").unwrap(), "other_val");
assert!(m.is_present("f"));
assert_eq!(m.value_of("f").unwrap(), "val");
}
#[test]
fn positional_required() {
let result = App::new("positional_required")
.arg(Arg::new("flag").index(1).required(true))
.try_get_matches_from(vec![""]);
assert!(result.is_err());
let err = result.err().unwrap();
assert_eq!(err.kind, ErrorKind::MissingRequiredArgument);
}
#[test]
fn positional_required_2() {
let m = App::new("positional_required")
.arg(Arg::new("flag").index(1).required(true))
.get_matches_from(vec!["", "someval"]);
assert!(m.is_present("flag"));
assert_eq!(m.value_of("flag").unwrap(), "someval");
}
#[test]
fn group_required() {
let result = App::new("group_required")
.arg(Arg::from("-f, --flag 'some flag'"))
.group(ArgGroup::new("gr").required(true).arg("some").arg("other"))
.arg(Arg::from("--some 'some arg'"))
.arg(Arg::from("--other 'other arg'"))
.try_get_matches_from(vec!["", "-f"]);
assert!(result.is_err());
let err = result.err().unwrap();
assert_eq!(err.kind, ErrorKind::MissingRequiredArgument);
}
#[test]
fn group_required_2() {
let m = App::new("group_required")
.arg(Arg::from("-f, --flag 'some flag'"))
.group(ArgGroup::new("gr").required(true).arg("some").arg("other"))
.arg(Arg::from("--some 'some arg'"))
.arg(Arg::from("--other 'other arg'"))
.get_matches_from(vec!["", "-f", "--some"]);
assert!(m.is_present("some"));
assert!(!m.is_present("other"));
assert!(m.is_present("flag"));
}
#[test]
fn group_required_3() {
let m = App::new("group_required")
.arg(Arg::from("-f, --flag 'some flag'"))
.group(ArgGroup::new("gr").required(true).arg("some").arg("other"))
.arg(Arg::from("--some 'some arg'"))
.arg(Arg::from("--other 'other arg'"))
.get_matches_from(vec!["", "-f", "--other"]);
assert!(!m.is_present("some"));
assert!(m.is_present("other"));
assert!(m.is_present("flag"));
}
#[test]
fn arg_require_group() {
let result = App::new("arg_require_group")
.arg(Arg::from("-f, --flag 'some flag'").requires("gr"))
.group(ArgGroup::new("gr").arg("some").arg("other"))
.arg(Arg::from("--some 'some arg'"))
.arg(Arg::from("--other 'other arg'"))
.try_get_matches_from(vec!["", "-f"]);
assert!(result.is_err());
let err = result.err().unwrap();
assert_eq!(err.kind, ErrorKind::MissingRequiredArgument);
}
#[test]
fn arg_require_group_2() {
let res = App::new("arg_require_group")
.arg(Arg::from("-f, --flag 'some flag'").requires("gr"))
.group(ArgGroup::new("gr").arg("some").arg("other"))
.arg(Arg::from("--some 'some arg'"))
.arg(Arg::from("--other 'other arg'"))
.try_get_matches_from(vec!["", "-f", "--some"]);
assert!(res.is_ok());
let m = res.unwrap();
assert!(m.is_present("some"));
assert!(!m.is_present("other"));
assert!(m.is_present("flag"));
}
#[test]
fn arg_require_group_3() {
let res = App::new("arg_require_group")
.arg(Arg::from("-f, --flag 'some flag'").requires("gr"))
.group(ArgGroup::new("gr").arg("some").arg("other"))
.arg(Arg::from("--some 'some arg'"))
.arg(Arg::from("--other 'other arg'"))
.try_get_matches_from(vec!["", "-f", "--other"]);
assert!(res.is_ok());
let m = res.unwrap();
assert!(!m.is_present("some"));
assert!(m.is_present("other"));
assert!(m.is_present("flag"));
}
// REQUIRED_UNLESS
#[test]
fn issue_753() {
let m = App::new("test")
.arg(Arg::from(
"-l, --list 'List available interfaces (and stop there)'",
))
.arg(
Arg::from("-i, --iface=[INTERFACE] 'Ethernet interface for fetching NTP packets'")
.required_unless_present("list"),
)
.arg(
Arg::from("-f, --file=[TESTFILE] 'Fetch NTP packets from pcap file'")
.conflicts_with("iface")
.required_unless_present("list"),
)
.arg(
Arg::from("-s, --server=[SERVER_IP] 'NTP server IP address'")
.required_unless_present("list"),
)
.arg(Arg::from("-p, --port=[SERVER_PORT] 'NTP server port'").default_value("123"))
.try_get_matches_from(vec!["test", "--list"]);
assert!(m.is_ok());
}
#[test]
fn required_unless_present() {
let res = App::new("unlesstest")
.arg(
Arg::new("cfg")
.required_unless_present("dbg")
.takes_value(true)
.long("config"),
)
.arg(Arg::new("dbg").long("debug"))
.try_get_matches_from(vec!["unlesstest", "--debug"]);
assert!(res.is_ok());
let m = res.unwrap();
assert!(m.is_present("dbg"));
assert!(!m.is_present("cfg"));
}
#[test]
fn required_unless_err() {
let res = App::new("unlesstest")
.arg(
Arg::new("cfg")
.required_unless_present("dbg")
.takes_value(true)
.long("config"),
)
.arg(Arg::new("dbg").long("debug"))
.try_get_matches_from(vec!["unlesstest"]);
assert!(res.is_err());
assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument);
}
// REQUIRED_UNLESS_ALL
#[test]
fn required_unless_present_all() {
let res = App::new("unlessall")
.arg(
Arg::new("cfg")
.required_unless_present_all(&["dbg", "infile"])
.takes_value(true)
.long("config"),
)
.arg(Arg::new("dbg").long("debug"))
.arg(Arg::new("infile").short('i').takes_value(true))
.try_get_matches_from(vec!["unlessall", "--debug", "-i", "file"]);
assert!(res.is_ok());
let m = res.unwrap();
assert!(m.is_present("dbg"));
assert!(m.is_present("infile"));
assert!(!m.is_present("cfg"));
}
#[test]
fn required_unless_all_err() {
let res = App::new("unlessall")
.arg(
Arg::new("cfg")
.required_unless_present_all(&["dbg", "infile"])
.takes_value(true)
.long("config"),
)
.arg(Arg::new("dbg").long("debug"))
.arg(Arg::new("infile").short('i').takes_value(true))
.try_get_matches_from(vec!["unlessall", "--debug"]);
assert!(res.is_err());
assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument);
}
// REQUIRED_UNLESS_ONE
#[test]
fn required_unless_present_any() {
let res = App::new("unlessone")
.arg(
Arg::new("cfg")
.required_unless_present_any(&["dbg", "infile"])
.takes_value(true)
.long("config"),
)
.arg(Arg::new("dbg").long("debug"))
.arg(Arg::new("infile").short('i').takes_value(true))
.try_get_matches_from(vec!["unlessone", "--debug"]);
assert!(res.is_ok());
let m = res.unwrap();
assert!(m.is_present("dbg"));
assert!(!m.is_present("cfg"));
}
#[test]
fn required_unless_any_2() {
// This tests that the required_unless_present_any works when the second arg in the array is used
// instead of the first.
let res = App::new("unlessone")
.arg(
Arg::new("cfg")
.required_unless_present_any(&["dbg", "infile"])
.takes_value(true)
.long("config"),
)
.arg(Arg::new("dbg").long("debug"))
.arg(Arg::new("infile").short('i').takes_value(true))
.try_get_matches_from(vec!["unlessone", "-i", "file"]);
assert!(res.is_ok());
let m = res.unwrap();
assert!(m.is_present("infile"));
assert!(!m.is_present("cfg"));
}
#[test]
fn required_unless_any_works_with_short() {
// GitHub issue: https://github.com/kbknapp/clap-rs/issues/1135
let res = App::new("unlessone")
.arg(Arg::new("a").conflicts_with("b").short('a'))
.arg(Arg::new("b").short('b'))
.arg(
Arg::new("x")
.short('x')
.required_unless_present_any(&["a", "b"]),
)
.try_get_matches_from(vec!["unlessone", "-a"]);
assert!(res.is_ok());
}
#[test]
fn required_unless_any_works_with_short_err() {
let res = App::new("unlessone")
.arg(Arg::new("a").conflicts_with("b").short('a'))
.arg(Arg::new("b").short('b'))
.arg(
Arg::new("x")
.short('x')
.required_unless_present_any(&["a", "b"]),
)
.try_get_matches_from(vec!["unlessone"]);
assert!(!res.is_ok());
}
#[test]
fn required_unless_any_works_without() {
let res = App::new("unlessone")
.arg(Arg::new("a").conflicts_with("b").short('a'))
.arg(Arg::new("b").short('b'))
.arg(Arg::new("x").required_unless_present_any(&["a", "b"]))
.try_get_matches_from(vec!["unlessone", "-a"]);
assert!(res.is_ok());
}
#[test]
fn required_unless_any_works_with_long() {
let res = App::new("unlessone")
.arg(Arg::new("a").conflicts_with("b").short('a'))
.arg(Arg::new("b").short('b'))
.arg(
Arg::new("x")
.long("x_is_the_option")
.required_unless_present_any(&["a", "b"]),
)
.try_get_matches_from(vec!["unlessone", "-a"]);
assert!(res.is_ok());
}
#[test]
fn required_unless_any_1() {
let res = App::new("unlessone")
.arg(
Arg::new("cfg")
.required_unless_present_any(&["dbg", "infile"])
.takes_value(true)
.long("config"),
)
.arg(Arg::new("dbg").long("debug"))
.arg(Arg::new("infile").short('i').takes_value(true))
.try_get_matches_from(vec!["unlessone", "--debug"]);
assert!(res.is_ok());
let m = res.unwrap();
assert!(!m.is_present("infile"));
assert!(!m.is_present("cfg"));
assert!(m.is_present("dbg"));
}
#[test]
fn required_unless_any_err() {
let res = App::new("unlessone")
.arg(
Arg::new("cfg")
.required_unless_present_any(&["dbg", "infile"])
.takes_value(true)
.long("config"),
)
.arg(Arg::new("dbg").long("debug"))
.arg(Arg::new("infile").short('i').takes_value(true))
.try_get_matches_from(vec!["unlessone"]);
assert!(res.is_err());
assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument);
}
#[test]
fn missing_required_output() {
assert!(utils::compare_output(
utils::complex_app(),
"clap-test -F",
MISSING_REQ,
true
));
}
// Conditional external requirements
#[test]
fn requires_if_present_val() {
let res = App::new("unlessone")
.arg(
Arg::new("cfg")
.requires_if("my.cfg", "extra")
.takes_value(true)
.long("config"),
)
.arg(Arg::new("extra").long("extra"))
.try_get_matches_from(vec!["unlessone", "--config=my.cfg"]);
assert!(res.is_err());
assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument);
}
#[test]
fn requires_if_present_mult() {
let res = App::new("unlessone")
.arg(
Arg::new("cfg")
.requires_ifs(&[("my.cfg", "extra"), ("other.cfg", "other")])
.takes_value(true)
.long("config"),
)
.arg(Arg::new("extra").long("extra"))
.arg(Arg::new("other").long("other"))
.try_get_matches_from(vec!["unlessone", "--config=other.cfg"]);
assert!(res.is_err());
assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument);
}
#[test]
fn requires_if_present_mult_pass() {
let res = App::new("unlessone")
.arg(
Arg::new("cfg")
.requires_ifs(&[("my.cfg", "extra"), ("other.cfg", "other")])
.takes_value(true)
.long("config"),
)
.arg(Arg::new("extra").long("extra"))
.arg(Arg::new("other").long("other"))
.try_get_matches_from(vec!["unlessone", "--config=some.cfg"]);
assert!(res.is_ok());
}
#[test]
fn requires_if_present_val_no_present_pass() {
let res = App::new("unlessone")
.arg(
Arg::new("cfg")
.requires_if("my.cfg", "extra")
.takes_value(true)
.long("config"),
)
.arg(Arg::new("extra").long("extra"))
.try_get_matches_from(vec!["unlessone"]);
assert!(res.is_ok());
}
// Conditionally required
#[test]
fn required_if_val_present_pass() {
let res = App::new("ri")
.arg(
Arg::new("cfg")
.required_if_eq("extra", "val")
.takes_value(true)
.long("config"),
)
.arg(Arg::new("extra").takes_value(true).long("extra"))
.try_get_matches_from(vec!["ri", "--extra", "val", "--config", "my.cfg"]);
assert!(res.is_ok());
}
#[test]
fn required_if_val_present_fail() {
let res = App::new("ri")
.arg(
Arg::new("cfg")
.required_if_eq("extra", "val")
.takes_value(true)
.long("config"),
)
.arg(Arg::new("extra").takes_value(true).long("extra"))
.try_get_matches_from(vec!["ri", "--extra", "val"]);
assert!(res.is_err());
assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument);
}
#[test]
fn required_if_val_present_case_insensitive_pass() {
let res = App::new("ri")
.arg(
Arg::new("cfg")
.required_if_eq("extra", "Val")
.takes_value(true)
.long("config"),
)
.arg(
Arg::new("extra")
.takes_value(true)
.long("extra")
.case_insensitive(true),
)
.try_get_matches_from(vec!["ri", "--extra", "vaL", "--config", "my.cfg"]);
assert!(res.is_ok());
}
#[test]
fn required_if_val_present_case_insensitive_fail() {
let res = App::new("ri")
.arg(
Arg::new("cfg")
.required_if_eq("extra", "Val")
.takes_value(true)
.long("config"),
)
.arg(
Arg::new("extra")
.takes_value(true)
.long("extra")
.case_insensitive(true),
)
.try_get_matches_from(vec!["ri", "--extra", "vaL"]);
assert!(res.is_err());
assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument);
}
#[test]
fn required_if_all_values_present_pass() {
let res = App::new("ri")
.arg(
Arg::new("cfg")
.required_if_eq_all(&[("extra", "val"), ("option", "spec")])
.takes_value(true)
.long("config"),
)
.arg(Arg::new("extra").takes_value(true).long("extra"))
.arg(Arg::new("option").takes_value(true).long("option"))
.try_get_matches_from(vec![
"ri", "--extra", "val", "--option", "spec", "--config", "my.cfg",
]);
assert!(res.is_ok());
}
#[test]
fn required_if_some_values_present_pass() {
let res = App::new("ri")
.arg(
Arg::new("cfg")
.required_if_eq_all(&[("extra", "val"), ("option", "spec")])
.takes_value(true)
.long("config"),
)
.arg(Arg::new("extra").takes_value(true).long("extra"))
.arg(Arg::new("option").takes_value(true).long("option"))
.try_get_matches_from(vec!["ri", "--extra", "val"]);
assert!(res.is_ok());
}
#[test]
fn required_if_all_values_present_fail() {
let res = App::new("ri")
.arg(
Arg::new("cfg")
.required_if_eq_all(&[("extra", "val"), ("option", "spec")])
.takes_value(true)
.long("config"),
)
.arg(Arg::new("extra").takes_value(true).long("extra"))
.arg(Arg::new("option").takes_value(true).long("option"))
.try_get_matches_from(vec!["ri", "--extra", "val", "--option", "spec"]);
assert!(res.is_err());
assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument);
}
#[test]
fn required_if_any_all_values_present_pass() {
let res = App::new("ri")
.arg(
Arg::new("cfg")
.required_if_eq_all(&[("extra", "val"), ("option", "spec")])
.required_if_eq_any(&[("extra", "val2"), ("option", "spec2")])
.takes_value(true)
.long("config"),
)
.arg(Arg::new("extra").takes_value(true).long("extra"))
.arg(Arg::new("option").takes_value(true).long("option"))
.try_get_matches_from(vec![
"ri", "--extra", "val", "--option", "spec", "--config", "my.cfg",
]);
assert!(res.is_ok());
}
#[test]
fn required_if_any_all_values_present_fail() {
let res = App::new("ri")
.arg(
Arg::new("cfg")
.required_if_eq_all(&[("extra", "val"), ("option", "spec")])
.required_if_eq_any(&[("extra", "val2"), ("option", "spec2")])
.takes_value(true)
.long("config"),
)
.arg(Arg::new("extra").takes_value(true).long("extra"))
.arg(Arg::new("option").takes_value(true).long("option"))
.try_get_matches_from(vec!["ri", "--extra", "val", "--option", "spec"]);
assert!(res.is_err());
assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument);
}
#[test]
fn list_correct_required_args() {
let app = App::new("Test app")
.version("1.0")
.author("F0x06")
.about("Arg test")
.arg(
Arg::new("target")
.takes_value(true)
.required(true)
.possible_values(&["file", "stdout"])
.long("target"),
)
.arg(
Arg::new("input")
.takes_value(true)
.required(true)
.long("input"),
)
.arg(
Arg::new("output")
.takes_value(true)
.required(true)
.long("output"),
);
assert!(utils::compare_output(
app,
"test --input somepath --target file",
COND_REQ_IN_USAGE,
true
));
}
#[test]
fn required_if_val_present_fail_error_output() {
let app = App::new("Test app")
.version("1.0")
.author("F0x06")
.about("Arg test")
.arg(
Arg::new("target")
.takes_value(true)
.required(true)
.possible_values(&["file", "stdout"])
.long("target"),
)
.arg(
Arg::new("input")
.takes_value(true)
.required(true)
.long("input"),
)
.arg(
Arg::new("output")
.takes_value(true)
.required_if_eq("target", "file")
.long("output"),
);
assert!(utils::compare_output(
app,
"test --input somepath --target file",
COND_REQ_IN_USAGE,
true
));
}
#[test]
fn required_if_wrong_val() {
let res = App::new("ri")
.arg(
Arg::new("cfg")
.required_if_eq("extra", "val")
.takes_value(true)
.long("config"),
)
.arg(Arg::new("extra").takes_value(true).long("extra"))
.try_get_matches_from(vec!["ri", "--extra", "other"]);
assert!(res.is_ok());
}
#[test]
fn required_ifs_val_present_pass() {
let res = App::new("ri")
.arg(
Arg::new("cfg")
.required_if_eq_any(&[("extra", "val"), ("option", "spec")])
.takes_value(true)
.long("config"),
)
.arg(Arg::new("option").takes_value(true).long("option"))
.arg(Arg::new("extra").takes_value(true).long("extra"))
.try_get_matches_from(vec!["ri", "--option", "spec", "--config", "my.cfg"]);
assert!(res.is_ok());
}
#[test]
fn required_ifs_val_present_fail() {
let res = App::new("ri")
.arg(
Arg::new("cfg")
.required_if_eq_any(&[("extra", "val"), ("option", "spec")])
.takes_value(true)
.long("config"),
)
.arg(Arg::new("extra").takes_value(true).long("extra"))
.arg(Arg::new("option").takes_value(true).long("option"))
.try_get_matches_from(vec!["ri", "--option", "spec"]);
assert!(res.is_err());
assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument);
}
#[test]
fn required_ifs_wrong_val() {
let res = App::new("ri")
.arg(
Arg::new("cfg")
.required_if_eq_any(&[("extra", "val"), ("option", "spec")])
.takes_value(true)
.long("config"),
)
.arg(Arg::new("extra").takes_value(true).long("extra"))
.arg(Arg::new("option").takes_value(true).long("option"))
.try_get_matches_from(vec!["ri", "--option", "other"]);
assert!(res.is_ok());
}
#[test]
fn required_ifs_wrong_val_mult_fail() {
let res = App::new("ri")
.arg(
Arg::new("cfg")
.required_if_eq_any(&[("extra", "val"), ("option", "spec")])
.takes_value(true)
.long("config"),
)
.arg(Arg::new("extra").takes_value(true).long("extra"))
.arg(Arg::new("option").takes_value(true).long("option"))
.try_get_matches_from(vec!["ri", "--extra", "other", "--option", "spec"]);
assert!(res.is_err());
assert_eq!(res.unwrap_err().kind, ErrorKind::MissingRequiredArgument);
}
#[test]
fn require_eq() {
let app = App::new("clap-test").version("v1.4.8").arg(
Arg::new("opt")
.long("opt")
.short('o')
.required(true)
.require_equals(true)
.value_name("FILE")
.about("some"),
);
assert!(utils::compare_output(
app,
"clap-test",
REQUIRE_EQUALS,
true
));
}
#[test]
fn require_eq_filtered() {
let app = App::new("clap-test")
.version("v1.4.8")
.arg(
Arg::new("opt")
.long("opt")
.short('o')
.required(true)
.require_equals(true)
.value_name("FILE")
.about("some"),
)
.arg(
Arg::new("foo")
.long("foo")
.short('f')
.required(true)
.require_equals(true)
.value_name("FILE")
.about("some other arg"),
);
assert!(utils::compare_output(
app,
"clap-test -f=blah",
REQUIRE_EQUALS_FILTERED,
true
));
}
#[test]
fn require_eq_filtered_group() {
let app = App::new("clap-test")
.version("v1.4.8")
.arg(
Arg::new("opt")
.long("opt")
.short('o')
.required(true)
.require_equals(true)
.value_name("FILE")
.about("some"),
)
.arg(
Arg::new("foo")
.long("foo")
.short('f')
.required(true)
.require_equals(true)
.value_name("FILE")
.about("some other arg"),
)
.arg(
Arg::new("g1")
.long("g1")
.require_equals(true)
.value_name("FILE"),
)
.arg(
Arg::new("g2")
.long("g2")
.require_equals(true)
.value_name("FILE"),
)
.group(
ArgGroup::new("test_group")
.args(&["g1", "g2"])
.required(true),
);
assert!(utils::compare_output(
app,
"clap-test -f=blah --g1=blah",
REQUIRE_EQUALS_FILTERED_GROUP,
true
));
}
static ISSUE_1158: &str = "error: The following required arguments were not provided:
-x <X>
-y <Y>
-z <Z>
USAGE:
example -x <X> -y <Y> -z <Z> <ID>
For more information try --help";
fn issue_1158_app() -> App<'static> {
App::new("example")
.arg(
Arg::from("-c, --config [FILE] 'Custom config file.'")
.required_unless_present("ID")
.conflicts_with("ID"),
)
.arg(
Arg::from("[ID] 'ID'")
.required_unless_present("config")
.conflicts_with("config")
.requires_all(&["x", "y", "z"]),
)
.arg(Arg::from("-x [X] 'X'"))
.arg(Arg::from("-y [Y] 'Y'"))
.arg(Arg::from("-z [Z] 'Z'"))
}
#[test]
fn multiple_required_unless_usage_printing() {
static MULTIPLE_REQUIRED_UNLESS_USAGE: &'static str =
"error: The following required arguments were not provided:
--a <a>
--b <b>
USAGE:
test --c <c> --a <a> --b <b>
For more information try --help";
let app = App::new("test")
.arg(
Arg::new("a")
.long("a")
.takes_value(true)
.required_unless_present("b")
.conflicts_with("b"),
)
.arg(
Arg::new("b")
.long("b")
.takes_value(true)
.required_unless_present("a")
.conflicts_with("a"),
)
.arg(
Arg::new("c")
.long("c")
.takes_value(true)
.required_unless_present("d")
.conflicts_with("d"),
)
.arg(
Arg::new("d")
.long("d")
.takes_value(true)
.required_unless_present("c")
.conflicts_with("c"),
);
assert!(utils::compare_output(
app,
"test --c asd",
MULTIPLE_REQUIRED_UNLESS_USAGE,
true
));
}
#[test]
fn issue_1158_conflicting_requirements() {
let app = issue_1158_app();
assert!(utils::compare_output(app, "example id", ISSUE_1158, true));
}
#[test]
fn issue_1158_conflicting_requirements_rev() {
let res = issue_1158_app().try_get_matches_from(&["", "--config", "some.conf"]);
assert!(res.is_ok());
}
#[test]
fn issue_1643_args_mutually_require_each_other() {
use clap::*;
let app = App::new("test")
.arg(
Arg::new("relation_id")
.about("The relation id to get the data from")
.long("relation-id")
.short('r')
.takes_value(true)
.requires("remote_unit_name"),
)
.arg(
Arg::new("remote_unit_name")
.about("The name of the remote unit to get data from")
.long("remote-unit")
.short('u')
.takes_value(true)
.requires("relation_id"),
);
app.get_matches_from(&["test", "-u", "hello", "-r", "farewell"]);
}
#[cfg(debug_assertions)]
#[test]
#[should_panic = "Argument or group 'extra' specified in 'requires*' for 'config' does not exist"]
fn requires_invalid_arg() {
let _ = App::new("prog")
.arg(Arg::new("config").requires("extra").long("config"))
.try_get_matches_from(vec!["", "--config"]);
}
#[cfg(debug_assertions)]
#[test]
#[should_panic = "Argument or group 'extra' specified in 'requires*' for 'config' does not exist"]
fn requires_if_invalid_arg() {
let _ = App::new("prog")
.arg(
Arg::new("config")
.requires_if("val", "extra")
.long("config"),
)
.try_get_matches_from(vec!["", "--config"]);
}
#[cfg(debug_assertions)]
#[test]
#[should_panic = "Argument or group 'extra' specified in 'required_if_eq*' for 'config' does not exist"]
fn required_if_invalid_arg() {
let _ = App::new("prog")
.arg(
Arg::new("config")
.required_if_eq("extra", "val")
.long("config"),
)
.try_get_matches_from(vec!["", "--config"]);
}
#[cfg(debug_assertions)]
#[test]
#[should_panic = "Argument or group 'extra' specified in 'required_unless*' for 'config' does not exist"]
fn required_unless_invalid_arg() {
let _ = App::new("prog")
.arg(
Arg::new("config")
.required_unless_present("extra")
.long("config"),
)
.try_get_matches_from(vec![""]);
}
| 29.143523 | 105 | 0.517732 |
22d263783f30394e76cb28296521f5e763b8e169
| 3,498 |
use std::{error::Error, net::IpAddr, path::PathBuf, time::Instant};
use clap::{AppSettings, Clap};
use mimalloc::MiMalloc;
use quic_test::{setup_logging, QUIC_PROTO};
use quinn::{Certificate, ClientConfigBuilder, Endpoint};
use tokio::{fs::File, io::AsyncReadExt};
use tracing::{info, trace};
#[global_allocator]
static GLOBAL: MiMalloc = MiMalloc;
#[tokio::main(flavor = "current_thread")]
async fn main() -> Result<(), Box<dyn Error + Send + Sync>> {
let options = CliOpt::parse();
setup_logging(options.verbose)?;
let cert = setup_cert(options.cert).await?;
let mut client_config = ClientConfigBuilder::default();
client_config
.add_certificate_authority(cert)?
.protocols(QUIC_PROTO);
if options.keylog {
client_config.enable_keylog();
}
let mut endpoint = Endpoint::builder();
endpoint.default_client_config(client_config.build());
let (endpoint, _) = endpoint.bind(&"[::]:0".parse()?)?;
trace!("Getting port from http://[{}]:{}/", options.ip, options.port);
let resp = reqwest::get(format!("http://[{}]:{}/", options.ip, options.port)).await?.text().await?.parse()?;
trace!("Got port {}", resp);
let new_conn = endpoint
.connect(&(options.ip, resp).into(), "localhost")?
.await?;
let mut send = new_conn.connection.open_uni().await?;
let mut file = File::open(&options.file).await?;
let name = options
.transfer_name
.as_ref()
.map(|s| s as &str)
.unwrap_or_else(|| {
options
.file
.file_name()
.expect("No file name")
.to_str()
.expect("Failed to convert file name")
});
let len = file.metadata().await?.len();
{
let file_name_len = (name.len() as u64).to_le_bytes();
send.write_all(&file_name_len).await?;
}
{
send.write_all(name.as_bytes()).await?;
}
{
let file_len = len.to_le_bytes();
send.write_all(&file_len).await?;
}
let len = len as usize;
let start = Instant::now();
let mut buf = [0; 4096];
loop {
let len = file.read(&mut buf).await?;
if len == 0 {
break;
}
send.write_all(&buf[0..len]).await?;
}
let end = start.elapsed();
send.finish().await?;
info!(
"Finished transfering in {:?}, average speed: {} MiB/s",
end,
(len as f64 / (1024.0 * 1024.0)) / end.as_secs_f64()
);
Ok(())
}
async fn setup_cert(
cert_path: PathBuf,
) -> Result<Certificate, Box<dyn Error + Send + Sync + 'static>> {
Ok(Certificate::from_der(&tokio::fs::read(cert_path).await?)?)
}
/// QUICCtest client CLI options
#[derive(Clap, Clone)]
#[clap(version = "0.1", author = "Dherse <[email protected]>")]
#[clap(setting = AppSettings::ColoredHelp)]
pub struct CliOpt {
/// A level of verbosity (not present = error only, -v = warnings, -vv = info, -vvv = debug, -vvvv = trace)
#[clap(short, long, parse(from_occurrences))]
pub verbose: u8,
/// Keylog the keys of the server
#[clap(long, short = 'l')]
pub keylog: bool,
/// TLS certificate key in PEM format
pub cert: PathBuf,
/// Path to the file to be sent
pub file: PathBuf,
/// IP address to send to
pub ip: IpAddr,
/// The port of the server
pub port: u16,
/// The name of the transfered file
pub transfer_name: Option<String>,
}
| 25.532847 | 112 | 0.584334 |
ab70f72dc613f02d93f7f9acf4045c289bc5806b
| 7,401 |
use crate::back::write::create_informational_target_machine;
use crate::llvm;
use libc::c_int;
use rustc_codegen_ssa::target_features::supported_target_features;
use rustc_data_structures::fx::FxHashSet;
use rustc_feature::UnstableFeatures;
use rustc_middle::bug;
use rustc_session::config::PrintRequest;
use rustc_session::Session;
use rustc_span::symbol::Symbol;
use rustc_target::spec::{MergeFunctions, PanicStrategy};
use std::ffi::CString;
use std::slice;
use std::str;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Once;
static POISONED: AtomicBool = AtomicBool::new(false);
static INIT: Once = Once::new();
pub(crate) fn init(sess: &Session) {
unsafe {
// Before we touch LLVM, make sure that multithreading is enabled.
INIT.call_once(|| {
if llvm::LLVMStartMultithreaded() != 1 {
// use an extra bool to make sure that all future usage of LLVM
// cannot proceed despite the Once not running more than once.
POISONED.store(true, Ordering::SeqCst);
}
configure_llvm(sess);
});
if POISONED.load(Ordering::SeqCst) {
bug!("couldn't enable multi-threaded LLVM");
}
}
}
fn require_inited() {
INIT.call_once(|| bug!("llvm is not initialized"));
if POISONED.load(Ordering::SeqCst) {
bug!("couldn't enable multi-threaded LLVM");
}
}
unsafe fn configure_llvm(sess: &Session) {
let n_args = sess.opts.cg.llvm_args.len() + sess.target.llvm_args.len();
let mut llvm_c_strs = Vec::with_capacity(n_args + 1);
let mut llvm_args = Vec::with_capacity(n_args + 1);
llvm::LLVMRustInstallFatalErrorHandler();
fn llvm_arg_to_arg_name(full_arg: &str) -> &str {
full_arg.trim().split(|c: char| c == '=' || c.is_whitespace()).next().unwrap_or("")
}
let cg_opts = sess.opts.cg.llvm_args.iter();
let tg_opts = sess.target.llvm_args.iter();
let sess_args = cg_opts.chain(tg_opts);
let user_specified_args: FxHashSet<_> =
sess_args.clone().map(|s| llvm_arg_to_arg_name(s)).filter(|s| !s.is_empty()).collect();
{
// This adds the given argument to LLVM. Unless `force` is true
// user specified arguments are *not* overridden.
let mut add = |arg: &str, force: bool| {
if force || !user_specified_args.contains(llvm_arg_to_arg_name(arg)) {
let s = CString::new(arg).unwrap();
llvm_args.push(s.as_ptr());
llvm_c_strs.push(s);
}
};
// Set the llvm "program name" to make usage and invalid argument messages more clear.
add("rustc -Cllvm-args=\"...\" with", true);
if sess.time_llvm_passes() {
add("-time-passes", false);
}
if sess.print_llvm_passes() {
add("-debug-pass=Structure", false);
}
if !sess.opts.debugging_opts.no_generate_arange_section {
add("-generate-arange-section", false);
}
match sess.opts.debugging_opts.merge_functions.unwrap_or(sess.target.merge_functions) {
MergeFunctions::Disabled | MergeFunctions::Trampolines => {}
MergeFunctions::Aliases => {
add("-mergefunc-use-aliases", false);
}
}
if sess.target.os == "emscripten" && sess.panic_strategy() == PanicStrategy::Unwind {
add("-enable-emscripten-cxx-exceptions", false);
}
// HACK(eddyb) LLVM inserts `llvm.assume` calls to preserve align attributes
// during inlining. Unfortunately these may block other optimizations.
add("-preserve-alignment-assumptions-during-inlining=false", false);
for arg in sess_args {
add(&(*arg), true);
}
}
if sess.opts.debugging_opts.llvm_time_trace && get_major_version() >= 9 {
// time-trace is not thread safe and running it in parallel will cause seg faults.
if !sess.opts.debugging_opts.no_parallel_llvm {
bug!("`-Z llvm-time-trace` requires `-Z no-parallel-llvm")
}
llvm::LLVMTimeTraceProfilerInitialize();
}
llvm::LLVMInitializePasses();
rustc_llvm::initialize_available_targets();
llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int, llvm_args.as_ptr());
}
pub fn time_trace_profiler_finish(file_name: &str) {
unsafe {
if get_major_version() >= 9 {
let file_name = CString::new(file_name).unwrap();
llvm::LLVMTimeTraceProfilerFinish(file_name.as_ptr());
}
}
}
// WARNING: the features after applying `to_llvm_feature` must be known
// to LLVM or the feature detection code will walk past the end of the feature
// array, leading to crashes.
pub fn to_llvm_feature<'a>(sess: &Session, s: &'a str) -> &'a str {
let arch = if sess.target.arch == "x86_64" { "x86" } else { &*sess.target.arch };
match (arch, s) {
("x86", "pclmulqdq") => "pclmul",
("x86", "rdrand") => "rdrnd",
("x86", "bmi1") => "bmi",
("x86", "cmpxchg16b") => "cx16",
("aarch64", "fp") => "fp-armv8",
("aarch64", "fp16") => "fullfp16",
(_, s) => s,
}
}
pub fn target_features(sess: &Session) -> Vec<Symbol> {
let target_machine = create_informational_target_machine(sess);
supported_target_features(sess)
.iter()
.filter_map(|&(feature, gate)| {
if UnstableFeatures::from_environment().is_nightly_build() || gate.is_none() {
Some(feature)
} else {
None
}
})
.filter(|feature| {
let llvm_feature = to_llvm_feature(sess, feature);
let cstr = CString::new(llvm_feature).unwrap();
unsafe { llvm::LLVMRustHasFeature(target_machine, cstr.as_ptr()) }
})
.map(|feature| Symbol::intern(feature))
.collect()
}
pub fn print_version() {
// Can be called without initializing LLVM
unsafe {
println!("LLVM version: {}.{}", llvm::LLVMRustVersionMajor(), llvm::LLVMRustVersionMinor());
}
}
pub fn get_major_version() -> u32 {
unsafe { llvm::LLVMRustVersionMajor() }
}
pub fn print_passes() {
// Can be called without initializing LLVM
unsafe {
llvm::LLVMRustPrintPasses();
}
}
pub(crate) fn print(req: PrintRequest, sess: &Session) {
require_inited();
let tm = create_informational_target_machine(sess);
unsafe {
match req {
PrintRequest::TargetCPUs => llvm::LLVMRustPrintTargetCPUs(tm),
PrintRequest::TargetFeatures => llvm::LLVMRustPrintTargetFeatures(tm),
_ => bug!("rustc_codegen_llvm can't handle print request: {:?}", req),
}
}
}
fn handle_native(name: &str) -> &str {
if name != "native" {
return name;
}
unsafe {
let mut len = 0;
let ptr = llvm::LLVMRustGetHostCPUName(&mut len);
str::from_utf8(slice::from_raw_parts(ptr as *const u8, len)).unwrap()
}
}
pub fn target_cpu(sess: &Session) -> &str {
let name = match sess.opts.cg.target_cpu {
Some(ref s) => &**s,
None => &*sess.target.cpu,
};
handle_native(name)
}
pub fn tune_cpu(sess: &Session) -> Option<&str> {
match sess.opts.debugging_opts.tune_cpu {
Some(ref s) => Some(handle_native(&**s)),
None => None,
}
}
| 32.893333 | 100 | 0.609918 |
f40442f1cabe9ae9d832e6748385e926b9b36be7
| 44,995 |
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
smoke_test_environment::SmokeTestEnvironment,
test_utils::{
diem_swarm_utils::{
get_json_rpc_diem_interface, get_op_tool, load_backend_storage, load_diem_root_storage,
load_node_config,
},
wait_for_transaction_on_all_nodes, write_key_to_file_bcs_format,
write_key_to_file_hex_format,
},
};
use diem_config::{
config::{PeerRole, SecureBackend},
network_id::NetworkId,
};
use diem_crypto::{
ed25519::{Ed25519PrivateKey, Ed25519PublicKey},
x25519, HashValue, PrivateKey, Uniform, ValidCryptoMaterialStringExt,
};
use diem_global_constants::{
CONSENSUS_KEY, FULLNODE_NETWORK_KEY, GENESIS_WAYPOINT, OPERATOR_ACCOUNT, OPERATOR_KEY,
OWNER_ACCOUNT, OWNER_KEY, VALIDATOR_NETWORK_ADDRESS_KEYS, VALIDATOR_NETWORK_KEY, WAYPOINT,
};
use diem_key_manager::diem_interface::DiemInterface;
use diem_management::storage::to_x25519;
use diem_operational_tool::{
keys::{EncodingType, KeyType},
test_helper::OperationalTool,
};
use diem_sdk::client::views::VMStatusView;
use diem_secure_storage::{CryptoStorage, KVStorage, Storage};
use diem_temppath::TempPath;
use diem_types::{
account_address::{from_identity_public_key, AccountAddress},
block_info::BlockInfo,
ledger_info::LedgerInfo,
network_address::NetworkAddress,
transaction::authenticator::AuthenticationKey,
waypoint::Waypoint,
};
use rand::rngs::OsRng;
use std::{
collections::HashSet,
convert::{TryFrom, TryInto},
fs,
path::PathBuf,
str::FromStr,
};
#[test]
fn test_account_resource() {
let (_env, op_tool, _, storage) = launch_swarm_with_op_tool_and_backend(1, 0);
// Fetch the owner account resource
let owner_account = storage.get::<AccountAddress>(OWNER_ACCOUNT).unwrap().value;
let account_resource = op_tool.account_resource(owner_account).unwrap();
assert_eq!(owner_account, account_resource.account);
assert_eq!(0, account_resource.sequence_number);
// Fetch the operator account resource
let operator_account = storage
.get::<AccountAddress>(OPERATOR_ACCOUNT)
.unwrap()
.value;
let account_resource = op_tool.account_resource(operator_account).unwrap();
assert_eq!(operator_account, account_resource.account);
assert_eq!(0, account_resource.sequence_number);
// Verify operator key
let on_chain_operator_key = hex::decode(account_resource.authentication_key).unwrap();
let operator_key = storage.get_public_key(OPERATOR_KEY).unwrap().public_key;
assert_eq!(
AuthenticationKey::ed25519(&operator_key),
AuthenticationKey::try_from(on_chain_operator_key).unwrap()
);
}
#[test]
fn test_auto_validate_options() {
let (env, op_tool, backend, _) = launch_swarm_with_op_tool_and_backend(1, 0);
// Rotate the operator key with a really low timeout to prevent validation
let (txn_ctx, _) = op_tool
.rotate_operator_key_with_custom_validation(&backend, false, Some(1), Some(0))
.unwrap();
assert!(txn_ctx.execution_result.is_none());
// Now wait for transaction execution
let client = env.get_validator_client(0, None);
client
.wait_for_transaction(txn_ctx.address, txn_ctx.sequence_number)
.unwrap();
// Verify that the transaction was executed correctly
let txn_ctx = op_tool
.validate_transaction(txn_ctx.address, txn_ctx.sequence_number)
.unwrap();
assert_eq!(VMStatusView::Executed, txn_ctx.execution_result.unwrap());
// Rotate the operator key with a custom timeout of 1 minute and a a custom sleep interval
let (txn_ctx, _) = op_tool
.rotate_operator_key_with_custom_validation(&backend, false, Some(2), Some(60))
.unwrap();
assert_eq!(VMStatusView::Executed, txn_ctx.execution_result.unwrap());
}
#[test]
fn test_consensus_key_rotation() {
let (_env, op_tool, backend, mut storage) = launch_swarm_with_op_tool_and_backend(1, 0);
// Rotate the consensus key
let (txn_ctx, new_consensus_key) = op_tool.rotate_consensus_key(&backend, false).unwrap();
assert_eq!(VMStatusView::Executed, txn_ctx.execution_result.unwrap());
// Verify that the config has been updated correctly with the new consensus key
let validator_account = storage.get::<AccountAddress>(OWNER_ACCOUNT).unwrap().value;
let config_consensus_key = op_tool
.validator_config(validator_account, Some(&backend))
.unwrap()
.consensus_public_key;
assert_eq!(new_consensus_key, config_consensus_key);
// Verify that the validator set info contains the new consensus key
let info_consensus_key = op_tool
.validator_set(Some(validator_account), Some(&backend))
.unwrap()[0]
.consensus_public_key
.clone();
assert_eq!(new_consensus_key, info_consensus_key);
// Rotate the consensus key in storage manually and perform another rotation using the op_tool.
// Here, we expected the op_tool to see that the consensus key in storage doesn't match the one
// on-chain, and thus it should simply forward a transaction to the blockchain.
let rotated_consensus_key = storage.rotate_key(CONSENSUS_KEY).unwrap();
let (txn_ctx, new_consensus_key) = op_tool.rotate_consensus_key(&backend, true).unwrap();
assert!(txn_ctx.execution_result.is_none());
assert_eq!(rotated_consensus_key, new_consensus_key);
}
#[test]
fn test_create_operator_hex_file() {
create_operator_with_file_writer(write_key_to_file_hex_format);
}
#[test]
fn test_create_operator_bcs_file() {
create_operator_with_file_writer(write_key_to_file_bcs_format);
}
#[test]
fn test_create_validator_hex_file() {
create_validator_with_file_writer(write_key_to_file_hex_format);
}
#[test]
fn test_create_validator_bcs_file() {
create_validator_with_file_writer(write_key_to_file_bcs_format);
}
#[test]
fn test_disable_address_validation() {
let num_nodes = 1;
let (_env, op_tool, backend, _) = launch_swarm_with_op_tool_and_backend(num_nodes, 0);
// Try to set the validator config with a bad address and verify failure
let bad_network_address = NetworkAddress::from_str("/dns4/127.0.0.1/tcp/1234").unwrap();
op_tool
.set_validator_config(
Some(bad_network_address.clone()),
None,
&backend,
false,
false,
)
.unwrap_err();
// Now disable address verification to set the validator config with a bad network address
let txn_ctx = op_tool
.set_validator_config(Some(bad_network_address), None, &backend, false, true)
.unwrap();
assert_eq!(VMStatusView::Executed, txn_ctx.execution_result.unwrap());
// Rotate the consensus key and verify that it isn't blocked by a bad network address
let _ = op_tool.rotate_consensus_key(&backend, false).unwrap();
// Rotate the validator network key and verify that it isn't blocked by a bad network address
let _ = op_tool
.rotate_validator_network_key(&backend, false)
.unwrap();
// Rotate the fullnode network key and verify that it isn't blocked by a bad network address
let _ = op_tool
.rotate_fullnode_network_key(&backend, false)
.unwrap();
// Rotate the operator key and verify that it isn't blocked by a bad network address
let _ = op_tool.rotate_operator_key(&backend, false).unwrap();
// Update the validator network address with a valid address
let new_network_address = NetworkAddress::from_str("/ip4/10.0.0.16/tcp/80").unwrap();
let _ = op_tool
.set_validator_config(Some(new_network_address), None, &backend, false, false)
.unwrap();
}
#[test]
fn test_set_operator_and_add_new_validator() {
let status = set_operator_and_add_new_validator_helper();
assert_ne!(VMStatusView::Executed, status);
}
#[test]
fn test_move_explain_operational_tooling() {
let status = set_operator_and_add_new_validator_helper();
// Make sure this was the error that we were expecting and that the error is explained
// correctly.
match status {
VMStatusView::MoveAbort { explanation, .. } => {
assert!(
explanation.is_some(),
"Move abort explanation for known abort code not found"
);
let explanation = explanation.unwrap();
assert_eq!(explanation.category, "INVALID_ARGUMENT");
assert_eq!(explanation.reason, "EALREADY_A_VALIDATOR");
}
_ => panic!("Unexpected VM status when trying to double-add validator"),
}
}
#[test]
fn test_extract_private_key() {
let (env, op_tool, backend, storage) = launch_swarm_with_op_tool_and_backend(1, 0);
// Extract the operator private key to file
let (_, node_config_path) = load_node_config(&env.validator_swarm, 0);
let key_file_path = node_config_path.with_file_name(OPERATOR_KEY);
let _ = op_tool
.extract_private_key(
OPERATOR_KEY,
key_file_path.to_str().unwrap(),
KeyType::Ed25519,
EncodingType::BCS,
&backend,
)
.unwrap();
// Verify the operator private key has been written correctly
let file_contents = fs::read(key_file_path).unwrap();
let key_from_file = bcs::from_bytes(&file_contents).unwrap();
let key_from_storage = storage.export_private_key(OPERATOR_KEY).unwrap();
assert_eq!(key_from_storage, key_from_file);
}
#[test]
fn test_extract_public_key() {
let (env, op_tool, backend, storage) = launch_swarm_with_op_tool_and_backend(1, 0);
// Extract the operator public key to file
let (_, node_config_path) = load_node_config(&env.validator_swarm, 0);
let key_file_path = node_config_path.with_file_name(OPERATOR_KEY);
let _ = op_tool
.extract_public_key(
OPERATOR_KEY,
key_file_path.to_str().unwrap(),
KeyType::Ed25519,
EncodingType::BCS,
&backend,
)
.unwrap();
// Verify the operator key has been written correctly
let file_contents = fs::read(key_file_path).unwrap();
let key_from_file = bcs::from_bytes(&file_contents).unwrap();
let key_from_storage = storage.get_public_key(OPERATOR_KEY).unwrap().public_key;
assert_eq!(key_from_storage, key_from_file);
}
#[test]
fn test_extract_peer_from_storage() {
let (mut env, op_tool, backend, _) = launch_swarm_with_op_tool_and_backend(1, 0);
env.setup_vfn_swarm();
// Check Validator
let (config, _) = load_node_config(&env.validator_swarm, 0);
let map = op_tool
.extract_peer_from_storage(VALIDATOR_NETWORK_KEY, &backend)
.unwrap();
let network_config = config.validator_network.unwrap();
let expected_peer_id = network_config.peer_id();
let expected_public_key = network_config.identity_key().public_key();
let (peer_id, peer) = map.iter().next().unwrap();
assert_eq!(expected_public_key, *peer.keys.iter().next().unwrap());
assert_eq!(expected_peer_id, *peer_id);
// Check VFN now
let (config, _) = load_node_config(&env.vfn_swarm().lock(), 0);
let map = op_tool
.extract_peer_from_storage(FULLNODE_NETWORK_KEY, &backend)
.unwrap();
let network_config = config
.full_node_networks
.iter()
.find(|network| network.network_id == NetworkId::Public)
.unwrap();
let expected_peer_id = network_config.peer_id();
let expected_public_key = network_config.identity_key().public_key();
let (peer_id, peer) = map.iter().next().unwrap();
assert_eq!(expected_public_key, *peer.keys.iter().next().unwrap());
assert_eq!(expected_peer_id, *peer_id);
}
#[test]
fn test_extract_peer_from_file() {
let op_tool = OperationalTool::test();
let path = TempPath::new();
path.create_as_file().unwrap();
let key = op_tool
.generate_key(KeyType::X25519, path.as_ref(), EncodingType::Hex)
.unwrap();
let peer = op_tool
.extract_peer_from_file(path.as_ref(), EncodingType::Hex)
.unwrap();
assert_eq!(1, peer.len());
let (peer_id, peer) = peer.iter().next().unwrap();
let public_key = key.public_key();
assert_eq!(public_key, *peer.keys.iter().next().unwrap());
assert_eq!(from_identity_public_key(public_key), *peer_id);
}
#[test]
fn test_extract_peers_from_keys() {
let op_tool = OperationalTool::test();
let output_path = TempPath::new();
output_path.create_as_file().unwrap();
let mut keys = HashSet::new();
for _ in 1..10 {
let key_path = TempPath::new();
key_path.create_as_file().unwrap();
keys.insert(
op_tool
.generate_key(KeyType::X25519, key_path.as_ref(), EncodingType::Hex)
.unwrap()
.public_key(),
);
}
let peers = op_tool
.extract_peers_from_keys(keys.clone(), output_path.as_ref())
.unwrap();
assert_eq!(keys.len(), peers.len());
for key in keys {
let address = from_identity_public_key(key);
let peer = peers.get(&address).unwrap();
let keys = &peer.keys;
assert_eq!(1, keys.len());
assert!(keys.contains(&key));
assert_eq!(PeerRole::Downstream, peer.role);
assert!(peer.addresses.is_empty());
}
}
#[test]
fn test_generate_key() {
let op_tool = OperationalTool::test();
let path = TempPath::new();
path.create_as_file().unwrap();
// Base64
let expected_key = op_tool
.generate_key(KeyType::X25519, path.as_ref(), EncodingType::Base64)
.unwrap();
assert_eq!(
expected_key,
x25519::PrivateKey::try_from(
base64::decode(fs::read(path.as_ref()).unwrap())
.unwrap()
.as_slice()
)
.unwrap(),
);
// Hex
let expected_key = op_tool
.generate_key(KeyType::X25519, path.as_ref(), EncodingType::Hex)
.unwrap();
assert_eq!(
expected_key,
x25519::PrivateKey::from_encoded_string(
&String::from_utf8(fs::read(path.as_ref()).unwrap()).unwrap()
)
.unwrap()
);
// BCS
let expected_key = op_tool
.generate_key(KeyType::X25519, path.as_ref(), EncodingType::BCS)
.unwrap();
assert_eq!(
expected_key,
bcs::from_bytes(&fs::read(path.as_ref()).unwrap()).unwrap()
);
}
#[test]
fn test_insert_waypoint() {
let (_env, op_tool, backend, storage) = launch_swarm_with_op_tool_and_backend(1, 0);
// Get the current waypoint from storage
let current_waypoint: Waypoint = storage.get(WAYPOINT).unwrap().value;
// Insert a new waypoint and genesis waypoint into storage
let inserted_waypoint =
Waypoint::new_any(&LedgerInfo::new(BlockInfo::empty(), HashValue::zero()));
assert_ne!(current_waypoint, inserted_waypoint);
op_tool
.insert_waypoint(inserted_waypoint, &backend, true)
.unwrap();
// Verify the waypoint has changed in storage and that genesis waypoint is now set
assert_eq!(inserted_waypoint, storage.get(WAYPOINT).unwrap().value);
assert_eq!(
inserted_waypoint,
storage.get(GENESIS_WAYPOINT).unwrap().value
);
// Insert the old waypoint into storage, but skip the genesis waypoint
op_tool
.insert_waypoint(current_waypoint, &backend, false)
.unwrap();
assert_eq!(current_waypoint, storage.get(WAYPOINT).unwrap().value);
assert_eq!(
inserted_waypoint,
storage.get(GENESIS_WAYPOINT).unwrap().value
);
}
#[test]
fn test_fullnode_network_key_rotation() {
let num_nodes = 1;
let (env, op_tool, backend, storage) = launch_swarm_with_op_tool_and_backend(num_nodes, 0);
// Rotate the full node network key
let (txn_ctx, new_network_key) = op_tool.rotate_fullnode_network_key(&backend, true).unwrap();
assert!(txn_ctx.execution_result.is_none());
// Wait for transaction execution
let client = env.get_validator_client(0, None);
client
.wait_for_transaction(txn_ctx.address, txn_ctx.sequence_number)
.unwrap();
// Verify that the config has been loaded correctly with new key
let validator_account = storage.get::<AccountAddress>(OWNER_ACCOUNT).unwrap().value;
let config_network_key = op_tool
.validator_config(validator_account, Some(&backend))
.unwrap()
.fullnode_network_address
.find_noise_proto()
.unwrap();
assert_eq!(new_network_key, config_network_key);
// Verify that the validator set info contains the new network key
let info_network_key = op_tool
.validator_set(Some(validator_account), Some(&backend))
.unwrap()[0]
.fullnode_network_address
.find_noise_proto()
.unwrap();
assert_eq!(new_network_key, info_network_key);
}
#[test]
fn test_network_key_rotation() {
let num_nodes = 4;
let (mut env, op_tool, backend, storage) = launch_swarm_with_op_tool_and_backend(num_nodes, 0);
// Rotate the validator network key
let (txn_ctx, new_network_key) = op_tool
.rotate_validator_network_key(&backend, true)
.unwrap();
assert!(txn_ctx.execution_result.is_none());
// Ensure all nodes have received the transaction
wait_for_transaction_on_all_nodes(&env, num_nodes, txn_ctx.address, txn_ctx.sequence_number);
// Verify that config has been loaded correctly with new key
let validator_account = storage.get::<AccountAddress>(OWNER_ACCOUNT).unwrap().value;
let config_network_key = op_tool
.validator_config(validator_account, Some(&backend))
.unwrap()
.validator_network_address
.find_noise_proto()
.unwrap();
assert_eq!(new_network_key, config_network_key);
// Verify that the validator set info contains the new network key
let info_network_key = op_tool
.validator_set(Some(validator_account), Some(&backend))
.unwrap()[0]
.validator_network_address
.find_noise_proto()
.unwrap();
assert_eq!(new_network_key, info_network_key);
// Restart validator
// At this point, the `add_node` call ensures connectivity to all nodes
env.validator_swarm.kill_node(0);
env.validator_swarm.start_node(0).unwrap();
}
#[test]
fn test_network_key_rotation_recovery() {
let num_nodes = 4;
let (mut env, op_tool, backend, mut storage) =
launch_swarm_with_op_tool_and_backend(num_nodes, 0);
// Rotate the network key in storage manually and perform a key rotation using the op_tool.
// Here, we expected the op_tool to see that the network key in storage doesn't match the one
// on-chain, and thus it should simply forward a transaction to the blockchain.
let rotated_network_key = storage.rotate_key(VALIDATOR_NETWORK_KEY).unwrap();
let (txn_ctx, new_network_key) = op_tool
.rotate_validator_network_key(&backend, true)
.unwrap();
assert!(txn_ctx.execution_result.is_none());
assert_eq!(new_network_key, to_x25519(rotated_network_key).unwrap());
// Ensure all nodes have received the transaction
wait_for_transaction_on_all_nodes(&env, num_nodes, txn_ctx.address, txn_ctx.sequence_number);
// Verify that config has been loaded correctly with new key
let validator_account = storage.get::<AccountAddress>(OWNER_ACCOUNT).unwrap().value;
let config_network_key = op_tool
.validator_config(validator_account, Some(&backend))
.unwrap()
.validator_network_address
.find_noise_proto()
.unwrap();
assert_eq!(new_network_key, config_network_key);
// Verify that the validator set info contains the new network key
let info_network_key = op_tool
.validator_set(Some(validator_account), Some(&backend))
.unwrap()[0]
.validator_network_address
.find_noise_proto()
.unwrap();
assert_eq!(new_network_key, info_network_key);
// Restart validator
// At this point, the `add_node` call ensures connectivity to all nodes
env.validator_swarm.kill_node(0);
env.validator_swarm.start_node(0).unwrap();
}
#[test]
fn test_operator_key_rotation() {
let (env, op_tool, backend, storage) = launch_swarm_with_op_tool_and_backend(1, 0);
let (txn_ctx, _) = op_tool.rotate_operator_key(&backend, true).unwrap();
assert!(txn_ctx.execution_result.is_none());
// Wait for transaction execution
let client = env.get_validator_client(0, None);
client
.wait_for_transaction(txn_ctx.address, txn_ctx.sequence_number)
.unwrap();
// Verify that the transaction was executed correctly
let txn_ctx = op_tool
.validate_transaction(txn_ctx.address, txn_ctx.sequence_number)
.unwrap();
assert_eq!(VMStatusView::Executed, txn_ctx.execution_result.unwrap());
// Rotate the consensus key to verify the operator key has been updated
let (txn_ctx, new_consensus_key) = op_tool.rotate_consensus_key(&backend, false).unwrap();
assert_eq!(VMStatusView::Executed, txn_ctx.execution_result.unwrap());
// Verify that the config has been updated correctly with the new consensus key
let validator_account = storage.get::<AccountAddress>(OWNER_ACCOUNT).unwrap().value;
let config_consensus_key = op_tool
.validator_config(validator_account, Some(&backend))
.unwrap()
.consensus_public_key;
assert_eq!(new_consensus_key, config_consensus_key);
}
#[test]
fn test_operator_key_rotation_recovery() {
let (env, op_tool, backend, mut storage) = launch_swarm_with_op_tool_and_backend(1, 0);
// Rotate the operator key
let (txn_ctx, new_operator_key) = op_tool.rotate_operator_key(&backend, false).unwrap();
assert_eq!(VMStatusView::Executed, txn_ctx.execution_result.unwrap());
// Verify that the transaction was executed correctly
let txn_ctx = op_tool
.validate_transaction(txn_ctx.address, txn_ctx.sequence_number)
.unwrap();
assert_eq!(VMStatusView::Executed, txn_ctx.execution_result.unwrap());
// Verify that the operator key was updated on-chain
let operator_account = storage
.get::<AccountAddress>(OPERATOR_ACCOUNT)
.unwrap()
.value;
let account_resource = op_tool.account_resource(operator_account).unwrap();
let on_chain_operator_key = hex::decode(account_resource.authentication_key).unwrap();
assert_eq!(
AuthenticationKey::ed25519(&new_operator_key),
AuthenticationKey::try_from(on_chain_operator_key).unwrap()
);
// Rotate the operator key in storage manually and perform another rotation using the op tool.
// Here, we expected the op_tool to see that the operator key in storage doesn't match the one
// on-chain, and thus it should simply forward a transaction to the blockchain.
let rotated_operator_key = storage.rotate_key(OPERATOR_KEY).unwrap();
let (txn_ctx, new_operator_key) = op_tool.rotate_operator_key(&backend, true).unwrap();
assert!(txn_ctx.execution_result.is_none());
assert_eq!(rotated_operator_key, new_operator_key);
// Wait for transaction execution
let client = env.get_validator_client(0, None);
client
.wait_for_transaction(txn_ctx.address, txn_ctx.sequence_number)
.unwrap();
// Verify that the transaction was executed correctly
let txn_ctx = op_tool
.validate_transaction(txn_ctx.address, txn_ctx.sequence_number)
.unwrap();
assert_eq!(VMStatusView::Executed, txn_ctx.execution_result.unwrap());
// Verify that the operator key was updated on-chain
let account_resource = op_tool.account_resource(operator_account).unwrap();
let on_chain_operator_key = hex::decode(account_resource.authentication_key).unwrap();
assert_eq!(
AuthenticationKey::ed25519(&new_operator_key),
AuthenticationKey::try_from(on_chain_operator_key).unwrap()
);
}
#[test]
fn test_print_account() {
let (_env, op_tool, backend, storage) = launch_swarm_with_op_tool_and_backend(1, 0);
// Print the owner account
let op_tool_owner_account = op_tool.print_account(OWNER_ACCOUNT, &backend).unwrap();
let storage_owner_account = storage.get::<AccountAddress>(OWNER_ACCOUNT).unwrap().value;
assert_eq!(storage_owner_account, op_tool_owner_account);
// Print the operator account
let op_tool_operator_account = op_tool.print_account(OPERATOR_ACCOUNT, &backend).unwrap();
let storage_operator_account = storage
.get::<AccountAddress>(OPERATOR_ACCOUNT)
.unwrap()
.value;
assert_eq!(storage_operator_account, op_tool_operator_account);
}
#[test]
fn test_print_key() {
let (_env, op_tool, backend, storage) = launch_swarm_with_op_tool_and_backend(1, 0);
// Print the operator key
let op_tool_operator_key = op_tool.print_key(OPERATOR_KEY, &backend).unwrap();
let storage_operator_key = storage.get_public_key(OPERATOR_KEY).unwrap().public_key;
assert_eq!(storage_operator_key, op_tool_operator_key);
// Print the consensus key
let op_tool_consensus_key = op_tool.print_key(CONSENSUS_KEY, &backend).unwrap();
let storage_consensus_key = storage.get_public_key(CONSENSUS_KEY).unwrap().public_key;
assert_eq!(storage_consensus_key, op_tool_consensus_key);
}
#[test]
fn test_print_waypoints() {
let (_env, op_tool, backend, _) = launch_swarm_with_op_tool_and_backend(1, 0);
// Insert a new waypoint and genesis waypoint into storage
let inserted_waypoint =
Waypoint::new_any(&LedgerInfo::new(BlockInfo::empty(), HashValue::zero()));
op_tool
.insert_waypoint(inserted_waypoint, &backend, true)
.unwrap();
// Print the waypoint
let waypoint = op_tool.print_waypoint(WAYPOINT, &backend).unwrap();
assert_eq!(inserted_waypoint, waypoint);
// Print the gensis waypoint
let genesis_waypoint = op_tool.print_waypoint(GENESIS_WAYPOINT, &backend).unwrap();
assert_eq!(inserted_waypoint, genesis_waypoint);
}
#[test]
fn test_validate_transaction() {
let (env, op_tool, backend, _) = launch_swarm_with_op_tool_and_backend(1, 0);
// Validate an unknown transaction and verify no VM state found
let operator_account = op_tool.print_account(OPERATOR_ACCOUNT, &backend).unwrap();
assert_eq!(
None,
op_tool
.validate_transaction(operator_account, 1000)
.unwrap()
.execution_result
);
// Submit a transaction (rotate the operator key) and validate the transaction execution
let (txn_ctx, _) = op_tool.rotate_operator_key(&backend, true).unwrap();
let client = env.get_validator_client(0, None);
client
.wait_for_transaction(operator_account, txn_ctx.sequence_number)
.unwrap();
let result = op_tool
.validate_transaction(operator_account, txn_ctx.sequence_number)
.unwrap()
.execution_result;
assert_eq!(VMStatusView::Executed, result.unwrap());
// Submit a transaction with auto validation (rotate the operator key) and compare results
let (txn_ctx, _) = op_tool.rotate_operator_key(&backend, false).unwrap();
assert_eq!(VMStatusView::Executed, txn_ctx.execution_result.unwrap());
let result = op_tool
.validate_transaction(operator_account, txn_ctx.sequence_number)
.unwrap()
.execution_result;
assert_eq!(VMStatusView::Executed, result.unwrap());
}
#[test]
fn test_validator_config() {
let (_env, op_tool, backend, mut storage) = launch_swarm_with_op_tool_and_backend(1, 0);
// Fetch the initial validator config for this operator's owner
let owner_account = storage.get::<AccountAddress>(OWNER_ACCOUNT).unwrap().value;
let consensus_key = storage.get_public_key(CONSENSUS_KEY).unwrap().public_key;
let original_validator_config = op_tool
.validator_config(owner_account, Some(&backend))
.unwrap();
assert_eq!(
consensus_key,
original_validator_config.consensus_public_key
);
// Rotate the consensus key locally and update the validator network address using the config
let new_consensus_key = storage.rotate_key(CONSENSUS_KEY).unwrap();
let new_network_address = NetworkAddress::from_str("/ip4/10.0.0.16/tcp/80").unwrap();
let txn_ctx = op_tool
.set_validator_config(
Some(new_network_address.clone()),
None,
&backend,
false,
false,
)
.unwrap();
assert_eq!(VMStatusView::Executed, txn_ctx.execution_result.unwrap());
// Re-fetch the validator config and verify the changes
let new_validator_config = op_tool
.validator_config(owner_account, Some(&backend))
.unwrap();
assert_eq!(new_consensus_key, new_validator_config.consensus_public_key);
assert!(new_validator_config
.validator_network_address
.to_string()
.contains(&new_network_address.to_string()));
assert_eq!(original_validator_config.name, new_validator_config.name);
assert_eq!(
original_validator_config.fullnode_network_address,
new_validator_config.fullnode_network_address
);
}
#[test]
fn test_validator_decryption() {
let (_env, op_tool, backend, mut storage) = launch_swarm_with_op_tool_and_backend(1, 0);
// Fetch the validator config and validator info for this operator's owner
let owner_account = storage.get::<AccountAddress>(OWNER_ACCOUNT).unwrap().value;
let validator_config = op_tool
.validator_config(owner_account, Some(&backend))
.unwrap();
let validator_set_infos = op_tool
.validator_set(Some(owner_account), Some(&backend))
.unwrap();
assert_eq!(1, validator_set_infos.len());
// Ensure the validator network addresses were decrypted successfully
let failed_decryption_address = NetworkAddress::from_str("/dns4/could-not-decrypt").unwrap();
let config_network_address = validator_config.validator_network_address;
let info_network_address = validator_set_infos[0].validator_network_address.clone();
assert_eq!(config_network_address, info_network_address,);
assert_ne!(failed_decryption_address, config_network_address);
// Corrupt the network address encryption key in storage
storage
.set(VALIDATOR_NETWORK_ADDRESS_KEYS, "INVALID KEY")
.unwrap();
// Verify that any failure to decrypt the address will still produce a result
for backend in &[Some(&backend), None] {
// Fetch the validator config and validator info for this operator's owner
let validator_config = op_tool.validator_config(owner_account, *backend).unwrap();
let validator_set_infos = op_tool
.validator_set(Some(owner_account), *backend)
.unwrap();
// Ensure the validator network addresses failed to decrypt, but everything else was fetched
let config_network_address = validator_config.validator_network_address;
let info_network_address = validator_set_infos[0].validator_network_address.clone();
assert_eq!(config_network_address, info_network_address);
assert_eq!(failed_decryption_address, config_network_address);
}
}
#[test]
fn test_validator_set() {
let num_nodes = 4;
let (_env, op_tool, backend, mut storage) = launch_swarm_with_op_tool_and_backend(num_nodes, 0);
// Fetch the validator config and validator info for this operator's owner
let owner_account = storage.get::<AccountAddress>(OWNER_ACCOUNT).unwrap().value;
let validator_config = op_tool
.validator_config(owner_account, Some(&backend))
.unwrap();
let validator_set_infos = op_tool
.validator_set(Some(owner_account), Some(&backend))
.unwrap();
assert_eq!(1, validator_set_infos.len());
// Compare the validator config and the validator info
let validator_info = validator_set_infos.first().unwrap();
assert_eq!(validator_info.account_address, owner_account);
assert_eq!(validator_info.name, validator_config.name);
assert_eq!(
validator_info.consensus_public_key,
validator_config.consensus_public_key
);
assert_eq!(
validator_info.validator_network_address,
validator_config.validator_network_address
);
assert_eq!(
validator_info.fullnode_network_address,
validator_config.fullnode_network_address
);
// Fetch the entire validator set and check this account is included
let validator_set_infos = op_tool.validator_set(None, Some(&backend)).unwrap();
assert_eq!(num_nodes, validator_set_infos.len());
let _ = validator_set_infos
.iter()
.find(|info| info.account_address == owner_account)
.unwrap();
// Overwrite the shared network encryption key in storage and verify that the
// validator set can still be retrieved (but unable to decrypt the validator
// network address)
let _ = storage
.set(VALIDATOR_NETWORK_ADDRESS_KEYS, "random string")
.unwrap();
let validator_set_infos = op_tool.validator_set(None, Some(&backend)).unwrap();
assert_eq!(num_nodes, validator_set_infos.len());
let validator_info = validator_set_infos
.iter()
.find(|info| info.account_address == owner_account)
.unwrap();
assert_eq!(
validator_info.fullnode_network_address,
validator_config.fullnode_network_address
);
assert_ne!(
validator_info.validator_network_address,
validator_config.validator_network_address
);
}
/// Creates a new account address and key for testing.
fn create_new_test_account() -> (Ed25519PrivateKey, AccountAddress) {
let mut rng = OsRng;
let key = Ed25519PrivateKey::generate(&mut rng);
let auth_key = AuthenticationKey::ed25519(&key.public_key());
let account = auth_key.derived_address();
(key, account)
}
/// Creates a new validator operator using the given file writer and verifies
/// the operator account is correctly initialized on-chain.
fn create_operator_with_file_writer(file_writer: fn(&Ed25519PublicKey, PathBuf)) {
let (env, op_tool, _, _) = launch_swarm_with_op_tool_and_backend(1, 0);
// Create a new operator key and account
let (operator_key, operator_account) = create_new_test_account();
// Verify the corresponding account doesn't exist on-chain
let diem_json_rpc = get_json_rpc_diem_interface(&env.validator_swarm, 0);
diem_json_rpc
.retrieve_account_state(operator_account)
.unwrap_err();
// Write the key to a file using the provided file writer
let key_file_path = write_key_to_file(&operator_key.public_key(), &env, file_writer);
// Create the operator account
let backend = load_diem_root_storage(&env.validator_swarm, 0);
let op_human_name = "new_operator";
let (txn_ctx, account_address) = op_tool
.create_validator_operator(
op_human_name,
key_file_path.to_str().unwrap(),
&backend,
false,
)
.unwrap();
assert_eq!(operator_account, account_address);
assert_eq!(VMStatusView::Executed, txn_ctx.execution_result.unwrap());
// Verify the operator account now exists on-chain
let account_state = diem_json_rpc
.retrieve_account_state(operator_account)
.unwrap();
let op_config_resource = account_state
.get_validator_operator_config_resource()
.unwrap()
.unwrap();
assert_eq!(op_human_name.as_bytes(), op_config_resource.human_name);
}
/// Creates a new validator using the given file writer and verifies
/// the account is correctly initialized on-chain.
fn create_validator_with_file_writer(file_writer: fn(&Ed25519PublicKey, PathBuf)) {
let (env, op_tool, _, _) = launch_swarm_with_op_tool_and_backend(1, 0);
// Create a new validator key and account
let (validator_key, validator_account) = create_new_test_account();
// Verify the corresponding account doesn't exist on-chain
let diem_json_rpc = get_json_rpc_diem_interface(&env.validator_swarm, 0);
diem_json_rpc
.retrieve_account_state(validator_account)
.unwrap_err();
// Write the key to a file using the provided file writer
let key_file_path = write_key_to_file(&validator_key.public_key(), &env, file_writer);
// Create the validator account
let backend = load_diem_root_storage(&env.validator_swarm, 0);
let val_human_name = "new_validator";
let (txn_ctx, account_address) = op_tool
.create_validator(
val_human_name,
key_file_path.to_str().unwrap(),
&backend,
true,
)
.unwrap();
assert!(txn_ctx.execution_result.is_none());
assert_eq!(validator_account, account_address);
// Wait for transaction execution
let client = env.get_validator_client(0, None);
client
.wait_for_transaction(txn_ctx.address, txn_ctx.sequence_number)
.unwrap();
// Verify that the transaction was executed
let txn_ctx = op_tool
.validate_transaction(txn_ctx.address, txn_ctx.sequence_number)
.unwrap();
assert_eq!(VMStatusView::Executed, txn_ctx.execution_result.unwrap());
// Verify the validator account now exists on-chain
let account_state = diem_json_rpc
.retrieve_account_state(validator_account)
.unwrap();
let val_config_resource = account_state
.get_validator_config_resource()
.unwrap()
.unwrap();
assert_eq!(val_human_name.as_bytes(), val_config_resource.human_name);
assert!(val_config_resource.delegated_account.is_none());
assert!(val_config_resource.validator_config.is_none());
}
/// Launches a validator swarm of a specified size, connects an operational
/// tool to the node at the specified index and fetches the node's secure backend.
pub fn launch_swarm_with_op_tool_and_backend(
num_nodes: usize,
node_index: usize,
) -> (
SmokeTestEnvironment,
OperationalTool,
SecureBackend,
Storage,
) {
// Launch validator swarm
let mut env = SmokeTestEnvironment::new(num_nodes);
env.validator_swarm.launch();
// Connect the operator tool to the node's JSON RPC API
let op_tool = get_op_tool(&env.validator_swarm, node_index);
// Load validator's on disk storage
let backend = load_backend_storage(&env.validator_swarm, node_index);
let storage: Storage = (&backend).try_into().unwrap();
(env, op_tool, backend, storage)
}
/// Writes a given key to file using a specified file writer and test environment.
fn write_key_to_file(
key: &Ed25519PublicKey,
env: &SmokeTestEnvironment,
file_writer: fn(&Ed25519PublicKey, PathBuf),
) -> PathBuf {
let (_, node_config_path) = load_node_config(&env.validator_swarm, 0);
let file_path = node_config_path.with_file_name("KEY_FILE");
file_writer(key, file_path.clone());
file_path
}
fn set_operator_and_add_new_validator_helper() -> VMStatusView {
let num_nodes = 3;
let (env, op_tool, _, _) = launch_swarm_with_op_tool_and_backend(num_nodes, 0);
// Create new validator and validator operator keys and accounts
let (validator_key, validator_account) = create_new_test_account();
let (operator_key, operator_account) = create_new_test_account();
// Write the validator key to a file and create the validator account
let validator_key_path = write_key_to_file(
&validator_key.public_key(),
&env,
write_key_to_file_hex_format,
);
let diem_backend = load_diem_root_storage(&env.validator_swarm, 0);
let val_human_name = "new_validator";
let (txn_ctx, _) = op_tool
.create_validator(
val_human_name,
validator_key_path.to_str().unwrap(),
&diem_backend,
false,
)
.unwrap();
assert_eq!(VMStatusView::Executed, txn_ctx.execution_result.unwrap());
// Write the operator key to a file and create the operator account
let operator_key_path = write_key_to_file(
&operator_key.public_key(),
&env,
write_key_to_file_bcs_format,
);
let op_human_name = "new_operator";
let (txn_ctx, _) = op_tool
.create_validator_operator(
op_human_name,
operator_key_path.to_str().unwrap(),
&diem_backend,
true,
)
.unwrap();
// Wait for transaction execution
let client = env.get_validator_client(0, None);
client
.wait_for_transaction(txn_ctx.address, txn_ctx.sequence_number)
.unwrap();
// Verify that the transaction was executed
let txn_ctx = op_tool
.validate_transaction(txn_ctx.address, txn_ctx.sequence_number)
.unwrap();
assert_eq!(VMStatusView::Executed, txn_ctx.execution_result.unwrap());
// Overwrite the keys in storage to execute the command from the new validator's perspective
let backend = load_backend_storage(&env.validator_swarm, 0);
let mut storage: Storage = (&backend).try_into().unwrap();
storage.set(OWNER_ACCOUNT, validator_account).unwrap();
storage
.import_private_key(OWNER_KEY, validator_key)
.unwrap();
// Verify no validator operator
let diem_json_rpc = get_json_rpc_diem_interface(&env.validator_swarm, 0);
let account_state = diem_json_rpc
.retrieve_account_state(validator_account)
.unwrap();
let val_config_resource = account_state
.get_validator_config_resource()
.unwrap()
.unwrap();
assert!(val_config_resource.delegated_account.is_none());
assert!(val_config_resource.validator_config.is_none());
// Set the validator operator
let txn_ctx = op_tool
.set_validator_operator(op_human_name, operator_account, &backend, true)
.unwrap();
assert!(txn_ctx.execution_result.is_none());
// Wait for transaction execution
client
.wait_for_transaction(txn_ctx.address, txn_ctx.sequence_number)
.unwrap();
// Verify the operator has been set correctly
let account_state = diem_json_rpc
.retrieve_account_state(validator_account)
.unwrap();
let val_config_resource = account_state
.get_validator_config_resource()
.unwrap()
.unwrap();
assert_eq!(
operator_account,
val_config_resource.delegated_account.unwrap()
);
assert!(val_config_resource.validator_config.is_none());
// Overwrite the keys in storage to execute the command from the new operator's perspective
storage.set(OPERATOR_ACCOUNT, operator_account).unwrap();
storage
.import_private_key(OPERATOR_KEY, operator_key)
.unwrap();
// Set the validator config
let network_address = Some(NetworkAddress::from_str("/ip4/10.0.0.16/tcp/80").unwrap());
let txn_ctx = op_tool
.set_validator_config(
network_address.clone(),
network_address,
&backend,
true,
false,
)
.unwrap();
assert!(txn_ctx.execution_result.is_none());
// Wait for transaction execution
client
.wait_for_transaction(txn_ctx.address, txn_ctx.sequence_number)
.unwrap();
// Check the validator set size
let validator_set_infos = op_tool.validator_set(None, Some(&backend)).unwrap();
assert_eq!(num_nodes, validator_set_infos.len());
assert!(validator_set_infos
.iter()
.find(|info| info.account_address == validator_account)
.is_none());
// Add the validator to the validator set
let txn_ctx = op_tool
.add_validator(validator_account, &diem_backend, true)
.unwrap();
// Wait for transaction execution
client
.wait_for_transaction(txn_ctx.address, txn_ctx.sequence_number)
.unwrap();
// Verify that the transaction wasn't executed
let txn_ctx = op_tool
.validate_transaction(txn_ctx.address, txn_ctx.sequence_number)
.unwrap();
assert_eq!(VMStatusView::Executed, txn_ctx.execution_result.unwrap());
// Check the new validator has been added to the set
let validator_set_infos = op_tool.validator_set(None, Some(&backend)).unwrap();
assert_eq!(num_nodes + 1, validator_set_infos.len());
let validator_info = validator_set_infos
.iter()
.find(|info| info.account_address == validator_account)
.unwrap();
assert_eq!(validator_account, validator_info.account_address);
assert_eq!(val_human_name, validator_info.name);
// Try and add the same validator again and watch it fail
let txn_ctx = op_tool
.add_validator(validator_account, &diem_backend, false)
.unwrap();
txn_ctx.execution_result.unwrap()
}
| 37.621237 | 100 | 0.698278 |
def05143448b9b893ea4816d14000febda8fb214
| 442 |
macro_rules! w {
($buf:expr, $to_w:expr) => {
match $buf.write_all($to_w) {
Ok(..) => (),
Err(..) => panic!("Failed to write to generated file"),
}
};
}
#[cfg(feature = "debug")]
macro_rules! debug {
($($arg:tt)*) => {
print!("[{:>w$}] \t", module_path!(), w = 28);
println!($($arg)*)
}
}
#[cfg(not(feature = "debug"))]
macro_rules! debug {
($($arg:tt)*) => {};
}
| 20.090909 | 67 | 0.434389 |
5d23339f2900510d0d42e0f61b149c65af59584c
| 5,282 |
//! Store buffer when failing to send events.
use std::fs::File;
use std::path::PathBuf;
use std::io;
use std::io::Write;
use std::fmt::Debug;
use retry_conf::RetryConf;
use error::FluentError;
use std::fs::OpenOptions;
use serde::ser::Serialize;
use dumpable::Dumpable;
/// Create file with write, create, append, and open option.
fn ensure_file_with_wca(path: PathBuf) -> Result<File, io::Error> {
let file = OpenOptions::new()
.write(true)
.create(true)
.append(true)
.open(path)?;
Ok(file)
}
/// Write events buffer into file with TSV format.
pub fn maybe_write_events<T>(conf: &RetryConf, events: T, err: FluentError) -> Result<(), FluentError>
where
T: Serialize + Dumpable + Debug,
{
let store_needed = conf.clone().need_to_store();
let store_path = conf.clone().store_path();
if store_needed {
match ensure_file_with_wca(store_path.clone().unwrap()) {
Ok(mut f) => {
let mut w = Vec::new();
write!(&mut w, "{}", events.dump()).unwrap();
f.write_all(&w)?;
f.sync_data()?;
Err(FluentError::FileStored(format!(
"stored buffer in specified file: \
{:?}",
store_path.unwrap()
)))
},
Err(e) => Err(From::from(e)),
}
} else {
Err(err)
}
}
#[cfg(test)]
mod tests {
extern crate tempdir;
use super::*;
use time;
use std::collections::HashMap;
use self::tempdir::TempDir;
use record::Record;
use retry_conf::RetryConf;
use error::FluentError;
use forwardable::forward::Forward;
#[cfg(not(feature = "time-as-integer"))]
use event_time::EventTime;
#[cfg(not(feature = "time-as-integer"))]
use event_record::EventRecord;
#[test]
fn test_write_record() {
let tag = "fruently".to_string();
let time = time::now();
let mut obj: HashMap<String, String> = HashMap::new();
obj.insert("name".to_string(), "fruently".to_string());
let record = Record::new(tag.clone(), time, obj.clone());
let tmp = TempDir::new("fruently").unwrap().into_path().join("buffer");
let conf = RetryConf::new().store_file(tmp.clone());
assert!(maybe_write_events(&conf, record, FluentError::Dummy("dummy".to_string())).is_err());
assert!(tmp.exists())
}
#[cfg(not(feature = "time-as-integer"))]
#[test]
fn test_write_event_record() {
let tag = "fruently".to_string();
let time = time::now();
let mut obj: HashMap<String, String> = HashMap::new();
obj.insert("name".to_string(), "fruently".to_string());
let record = EventRecord::new(tag.clone(), time, obj.clone());
let tmp = TempDir::new("fruently").unwrap().into_path().join("buffer");
let conf = RetryConf::new().store_file(tmp.clone());
assert!(maybe_write_events(&conf, record, FluentError::Dummy("dummy".to_string())).is_err());
assert!(tmp.exists())
}
#[test]
fn test_write_record_2_times() {
let tag = "fruently".to_string();
let time = time::now();
let mut obj: HashMap<String, String> = HashMap::new();
obj.insert("name".to_string(), "fruently".to_string());
let record = Record::new(tag.clone(), time, obj.clone());
let tmp = TempDir::new("fruently").unwrap().into_path().join("buffer");
let conf = RetryConf::new().store_file(tmp.clone());
assert!(maybe_write_events(&conf, record, FluentError::Dummy("dummy".to_string())).is_err());
assert!(tmp.exists());
let mut obj2: HashMap<String, String> = HashMap::new();
obj2.insert("name2".to_string(), "fruently2".to_string());
let record2 = Record::new(tag.clone(), time, obj2.clone());
let conf2 = RetryConf::new().store_file(tmp.clone());
assert!(maybe_write_events(&conf2, record2, FluentError::Dummy("dummy".to_string())).is_err());
assert!(tmp.exists())
}
#[test]
fn test_write_forward_records() {
#[inline]
#[cfg(not(feature = "time-as-integer"))]
fn make_time() -> EventTime {
EventTime::new(time::now())
}
#[inline]
#[cfg(feature = "time-as-integer")]
fn make_time() -> i64 {
time::now().to_timespec().sec
}
let tag = "fruently".to_string();
let mut obj1: HashMap<String, String> = HashMap::new();
obj1.insert("hey".to_string(), "Rust with forward mode!".to_string());
let mut obj2: HashMap<String, String> = HashMap::new();
obj2.insert("yeah".to_string(), "Also sent together!".to_string());
let time = make_time();
let entry = (time.clone(), obj1);
let entry2 = (time.clone(), obj2);
let entries = vec![(entry), (entry2)];
let forward = Forward::new(tag, entries);
let tmp = TempDir::new("fruently").unwrap().into_path().join("buffer");
let conf = RetryConf::new().store_file(tmp.clone());
assert!(maybe_write_events(&conf, forward, FluentError::Dummy("dummy".to_string())).is_err());
assert!(tmp.exists())
}
}
| 37.460993 | 103 | 0.576676 |
ff7d774934d26f6c4480d7d6cbe7dc8dce3b621c
| 28,402 |
// Copyright 2019. The Tari Project
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
// following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
// disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
// products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use std::{collections::HashMap, fmt, fmt::Formatter, sync::Arc};
use aes_gcm::Aes256Gcm;
use tari_common_types::{
transaction::{ImportStatus, TxId},
types::PublicKey,
};
use tari_comms::types::CommsPublicKey;
use tari_core::transactions::{
tari_amount::MicroTari,
transaction_components::{Transaction, TransactionOutput},
};
use tari_service_framework::reply_channel::SenderService;
use tari_utilities::hex::Hex;
use tokio::sync::broadcast;
use tower::Service;
use crate::{
transaction_service::{
error::TransactionServiceError,
storage::models::{
CompletedTransaction,
InboundTransaction,
OutboundTransaction,
TxCancellationReason,
WalletTransaction,
},
},
OperationId,
};
/// API Request enum
#[allow(clippy::large_enum_variant)]
pub enum TransactionServiceRequest {
GetPendingInboundTransactions,
GetPendingOutboundTransactions,
GetCompletedTransactions,
GetCancelledPendingInboundTransactions,
GetCancelledPendingOutboundTransactions,
GetCancelledCompletedTransactions,
GetCompletedTransaction(TxId),
GetAnyTransaction(TxId),
SendTransaction {
dest_pubkey: CommsPublicKey,
amount: MicroTari,
unique_id: Option<Vec<u8>>,
parent_public_key: Option<PublicKey>,
fee_per_gram: MicroTari,
message: String,
},
SendOneSidedTransaction {
dest_pubkey: CommsPublicKey,
amount: MicroTari,
unique_id: Option<Vec<u8>>,
parent_public_key: Option<PublicKey>,
fee_per_gram: MicroTari,
message: String,
},
SendShaAtomicSwapTransaction(CommsPublicKey, MicroTari, MicroTari, String),
CancelTransaction(TxId),
ImportUtxoWithStatus {
amount: MicroTari,
source_public_key: CommsPublicKey,
message: String,
maturity: Option<u64>,
import_status: ImportStatus,
tx_id: Option<TxId>,
current_height: Option<u64>,
},
SubmitTransactionToSelf(TxId, Transaction, MicroTari, MicroTari, String),
SetLowPowerMode,
SetNormalPowerMode,
ApplyEncryption(Box<Aes256Gcm>),
RemoveEncryption,
GenerateCoinbaseTransaction(MicroTari, MicroTari, u64),
RestartTransactionProtocols,
RestartBroadcastProtocols,
GetNumConfirmationsRequired,
SetNumConfirmationsRequired(u64),
ValidateTransactions,
ReValidateTransactions,
}
impl fmt::Display for TransactionServiceRequest {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::GetPendingInboundTransactions => f.write_str("GetPendingInboundTransactions"),
Self::GetPendingOutboundTransactions => f.write_str("GetPendingOutboundTransactions"),
Self::GetCompletedTransactions => f.write_str("GetCompletedTransactions"),
Self::GetCancelledPendingInboundTransactions => f.write_str("GetCancelledPendingInboundTransactions"),
Self::GetCancelledPendingOutboundTransactions => f.write_str("GetCancelledPendingOutboundTransactions"),
Self::GetCancelledCompletedTransactions => f.write_str("GetCancelledCompletedTransactions"),
Self::GetCompletedTransaction(t) => f.write_str(&format!("GetCompletedTransaction({})", t)),
Self::SendTransaction {
dest_pubkey,
amount,
message,
..
} => f.write_str(&format!(
"SendTransaction (to {}, {}, {})",
dest_pubkey.to_hex(),
amount,
message
)),
Self::SendOneSidedTransaction {
dest_pubkey,
amount,
message,
..
} => f.write_str(&format!(
"SendOneSidedTransaction (to {}, {}, {})",
dest_pubkey.to_hex(),
amount,
message
)),
Self::SendShaAtomicSwapTransaction(k, v, _, msg) => {
f.write_str(&format!("SendShaAtomicSwapTransaction (to {}, {}, {})", k, v, msg))
},
Self::CancelTransaction(t) => f.write_str(&format!("CancelTransaction ({})", t)),
Self::ImportUtxoWithStatus {
amount,
source_public_key,
message,
maturity,
import_status,
tx_id,
current_height,
} => f.write_str(&format!(
"ImportUtxo (from {}, {}, {} with maturity {} and {:?} and {:?} and {:?})",
source_public_key,
amount,
message,
maturity.unwrap_or(0),
import_status,
tx_id,
current_height,
)),
Self::SubmitTransactionToSelf(tx_id, _, _, _, _) => f.write_str(&format!("SubmitTransaction ({})", tx_id)),
Self::SetLowPowerMode => f.write_str("SetLowPowerMode "),
Self::SetNormalPowerMode => f.write_str("SetNormalPowerMode"),
Self::ApplyEncryption(_) => f.write_str("ApplyEncryption"),
Self::RemoveEncryption => f.write_str("RemoveEncryption"),
Self::GenerateCoinbaseTransaction(_, _, bh) => {
f.write_str(&format!("GenerateCoinbaseTransaction (Blockheight {})", bh))
},
Self::RestartTransactionProtocols => f.write_str("RestartTransactionProtocols"),
Self::RestartBroadcastProtocols => f.write_str("RestartBroadcastProtocols"),
Self::GetNumConfirmationsRequired => f.write_str("GetNumConfirmationsRequired"),
Self::SetNumConfirmationsRequired(_) => f.write_str("SetNumConfirmationsRequired"),
Self::GetAnyTransaction(t) => f.write_str(&format!("GetAnyTransaction({})", t)),
TransactionServiceRequest::ValidateTransactions => f.write_str("ValidateTransactions"),
TransactionServiceRequest::ReValidateTransactions => f.write_str("ReValidateTransactions"),
}
}
}
/// API Response enum
#[derive(Debug)]
pub enum TransactionServiceResponse {
TransactionSent(TxId),
TransactionCancelled,
PendingInboundTransactions(HashMap<TxId, InboundTransaction>),
PendingOutboundTransactions(HashMap<TxId, OutboundTransaction>),
CompletedTransactions(HashMap<TxId, CompletedTransaction>),
CompletedTransaction(Box<CompletedTransaction>),
BaseNodePublicKeySet,
UtxoImported(TxId),
TransactionSubmitted,
LowPowerModeSet,
NormalPowerModeSet,
EncryptionApplied,
EncryptionRemoved,
CoinbaseTransactionGenerated(Box<Transaction>),
ProtocolsRestarted,
AnyTransaction(Box<Option<WalletTransaction>>),
NumConfirmationsRequired(u64),
NumConfirmationsSet,
ValidationStarted(OperationId),
CompletedTransactionValidityChanged,
ShaAtomicSwapTransactionSent(Box<(TxId, PublicKey, TransactionOutput)>),
}
/// Events that can be published on the Text Message Service Event Stream
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub enum TransactionEvent {
MempoolBroadcastTimedOut(TxId),
ReceivedTransaction(TxId),
ReceivedTransactionReply(TxId),
ReceivedFinalizedTransaction(TxId),
TransactionDiscoveryInProgress(TxId),
TransactionDirectSendResult(TxId, bool),
TransactionCompletedImmediately(TxId),
TransactionStoreForwardSendResult(TxId, bool),
TransactionCancelled(TxId, TxCancellationReason),
TransactionBroadcast(TxId),
TransactionImported(TxId),
FauxTransactionUnconfirmed {
tx_id: TxId,
num_confirmations: u64,
is_valid: bool,
},
FauxTransactionConfirmed {
tx_id: TxId,
is_valid: bool,
},
TransactionMined {
tx_id: TxId,
is_valid: bool,
},
TransactionMinedRequestTimedOut(TxId),
TransactionMinedUnconfirmed {
tx_id: TxId,
num_confirmations: u64,
is_valid: bool,
},
TransactionValidationStateChanged(OperationId),
TransactionValidationCompleted(OperationId),
TransactionValidationFailed(OperationId),
Error(String),
}
impl fmt::Display for TransactionEvent {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
TransactionEvent::MempoolBroadcastTimedOut(tx_id) => {
write!(f, "MempoolBroadcastTimedOut for tx:{}", tx_id)
},
TransactionEvent::ReceivedTransaction(tx) => {
write!(f, "ReceivedTransaction for {}", tx)
},
TransactionEvent::ReceivedTransactionReply(tx) => {
write!(f, "ReceivedTransactionReply for {}", tx)
},
TransactionEvent::ReceivedFinalizedTransaction(tx) => {
write!(f, "ReceivedFinalizedTransaction for {}", tx)
},
TransactionEvent::TransactionDiscoveryInProgress(tx) => {
write!(f, "TransactionDiscoveryInProgress for {}", tx)
},
TransactionEvent::TransactionDirectSendResult(tx, success) => {
write!(f, "TransactionDirectSendResult for {}: {}", tx, success)
},
TransactionEvent::TransactionCompletedImmediately(tx) => {
write!(f, "TransactionCompletedImmediately for {}", tx)
},
TransactionEvent::TransactionStoreForwardSendResult(tx, success) => {
write!(f, "TransactionStoreForwardSendResult for {}:{}", tx, success)
},
TransactionEvent::TransactionCancelled(tx, rejection) => {
write!(f, "TransactionCancelled for {}:{:?}", tx, rejection)
},
TransactionEvent::TransactionBroadcast(tx) => {
write!(f, "TransactionBroadcast for {}", tx)
},
TransactionEvent::TransactionImported(tx) => {
write!(f, "TransactionImported for {}", tx)
},
TransactionEvent::FauxTransactionUnconfirmed {
tx_id,
num_confirmations,
is_valid,
} => {
write!(
f,
"FauxTransactionUnconfirmed for {} with num confirmations: {}. is_valid: {}",
tx_id, num_confirmations, is_valid
)
},
TransactionEvent::FauxTransactionConfirmed { tx_id, is_valid } => {
write!(f, "FauxTransactionConfirmed for {}. is_valid: {}", tx_id, is_valid)
},
TransactionEvent::TransactionMined { tx_id, is_valid } => {
write!(f, "TransactionMined for {}. is_valid: {}", tx_id, is_valid)
},
TransactionEvent::TransactionMinedRequestTimedOut(tx) => {
write!(f, "TransactionMinedRequestTimedOut for {}", tx)
},
TransactionEvent::TransactionMinedUnconfirmed {
tx_id,
num_confirmations,
is_valid,
} => {
write!(
f,
"TransactionMinedUnconfirmed for {} with num confirmations: {}. is_valid: {}",
tx_id, num_confirmations, is_valid
)
},
TransactionEvent::Error(error) => {
write!(f, "Error:{}", error)
},
TransactionEvent::TransactionValidationStateChanged(operation_id) => {
write!(f, "Transaction validation state changed: {}", operation_id)
},
TransactionEvent::TransactionValidationCompleted(operation_id) => {
write!(f, "Transaction validation completed: {}", operation_id)
},
TransactionEvent::TransactionValidationFailed(operation_id) => {
write!(f, "Transaction validation failed: {}", operation_id)
},
}
}
}
pub type TransactionEventSender = broadcast::Sender<Arc<TransactionEvent>>;
pub type TransactionEventReceiver = broadcast::Receiver<Arc<TransactionEvent>>;
/// The Transaction Service Handle is a struct that contains the interfaces used to communicate with a running
/// Transaction Service
#[derive(Clone)]
pub struct TransactionServiceHandle {
handle: SenderService<TransactionServiceRequest, Result<TransactionServiceResponse, TransactionServiceError>>,
event_stream_sender: TransactionEventSender,
}
impl TransactionServiceHandle {
pub fn new(
handle: SenderService<TransactionServiceRequest, Result<TransactionServiceResponse, TransactionServiceError>>,
event_stream_sender: TransactionEventSender,
) -> Self {
Self {
handle,
event_stream_sender,
}
}
pub fn get_event_stream(&self) -> TransactionEventReceiver {
self.event_stream_sender.subscribe()
}
pub async fn send_transaction(
&mut self,
dest_pubkey: CommsPublicKey,
amount: MicroTari,
fee_per_gram: MicroTari,
message: String,
) -> Result<TxId, TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::SendTransaction {
dest_pubkey,
amount,
unique_id: None,
parent_public_key: None,
fee_per_gram,
message,
})
.await??
{
TransactionServiceResponse::TransactionSent(tx_id) => Ok(tx_id),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn send_transaction_or_token(
&mut self,
dest_pubkey: CommsPublicKey,
amount: MicroTari,
unique_id: Option<Vec<u8>>,
parent_public_key: Option<PublicKey>,
fee_per_gram: MicroTari,
message: String,
) -> Result<TxId, TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::SendTransaction {
dest_pubkey,
amount,
unique_id,
parent_public_key,
fee_per_gram,
message,
})
.await??
{
TransactionServiceResponse::TransactionSent(tx_id) => Ok(tx_id),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn send_one_sided_transaction(
&mut self,
dest_pubkey: CommsPublicKey,
amount: MicroTari,
fee_per_gram: MicroTari,
message: String,
) -> Result<TxId, TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::SendOneSidedTransaction {
dest_pubkey,
amount,
unique_id: None,
parent_public_key: None,
fee_per_gram,
message,
})
.await??
{
TransactionServiceResponse::TransactionSent(tx_id) => Ok(tx_id),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn send_one_sided_transaction_or_token(
&mut self,
dest_pubkey: CommsPublicKey,
amount: MicroTari,
unique_id: Option<Vec<u8>>,
parent_public_key: Option<PublicKey>,
fee_per_gram: MicroTari,
message: String,
) -> Result<TxId, TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::SendOneSidedTransaction {
dest_pubkey,
amount,
unique_id,
parent_public_key,
fee_per_gram,
message,
})
.await??
{
TransactionServiceResponse::TransactionSent(tx_id) => Ok(tx_id),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn cancel_transaction(&mut self, tx_id: TxId) -> Result<(), TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::CancelTransaction(tx_id))
.await??
{
TransactionServiceResponse::TransactionCancelled => Ok(()),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn get_pending_inbound_transactions(
&mut self,
) -> Result<HashMap<TxId, InboundTransaction>, TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::GetPendingInboundTransactions)
.await??
{
TransactionServiceResponse::PendingInboundTransactions(p) => Ok(p),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn get_cancelled_pending_inbound_transactions(
&mut self,
) -> Result<HashMap<TxId, InboundTransaction>, TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::GetCancelledPendingInboundTransactions)
.await??
{
TransactionServiceResponse::PendingInboundTransactions(p) => Ok(p),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn get_pending_outbound_transactions(
&mut self,
) -> Result<HashMap<TxId, OutboundTransaction>, TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::GetPendingOutboundTransactions)
.await??
{
TransactionServiceResponse::PendingOutboundTransactions(p) => Ok(p),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn get_cancelled_pending_outbound_transactions(
&mut self,
) -> Result<HashMap<TxId, OutboundTransaction>, TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::GetCancelledPendingOutboundTransactions)
.await??
{
TransactionServiceResponse::PendingOutboundTransactions(p) => Ok(p),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn get_completed_transactions(
&mut self,
) -> Result<HashMap<TxId, CompletedTransaction>, TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::GetCompletedTransactions)
.await??
{
TransactionServiceResponse::CompletedTransactions(c) => Ok(c),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn get_cancelled_completed_transactions(
&mut self,
) -> Result<HashMap<TxId, CompletedTransaction>, TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::GetCancelledCompletedTransactions)
.await??
{
TransactionServiceResponse::CompletedTransactions(c) => Ok(c),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn get_completed_transaction(
&mut self,
tx_id: TxId,
) -> Result<CompletedTransaction, TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::GetCompletedTransaction(tx_id))
.await??
{
TransactionServiceResponse::CompletedTransaction(t) => Ok(*t),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn get_any_transaction(
&mut self,
tx_id: TxId,
) -> Result<Option<WalletTransaction>, TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::GetAnyTransaction(tx_id))
.await??
{
TransactionServiceResponse::AnyTransaction(t) => Ok(*t),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn import_utxo_with_status(
&mut self,
amount: MicroTari,
source_public_key: CommsPublicKey,
message: String,
maturity: Option<u64>,
import_status: ImportStatus,
tx_id: Option<TxId>,
current_height: Option<u64>,
) -> Result<TxId, TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::ImportUtxoWithStatus {
amount,
source_public_key,
message,
maturity,
import_status,
tx_id,
current_height,
})
.await??
{
TransactionServiceResponse::UtxoImported(tx_id) => Ok(tx_id),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn submit_transaction(
&mut self,
tx_id: TxId,
tx: Transaction,
amount: MicroTari,
message: String,
) -> Result<(), TransactionServiceError> {
let fee = tx.body.get_total_fee();
match self
.handle
.call(TransactionServiceRequest::SubmitTransactionToSelf(
tx_id, tx, fee, amount, message,
))
.await??
{
TransactionServiceResponse::TransactionSubmitted => Ok(()),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn set_low_power_mode(&mut self) -> Result<(), TransactionServiceError> {
match self.handle.call(TransactionServiceRequest::SetLowPowerMode).await?? {
TransactionServiceResponse::LowPowerModeSet => Ok(()),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn revalidate_all_transactions(&mut self) -> Result<(), TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::ReValidateTransactions)
.await??
{
TransactionServiceResponse::ValidationStarted(_) => Ok(()),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn set_normal_power_mode(&mut self) -> Result<(), TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::SetNormalPowerMode)
.await??
{
TransactionServiceResponse::NormalPowerModeSet => Ok(()),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn apply_encryption(&mut self, cipher: Aes256Gcm) -> Result<(), TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::ApplyEncryption(Box::new(cipher)))
.await??
{
TransactionServiceResponse::EncryptionApplied => Ok(()),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn remove_encryption(&mut self) -> Result<(), TransactionServiceError> {
match self.handle.call(TransactionServiceRequest::RemoveEncryption).await?? {
TransactionServiceResponse::EncryptionRemoved => Ok(()),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn get_num_confirmations_required(&mut self) -> Result<u64, TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::GetNumConfirmationsRequired)
.await??
{
TransactionServiceResponse::NumConfirmationsRequired(confirmations) => Ok(confirmations),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn set_num_confirmations_required(&mut self, number: u64) -> Result<(), TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::SetNumConfirmationsRequired(number))
.await??
{
TransactionServiceResponse::NumConfirmationsSet => Ok(()),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn generate_coinbase_transaction(
&mut self,
rewards: MicroTari,
fees: MicroTari,
block_height: u64,
) -> Result<Transaction, TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::GenerateCoinbaseTransaction(
rewards,
fees,
block_height,
))
.await??
{
TransactionServiceResponse::CoinbaseTransactionGenerated(tx) => Ok(*tx),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn restart_transaction_protocols(&mut self) -> Result<(), TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::RestartTransactionProtocols)
.await??
{
TransactionServiceResponse::ProtocolsRestarted => Ok(()),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn restart_broadcast_protocols(&mut self) -> Result<(), TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::RestartBroadcastProtocols)
.await??
{
TransactionServiceResponse::ProtocolsRestarted => Ok(()),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn validate_transactions(&mut self) -> Result<OperationId, TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::ValidateTransactions)
.await??
{
TransactionServiceResponse::ValidationStarted(id) => Ok(id),
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
pub async fn send_sha_atomic_swap_transaction(
&mut self,
dest_pubkey: CommsPublicKey,
amount: MicroTari,
fee_per_gram: MicroTari,
message: String,
) -> Result<(TxId, PublicKey, TransactionOutput), TransactionServiceError> {
match self
.handle
.call(TransactionServiceRequest::SendShaAtomicSwapTransaction(
dest_pubkey,
amount,
fee_per_gram,
message,
))
.await??
{
TransactionServiceResponse::ShaAtomicSwapTransactionSent(boxed) => {
let (tx_id, pre_image, output) = *boxed;
Ok((tx_id, pre_image, output))
},
_ => Err(TransactionServiceError::UnexpectedApiResponse),
}
}
}
| 37.175393 | 119 | 0.61214 |
0998d3a3f6e91e1842353584a18ce04a2d9ffbfc
| 8,727 |
use core::convert::Infallible;
use super::{
ErasedPin, Floating, Input, OpenDrain, Output, PartiallyErasedPin, Pin, PullDown, PullUp,
PushPull,
};
pub use embedded_hal_one::digital::PinState;
use embedded_hal_one::digital::{
blocking::{InputPin, IoPin, OutputPin, StatefulOutputPin, ToggleableOutputPin},
ErrorType,
};
fn into_state(state: PinState) -> super::PinState {
match state {
PinState::Low => super::PinState::Low,
PinState::High => super::PinState::High,
}
}
// Implementations for `Pin`
impl<MODE, const P: char, const N: u8> ErrorType for Pin<MODE, P, N> {
type Error = Infallible;
}
impl<MODE, const P: char, const N: u8> OutputPin for Pin<Output<MODE>, P, N> {
#[inline(always)]
fn set_high(&mut self) -> Result<(), Self::Error> {
self.set_high();
Ok(())
}
#[inline(always)]
fn set_low(&mut self) -> Result<(), Self::Error> {
self.set_low();
Ok(())
}
}
impl<MODE, const P: char, const N: u8> StatefulOutputPin for Pin<Output<MODE>, P, N> {
#[inline(always)]
fn is_set_high(&self) -> Result<bool, Self::Error> {
Ok(self.is_set_high())
}
#[inline(always)]
fn is_set_low(&self) -> Result<bool, Self::Error> {
Ok(self.is_set_low())
}
}
impl<MODE, const P: char, const N: u8> ToggleableOutputPin for Pin<Output<MODE>, P, N> {
#[inline(always)]
fn toggle(&mut self) -> Result<(), Self::Error> {
self.toggle();
Ok(())
}
}
impl<const P: char, const N: u8> InputPin for Pin<Output<OpenDrain>, P, N> {
#[inline(always)]
fn is_high(&self) -> Result<bool, Self::Error> {
Ok(self.is_high())
}
#[inline(always)]
fn is_low(&self) -> Result<bool, Self::Error> {
Ok(self.is_low())
}
}
impl<MODE, const P: char, const N: u8> InputPin for Pin<Input<MODE>, P, N> {
#[inline(always)]
fn is_high(&self) -> Result<bool, Self::Error> {
Ok(self.is_high())
}
#[inline(always)]
fn is_low(&self) -> Result<bool, Self::Error> {
Ok(self.is_low())
}
}
impl<const P: char, const N: u8> IoPin<Self, Self> for Pin<Output<OpenDrain>, P, N> {
type Error = Infallible;
fn into_input_pin(self) -> Result<Self, Self::Error> {
Ok(self)
}
fn into_output_pin(mut self, state: PinState) -> Result<Self, Self::Error> {
self.set_state(into_state(state));
Ok(self)
}
}
impl<const P: char, const N: u8> IoPin<Pin<Input<Floating>, P, N>, Self>
for Pin<Output<OpenDrain>, P, N>
{
type Error = Infallible;
fn into_input_pin(self) -> Result<Pin<Input<Floating>, P, N>, Self::Error> {
Ok(self.into_floating_input())
}
fn into_output_pin(mut self, state: PinState) -> Result<Self, Self::Error> {
self.set_state(into_state(state));
Ok(self)
}
}
impl<const P: char, const N: u8> IoPin<Self, Pin<Output<OpenDrain>, P, N>>
for Pin<Input<Floating>, P, N>
{
type Error = Infallible;
fn into_input_pin(self) -> Result<Self, Self::Error> {
Ok(self)
}
fn into_output_pin(self, state: PinState) -> Result<Pin<Output<OpenDrain>, P, N>, Self::Error> {
Ok(self.into_open_drain_output_in_state(into_state(state)))
}
}
impl<const P: char, const N: u8> IoPin<Pin<Input<Floating>, P, N>, Self>
for Pin<Output<PushPull>, P, N>
{
type Error = Infallible;
fn into_input_pin(self) -> Result<Pin<Input<Floating>, P, N>, Self::Error> {
Ok(self.into_floating_input())
}
fn into_output_pin(mut self, state: PinState) -> Result<Self, Self::Error> {
self.set_state(into_state(state));
Ok(self)
}
}
impl<const P: char, const N: u8> IoPin<Self, Pin<Output<PushPull>, P, N>>
for Pin<Input<Floating>, P, N>
{
type Error = Infallible;
fn into_input_pin(self) -> Result<Self, Self::Error> {
Ok(self)
}
fn into_output_pin(self, state: PinState) -> Result<Pin<Output<PushPull>, P, N>, Self::Error> {
Ok(self.into_push_pull_output_in_state(into_state(state)))
}
}
impl<const P: char, const N: u8> IoPin<Pin<Input<PullUp>, P, N>, Self>
for Pin<Output<PushPull>, P, N>
{
type Error = Infallible;
fn into_input_pin(self) -> Result<Pin<Input<PullUp>, P, N>, Self::Error> {
Ok(self.into_pull_up_input())
}
fn into_output_pin(mut self, state: PinState) -> Result<Self, Self::Error> {
self.set_state(into_state(state));
Ok(self)
}
}
impl<const P: char, const N: u8> IoPin<Self, Pin<Output<PushPull>, P, N>>
for Pin<Input<PullUp>, P, N>
{
type Error = Infallible;
fn into_input_pin(self) -> Result<Self, Self::Error> {
Ok(self)
}
fn into_output_pin(self, state: PinState) -> Result<Pin<Output<PushPull>, P, N>, Self::Error> {
Ok(self.into_push_pull_output_in_state(into_state(state)))
}
}
impl<const P: char, const N: u8> IoPin<Pin<Input<PullDown>, P, N>, Self>
for Pin<Output<PushPull>, P, N>
{
type Error = Infallible;
fn into_input_pin(self) -> Result<Pin<Input<PullDown>, P, N>, Self::Error> {
Ok(self.into_pull_down_input())
}
fn into_output_pin(mut self, state: PinState) -> Result<Self, Self::Error> {
self.set_state(into_state(state));
Ok(self)
}
}
impl<const P: char, const N: u8> IoPin<Self, Pin<Output<PushPull>, P, N>>
for Pin<Input<PullDown>, P, N>
{
type Error = Infallible;
fn into_input_pin(self) -> Result<Self, Self::Error> {
Ok(self)
}
fn into_output_pin(self, state: PinState) -> Result<Pin<Output<PushPull>, P, N>, Self::Error> {
Ok(self.into_push_pull_output_in_state(into_state(state)))
}
}
// Implementations for `ErasedPin`
impl<MODE> ErrorType for ErasedPin<MODE> {
type Error = core::convert::Infallible;
}
impl<MODE> OutputPin for ErasedPin<Output<MODE>> {
#[inline(always)]
fn set_high(&mut self) -> Result<(), Self::Error> {
self.set_high();
Ok(())
}
#[inline(always)]
fn set_low(&mut self) -> Result<(), Self::Error> {
self.set_low();
Ok(())
}
}
impl<MODE> StatefulOutputPin for ErasedPin<Output<MODE>> {
#[inline(always)]
fn is_set_high(&self) -> Result<bool, Self::Error> {
Ok(self.is_set_high())
}
#[inline(always)]
fn is_set_low(&self) -> Result<bool, Self::Error> {
Ok(self.is_set_low())
}
}
impl<MODE> ToggleableOutputPin for ErasedPin<Output<MODE>> {
#[inline(always)]
fn toggle(&mut self) -> Result<(), Self::Error> {
self.toggle();
Ok(())
}
}
impl InputPin for ErasedPin<Output<OpenDrain>> {
#[inline(always)]
fn is_high(&self) -> Result<bool, Self::Error> {
Ok(self.is_high())
}
#[inline(always)]
fn is_low(&self) -> Result<bool, Self::Error> {
Ok(self.is_low())
}
}
impl<MODE> InputPin for ErasedPin<Input<MODE>> {
#[inline(always)]
fn is_high(&self) -> Result<bool, Self::Error> {
Ok(self.is_high())
}
#[inline(always)]
fn is_low(&self) -> Result<bool, Self::Error> {
Ok(self.is_low())
}
}
// Implementations for `PartiallyErasedPin`
impl<MODE, const P: char> ErrorType for PartiallyErasedPin<MODE, P> {
type Error = Infallible;
}
impl<MODE, const P: char> OutputPin for PartiallyErasedPin<Output<MODE>, P> {
#[inline(always)]
fn set_high(&mut self) -> Result<(), Self::Error> {
self.set_high();
Ok(())
}
#[inline(always)]
fn set_low(&mut self) -> Result<(), Self::Error> {
self.set_low();
Ok(())
}
}
impl<MODE, const P: char> StatefulOutputPin for PartiallyErasedPin<Output<MODE>, P> {
#[inline(always)]
fn is_set_high(&self) -> Result<bool, Self::Error> {
Ok(self.is_set_high())
}
#[inline(always)]
fn is_set_low(&self) -> Result<bool, Self::Error> {
Ok(self.is_set_low())
}
}
impl<MODE, const P: char> ToggleableOutputPin for PartiallyErasedPin<Output<MODE>, P> {
#[inline(always)]
fn toggle(&mut self) -> Result<(), Self::Error> {
self.toggle();
Ok(())
}
}
impl<const P: char> InputPin for PartiallyErasedPin<Output<OpenDrain>, P> {
#[inline(always)]
fn is_high(&self) -> Result<bool, Self::Error> {
Ok(self.is_high())
}
#[inline(always)]
fn is_low(&self) -> Result<bool, Self::Error> {
Ok(self.is_low())
}
}
impl<MODE, const P: char> InputPin for PartiallyErasedPin<Input<MODE>, P> {
#[inline(always)]
fn is_high(&self) -> Result<bool, Self::Error> {
Ok(self.is_high())
}
#[inline(always)]
fn is_low(&self) -> Result<bool, Self::Error> {
Ok(self.is_low())
}
}
| 27.271875 | 100 | 0.602727 |
deddade3d8c895c21d3cdabd4b33f8e7056ff145
| 668 |
use wascc_host::{PublishedEvent, ControlInterface, ControlPlaneProvider, Result};
pub struct NatsControlPlaneProvider {
control: Option<ControlInterface>,
}
impl NatsControlPlaneProvider {
pub fn new() -> NatsControlPlaneProvider {
NatsControlPlaneProvider { control: None }
}
}
impl ControlPlaneProvider for NatsControlPlaneProvider {
fn init(&mut self, controller: ControlInterface) -> Result<()> {
self.control = Some(controller);
Ok(())
}
fn close(&mut self) -> Result<()> {
unimplemented!()
}
fn emit_control_event(&self, event: PublishedEvent) -> Result<()> {
unimplemented!()
}
}
| 24.740741 | 81 | 0.664671 |
753a4e491550f0d8346437ddbe155a7a72f748b9
| 1,656 |
// Test that `async { .. }` blocks:
// 1. do not allow `break` expressions.
// 2. get targeted by `return` and not the parent function.
// 3. get targeted by `?` and not the parent function.
//
// edition:2018
// ignore-tidy-linelength
fn main() {}
use core::future::Future;
fn return_targets_async_block_not_fn() -> u8 {
//~^ ERROR mismatched types
let block = async {
return 0u8;
};
let _: &dyn Future<Output = ()> = █
//~^ ERROR type mismatch resolving `<impl std::future::Future as std::future::Future>::Output == ()`
}
async fn return_targets_async_block_not_async_fn() -> u8 {
//~^ ERROR type mismatch resolving
let block = async {
return 0u8;
};
let _: &dyn Future<Output = ()> = █
//~^ ERROR type mismatch resolving `<impl std::future::Future as std::future::Future>::Output == ()`
}
fn no_break_in_async_block() {
async {
break 0u8; //~ ERROR `break` inside of an `async` block
};
}
fn no_break_in_async_block_even_with_outer_loop() {
loop {
async {
break 0u8; //~ ERROR `break` inside of an `async` block
};
}
}
struct MyErr;
fn err() -> Result<u8, MyErr> { Err(MyErr) }
fn rethrow_targets_async_block_not_fn() -> Result<u8, MyErr> {
//~^ ERROR mismatched types
let block = async {
err()?;
Ok(())
};
let _: &dyn Future<Output = Result<(), MyErr>> = █
}
fn rethrow_targets_async_block_not_async_fn() -> Result<u8, MyErr> {
//~^ ERROR mismatched types
let block = async {
err()?;
Ok(())
};
let _: &dyn Future<Output = Result<(), MyErr>> = █
}
| 25.476923 | 104 | 0.597222 |
223f91d909b3424c2420ca91eb4dcb3a56c48f76
| 67,928 |
//! Utilities for formatting and printing strings.
// ignore-tidy-undocumented-unsafe
#![stable(feature = "rust1", since = "1.0.0")]
use crate::cell::{Cell, Ref, RefCell, RefMut, UnsafeCell};
use crate::marker::PhantomData;
use crate::mem;
use crate::num::flt2dec;
use crate::ops::Deref;
use crate::result;
use crate::slice;
use crate::str;
mod builders;
mod float;
mod num;
#[stable(feature = "fmt_flags_align", since = "1.28.0")]
/// Possible alignments returned by `Formatter::align`
#[derive(Debug)]
pub enum Alignment {
#[stable(feature = "fmt_flags_align", since = "1.28.0")]
/// Indication that contents should be left-aligned.
Left,
#[stable(feature = "fmt_flags_align", since = "1.28.0")]
/// Indication that contents should be right-aligned.
Right,
#[stable(feature = "fmt_flags_align", since = "1.28.0")]
/// Indication that contents should be center-aligned.
Center,
}
#[stable(feature = "debug_builders", since = "1.2.0")]
pub use self::builders::{DebugList, DebugMap, DebugSet, DebugStruct, DebugTuple};
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "0")]
#[doc(hidden)]
pub mod rt {
pub mod v1;
}
/// The type returned by formatter methods.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// #[derive(Debug)]
/// struct Triangle {
/// a: f32,
/// b: f32,
/// c: f32
/// }
///
/// impl fmt::Display for Triangle {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// write!(f, "({}, {}, {})", self.a, self.b, self.c)
/// }
/// }
///
/// let pythagorean_triple = Triangle { a: 3.0, b: 4.0, c: 5.0 };
///
/// println!("{}", pythagorean_triple);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub type Result = result::Result<(), Error>;
/// The error type which is returned from formatting a message into a stream.
///
/// This type does not support transmission of an error other than that an error
/// occurred. Any extra information must be arranged to be transmitted through
/// some other means.
///
/// An important thing to remember is that the type `fmt::Error` should not be
/// confused with [`std::io::Error`] or [`std::error::Error`], which you may also
/// have in scope.
///
/// [`std::io::Error`]: ../../std/io/struct.Error.html
/// [`std::error::Error`]: ../../std/error/trait.Error.html
///
/// # Examples
///
/// ```rust
/// use std::fmt::{self, write};
///
/// let mut output = String::new();
/// if let Err(fmt::Error) = write(&mut output, format_args!("Hello {}!", "world")) {
/// panic!("An error occurred");
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Copy, Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct Error;
/// A collection of methods that are required to format a message into a stream.
///
/// This trait is the type which this modules requires when formatting
/// information. This is similar to the standard library's [`io::Write`] trait,
/// but it is only intended for use in libcore.
///
/// This trait should generally not be implemented by consumers of the standard
/// library. The [`write!`] macro accepts an instance of [`io::Write`], and the
/// [`io::Write`] trait is favored over implementing this trait.
///
/// [`write!`]: ../../std/macro.write.html
/// [`io::Write`]: ../../std/io/trait.Write.html
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Write {
/// Writes a string slice into this writer, returning whether the write
/// succeeded.
///
/// This method can only succeed if the entire string slice was successfully
/// written, and this method will not return until all data has been
/// written or an error occurs.
///
/// # Errors
///
/// This function will return an instance of [`Error`] on error.
///
/// [`Error`]: struct.Error.html
///
/// # Examples
///
/// ```
/// use std::fmt::{Error, Write};
///
/// fn writer<W: Write>(f: &mut W, s: &str) -> Result<(), Error> {
/// f.write_str(s)
/// }
///
/// let mut buf = String::new();
/// writer(&mut buf, "hola").unwrap();
/// assert_eq!(&buf, "hola");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn write_str(&mut self, s: &str) -> Result;
/// Writes a [`char`] into this writer, returning whether the write succeeded.
///
/// A single [`char`] may be encoded as more than one byte.
/// This method can only succeed if the entire byte sequence was successfully
/// written, and this method will not return until all data has been
/// written or an error occurs.
///
/// # Errors
///
/// This function will return an instance of [`Error`] on error.
///
/// [`char`]: ../../std/primitive.char.html
/// [`Error`]: struct.Error.html
///
/// # Examples
///
/// ```
/// use std::fmt::{Error, Write};
///
/// fn writer<W: Write>(f: &mut W, c: char) -> Result<(), Error> {
/// f.write_char(c)
/// }
///
/// let mut buf = String::new();
/// writer(&mut buf, 'a').unwrap();
/// writer(&mut buf, 'b').unwrap();
/// assert_eq!(&buf, "ab");
/// ```
#[stable(feature = "fmt_write_char", since = "1.1.0")]
fn write_char(&mut self, c: char) -> Result {
self.write_str(c.encode_utf8(&mut [0; 4]))
}
/// Glue for usage of the [`write!`] macro with implementors of this trait.
///
/// This method should generally not be invoked manually, but rather through
/// the [`write!`] macro itself.
///
/// [`write!`]: ../../std/macro.write.html
///
/// # Examples
///
/// ```
/// use std::fmt::{Error, Write};
///
/// fn writer<W: Write>(f: &mut W, s: &str) -> Result<(), Error> {
/// f.write_fmt(format_args!("{}", s))
/// }
///
/// let mut buf = String::new();
/// writer(&mut buf, "world").unwrap();
/// assert_eq!(&buf, "world");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn write_fmt(mut self: &mut Self, args: Arguments<'_>) -> Result {
write(&mut self, args)
}
}
#[stable(feature = "fmt_write_blanket_impl", since = "1.4.0")]
impl<W: Write + ?Sized> Write for &mut W {
fn write_str(&mut self, s: &str) -> Result {
(**self).write_str(s)
}
fn write_char(&mut self, c: char) -> Result {
(**self).write_char(c)
}
fn write_fmt(&mut self, args: Arguments<'_>) -> Result {
(**self).write_fmt(args)
}
}
/// Configuration for formatting.
///
/// A `Formatter` represents various options related to formatting. Users do not
/// construct `Formatter`s directly; a mutable reference to one is passed to
/// the `fmt` method of all formatting traits, like [`Debug`] and [`Display`].
///
/// To interact with a `Formatter`, you'll call various methods to change the
/// various options related to formatting. For examples, please see the
/// documentation of the methods defined on `Formatter` below.
///
/// [`Debug`]: trait.Debug.html
/// [`Display`]: trait.Display.html
#[allow(missing_debug_implementations)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Formatter<'a> {
flags: u32,
fill: char,
align: rt::v1::Alignment,
width: Option<usize>,
precision: Option<usize>,
buf: &'a mut (dyn Write + 'a),
curarg: slice::Iter<'a, ArgumentV1<'a>>,
args: &'a [ArgumentV1<'a>],
}
// NB. Argument is essentially an optimized partially applied formatting function,
// equivalent to `exists T.(&T, fn(&T, &mut Formatter<'_>) -> Result`.
struct Void {
_priv: (),
/// Erases all oibits, because `Void` erases the type of the object that
/// will be used to produce formatted output. Since we do not know what
/// oibits the real types have (and they can have any or none), we need to
/// take the most conservative approach and forbid all oibits.
///
/// It was added after #45197 showed that one could share a `!Sync`
/// object across threads by passing it into `format_args!`.
_oibit_remover: PhantomData<*mut dyn Fn()>,
}
/// This struct represents the generic "argument" which is taken by the Xprintf
/// family of functions. It contains a function to format the given value. At
/// compile time it is ensured that the function and the value have the correct
/// types, and then this struct is used to canonicalize arguments to one type.
#[derive(Copy, Clone)]
#[allow(missing_debug_implementations)]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "0")]
#[doc(hidden)]
pub struct ArgumentV1<'a> {
value: &'a Void,
formatter: fn(&Void, &mut Formatter<'_>) -> Result,
}
impl<'a> ArgumentV1<'a> {
#[inline(never)]
fn show_usize(x: &usize, f: &mut Formatter<'_>) -> Result {
Display::fmt(x, f)
}
#[doc(hidden)]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "0")]
pub fn new<'b, T>(x: &'b T, f: fn(&T, &mut Formatter<'_>) -> Result) -> ArgumentV1<'b> {
unsafe { ArgumentV1 { formatter: mem::transmute(f), value: mem::transmute(x) } }
}
#[doc(hidden)]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "0")]
pub fn from_usize(x: &usize) -> ArgumentV1<'_> {
ArgumentV1::new(x, ArgumentV1::show_usize)
}
fn as_usize(&self) -> Option<usize> {
if self.formatter as usize == ArgumentV1::show_usize as usize {
Some(unsafe { *(self.value as *const _ as *const usize) })
} else {
None
}
}
}
// flags available in the v1 format of format_args
#[derive(Copy, Clone)]
enum FlagV1 {
SignPlus,
SignMinus,
Alternate,
SignAwareZeroPad,
DebugLowerHex,
DebugUpperHex,
}
impl<'a> Arguments<'a> {
/// When using the format_args!() macro, this function is used to generate the
/// Arguments structure.
#[doc(hidden)]
#[inline]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "0")]
pub fn new_v1(pieces: &'a [&'a str], args: &'a [ArgumentV1<'a>]) -> Arguments<'a> {
Arguments { pieces, fmt: None, args }
}
/// This function is used to specify nonstandard formatting parameters.
/// The `pieces` array must be at least as long as `fmt` to construct
/// a valid Arguments structure. Also, any `Count` within `fmt` that is
/// `CountIsParam` or `CountIsNextParam` has to point to an argument
/// created with `argumentusize`. However, failing to do so doesn't cause
/// unsafety, but will ignore invalid .
#[doc(hidden)]
#[inline]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "0")]
pub fn new_v1_formatted(
pieces: &'a [&'a str],
args: &'a [ArgumentV1<'a>],
fmt: &'a [rt::v1::Argument],
) -> Arguments<'a> {
Arguments { pieces, fmt: Some(fmt), args }
}
/// Estimates the length of the formatted text.
///
/// This is intended to be used for setting initial `String` capacity
/// when using `format!`. Note: this is neither the lower nor upper bound.
#[doc(hidden)]
#[inline]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "0")]
pub fn estimated_capacity(&self) -> usize {
let pieces_length: usize = self.pieces.iter().map(|x| x.len()).sum();
if self.args.is_empty() {
pieces_length
} else if self.pieces[0] == "" && pieces_length < 16 {
// If the format string starts with an argument,
// don't preallocate anything, unless length
// of pieces is significant.
0
} else {
// There are some arguments, so any additional push
// will reallocate the string. To avoid that,
// we're "pre-doubling" the capacity here.
pieces_length.checked_mul(2).unwrap_or(0)
}
}
}
/// This structure represents a safely precompiled version of a format string
/// and its arguments. This cannot be generated at runtime because it cannot
/// safely be done, so no constructors are given and the fields are private
/// to prevent modification.
///
/// The [`format_args!`] macro will safely create an instance of this structure.
/// The macro validates the format string at compile-time so usage of the
/// [`write`] and [`format`] functions can be safely performed.
///
/// You can use the `Arguments<'a>` that [`format_args!`] returns in `Debug`
/// and `Display` contexts as seen below. The example also shows that `Debug`
/// and `Display` format to the same thing: the interpolated format string
/// in `format_args!`.
///
/// ```rust
/// let debug = format!("{:?}", format_args!("{} foo {:?}", 1, 2));
/// let display = format!("{}", format_args!("{} foo {:?}", 1, 2));
/// assert_eq!("1 foo 2", display);
/// assert_eq!(display, debug);
/// ```
///
/// [`format_args!`]: ../../std/macro.format_args.html
/// [`format`]: ../../std/fmt/fn.format.html
/// [`write`]: ../../std/fmt/fn.write.html
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Copy, Clone)]
pub struct Arguments<'a> {
// Format string pieces to print.
pieces: &'a [&'a str],
// Placeholder specs, or `None` if all specs are default (as in "{}{}").
fmt: Option<&'a [rt::v1::Argument]>,
// Dynamic arguments for interpolation, to be interleaved with string
// pieces. (Every argument is preceded by a string piece.)
args: &'a [ArgumentV1<'a>],
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Debug for Arguments<'_> {
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result {
Display::fmt(self, fmt)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Display for Arguments<'_> {
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result {
write(fmt.buf, *self)
}
}
/// `?` formatting.
///
/// `Debug` should format the output in a programmer-facing, debugging context.
///
/// Generally speaking, you should just `derive` a `Debug` implementation.
///
/// When used with the alternate format specifier `#?`, the output is pretty-printed.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// This trait can be used with `#[derive]` if all fields implement `Debug`. When
/// `derive`d for structs, it will use the name of the `struct`, then `{`, then a
/// comma-separated list of each field's name and `Debug` value, then `}`. For
/// `enum`s, it will use the name of the variant and, if applicable, `(`, then the
/// `Debug` values of the fields, then `)`.
///
/// # Examples
///
/// Deriving an implementation:
///
/// ```
/// #[derive(Debug)]
/// struct Point {
/// x: i32,
/// y: i32,
/// }
///
/// let origin = Point { x: 0, y: 0 };
///
/// println!("The origin is: {:?}", origin);
/// ```
///
/// Manually implementing:
///
/// ```
/// use std::fmt;
///
/// struct Point {
/// x: i32,
/// y: i32,
/// }
///
/// impl fmt::Debug for Point {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// write!(f, "Point {{ x: {}, y: {} }}", self.x, self.y)
/// }
/// }
///
/// let origin = Point { x: 0, y: 0 };
///
/// println!("The origin is: {:?}", origin);
/// ```
///
/// This outputs:
///
/// ```text
/// The origin is: Point { x: 0, y: 0 }
/// ```
///
/// There are a number of `debug_*` methods on [`Formatter`] to help you with manual
/// implementations, such as [`debug_struct`][debug_struct].
///
/// `Debug` implementations using either `derive` or the debug builder API
/// on [`Formatter`] support pretty-printing using the alternate flag: `{:#?}`.
///
/// [debug_struct]: ../../std/fmt/struct.Formatter.html#method.debug_struct
/// [`Formatter`]: ../../std/fmt/struct.Formatter.html
///
/// Pretty-printing with `#?`:
///
/// ```
/// #[derive(Debug)]
/// struct Point {
/// x: i32,
/// y: i32,
/// }
///
/// let origin = Point { x: 0, y: 0 };
///
/// println!("The origin is: {:#?}", origin);
/// ```
///
/// This outputs:
///
/// ```text
/// The origin is: Point {
/// x: 0,
/// y: 0
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented(
on(
crate_local,
label = "`{Self}` cannot be formatted using `{{:?}}`",
note = "add `#[derive(Debug)]` or manually implement `{Debug}`"
),
message = "`{Self}` doesn't implement `{Debug}`",
label = "`{Self}` cannot be formatted using `{{:?}}` because it doesn't implement `{Debug}`"
)]
#[doc(alias = "{:?}")]
#[rustc_diagnostic_item = "debug_trait"]
pub trait Debug {
/// Formats the value using the given formatter.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Position {
/// longitude: f32,
/// latitude: f32,
/// }
///
/// impl fmt::Debug for Position {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// write!(f, "({:?}, {:?})", self.longitude, self.latitude)
/// }
/// }
///
/// assert_eq!("(1.987, 2.983)".to_owned(),
/// format!("{:?}", Position { longitude: 1.987, latitude: 2.983, }));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
// Separate module to reexport the macro `Debug` from prelude without the trait `Debug`.
pub(crate) mod macros {
/// Derive macro generating an impl of the trait `Debug`.
#[rustc_builtin_macro]
#[stable(feature = "builtin_macro_prelude", since = "1.38.0")]
#[allow_internal_unstable(core_intrinsics)]
pub macro Debug($item:item) {
/* compiler built-in */
}
}
#[stable(feature = "builtin_macro_prelude", since = "1.38.0")]
#[doc(inline)]
pub use macros::Debug;
/// Format trait for an empty format, `{}`.
///
/// `Display` is similar to [`Debug`][debug], but `Display` is for user-facing
/// output, and so cannot be derived.
///
/// [debug]: trait.Debug.html
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Implementing `Display` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Point {
/// x: i32,
/// y: i32,
/// }
///
/// impl fmt::Display for Point {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// write!(f, "({}, {})", self.x, self.y)
/// }
/// }
///
/// let origin = Point { x: 0, y: 0 };
///
/// println!("The origin is: {}", origin);
/// ```
#[rustc_on_unimplemented(
on(
_Self = "std::path::Path",
label = "`{Self}` cannot be formatted with the default formatter; call `.display()` on it",
note = "call `.display()` or `.to_string_lossy()` to safely print paths, \
as they may contain non-Unicode data"
),
message = "`{Self}` doesn't implement `{Display}`",
label = "`{Self}` cannot be formatted with the default formatter",
note = "in format strings you may be able to use `{{:?}}` (or {{:#?}} for pretty-print) instead"
)]
#[doc(alias = "{}")]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Display {
/// Formats the value using the given formatter.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Position {
/// longitude: f32,
/// latitude: f32,
/// }
///
/// impl fmt::Display for Position {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// write!(f, "({}, {})", self.longitude, self.latitude)
/// }
/// }
///
/// assert_eq!("(1.987, 2.983)".to_owned(),
/// format!("{}", Position { longitude: 1.987, latitude: 2.983, }));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `o` formatting.
///
/// The `Octal` trait should format its output as a number in base-8.
///
/// For primitive signed integers (`i8` to `i128`, and `isize`),
/// negative values are formatted as the two’s complement representation.
///
/// The alternate flag, `#`, adds a `0o` in front of the output.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Basic usage with `i32`:
///
/// ```
/// let x = 42; // 42 is '52' in octal
///
/// assert_eq!(format!("{:o}", x), "52");
/// assert_eq!(format!("{:#o}", x), "0o52");
///
/// assert_eq!(format!("{:o}", -16), "37777777760");
/// ```
///
/// Implementing `Octal` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::Octal for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// let val = self.0;
///
/// fmt::Octal::fmt(&val, f) // delegate to i32's implementation
/// }
/// }
///
/// let l = Length(9);
///
/// println!("l as octal is: {:o}", l);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Octal {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `b` formatting.
///
/// The `Binary` trait should format its output as a number in binary.
///
/// For primitive signed integers ([`i8`] to [`i128`], and [`isize`]),
/// negative values are formatted as the two’s complement representation.
///
/// The alternate flag, `#`, adds a `0b` in front of the output.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// # Examples
///
/// Basic usage with [`i32`]:
///
/// ```
/// let x = 42; // 42 is '101010' in binary
///
/// assert_eq!(format!("{:b}", x), "101010");
/// assert_eq!(format!("{:#b}", x), "0b101010");
///
/// assert_eq!(format!("{:b}", -16), "11111111111111111111111111110000");
/// ```
///
/// Implementing `Binary` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::Binary for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// let val = self.0;
///
/// fmt::Binary::fmt(&val, f) // delegate to i32's implementation
/// }
/// }
///
/// let l = Length(107);
///
/// println!("l as binary is: {:b}", l);
/// ```
///
/// [module]: ../../std/fmt/index.html
/// [`i8`]: ../../std/primitive.i8.html
/// [`i128`]: ../../std/primitive.i128.html
/// [`isize`]: ../../std/primitive.isize.html
/// [`i32`]: ../../std/primitive.i32.html
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Binary {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `x` formatting.
///
/// The `LowerHex` trait should format its output as a number in hexadecimal, with `a` through `f`
/// in lower case.
///
/// For primitive signed integers (`i8` to `i128`, and `isize`),
/// negative values are formatted as the two’s complement representation.
///
/// The alternate flag, `#`, adds a `0x` in front of the output.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Basic usage with `i32`:
///
/// ```
/// let x = 42; // 42 is '2a' in hex
///
/// assert_eq!(format!("{:x}", x), "2a");
/// assert_eq!(format!("{:#x}", x), "0x2a");
///
/// assert_eq!(format!("{:x}", -16), "fffffff0");
/// ```
///
/// Implementing `LowerHex` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::LowerHex for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// let val = self.0;
///
/// fmt::LowerHex::fmt(&val, f) // delegate to i32's implementation
/// }
/// }
///
/// let l = Length(9);
///
/// println!("l as hex is: {:x}", l);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait LowerHex {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `X` formatting.
///
/// The `UpperHex` trait should format its output as a number in hexadecimal, with `A` through `F`
/// in upper case.
///
/// For primitive signed integers (`i8` to `i128`, and `isize`),
/// negative values are formatted as the two’s complement representation.
///
/// The alternate flag, `#`, adds a `0x` in front of the output.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Basic usage with `i32`:
///
/// ```
/// let x = 42; // 42 is '2A' in hex
///
/// assert_eq!(format!("{:X}", x), "2A");
/// assert_eq!(format!("{:#X}", x), "0x2A");
///
/// assert_eq!(format!("{:X}", -16), "FFFFFFF0");
/// ```
///
/// Implementing `UpperHex` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::UpperHex for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// let val = self.0;
///
/// fmt::UpperHex::fmt(&val, f) // delegate to i32's implementation
/// }
/// }
///
/// let l = Length(9);
///
/// println!("l as hex is: {:X}", l);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait UpperHex {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `p` formatting.
///
/// The `Pointer` trait should format its output as a memory location. This is commonly presented
/// as hexadecimal.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Basic usage with `&i32`:
///
/// ```
/// let x = &42;
///
/// let address = format!("{:p}", x); // this produces something like '0x7f06092ac6d0'
/// ```
///
/// Implementing `Pointer` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::Pointer for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// // use `as` to convert to a `*const T`, which implements Pointer, which we can use
///
/// let ptr = self as *const Self;
/// fmt::Pointer::fmt(&ptr, f)
/// }
/// }
///
/// let l = Length(42);
///
/// println!("l is in memory here: {:p}", l);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Pointer {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `e` formatting.
///
/// The `LowerExp` trait should format its output in scientific notation with a lower-case `e`.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Basic usage with `f64`:
///
/// ```
/// let x = 42.0; // 42.0 is '4.2e1' in scientific notation
///
/// assert_eq!(format!("{:e}", x), "4.2e1");
/// ```
///
/// Implementing `LowerExp` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::LowerExp for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// let val = self.0;
/// write!(f, "{}e1", val / 10)
/// }
/// }
///
/// let l = Length(100);
///
/// println!("l in scientific notation is: {:e}", l);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait LowerExp {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `E` formatting.
///
/// The `UpperExp` trait should format its output in scientific notation with an upper-case `E`.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Basic usage with `f64`:
///
/// ```
/// let x = 42.0; // 42.0 is '4.2E1' in scientific notation
///
/// assert_eq!(format!("{:E}", x), "4.2E1");
/// ```
///
/// Implementing `UpperExp` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::UpperExp for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// let val = self.0;
/// write!(f, "{}E1", val / 10)
/// }
/// }
///
/// let l = Length(100);
///
/// println!("l in scientific notation is: {:E}", l);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait UpperExp {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// The `write` function takes an output stream, and an `Arguments` struct
/// that can be precompiled with the `format_args!` macro.
///
/// The arguments will be formatted according to the specified format string
/// into the output stream provided.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use std::fmt;
///
/// let mut output = String::new();
/// fmt::write(&mut output, format_args!("Hello {}!", "world"))
/// .expect("Error occurred while trying to write in String");
/// assert_eq!(output, "Hello world!");
/// ```
///
/// Please note that using [`write!`] might be preferable. Example:
///
/// ```
/// use std::fmt::Write;
///
/// let mut output = String::new();
/// write!(&mut output, "Hello {}!", "world")
/// .expect("Error occurred while trying to write in String");
/// assert_eq!(output, "Hello world!");
/// ```
///
/// [`write!`]: ../../std/macro.write.html
#[stable(feature = "rust1", since = "1.0.0")]
pub fn write(output: &mut dyn Write, args: Arguments<'_>) -> Result {
let mut formatter = Formatter {
flags: 0,
width: None,
precision: None,
buf: output,
align: rt::v1::Alignment::Unknown,
fill: ' ',
args: args.args,
curarg: args.args.iter(),
};
let mut idx = 0;
match args.fmt {
None => {
// We can use default formatting parameters for all arguments.
for (arg, piece) in args.args.iter().zip(args.pieces.iter()) {
formatter.buf.write_str(*piece)?;
(arg.formatter)(arg.value, &mut formatter)?;
idx += 1;
}
}
Some(fmt) => {
// Every spec has a corresponding argument that is preceded by
// a string piece.
for (arg, piece) in fmt.iter().zip(args.pieces.iter()) {
formatter.buf.write_str(*piece)?;
formatter.run(arg)?;
idx += 1;
}
}
}
// There can be only one trailing string piece left.
if let Some(piece) = args.pieces.get(idx) {
formatter.buf.write_str(*piece)?;
}
Ok(())
}
/// Padding after the end of something. Returned by `Formatter::padding`.
#[must_use = "don't forget to write the post padding"]
struct PostPadding {
fill: char,
padding: usize,
}
impl PostPadding {
fn new(fill: char, padding: usize) -> PostPadding {
PostPadding { fill, padding }
}
/// Write this post padding.
fn write(self, buf: &mut dyn Write) -> Result {
for _ in 0..self.padding {
buf.write_char(self.fill)?;
}
Ok(())
}
}
impl<'a> Formatter<'a> {
fn wrap_buf<'b, 'c, F>(&'b mut self, wrap: F) -> Formatter<'c>
where
'b: 'c,
F: FnOnce(&'b mut (dyn Write + 'b)) -> &'c mut (dyn Write + 'c),
{
Formatter {
// We want to change this
buf: wrap(self.buf),
// And preserve these
flags: self.flags,
fill: self.fill,
align: self.align,
width: self.width,
precision: self.precision,
// These only exist in the struct for the `run` method,
// which won’t be used together with this method.
curarg: self.curarg.clone(),
args: self.args,
}
}
// First up is the collection of functions used to execute a format string
// at runtime. This consumes all of the compile-time statics generated by
// the format! syntax extension.
fn run(&mut self, arg: &rt::v1::Argument) -> Result {
// Fill in the format parameters into the formatter
self.fill = arg.format.fill;
self.align = arg.format.align;
self.flags = arg.format.flags;
self.width = self.getcount(&arg.format.width);
self.precision = self.getcount(&arg.format.precision);
// Extract the correct argument
let value = match arg.position {
rt::v1::Position::Next => *self.curarg.next().unwrap(),
rt::v1::Position::At(i) => self.args[i],
};
// Then actually do some printing
(value.formatter)(value.value, self)
}
fn getcount(&mut self, cnt: &rt::v1::Count) -> Option<usize> {
match *cnt {
rt::v1::Count::Is(n) => Some(n),
rt::v1::Count::Implied => None,
rt::v1::Count::Param(i) => self.args[i].as_usize(),
rt::v1::Count::NextParam => self.curarg.next()?.as_usize(),
}
}
// Helper methods used for padding and processing formatting arguments that
// all formatting traits can use.
/// Performs the correct padding for an integer which has already been
/// emitted into a str. The str should *not* contain the sign for the
/// integer, that will be added by this method.
///
/// # Arguments
///
/// * is_nonnegative - whether the original integer was either positive or zero.
/// * prefix - if the '#' character (Alternate) is provided, this
/// is the prefix to put in front of the number.
/// * buf - the byte array that the number has been formatted into
///
/// This function will correctly account for the flags provided as well as
/// the minimum width. It will not take precision into account.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo { nb: i32 };
///
/// impl Foo {
/// fn new(nb: i32) -> Foo {
/// Foo {
/// nb,
/// }
/// }
/// }
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// // We need to remove "-" from the number output.
/// let tmp = self.nb.abs().to_string();
///
/// formatter.pad_integral(self.nb > 0, "Foo ", &tmp)
/// }
/// }
///
/// assert_eq!(&format!("{}", Foo::new(2)), "2");
/// assert_eq!(&format!("{}", Foo::new(-1)), "-1");
/// assert_eq!(&format!("{:#}", Foo::new(-1)), "-Foo 1");
/// assert_eq!(&format!("{:0>#8}", Foo::new(-1)), "00-Foo 1");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn pad_integral(&mut self, is_nonnegative: bool, prefix: &str, buf: &str) -> Result {
let mut width = buf.len();
let mut sign = None;
if !is_nonnegative {
sign = Some('-');
width += 1;
} else if self.sign_plus() {
sign = Some('+');
width += 1;
}
let prefix = if self.alternate() {
width += prefix.chars().count();
Some(prefix)
} else {
None
};
// Writes the sign if it exists, and then the prefix if it was requested
#[inline(never)]
fn write_prefix(f: &mut Formatter<'_>, sign: Option<char>, prefix: Option<&str>) -> Result {
if let Some(c) = sign {
f.buf.write_char(c)?;
}
if let Some(prefix) = prefix { f.buf.write_str(prefix) } else { Ok(()) }
}
// The `width` field is more of a `min-width` parameter at this point.
match self.width {
// If there's no minimum length requirements then we can just
// write the bytes.
None => {
write_prefix(self, sign, prefix)?;
self.buf.write_str(buf)
}
// Check if we're over the minimum width, if so then we can also
// just write the bytes.
Some(min) if width >= min => {
write_prefix(self, sign, prefix)?;
self.buf.write_str(buf)
}
// The sign and prefix goes before the padding if the fill character
// is zero
Some(min) if self.sign_aware_zero_pad() => {
self.fill = '0';
self.align = rt::v1::Alignment::Right;
write_prefix(self, sign, prefix)?;
let post_padding = self.padding(min - width, rt::v1::Alignment::Right)?;
self.buf.write_str(buf)?;
post_padding.write(self.buf)
}
// Otherwise, the sign and prefix goes after the padding
Some(min) => {
let post_padding = self.padding(min - width, rt::v1::Alignment::Right)?;
write_prefix(self, sign, prefix)?;
self.buf.write_str(buf)?;
post_padding.write(self.buf)
}
}
}
/// This function takes a string slice and emits it to the internal buffer
/// after applying the relevant formatting flags specified. The flags
/// recognized for generic strings are:
///
/// * width - the minimum width of what to emit
/// * fill/align - what to emit and where to emit it if the string
/// provided needs to be padded
/// * precision - the maximum length to emit, the string is truncated if it
/// is longer than this length
///
/// Notably this function ignores the `flag` parameters.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo;
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// formatter.pad("Foo")
/// }
/// }
///
/// assert_eq!(&format!("{:<4}", Foo), "Foo ");
/// assert_eq!(&format!("{:0>4}", Foo), "0Foo");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn pad(&mut self, s: &str) -> Result {
// Make sure there's a fast path up front
if self.width.is_none() && self.precision.is_none() {
return self.buf.write_str(s);
}
// The `precision` field can be interpreted as a `max-width` for the
// string being formatted.
let s = if let Some(max) = self.precision {
// If our string is longer that the precision, then we must have
// truncation. However other flags like `fill`, `width` and `align`
// must act as always.
if let Some((i, _)) = s.char_indices().nth(max) {
// LLVM here can't prove that `..i` won't panic `&s[..i]`, but
// we know that it can't panic. Use `get` + `unwrap_or` to avoid
// `unsafe` and otherwise don't emit any panic-related code
// here.
s.get(..i).unwrap_or(&s)
} else {
&s
}
} else {
&s
};
// The `width` field is more of a `min-width` parameter at this point.
match self.width {
// If we're under the maximum length, and there's no minimum length
// requirements, then we can just emit the string
None => self.buf.write_str(s),
// If we're under the maximum width, check if we're over the minimum
// width, if so it's as easy as just emitting the string.
Some(width) if s.chars().count() >= width => self.buf.write_str(s),
// If we're under both the maximum and the minimum width, then fill
// up the minimum width with the specified string + some alignment.
Some(width) => {
let align = rt::v1::Alignment::Left;
let post_padding = self.padding(width - s.chars().count(), align)?;
self.buf.write_str(s)?;
post_padding.write(self.buf)
}
}
}
/// Write the pre-padding and return the unwritten post-padding. Callers are
/// responsible for ensuring post-padding is written after the thing that is
/// being padded.
fn padding(
&mut self,
padding: usize,
default: rt::v1::Alignment,
) -> result::Result<PostPadding, Error> {
let align = match self.align {
rt::v1::Alignment::Unknown => default,
_ => self.align,
};
let (pre_pad, post_pad) = match align {
rt::v1::Alignment::Left => (0, padding),
rt::v1::Alignment::Right | rt::v1::Alignment::Unknown => (padding, 0),
rt::v1::Alignment::Center => (padding / 2, (padding + 1) / 2),
};
for _ in 0..pre_pad {
self.buf.write_char(self.fill)?;
}
Ok(PostPadding::new(self.fill, post_pad))
}
/// Takes the formatted parts and applies the padding.
/// Assumes that the caller already has rendered the parts with required precision,
/// so that `self.precision` can be ignored.
fn pad_formatted_parts(&mut self, formatted: &flt2dec::Formatted<'_>) -> Result {
if let Some(mut width) = self.width {
// for the sign-aware zero padding, we render the sign first and
// behave as if we had no sign from the beginning.
let mut formatted = formatted.clone();
let old_fill = self.fill;
let old_align = self.align;
let mut align = old_align;
if self.sign_aware_zero_pad() {
// a sign always goes first
let sign = unsafe { str::from_utf8_unchecked(formatted.sign) };
self.buf.write_str(sign)?;
// remove the sign from the formatted parts
formatted.sign = b"";
width = width.saturating_sub(sign.len());
align = rt::v1::Alignment::Right;
self.fill = '0';
self.align = rt::v1::Alignment::Right;
}
// remaining parts go through the ordinary padding process.
let len = formatted.len();
let ret = if width <= len {
// no padding
self.write_formatted_parts(&formatted)
} else {
let post_padding = self.padding(width - len, align)?;
self.write_formatted_parts(&formatted)?;
post_padding.write(self.buf)
};
self.fill = old_fill;
self.align = old_align;
ret
} else {
// this is the common case and we take a shortcut
self.write_formatted_parts(formatted)
}
}
fn write_formatted_parts(&mut self, formatted: &flt2dec::Formatted<'_>) -> Result {
fn write_bytes(buf: &mut dyn Write, s: &[u8]) -> Result {
buf.write_str(unsafe { str::from_utf8_unchecked(s) })
}
if !formatted.sign.is_empty() {
write_bytes(self.buf, formatted.sign)?;
}
for part in formatted.parts {
match *part {
flt2dec::Part::Zero(mut nzeroes) => {
const ZEROES: &str = // 64 zeroes
"0000000000000000000000000000000000000000000000000000000000000000";
while nzeroes > ZEROES.len() {
self.buf.write_str(ZEROES)?;
nzeroes -= ZEROES.len();
}
if nzeroes > 0 {
self.buf.write_str(&ZEROES[..nzeroes])?;
}
}
flt2dec::Part::Num(mut v) => {
let mut s = [0; 5];
let len = part.len();
for c in s[..len].iter_mut().rev() {
*c = b'0' + (v % 10) as u8;
v /= 10;
}
write_bytes(self.buf, &s[..len])?;
}
flt2dec::Part::Copy(buf) => {
write_bytes(self.buf, buf)?;
}
}
}
Ok(())
}
/// Writes some data to the underlying buffer contained within this
/// formatter.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo;
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// formatter.write_str("Foo")
/// // This is equivalent to:
/// // write!(formatter, "Foo")
/// }
/// }
///
/// assert_eq!(&format!("{}", Foo), "Foo");
/// assert_eq!(&format!("{:0>8}", Foo), "Foo");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn write_str(&mut self, data: &str) -> Result {
self.buf.write_str(data)
}
/// Writes some formatted information into this instance.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// formatter.write_fmt(format_args!("Foo {}", self.0))
/// }
/// }
///
/// assert_eq!(&format!("{}", Foo(-1)), "Foo -1");
/// assert_eq!(&format!("{:0>8}", Foo(2)), "Foo 2");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn write_fmt(&mut self, fmt: Arguments<'_>) -> Result {
write(self.buf, fmt)
}
/// Flags for formatting
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_deprecated(
since = "1.24.0",
reason = "use the `sign_plus`, `sign_minus`, `alternate`, \
or `sign_aware_zero_pad` methods instead"
)]
pub fn flags(&self) -> u32 {
self.flags
}
/// Character used as 'fill' whenever there is alignment.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo;
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// let c = formatter.fill();
/// if let Some(width) = formatter.width() {
/// for _ in 0..width {
/// write!(formatter, "{}", c)?;
/// }
/// Ok(())
/// } else {
/// write!(formatter, "{}", c)
/// }
/// }
/// }
///
/// // We set alignment to the left with ">".
/// assert_eq!(&format!("{:G>3}", Foo), "GGG");
/// assert_eq!(&format!("{:t>6}", Foo), "tttttt");
/// ```
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn fill(&self) -> char {
self.fill
}
/// Flag indicating what form of alignment was requested.
///
/// # Examples
///
/// ```
/// extern crate core;
///
/// use std::fmt::{self, Alignment};
///
/// struct Foo;
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// let s = if let Some(s) = formatter.align() {
/// match s {
/// Alignment::Left => "left",
/// Alignment::Right => "right",
/// Alignment::Center => "center",
/// }
/// } else {
/// "into the void"
/// };
/// write!(formatter, "{}", s)
/// }
/// }
///
/// assert_eq!(&format!("{:<}", Foo), "left");
/// assert_eq!(&format!("{:>}", Foo), "right");
/// assert_eq!(&format!("{:^}", Foo), "center");
/// assert_eq!(&format!("{}", Foo), "into the void");
/// ```
#[stable(feature = "fmt_flags_align", since = "1.28.0")]
pub fn align(&self) -> Option<Alignment> {
match self.align {
rt::v1::Alignment::Left => Some(Alignment::Left),
rt::v1::Alignment::Right => Some(Alignment::Right),
rt::v1::Alignment::Center => Some(Alignment::Center),
rt::v1::Alignment::Unknown => None,
}
}
/// Optionally specified integer width that the output should be.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// if let Some(width) = formatter.width() {
/// // If we received a width, we use it
/// write!(formatter, "{:width$}", &format!("Foo({})", self.0), width = width)
/// } else {
/// // Otherwise we do nothing special
/// write!(formatter, "Foo({})", self.0)
/// }
/// }
/// }
///
/// assert_eq!(&format!("{:10}", Foo(23)), "Foo(23) ");
/// assert_eq!(&format!("{}", Foo(23)), "Foo(23)");
/// ```
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn width(&self) -> Option<usize> {
self.width
}
/// Optionally specified precision for numeric types.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(f32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// if let Some(precision) = formatter.precision() {
/// // If we received a precision, we use it.
/// write!(formatter, "Foo({1:.*})", precision, self.0)
/// } else {
/// // Otherwise we default to 2.
/// write!(formatter, "Foo({:.2})", self.0)
/// }
/// }
/// }
///
/// assert_eq!(&format!("{:.4}", Foo(23.2)), "Foo(23.2000)");
/// assert_eq!(&format!("{}", Foo(23.2)), "Foo(23.20)");
/// ```
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn precision(&self) -> Option<usize> {
self.precision
}
/// Determines if the `+` flag was specified.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// if formatter.sign_plus() {
/// write!(formatter,
/// "Foo({}{})",
/// if self.0 < 0 { '-' } else { '+' },
/// self.0)
/// } else {
/// write!(formatter, "Foo({})", self.0)
/// }
/// }
/// }
///
/// assert_eq!(&format!("{:+}", Foo(23)), "Foo(+23)");
/// assert_eq!(&format!("{}", Foo(23)), "Foo(23)");
/// ```
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn sign_plus(&self) -> bool {
self.flags & (1 << FlagV1::SignPlus as u32) != 0
}
/// Determines if the `-` flag was specified.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// if formatter.sign_minus() {
/// // You want a minus sign? Have one!
/// write!(formatter, "-Foo({})", self.0)
/// } else {
/// write!(formatter, "Foo({})", self.0)
/// }
/// }
/// }
///
/// assert_eq!(&format!("{:-}", Foo(23)), "-Foo(23)");
/// assert_eq!(&format!("{}", Foo(23)), "Foo(23)");
/// ```
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn sign_minus(&self) -> bool {
self.flags & (1 << FlagV1::SignMinus as u32) != 0
}
/// Determines if the `#` flag was specified.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// if formatter.alternate() {
/// write!(formatter, "Foo({})", self.0)
/// } else {
/// write!(formatter, "{}", self.0)
/// }
/// }
/// }
///
/// assert_eq!(&format!("{:#}", Foo(23)), "Foo(23)");
/// assert_eq!(&format!("{}", Foo(23)), "23");
/// ```
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn alternate(&self) -> bool {
self.flags & (1 << FlagV1::Alternate as u32) != 0
}
/// Determines if the `0` flag was specified.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// assert!(formatter.sign_aware_zero_pad());
/// assert_eq!(formatter.width(), Some(4));
/// // We ignore the formatter's options.
/// write!(formatter, "{}", self.0)
/// }
/// }
///
/// assert_eq!(&format!("{:04}", Foo(23)), "23");
/// ```
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn sign_aware_zero_pad(&self) -> bool {
self.flags & (1 << FlagV1::SignAwareZeroPad as u32) != 0
}
// FIXME: Decide what public API we want for these two flags.
// https://github.com/rust-lang/rust/issues/48584
fn debug_lower_hex(&self) -> bool {
self.flags & (1 << FlagV1::DebugLowerHex as u32) != 0
}
fn debug_upper_hex(&self) -> bool {
self.flags & (1 << FlagV1::DebugUpperHex as u32) != 0
}
/// Creates a [`DebugStruct`] builder designed to assist with creation of
/// [`fmt::Debug`] implementations for structs.
///
/// [`DebugStruct`]: ../../std/fmt/struct.DebugStruct.html
/// [`fmt::Debug`]: ../../std/fmt/trait.Debug.html
///
/// # Examples
///
/// ```rust
/// use std::fmt;
/// use std::net::Ipv4Addr;
///
/// struct Foo {
/// bar: i32,
/// baz: String,
/// addr: Ipv4Addr,
/// }
///
/// impl fmt::Debug for Foo {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// fmt.debug_struct("Foo")
/// .field("bar", &self.bar)
/// .field("baz", &self.baz)
/// .field("addr", &format_args!("{}", self.addr))
/// .finish()
/// }
/// }
///
/// assert_eq!(
/// "Foo { bar: 10, baz: \"Hello World\", addr: 127.0.0.1 }",
/// format!("{:?}", Foo {
/// bar: 10,
/// baz: "Hello World".to_string(),
/// addr: Ipv4Addr::new(127, 0, 0, 1),
/// })
/// );
/// ```
#[stable(feature = "debug_builders", since = "1.2.0")]
pub fn debug_struct<'b>(&'b mut self, name: &str) -> DebugStruct<'b, 'a> {
builders::debug_struct_new(self, name)
}
/// Creates a `DebugTuple` builder designed to assist with creation of
/// `fmt::Debug` implementations for tuple structs.
///
/// # Examples
///
/// ```rust
/// use std::fmt;
/// use std::marker::PhantomData;
///
/// struct Foo<T>(i32, String, PhantomData<T>);
///
/// impl<T> fmt::Debug for Foo<T> {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// fmt.debug_tuple("Foo")
/// .field(&self.0)
/// .field(&self.1)
/// .field(&format_args!("_"))
/// .finish()
/// }
/// }
///
/// assert_eq!(
/// "Foo(10, \"Hello\", _)",
/// format!("{:?}", Foo(10, "Hello".to_string(), PhantomData::<u8>))
/// );
/// ```
#[stable(feature = "debug_builders", since = "1.2.0")]
pub fn debug_tuple<'b>(&'b mut self, name: &str) -> DebugTuple<'b, 'a> {
builders::debug_tuple_new(self, name)
}
/// Creates a `DebugList` builder designed to assist with creation of
/// `fmt::Debug` implementations for list-like structures.
///
/// # Examples
///
/// ```rust
/// use std::fmt;
///
/// struct Foo(Vec<i32>);
///
/// impl fmt::Debug for Foo {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// fmt.debug_list().entries(self.0.iter()).finish()
/// }
/// }
///
/// // prints "[10, 11]"
/// println!("{:?}", Foo(vec![10, 11]));
/// ```
#[stable(feature = "debug_builders", since = "1.2.0")]
pub fn debug_list<'b>(&'b mut self) -> DebugList<'b, 'a> {
builders::debug_list_new(self)
}
/// Creates a `DebugSet` builder designed to assist with creation of
/// `fmt::Debug` implementations for set-like structures.
///
/// # Examples
///
/// ```rust
/// use std::fmt;
///
/// struct Foo(Vec<i32>);
///
/// impl fmt::Debug for Foo {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// fmt.debug_set().entries(self.0.iter()).finish()
/// }
/// }
///
/// // prints "{10, 11}"
/// println!("{:?}", Foo(vec![10, 11]));
/// ```
///
/// [`format_args!`]: ../../std/macro.format_args.html
///
/// In this more complex example, we use [`format_args!`] and `.debug_set()`
/// to build a list of match arms:
///
/// ```rust
/// use std::fmt;
///
/// struct Arm<'a, L: 'a, R: 'a>(&'a (L, R));
/// struct Table<'a, K: 'a, V: 'a>(&'a [(K, V)], V);
///
/// impl<'a, L, R> fmt::Debug for Arm<'a, L, R>
/// where
/// L: 'a + fmt::Debug, R: 'a + fmt::Debug
/// {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// L::fmt(&(self.0).0, fmt)?;
/// fmt.write_str(" => ")?;
/// R::fmt(&(self.0).1, fmt)
/// }
/// }
///
/// impl<'a, K, V> fmt::Debug for Table<'a, K, V>
/// where
/// K: 'a + fmt::Debug, V: 'a + fmt::Debug
/// {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// fmt.debug_set()
/// .entries(self.0.iter().map(Arm))
/// .entry(&Arm(&(format_args!("_"), &self.1)))
/// .finish()
/// }
/// }
/// ```
#[stable(feature = "debug_builders", since = "1.2.0")]
pub fn debug_set<'b>(&'b mut self) -> DebugSet<'b, 'a> {
builders::debug_set_new(self)
}
/// Creates a `DebugMap` builder designed to assist with creation of
/// `fmt::Debug` implementations for map-like structures.
///
/// # Examples
///
/// ```rust
/// use std::fmt;
///
/// struct Foo(Vec<(String, i32)>);
///
/// impl fmt::Debug for Foo {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// fmt.debug_map().entries(self.0.iter().map(|&(ref k, ref v)| (k, v))).finish()
/// }
/// }
///
/// // prints "{"A": 10, "B": 11}"
/// println!("{:?}", Foo(vec![("A".to_string(), 10), ("B".to_string(), 11)]));
/// ```
#[stable(feature = "debug_builders", since = "1.2.0")]
pub fn debug_map<'b>(&'b mut self) -> DebugMap<'b, 'a> {
builders::debug_map_new(self)
}
}
#[stable(since = "1.2.0", feature = "formatter_write")]
impl Write for Formatter<'_> {
fn write_str(&mut self, s: &str) -> Result {
self.buf.write_str(s)
}
fn write_char(&mut self, c: char) -> Result {
self.buf.write_char(c)
}
fn write_fmt(&mut self, args: Arguments<'_>) -> Result {
write(self.buf, args)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Display for Error {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Display::fmt("an error occurred when formatting an argument", f)
}
}
// Implementations of the core formatting traits
macro_rules! fmt_refs {
($($tr:ident),*) => {
$(
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + $tr> $tr for &T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result { $tr::fmt(&**self, f) }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + $tr> $tr for &mut T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result { $tr::fmt(&**self, f) }
}
)*
}
}
fmt_refs! { Debug, Display, Octal, Binary, LowerHex, UpperHex, LowerExp, UpperExp }
#[unstable(feature = "never_type", issue = "35121")]
impl Debug for ! {
fn fmt(&self, _: &mut Formatter<'_>) -> Result {
*self
}
}
#[unstable(feature = "never_type", issue = "35121")]
impl Display for ! {
fn fmt(&self, _: &mut Formatter<'_>) -> Result {
*self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Debug for bool {
#[inline]
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Display::fmt(self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Display for bool {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Display::fmt(if *self { "true" } else { "false" }, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Debug for str {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.write_char('"')?;
let mut from = 0;
for (i, c) in self.char_indices() {
let esc = c.escape_debug();
// If char needs escaping, flush backlog so far and write, else skip
if esc.len() != 1 {
f.write_str(&self[from..i])?;
for c in esc {
f.write_char(c)?;
}
from = i + c.len_utf8();
}
}
f.write_str(&self[from..])?;
f.write_char('"')
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Display for str {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.pad(self)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Debug for char {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.write_char('\'')?;
for c in self.escape_debug() {
f.write_char(c)?
}
f.write_char('\'')
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Display for char {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
if f.width.is_none() && f.precision.is_none() {
f.write_char(*self)
} else {
f.pad(self.encode_utf8(&mut [0; 4]))
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Pointer for *const T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
let old_width = f.width;
let old_flags = f.flags;
// The alternate flag is already treated by LowerHex as being special-
// it denotes whether to prefix with 0x. We use it to work out whether
// or not to zero extend, and then unconditionally set it to get the
// prefix.
if f.alternate() {
f.flags |= 1 << (FlagV1::SignAwareZeroPad as u32);
if f.width.is_none() {
f.width = Some(((mem::size_of::<usize>() * 8) / 4) + 2);
}
}
f.flags |= 1 << (FlagV1::Alternate as u32);
let ret = LowerHex::fmt(&(*self as *const () as usize), f);
f.width = old_width;
f.flags = old_flags;
ret
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Pointer for *mut T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Pointer::fmt(&(*self as *const T), f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Pointer for &T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Pointer::fmt(&(*self as *const T), f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Pointer for &mut T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Pointer::fmt(&(&**self as *const T), f)
}
}
// Implementation of Display/Debug for various core types
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Debug for *const T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Pointer::fmt(self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Debug for *mut T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Pointer::fmt(self, f)
}
}
macro_rules! peel {
($name:ident, $($other:ident,)*) => (tuple! { $($other,)* })
}
macro_rules! tuple {
() => ();
( $($name:ident,)+ ) => (
#[stable(feature = "rust1", since = "1.0.0")]
impl<$($name:Debug),+> Debug for ($($name,)+) where last_type!($($name,)+): ?Sized {
#[allow(non_snake_case, unused_assignments)]
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
let mut builder = f.debug_tuple("");
let ($(ref $name,)+) = *self;
$(
builder.field(&$name);
)+
builder.finish()
}
}
peel! { $($name,)+ }
)
}
macro_rules! last_type {
($a:ident,) => { $a };
($a:ident, $($rest_a:ident,)+) => { last_type!($($rest_a,)+) };
}
tuple! { T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, }
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Debug> Debug for [T] {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.debug_list().entries(self.iter()).finish()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Debug for () {
#[inline]
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.pad("()")
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Debug for PhantomData<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.pad("PhantomData")
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Copy + Debug> Debug for Cell<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.debug_struct("Cell").field("value", &self.get()).finish()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Debug> Debug for RefCell<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
match self.try_borrow() {
Ok(borrow) => f.debug_struct("RefCell").field("value", &borrow).finish(),
Err(_) => {
// The RefCell is mutably borrowed so we can't look at its value
// here. Show a placeholder instead.
struct BorrowedPlaceholder;
impl Debug for BorrowedPlaceholder {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.write_str("<borrowed>")
}
}
f.debug_struct("RefCell").field("value", &BorrowedPlaceholder).finish()
}
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Debug> Debug for Ref<'_, T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Debug::fmt(&**self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Debug> Debug for RefMut<'_, T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Debug::fmt(&*(self.deref()), f)
}
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<T: ?Sized + Debug> Debug for UnsafeCell<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.pad("UnsafeCell")
}
}
// If you expected tests to be here, look instead at the ui/ifmt.rs test,
// it's a lot easier than creating all of the rt::Piece structures here.
| 31.017352 | 100 | 0.522833 |
71efb99e2afa39947014f0a0256abbfecd942aed
| 51 |
pub fn hello_world() {
std::unimplemented!()
}
| 12.75 | 25 | 0.627451 |
e56fc14867fc98d8e17b8980e2af0b8c35420108
| 11,594 |
use std::fmt;
use std::convert::From;
use yaml::*;
#[derive(Copy, Clone, Debug)]
pub enum EmitError {
FmtError(fmt::Error),
BadHashmapKey,
}
impl From<fmt::Error> for EmitError {
fn from(f: fmt::Error) -> Self {
EmitError::FmtError(f)
}
}
pub struct YamlEmitter<'a> {
writer: &'a mut fmt::Write,
best_indent: usize,
level: isize,
}
pub type EmitResult = Result<(), EmitError>;
// from serialize::json
fn escape_str(wr: &mut fmt::Write, v: &str) -> Result<(), fmt::Error> {
try!(wr.write_str("\""));
let mut start = 0;
for (i, byte) in v.bytes().enumerate() {
let escaped = match byte {
b'"' => "\\\"",
b'\\' => "\\\\",
b'\x00' => "\\u0000",
b'\x01' => "\\u0001",
b'\x02' => "\\u0002",
b'\x03' => "\\u0003",
b'\x04' => "\\u0004",
b'\x05' => "\\u0005",
b'\x06' => "\\u0006",
b'\x07' => "\\u0007",
b'\x08' => "\\b",
b'\t' => "\\t",
b'\n' => "\\n",
b'\x0b' => "\\u000b",
b'\x0c' => "\\f",
b'\r' => "\\r",
b'\x0e' => "\\u000e",
b'\x0f' => "\\u000f",
b'\x10' => "\\u0010",
b'\x11' => "\\u0011",
b'\x12' => "\\u0012",
b'\x13' => "\\u0013",
b'\x14' => "\\u0014",
b'\x15' => "\\u0015",
b'\x16' => "\\u0016",
b'\x17' => "\\u0017",
b'\x18' => "\\u0018",
b'\x19' => "\\u0019",
b'\x1a' => "\\u001a",
b'\x1b' => "\\u001b",
b'\x1c' => "\\u001c",
b'\x1d' => "\\u001d",
b'\x1e' => "\\u001e",
b'\x1f' => "\\u001f",
b'\x7f' => "\\u007f",
_ => { continue; }
};
if start < i {
try!(wr.write_str(&v[start..i]));
}
try!(wr.write_str(escaped));
start = i + 1;
}
if start != v.len() {
try!(wr.write_str(&v[start..]));
}
try!(wr.write_str("\""));
Ok(())
}
impl<'a> YamlEmitter<'a> {
pub fn new(writer: &'a mut fmt::Write) -> YamlEmitter {
YamlEmitter {
writer: writer,
best_indent: 2,
level: -1
}
}
pub fn dump(&mut self, doc: &Yaml) -> EmitResult {
// write DocumentStart
try!(write!(self.writer, "---\n"));
self.level = -1;
self.emit_node(doc)
}
fn write_indent(&mut self) -> EmitResult {
if self.level <= 0 { return Ok(()); }
for _ in 0..self.level {
for _ in 0..self.best_indent {
try!(write!(self.writer, " "));
}
}
Ok(())
}
fn emit_node_compact(&mut self, node: &Yaml) -> EmitResult {
match *node {
Yaml::Array(ref v) => {
try!(write!(self.writer, "["));
if self.level >= 0 {
try!(write!(self.writer, "+ "));
}
self.level += 1;
for (cnt, x) in v.iter().enumerate() {
try!(self.write_indent());
if cnt > 0 { try!(write!(self.writer, ", ")); }
try!(self.emit_node(x));
}
self.level -= 1;
try!(write!(self.writer, "]"));
Ok(())
},
Yaml::Hash(ref h) => {
try!(self.writer.write_str("{"));
self.level += 1;
for (cnt, (k, v)) in h.iter().enumerate() {
if cnt > 0 {
try!(write!(self.writer, ", "));
}
match *k {
// complex key is not supported
Yaml::Array(_) | Yaml::Hash(_) => {
return Err(EmitError::BadHashmapKey);
},
_ => { try!(self.emit_node(k)); }
}
try!(write!(self.writer, ": "));
try!(self.emit_node(v));
}
try!(self.writer.write_str("}"));
self.level -= 1;
Ok(())
},
_ => self.emit_node(node)
}
}
fn emit_node(&mut self, node: &Yaml) -> EmitResult {
match *node {
Yaml::Array(ref v) => {
if v.is_empty() {
try!(write!(self.writer, "[]"));
Ok(())
} else {
if self.level >= 0 {
try!(write!(self.writer, "\n"));
}
self.level += 1;
for (cnt, x) in v.iter().enumerate() {
if cnt > 0 {
try!(write!(self.writer, "\n"));
}
try!(self.write_indent());
try!(write!(self.writer, "- "));
try!(self.emit_node(x));
}
self.level -= 1;
Ok(())
}
},
Yaml::Hash(ref h) => {
if h.is_empty() {
try!(self.writer.write_str("{}"));
Ok(())
} else {
if self.level >= 0 {
try!(write!(self.writer, "\n"));
}
self.level += 1;
for (cnt, (k, v)) in h.iter().enumerate() {
if cnt > 0 {
try!(write!(self.writer, "\n"));
}
try!(self.write_indent());
match *k {
Yaml::Array(_) | Yaml::Hash(_) => {
try!(self.emit_node_compact(k));
//return Err(EmitError::BadHashmapKey);
},
_ => { try!(self.emit_node(k)); }
}
try!(write!(self.writer, ": "));
try!(self.emit_node(v));
}
self.level -= 1;
Ok(())
}
},
Yaml::String(ref v) => {
if need_quotes(v) {
try!(escape_str(self.writer, v));
}
else {
try!(write!(self.writer, "{}", v));
}
Ok(())
},
Yaml::Boolean(v) => {
if v {
try!(self.writer.write_str("true"));
} else {
try!(self.writer.write_str("false"));
}
Ok(())
},
Yaml::Integer(v) => {
try!(write!(self.writer, "{}", v));
Ok(())
},
Yaml::Real(ref v) => {
try!(write!(self.writer, "{}", v));
Ok(())
},
Yaml::Null | Yaml::BadValue => {
try!(write!(self.writer, "~"));
Ok(())
},
// XXX(chenyh) Alias
_ => { Ok(()) }
}
}
}
/// Check if the string requires quoting.
/// Strings containing any of the following characters must be quoted.
/// :, {, }, [, ], ,, &, *, #, ?, |, -, <, >, =, !, %, @, `
///
/// If the string contains any of the following control characters, it must be escaped with double quotes:
/// \0, \x01, \x02, \x03, \x04, \x05, \x06, \a, \b, \t, \n, \v, \f, \r, \x0e, \x0f, \x10, \x11, \x12, \x13, \x14, \x15, \x16, \x17, \x18, \x19, \x1a, \e, \x1c, \x1d, \x1e, \x1f, \N, \_, \L, \P
///
/// Finally, there are other cases when the strings must be quoted, no matter if you're using single or double quotes:
/// * When the string is true or false (otherwise, it would be treated as a boolean value);
/// * When the string is null or ~ (otherwise, it would be considered as a null value);
/// * When the string looks like a number, such as integers (e.g. 2, 14, etc.), floats (e.g. 2.6, 14.9) and exponential numbers (e.g. 12e7, etc.) (otherwise, it would be treated as a numeric value);
/// * When the string looks like a date (e.g. 2014-12-31) (otherwise it would be automatically converted into a Unix timestamp).
fn need_quotes(string: &str) -> bool {
fn need_quotes_spaces(string: &str) -> bool {
string.starts_with(' ')
|| string.ends_with(' ')
}
string == ""
|| need_quotes_spaces(string)
|| string.contains(|character: char| {
match character {
':' | '{' | '}' | '[' | ']' | ',' | '&' | '*' | '#' | '?' | '|' | '-' | '<' | '>' | '=' | '!' | '%' | '@' | '`' | '\\' | '\0' ... '\x06' | '\t' | '\n' | '\r' | '\x0e' ... '\x1a' | '\x1c' ... '\x1f' => true,
_ => false,
}
})
|| string == "true"
|| string == "false"
|| string == "null"
|| string == "~"
|| string.parse::<i64>().is_ok()
|| string.parse::<f64>().is_ok()
}
#[cfg(test)]
mod tests {
use super::*;
use yaml::*;
#[test]
fn test_emit_simple() {
let s = "
# comment
a0 bb: val
a1:
b1: 4
b2: d
a2: 4 # i'm comment
a3: [1, 2, 3]
a4:
- - a1
- a2
- 2
- []
- {}
a5: 'single_quoted'
a6: \"double_quoted\"
a7: 你好
'key 1': \"ddd\\\tbbb\"
";
let docs = YamlLoader::load_from_str(&s).unwrap();
let doc = &docs[0];
let mut writer = String::new();
{
let mut emitter = YamlEmitter::new(&mut writer);
emitter.dump(doc).unwrap();
}
let docs_new = YamlLoader::load_from_str(&s).unwrap();
let doc_new = &docs_new[0];
assert_eq!(doc, doc_new);
}
#[test]
fn test_emit_complex() {
let s = r#"
cataloge:
product: &coffee { name: Coffee, price: 2.5 , unit: 1l }
product: &cookies { name: Cookies!, price: 3.40 , unit: 400g}
products:
*coffee:
amount: 4
*cookies:
amount: 4
[1,2,3,4]:
array key
2.4:
real key
true:
bool key
{}:
empty hash key
"#;
let docs = YamlLoader::load_from_str(&s).unwrap();
let doc = &docs[0];
let mut writer = String::new();
{
let mut emitter = YamlEmitter::new(&mut writer);
emitter.dump(doc).unwrap();
}
let docs_new = YamlLoader::load_from_str(&s).unwrap();
let doc_new = &docs_new[0];
assert_eq!(doc, doc_new);
}
#[test]
fn test_emit_avoid_quotes() {
let s = r#"---
a7: 你好
boolean: "true"
boolean2: "false"
date: "2014-12-31"
empty_string: ""
empty_string1: " "
empty_string2: " a"
empty_string3: " a "
exp: "12e7"
field: ":"
field2: "{"
field3: "\\"
field4: "\n"
float: "2.6"
int: "4"
nullable: "null"
nullable2: "~"
products:
"*coffee":
amount: 4
"*cookies":
amount: 4
"2.4": real key
"[1,2,3,4]": array key
"true": bool key
"{}": empty hash key
x: test
y: string with spaces"#;
let docs = YamlLoader::load_from_str(&s).unwrap();
let doc = &docs[0];
let mut writer = String::new();
{
let mut emitter = YamlEmitter::new(&mut writer);
emitter.dump(doc).unwrap();
}
assert_eq!(s, writer);
}
}
| 29.351899 | 218 | 0.393479 |
3823fa8b16463189926076afc650d93a1e79b569
| 1,043 |
use crate::spec::{LinkerFlavor, Target, TargetResult, Endianness};
pub fn target() -> TargetResult {
let mut base = super::android_base::opts();
base.cpu = "x86-64".to_string();
// https://developer.android.com/ndk/guides/abis.html#86-64
base.features = "+mmx,+sse,+sse2,+sse3,+ssse3,+sse4.1,+sse4.2,+popcnt".to_string();
base.max_atomic_width = Some(64);
base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
base.stack_probes = true;
Ok(Target {
llvm_target: "x86_64-linux-android".to_string(),
target_endian: Endianness::Little,
target_pointer_width: 64,
target_c_int_width: "32".to_string(),
data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128"
.to_string(),
arch: "x86_64".to_string(),
target_os: "android".to_string(),
target_env: String::new(),
target_vendor: "unknown".to_string(),
linker_flavor: LinkerFlavor::Gcc,
options: base,
})
}
| 38.62963 | 93 | 0.634708 |
3380affccb7abf864ec17a52891cfcc3153b3c0f
| 2,320 |
use crate::prelude::*;
#[derive(Clone)]
pub struct ObserverComp<N, C, Item> {
next: N,
complete: C,
is_stopped: bool,
marker: TypeHint<*const Item>,
}
impl<N, C, Item> Observer for ObserverComp<N, C, Item>
where
C: FnMut(),
N: FnMut(Item),
{
type Item = Item;
type Err = ();
#[inline]
fn next(&mut self, value: Item) { (self.next)(value); }
#[inline]
fn error(&mut self, _err: ()) { self.is_stopped = true; }
fn complete(&mut self) {
(self.complete)();
self.is_stopped = true;
}
fn is_stopped(&self) -> bool { self.is_stopped }
}
pub trait SubscribeComplete<'a, N, C> {
/// A type implementing [`SubscriptionLike`]
type Unsub: SubscriptionLike;
/// Invokes an execution of an Observable and registers Observer handlers for
/// notifications it will emit.
fn subscribe_complete(
self,
next: N,
complete: C,
) -> SubscriptionWrapper<Self::Unsub>;
}
impl<'a, S, N, C> SubscribeComplete<'a, N, C> for S
where
S: LocalObservable<'a, Err = ()>,
C: FnMut() + 'a,
N: FnMut(S::Item) + 'a,
S::Item: 'a,
{
type Unsub = S::Unsub;
fn subscribe_complete(
self,
next: N,
complete: C,
) -> SubscriptionWrapper<Self::Unsub>
where
Self: Sized,
S::Item: 'a,
{
let unsub = self.actual_subscribe(Subscriber::local(ObserverComp {
next,
complete,
is_stopped: false,
marker: TypeHint::new(),
}));
SubscriptionWrapper(unsub)
}
}
impl<'a, S, N, C> SubscribeComplete<'a, N, C> for Shared<S>
where
S: SharedObservable<Err = ()>,
C: FnMut() + Send + Sync + 'static,
N: FnMut(S::Item) + Send + Sync + 'static,
S::Item: 'static,
{
type Unsub = S::Unsub;
fn subscribe_complete(
self,
next: N,
complete: C,
) -> SubscriptionWrapper<Self::Unsub>
where
Self: Sized,
{
let unsub = self.0.actual_subscribe(Subscriber::shared(ObserverComp {
next,
complete,
is_stopped: false,
marker: TypeHint::new(),
}));
SubscriptionWrapper(unsub)
}
}
#[test]
fn raii() {
let mut times = 0;
{
let mut subject = Subject::new();
{
let _ = subject
.clone()
.subscribe_complete(|_| times += 1, || {})
.unsubscribe_when_dropped();
} // <-- guard is dropped here!
subject.next(());
}
assert_eq!(times, 0);
}
| 21.090909 | 79 | 0.596552 |
03b4e14e974933b2e7faa4dcbb2fcc1e4d0d05b1
| 173 |
//! Modules related to the handshake phase of the Network Block Device (NBD)
//! protocol.
mod connection;
pub(crate) use connection::RawConnection;
pub(crate) mod frame;
| 21.625 | 76 | 0.751445 |
698effd3c2c5545b4fff9b16de60d84d5d68b59c
| 4,398 |
use crate::prelude::*;
use nu_engine::evaluate_baseline_expr;
use nu_engine::WholeStreamCommand;
use nu_errors::ShellError;
use nu_protocol::{
hir::CapturedBlock,
hir::{ClassifiedCommand, SpannedExpression},
Signature, SyntaxShape, Value,
};
pub struct Command;
#[derive(Deserialize)]
pub struct Arguments {
block: CapturedBlock,
}
impl WholeStreamCommand for Command {
fn name(&self) -> &str {
"where"
}
fn signature(&self) -> Signature {
Signature::build("where").required(
"condition",
SyntaxShape::RowCondition,
"the condition that must match",
)
}
fn usage(&self) -> &str {
"Filter table to match the condition."
}
fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> {
where_command(args)
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "List all files in the current directory with sizes greater than 2kb",
example: "ls | where size > 2kb",
result: None,
},
Example {
description: "List only the files in the current directory",
example: "ls | where type == File",
result: None,
},
Example {
description: "List all files with names that contain \"Car\"",
example: "ls | where name =~ \"Car\"",
result: None,
},
Example {
description: "List all files that were modified in the last two weeks",
example: "ls | where modified <= 2wk",
result: None,
},
]
}
}
fn where_command(raw_args: CommandArgs) -> Result<OutputStream, ShellError> {
let context = Arc::new(EvaluationContext::from_args(&raw_args));
let tag = raw_args.call_info.name_tag.clone();
let (Arguments { block }, input) = raw_args.process()?;
let condition = {
if block.block.block.len() != 1 {
return Err(ShellError::labeled_error(
"Expected a condition",
"expected a condition",
tag,
));
}
match block.block.block[0].pipelines.get(0) {
Some(item) => match item.list.get(0) {
Some(ClassifiedCommand::Expr(expr)) => expr.clone(),
_ => {
return Err(ShellError::labeled_error(
"Expected a condition",
"expected a condition",
tag,
));
}
},
None => {
return Err(ShellError::labeled_error(
"Expected a condition",
"expected a condition",
tag,
));
}
}
};
Ok(WhereIterator {
condition,
context,
input,
block,
}
.to_output_stream())
}
#[cfg(test)]
mod tests {
use super::Command;
use super::ShellError;
#[test]
fn examples_work_as_expected() -> Result<(), ShellError> {
use crate::examples::test as test_examples;
test_examples(Command {})
}
}
struct WhereIterator {
condition: Box<SpannedExpression>,
context: Arc<EvaluationContext>,
input: InputStream,
block: CapturedBlock,
}
impl Iterator for WhereIterator {
type Item = Value;
fn next(&mut self) -> Option<Self::Item> {
while let Some(x) = self.input.next() {
self.context.scope.enter_scope();
self.context.scope.add_vars(&self.block.captured.entries);
self.context.scope.add_var("$it", x.clone());
//FIXME: should we use the scope that's brought in as well?
let condition = evaluate_baseline_expr(&self.condition, &self.context);
self.context.scope.exit_scope();
match condition {
Ok(condition) => match condition.as_bool() {
Ok(b) => {
if b {
return Some(x);
}
}
Err(e) => return Some(Value::error(e)),
},
Err(e) => return Some(Value::error(e)),
}
}
None
}
}
| 28.374194 | 99 | 0.506594 |
f9a9338834b108c50d1bd9e97d6a533a7f7905fc
| 164 |
use simple_func_timer::timer;
#[timer(unit = "as")]
fn sum(max: i32) -> i32 {
let mut sum = 0;
for i in 1..max {
sum += i;
}
return sum;
}
| 14.909091 | 29 | 0.506098 |
01f2f92891f0df9749b01fdc6e8a9ef6793f8226
| 6,766 |
use serde::Serialize;
use twilight_model::{
channel::message::{
sticker::{StickerFormatType, StickerType},
Sticker,
},
id::{
marker::{GuildMarker, StickerMarker, StickerPackMarker, UserMarker},
Id,
},
};
/// Representation of a cached [`Sticker`].
///
/// [`Sticker`]: twilight_model::channel::message::sticker::Sticker
#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
pub struct CachedSticker {
/// Whether the sticker is available.
pub(crate) available: bool,
/// Description of the sticker.
pub(crate) description: String,
/// Format type.
pub(crate) format_type: StickerFormatType,
/// ID of the guild that owns the sticker.
pub(crate) guild_id: Option<Id<GuildMarker>>,
/// Unique ID of the sticker.
pub(crate) id: Id<StickerMarker>,
/// Kind of sticker.
pub(crate) kind: StickerType,
/// Name of the sticker.
pub(crate) name: String,
/// Unique ID of the pack the sticker is in.
pub(crate) pack_id: Option<Id<StickerPackMarker>>,
/// Sticker's sort order within a pack.
pub(crate) sort_value: Option<u64>,
/// CSV list of tags the sticker is assigned to, if any.
pub(crate) tags: String,
/// ID of the user that uploaded the sticker.
pub(crate) user_id: Option<Id<UserMarker>>,
}
impl CachedSticker {
/// Whether the sticker is available.
pub const fn available(&self) -> bool {
self.available
}
/// Description of the sticker.
pub fn description(&self) -> &str {
&self.description
}
/// Format type.
pub const fn format_type(&self) -> StickerFormatType {
self.format_type
}
/// ID of the guild that owns the sticker.
pub const fn guild_id(&self) -> Option<Id<GuildMarker>> {
self.guild_id
}
/// Unique ID of the sticker.
pub const fn id(&self) -> Id<StickerMarker> {
self.id
}
/// Kind of sticker.
pub const fn kind(&self) -> StickerType {
self.kind
}
/// Name of the sticker.
pub fn name(&self) -> &str {
&self.name
}
/// Unique ID of the pack the sticker is in.
pub const fn pack_id(&self) -> Option<Id<StickerPackMarker>> {
self.pack_id
}
/// Sticker's sort order within a pack.
pub const fn sort_value(&self) -> Option<u64> {
self.sort_value
}
/// CSV list of tags the sticker is assigned to, if any.
pub fn tags(&self) -> &str {
&self.tags
}
/// ID of the user that uploaded the sticker.
pub const fn user_id(&self) -> Option<Id<UserMarker>> {
self.user_id
}
/// Construct a cached sticker from its [`twilight_model`] form.
pub(crate) fn from_model(sticker: Sticker) -> Self {
let Sticker {
available,
description,
format_type,
guild_id,
id,
kind,
name,
pack_id,
sort_value,
tags,
user,
} = sticker;
Self {
available,
description: description.unwrap_or_default(),
format_type,
guild_id,
id,
kind,
name,
pack_id,
sort_value,
tags,
user_id: user.map(|user| user.id),
}
}
}
impl PartialEq<Sticker> for CachedSticker {
fn eq(&self, other: &Sticker) -> bool {
self.available == other.available
&& self.description.as_str() == other.description.as_ref().map_or("", String::as_str)
&& self.format_type == other.format_type
&& self.guild_id == other.guild_id
&& self.id == other.id
&& self.kind == other.kind
&& self.name == other.name
&& self.pack_id == other.pack_id
&& self.sort_value == other.sort_value
&& self.tags == other.tags
&& self.user_id == other.user.as_ref().map(|user| user.id)
}
}
#[cfg(test)]
mod tests {
use super::CachedSticker;
use serde::Serialize;
use static_assertions::{assert_fields, assert_impl_all};
use std::fmt::Debug;
use twilight_model::{
channel::message::{
sticker::{StickerFormatType, StickerType},
Sticker,
},
id::Id,
user::{PremiumType, User, UserFlags},
util::{image_hash::ImageHashParseError, ImageHash},
};
assert_fields!(
CachedSticker: available,
description,
format_type,
guild_id,
id,
kind,
name,
pack_id,
sort_value,
tags,
user_id
);
assert_impl_all!(
CachedSticker: Clone,
Debug,
Eq,
PartialEq,
PartialEq<Sticker>,
Send,
Serialize,
Sync
);
#[test]
fn eq_sticker() -> Result<(), ImageHashParseError> {
let avatar = ImageHash::parse(b"5bf451026c107906b4dccea015320222")?;
let sticker = Sticker {
available: true,
description: Some("sticker".into()),
format_type: StickerFormatType::Png,
guild_id: Some(Id::new(1)),
id: Id::new(2),
kind: StickerType::Guild,
name: "stick".into(),
pack_id: Some(Id::new(3)),
sort_value: Some(1),
tags: "foo,bar,baz".into(),
user: Some(User {
accent_color: None,
avatar: Some(avatar),
banner: None,
bot: false,
discriminator: 1,
email: Some("[email protected]".to_owned()),
flags: Some(UserFlags::PREMIUM_EARLY_SUPPORTER | UserFlags::VERIFIED_DEVELOPER),
id: Id::new(1),
locale: Some("en-us".to_owned()),
mfa_enabled: Some(true),
name: "test".to_owned(),
premium_type: Some(PremiumType::Nitro),
public_flags: Some(
UserFlags::PREMIUM_EARLY_SUPPORTER | UserFlags::VERIFIED_DEVELOPER,
),
system: Some(true),
verified: Some(true),
}),
};
let cached = CachedSticker {
available: true,
description: "sticker".into(),
format_type: StickerFormatType::Png,
guild_id: Some(Id::new(1)),
id: Id::new(2),
kind: StickerType::Guild,
name: "stick".into(),
pack_id: Some(Id::new(3)),
sort_value: Some(1),
tags: "foo,bar,baz".into(),
user_id: Some(Id::new(1)),
};
assert_eq!(cached, sticker);
Ok(())
}
}
| 28.074689 | 97 | 0.536802 |
566c6326c0ce1f37713175b6ef4e8b5d313cff4b
| 1,887 |
use amethyst::core::ecs::{Builder, Entity, World, WorldExt};
use amethyst::core::Transform;
use amethyst::renderer::Camera;
#[derive(Ord, PartialOrd, Eq, PartialEq, Copy, Clone)]
#[repr(u8)]
pub enum CameraHeight {
// Bottom
Terrain = 0,
Units,
Projectiles,
// Top
}
pub fn _some() {
// let vision_registry = &*world.read_resource::<VisionRegistry>();
// let map_registry = &*world.read_resource::<MapRegistry>();
// let vision_point: (usize, usize) = (5, 5);
//
// let visible = vision_registry.get_visible(vision_point).unwrap();
//
// let mut tints = world.write_component::<Tint>();
//
// visible
// .iter()
// .map(|(x, y)| {
// // println!("{:?} {:?}", x, y);
// map_registry.get_tile(*x, *y).unwrap().0.clone()
// })
// .for_each(|(entity)| {
// let tint = tints.get_mut(entity).unwrap();
// tint.0.blue = 1.0;
// tint.0.green = 1.0;
// });
//
// let mut vis_point = tints
// .get_mut(
// map_registry
// .get_tile(vision_point.0, vision_point.1)
// .unwrap()
// .0
// .clone(),
// )
// .unwrap()
// .0;
// vis_point.red = 0.0;
// vis_point.green = 0.0;
// vis_point.blue = 0.0;
}
pub fn initialize_camera(world: &mut World, dimensions: (f32, f32), offset: f32) -> Entity {
// Setup camera in a way that our screen covers whole arena and (0, 0) is in the bottom left.
let mut transform = Transform::default();
transform.set_translation_xyz(
dimensions.0 / 2.0 - offset,
dimensions.1 / 2.0 - offset,
100.0,
);
world
.create_entity()
.with(Camera::standard_2d(dimensions.0, dimensions.1))
.with(transform)
.build()
}
| 28.164179 | 97 | 0.523582 |
ed6d0af0bf02f22af495d11eef0064030ec9d5f5
| 1,714 |
use crate as sea_orm;
use crate::entity::prelude::*;
#[derive(Copy, Clone, Default, Debug, DeriveEntity)]
pub struct Entity;
impl EntityName for Entity {
fn table_name(&self) -> &str {
"cake_filling"
}
}
#[derive(Clone, Debug, PartialEq, DeriveModel, DeriveActiveModel)]
pub struct Model {
pub cake_id: i32,
pub filling_id: i32,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
pub enum Column {
CakeId,
FillingId,
}
#[derive(Copy, Clone, Debug, EnumIter, DerivePrimaryKey)]
pub enum PrimaryKey {
CakeId,
FillingId,
}
impl PrimaryKeyTrait for PrimaryKey {
type ValueType = (i32, i32);
fn auto_increment() -> bool {
false
}
}
#[derive(Copy, Clone, Debug, EnumIter)]
pub enum Relation {
Cake,
Filling,
}
impl ColumnTrait for Column {
type EntityName = Entity;
fn def(&self) -> ColumnDef {
match self {
Self::CakeId => ColumnType::Integer.def(),
Self::FillingId => ColumnType::Integer.def(),
}
}
}
impl RelationTrait for Relation {
fn def(&self) -> RelationDef {
match self {
Self::Cake => Entity::belongs_to(super::cake::Entity)
.from(Column::CakeId)
.to(super::cake::Column::Id)
.into(),
Self::Filling => Entity::belongs_to(super::filling::Entity)
.from(Column::FillingId)
.to(super::filling::Column::Id)
.into(),
}
}
}
impl Related<super::cake_filling_price::Entity> for Entity {
fn to() -> RelationDef {
super::cake_filling_price::Relation::CakeFilling.def().rev()
}
}
impl ActiveModelBehavior for ActiveModel {}
| 21.974359 | 71 | 0.597433 |
1c700fae7396134916b656df2f5292d4f1aa3ef8
| 1,383 |
// WARNING: This file was autogenerated by jni-bindgen. Any changes to this file may be lost!!!
#[cfg(any(feature = "all", feature = "java-nio-channels-WritePendingException"))]
__jni_bindgen! {
/// public class [WritePendingException](https://developer.android.com/reference/java/nio/channels/WritePendingException.html)
///
/// Required feature: java-nio-channels-WritePendingException
public class WritePendingException ("java/nio/channels/WritePendingException") extends crate::java::lang::IllegalStateException {
/// [WritePendingException](https://developer.android.com/reference/java/nio/channels/WritePendingException.html#WritePendingException())
pub fn new<'env>(__jni_env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::result::Result<__jni_bindgen::Local<'env, crate::java::nio::channels::WritePendingException>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "java/nio/channels/WritePendingException", java.flags == PUBLIC, .name == "<init>", .descriptor == "()V"
unsafe {
let __jni_args = [];
let (__jni_class, __jni_method) = __jni_env.require_class_method("java/nio/channels/WritePendingException\0", "<init>\0", "()V\0");
__jni_env.new_object_a(__jni_class, __jni_method, __jni_args.as_ptr())
}
}
}
}
| 62.863636 | 238 | 0.689082 |
18b2f72c58c7179ec3f3875b71d7a1fdf18dfd53
| 1,438 |
use wasmtime::*;
#[test]
fn caches_across_engines() {
let mut c = Config::new();
c.cranelift_clear_cpu_flags();
let bytes = Module::new(&Engine::new(&c).unwrap(), "(module)")
.unwrap()
.serialize()
.unwrap();
let res = Module::deserialize(
&Engine::new(&Config::new().cranelift_clear_cpu_flags()).unwrap(),
&bytes,
);
assert!(res.is_ok());
// differ in shared cranelift flags
let res = Module::deserialize(
&Engine::new(
&Config::new()
.cranelift_clear_cpu_flags()
.cranelift_nan_canonicalization(true),
)
.unwrap(),
&bytes,
);
assert!(res.is_err());
// differ in cranelift settings
let res = Module::deserialize(
&Engine::new(
&Config::new()
.cranelift_clear_cpu_flags()
.cranelift_opt_level(OptLevel::None),
)
.unwrap(),
&bytes,
);
assert!(res.is_err());
// differ in cpu-specific flags
if cfg!(target_arch = "x86_64") {
let res = Module::deserialize(
&Engine::new(unsafe {
&Config::new()
.cranelift_clear_cpu_flags()
.cranelift_other_flag("has_sse3", "true")
.unwrap()
})
.unwrap(),
&bytes,
);
assert!(res.is_err());
}
}
| 24.793103 | 74 | 0.497218 |
38847585359ca0537376bb9649af538b2f204796
| 1,180 |
pub(crate) mod caching_session;
mod cluster;
pub(crate) mod connection;
mod connection_pool;
pub mod errors;
pub mod iterator;
pub mod load_balancing;
pub(crate) mod metrics;
mod node;
pub mod partitioner;
pub mod query_result;
pub mod retry_policy;
pub mod session;
pub mod session_builder;
pub mod speculative_execution;
pub mod topology;
#[cfg(test)]
mod authenticate_test;
#[cfg(test)]
pub(crate) mod session_test;
pub use cluster::ClusterData;
pub use node::Node;
// All of the Authenticators supported by Scylla
#[derive(Debug, PartialEq)]
pub enum Authenticator {
AllowAllAuthenticator,
PasswordAuthenticator,
CassandraPasswordAuthenticator,
CassandraAllowAllAuthenticator,
ScyllaTransitionalAuthenticator,
}
/// The wire protocol compression algorithm.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]
pub enum Compression {
/// LZ4 compression algorithm.
Lz4,
/// Snappy compression algorithm.
Snappy,
}
impl ToString for Compression {
fn to_string(&self) -> String {
match self {
Compression::Lz4 => "lz4".to_owned(),
Compression::Snappy => "snappy".to_owned(),
}
}
}
| 22.264151 | 61 | 0.718644 |
50711fd981435d307509498bb1778055cf5e8e5e
| 1,553 |
// Copyright 2018 TAP, Inc. All Rights Reserved.
extern crate ws;
extern crate lazy_static;
use std::sync::Mutex;
use std::env;
lazy_static::lazy_static! {
static ref SENDERS: Mutex<Vec<ws::Sender>> = Mutex::new(Vec::new());
}
struct EchoHandler {
me: ws::Sender,
}
impl ws::Handler for EchoHandler {
fn on_message(&mut self, msg: ws::Message) -> ws::Result<()> {
for ref i in SENDERS.lock().unwrap().iter() {
if self.me.ne(i) {
i.send(msg.clone()).unwrap();
}
}
Ok(())
}
}
struct Server {}
impl ws::Factory for Server {
type Handler = EchoHandler;
fn connection_made(&mut self, sender: ws::Sender) -> EchoHandler {
println!("Connection : {}", sender.connection_id());
sender.send(format!("Connect:{}", sender.connection_id())).unwrap();
SENDERS.lock().unwrap().push(sender.clone());
EchoHandler{
me: sender,
}
}
fn connection_lost(&mut self, handler: EchoHandler) {
println!("Disconnect : {}", handler.me.connection_id());
SENDERS.lock().unwrap().retain(|ref sender| {
sender.send(format!("Disconnect:{}", handler.me.connection_id())).unwrap();
handler.me.ne(sender)
});
}
fn on_shutdown(&mut self) {
SENDERS.lock().unwrap().clear();
}
}
fn main() {
if let Some(arg1) = env::args().nth(1) {
println!("Open : {}", arg1);
ws::WebSocket::new(Server {}).unwrap().listen(arg1).unwrap();
}
else {
println!("Open : localhost:8989");
ws::WebSocket::new(Server {}).unwrap().listen("localhost:8989").unwrap();
}
}
| 22.185714 | 81 | 0.611719 |
01c700bbca1f00f02f4dc0dad422cc8e29c905d8
| 3,902 |
// Copyright 2021 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
crate::model::hooks::RuntimeInfo,
async_trait::async_trait,
fidl_fuchsia_diagnostics_types::{ComponentDiagnostics, Task as DiagnosticsTask, TaskUnknown},
fuchsia_zircon::{self as zx, AsHandleRef, Task},
fuchsia_zircon_sys as zx_sys,
futures::channel::oneshot,
};
/// Trait that all structs that behave as Task's implement.
/// Used for simplying testing.
#[async_trait]
pub trait RuntimeStatsSource {
/// The koid of the Cpu stats source.
fn koid(&self) -> Result<zx_sys::zx_koid_t, zx::Status>;
/// The returned future will resolve when the task is terminated.
fn handle_ref(&self) -> zx::HandleRef<'_>;
/// Provides the runtime info containing the stats.
async fn get_runtime_info(&self) -> Result<zx::TaskRuntimeInfo, zx::Status>;
}
/// Trait for the container returned by a `DiagnosticsReceiverProvider`.
/// Used for simplying testing.
#[async_trait]
pub trait RuntimeStatsContainer<T: RuntimeStatsSource> {
/// The task running a component.
fn take_component_task(&mut self) -> Option<T>;
/// An optional parent task running multiple components including `component_task`.
fn take_parent_task(&mut self) -> Option<T>;
}
/// Trait for the providers of asynchronous receivers where the diagnostics data is sent.
/// Used for simplying testing.
#[async_trait]
pub trait DiagnosticsReceiverProvider<T, S>
where
T: RuntimeStatsContainer<S>,
S: RuntimeStatsSource,
{
/// Fetches a oneshot receiver that will eventually resolve to the diagnostics of a component
/// if the runner provides them.
async fn get_receiver(&self) -> Option<oneshot::Receiver<T>>;
}
#[async_trait]
impl RuntimeStatsSource for DiagnosticsTask {
fn koid(&self) -> Result<zx_sys::zx_koid_t, zx::Status> {
let info = match &self {
DiagnosticsTask::Job(job) => job.basic_info(),
DiagnosticsTask::Process(process) => process.basic_info(),
DiagnosticsTask::Thread(thread) => thread.basic_info(),
TaskUnknown!() => {
unreachable!("only jobs, threads and processes are tasks");
}
}?;
Ok(info.koid.raw_koid())
}
fn handle_ref(&self) -> zx::HandleRef<'_> {
match &self {
DiagnosticsTask::Job(job) => job.as_handle_ref(),
DiagnosticsTask::Process(process) => process.as_handle_ref(),
DiagnosticsTask::Thread(thread) => thread.as_handle_ref(),
TaskUnknown!() => {
unreachable!("only jobs, threads and processes are tasks");
}
}
}
async fn get_runtime_info(&self) -> Result<zx::TaskRuntimeInfo, zx::Status> {
match &self {
DiagnosticsTask::Job(job) => job.get_runtime_info(),
DiagnosticsTask::Process(process) => process.get_runtime_info(),
DiagnosticsTask::Thread(thread) => thread.get_runtime_info(),
TaskUnknown!() => {
unreachable!("only jobs, threads and processes are tasks");
}
}
}
}
#[async_trait]
impl RuntimeStatsContainer<DiagnosticsTask> for ComponentDiagnostics {
fn take_component_task(&mut self) -> Option<DiagnosticsTask> {
self.tasks.as_mut().and_then(|tasks| tasks.component_task.take())
}
fn take_parent_task(&mut self) -> Option<DiagnosticsTask> {
self.tasks.as_mut().and_then(|tasks| tasks.parent_task.take())
}
}
#[async_trait]
impl DiagnosticsReceiverProvider<ComponentDiagnostics, DiagnosticsTask> for RuntimeInfo {
async fn get_receiver(&self) -> Option<oneshot::Receiver<ComponentDiagnostics>> {
let mut receiver_guard = self.diagnostics_receiver.lock().await;
receiver_guard.take()
}
}
| 37.519231 | 97 | 0.668888 |
03648c531707dd01b65d6bb66dbd384b90a00fe8
| 5,294 |
//! Asynchronous Cincinnati client.
// Cincinnati client.
mod client;
pub use client::Node;
#[cfg(test)]
mod mock_tests;
use crate::config::inputs;
use crate::identity::Identity;
use failure::{bail, Error, Fallible};
use futures::future;
use futures::prelude::*;
use prometheus::{IntCounter, IntGauge};
use serde::Serialize;
/// Metadata key for payload scheme.
pub static AGE_INDEX_KEY: &str = "org.fedoraproject.coreos.releases.age_index";
/// Metadata key for payload scheme.
pub static SCHEME_KEY: &str = "org.fedoraproject.coreos.scheme";
/// Metadata value for "checksum" payload scheme.
pub static CHECKSUM_SCHEME: &str = "checksum";
lazy_static::lazy_static! {
static ref GRAPH_NODES: IntGauge = register_int_gauge!(opts!(
"zincati_cincinnati_graph_nodes_count",
"Number of nodes in Cincinnati update graph."
)).unwrap();
static ref UPDATE_CHECKS: IntCounter = register_int_counter!(opts!(
"zincati_cincinnati_update_checks_total",
"Total number of checks for updates to the upstream Cincinnati server."
)).unwrap();
static ref UPDATE_CHECKS_ERRORS: IntCounter = register_int_counter!(opts!(
"zincati_cincinnati_update_checks_errors_total",
"Total number of errors on checks for updates."
)).unwrap();
}
/// Cincinnati configuration.
#[derive(Debug, Serialize)]
pub struct Cincinnati {
/// Service base URL.
pub base_url: String,
}
impl Cincinnati {
/// Process Cincinnati configuration.
pub(crate) fn with_config(cfg: inputs::CincinnatiInput, id: &Identity) -> Fallible<Self> {
if cfg.base_url.is_empty() {
bail!("empty Cincinnati base URL");
}
// Substitute templated key with agent runtime values.
let base_url = if envsubst::is_templated(&cfg.base_url) {
let context = id.url_variables();
envsubst::validate_vars(&context)?;
envsubst::substitute(cfg.base_url, &context)?
} else {
cfg.base_url
};
let c = Self { base_url };
Ok(c)
}
/// Fetch next update-hint from Cincinnati.
pub(crate) fn fetch_update_hint(
&self,
id: &Identity,
can_check: bool,
) -> Box<Future<Item = Option<Node>, Error = ()>> {
if !can_check {
return Box::new(futures::future::ok(None));
}
let update = self
.next_update(id)
.inspect(|_| UPDATE_CHECKS.inc())
.map_err(|e| {
UPDATE_CHECKS_ERRORS.inc();
log::error!("failed to check for updates: {}", e)
});
Box::new(update)
}
/// Get the next update.
fn next_update(&self, id: &Identity) -> Box<Future<Item = Option<Node>, Error = Error>> {
let params = id.cincinnati_params();
let base_checksum = id.current_os.checksum.clone();
let client = client::ClientBuilder::new(self.base_url.to_string())
.query_params(Some(params))
.build();
let next = future::result(client)
.and_then(|c| c.fetch_graph())
.and_then(|graph| find_update(graph, base_checksum));
Box::new(next)
}
}
/// Walk the graph, looking for an update reachable from the given digest.
fn find_update(graph: client::Graph, digest: String) -> Fallible<Option<Node>> {
GRAPH_NODES.set(graph.nodes.len() as i64);
let cur_position = match graph
.nodes
.iter()
.position(|n| is_same_checksum(n, &digest))
{
Some(pos) => pos,
None => return Ok(None),
};
let targets: Vec<_> = graph
.edges
.iter()
.filter_map(|(src, dst)| {
if *src == cur_position as u64 {
Some(*dst as usize)
} else {
None
}
})
.collect();
let mut updates = Vec::with_capacity(targets.len());
for pos in targets {
match graph.nodes.get(pos) {
Some(n) => updates.push(n.clone()),
None => bail!("target node '{}' not present in graph"),
};
}
match updates.len() {
0 => Ok(None),
// TODO(lucab): stable pick next update
_ => Ok(Some(updates.swap_remove(0))),
}
}
/// Check whether input node matches current checksum.
fn is_same_checksum(node: &Node, checksum: &str) -> bool {
let payload_is_checksum = node
.metadata
.get(SCHEME_KEY)
.map(|v| v == CHECKSUM_SCHEME)
.unwrap_or(false);
payload_is_checksum && node.payload == checksum
}
#[cfg(test)]
mod tests {
use super::*;
use std::collections::HashMap;
#[test]
fn source_node_comparison() {
let current = "current-sha";
let mut metadata = HashMap::new();
metadata.insert(SCHEME_KEY.to_string(), CHECKSUM_SCHEME.to_string());
let matching = Node {
version: "v0".to_string(),
payload: current.to_string(),
metadata,
};
assert!(is_same_checksum(&matching, current));
let mismatch = Node {
version: "v0".to_string(),
payload: "mismatch".to_string(),
metadata: HashMap::new(),
};
assert!(!is_same_checksum(&mismatch, current));
}
}
| 29.248619 | 94 | 0.593502 |
fb03f0a21cb99c2a5b64a59db0c63e75c95d92ce
| 2,514 |
use chrono::{DateTime, Utc};
use reqwest::Method;
use serde::{Deserialize, Serialize};
use crate::client::{Client, PagedResponse, PaginationOptions, Response};
use crate::errors::Result;
use crate::transactions::Money;
/// Account resource represents all of a user’s accounts, including bitcoin, bitcoin cash, litecoin and ethereum wallets, fiat currency accounts, and vaults. This is represented in the type field. It’s important to note that new types can be added over time so you want to make sure this won’t break your implementation.
/// User can only have one primary account and its type can only be wallet.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Account {
/// Resource ID
pub id: String,
/// constant `"account"`
pub resource: String,
pub resource_path: String,
/// User or system defined name
pub name: String,
/// Primary account
pub primary: bool,
/// Account’s type. Available values: wallet, fiat, vault
pub r#type: AccountType,
/// Account’s currency
pub currency: AccountCurrency,
/// Balance in BTC or ETH
pub balance: Money,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "lowercase")]
pub enum AccountType {
Wallet,
Fiat,
Vault,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct AccountCurrency {
pub code: String,
pub name: String,
pub color: String,
pub sort_index: i32,
pub exponent: i32,
pub r#type: String,
pub address_regex: String,
pub asset_id: String,
pub slug: String,
}
impl Client {
/// List accounts
///
/// Lists current user’s accounts to which the authentication method has access to.
pub async fn list_accounts(
&self,
pagination: &PaginationOptions,
) -> Result<PagedResponse<Account>> {
self.send_request(
Method::GET,
&format!("accounts{}", pagination.get_query()),
None::<&()>,
)
.await
}
/// Show an account
///
/// Show current user’s account. To access the primary account for a given currency, a currency string (BTC or ETH) can be used instead of the account id in the URL.
pub async fn get_account(&self, account_id: &str) -> Result<Response<Account>> {
self.send_request(
Method::GET,
&format!("accounts/{}", account_id),
None::<&()>,
)
.await
}
}
| 31.037037 | 320 | 0.649562 |
0a1bc1a4cb93d99dc350d44b61f84870f403f171
| 165 |
pub struct AchievementCondition {
kind: &'static str,
value: u64,
}
pub struct Achievement {
name: String,
conditions: Vec<AchievementCondition>,
}
| 16.5 | 42 | 0.684848 |
7129d2d3d3a65697af7b07baf14ee9119c01926a
| 13,740 |
//! A library for parsing ACPI tables. This crate can be used by bootloaders and kernels for architectures that
//! support ACPI. This crate is not feature-complete, but can parse lots of the more common tables. Parsing the
//! ACPI tables is required for correctly setting up the APICs, HPET, and provides useful information about power
//! management and many other platform capabilities.
//!
//! This crate is designed to find and parse the static tables ACPI provides. It should be used in conjunction with
//! the `aml` crate, which is the (much less complete) AML parser used to parse the DSDT and SSDTs. These crates
//! are separate because some kernels may want to detect the static tables, but delay AML parsing to a later stage.
//!
//! This crate requires `alloc` to make heap allocations. If you are trying to find the RSDP in an environment that
//! does not have a heap (e.g. a bootloader), you can use the `rsdp` crate. The types from that crate are
//! compatible with `acpi`.
//!
//! ### Usage
//! To use the library, you will need to provide an implementation of the `AcpiHandler` trait, which allows the
//! library to make requests such as mapping a particular region of physical memory into the virtual address space.
//!
//! You then need to construct an instance of `AcpiTables`, which can be done in a few ways depending on how much
//! information you have:
//! * Use `AcpiTables::from_rsdp` if you have the physical address of the RSDP
//! * Use `AcpiTables::from_rsdt` if you have the physical address of the RSDT/XSDT
//! * Use `AcpiTables::search_for_rsdp_bios` if you don't have the address of either, but **you know you are
//! running on BIOS, not UEFI**
//! * Use `AcpiTables::from_tables_direct` if you are using the library in an unusual setting, such as in usermode,
//! and have a custom method to enumerate and access the tables.
//!
//! `AcpiTables` stores the addresses of all of the tables detected on a platform. The SDTs are parsed by this
//! library, or can be accessed directly with `from_sdt`, while the `DSDT` and any `SSDTs` should be parsed with
//! `aml`.
//!
//! To gather information out of the static tables, a few of the types you should take a look at are:
//! - [`PlatformInfo`](crate::platform::PlatformInfo) parses the FADT and MADT to create a nice view of the
//! processor topology and interrupt controllers on `x86_64`, and the interrupt controllers on other platforms.
//! `AcpiTables::platform_info` is a convenience method for constructing a `PlatformInfo`.
//! - [`HpetInfo`](crate::hpet::HpetInfo) parses the HPET table and tells you how to configure the High
//! Precision Event Timer.
//! - [`PciConfigRegions`](crate::mcfg::PciConfigRegions) parses the MCFG and tells you how PCIe configuration
//! space is mapped into physical memory.
/*
* Contributing notes (you may find these useful if you're new to contributing to the library):
* - Accessing packed fields without UB: Lots of the structures defined by ACPI are defined with `repr(packed)`
* to prevent padding being introduced, which would make the structure's layout incorrect. In Rust, this
* creates a problem as references to these fields could be unaligned, which is undefined behaviour. For the
* majority of these fields, this problem can be easily avoided by telling the compiler to make a copy of the
* field's contents: this is the perhaps unfamiliar pattern of e.g. `!{ entry.flags }.get_bit(0)` we use
* around the codebase.
*/
#![no_std]
#![deny(unsafe_op_in_unsafe_fn)]
extern crate alloc;
#[cfg_attr(test, macro_use)]
#[cfg(test)]
extern crate std;
pub mod bgrt;
pub mod fadt;
pub mod hpet;
pub mod madt;
pub mod mcfg;
pub mod platform;
pub mod sdt;
pub use crate::{
fadt::PowerProfile,
hpet::HpetInfo,
madt::MadtError,
mcfg::PciConfigRegions,
platform::{interrupt::InterruptModel, PlatformInfo},
};
pub use rsdp::{
handler::{AcpiHandler, PhysicalMapping},
RsdpError,
};
use crate::sdt::{SdtHeader, Signature};
use alloc::{collections::BTreeMap, vec::Vec};
use core::mem;
use log::trace;
use rsdp::Rsdp;
#[derive(Debug)]
pub enum AcpiError {
Rsdp(RsdpError),
SdtInvalidSignature(Signature),
SdtInvalidOemId(Signature),
SdtInvalidTableId(Signature),
SdtInvalidChecksum(Signature),
TableMissing(Signature),
InvalidFacsAddress,
InvalidDsdtAddress,
InvalidMadt(MadtError),
InvalidGenericAddress,
}
pub struct AcpiTables<H>
where
H: AcpiHandler,
{
/// The revision of ACPI that the system uses, as inferred from the revision of the RSDT/XSDT.
pub revision: u8,
pub sdts: BTreeMap<sdt::Signature, Sdt>,
pub dsdt: Option<AmlTable>,
pub ssdts: Vec<AmlTable>,
handler: H,
}
impl<H> AcpiTables<H>
where
H: AcpiHandler,
{
/// Create an `AcpiTables` if you have the physical address of the RSDP.
pub unsafe fn from_rsdp(handler: H, rsdp_address: usize) -> Result<AcpiTables<H>, AcpiError> {
let rsdp_mapping = unsafe { handler.map_physical_region::<Rsdp>(rsdp_address, mem::size_of::<Rsdp>()) };
rsdp_mapping.validate().map_err(AcpiError::Rsdp)?;
Self::from_validated_rsdp(handler, rsdp_mapping)
}
/// Search for the RSDP on a BIOS platform. This accesses BIOS-specific memory locations and will probably not
/// work on UEFI platforms. See [Rsdp::search_for_rsdp_bios](rsdp_search::Rsdp::search_for_rsdp_bios) for
/// details.
pub unsafe fn search_for_rsdp_bios(handler: H) -> Result<AcpiTables<H>, AcpiError> {
let rsdp_mapping = unsafe { Rsdp::search_for_on_bios(handler.clone()) }.map_err(AcpiError::Rsdp)?;
Self::from_validated_rsdp(handler, rsdp_mapping)
}
/// Create an `AcpiTables` if you have a `PhysicalMapping` of the RSDP that you know is correct. This is called
/// from `from_rsdp` after validation, but can also be used if you've searched for the RSDP manually on a BIOS
/// system.
pub fn from_validated_rsdp(
handler: H,
rsdp_mapping: PhysicalMapping<H, Rsdp>,
) -> Result<AcpiTables<H>, AcpiError> {
let revision = rsdp_mapping.revision();
if revision == 0 {
/*
* We're running on ACPI Version 1.0. We should use the 32-bit RSDT address.
*/
let rsdt_address = rsdp_mapping.rsdt_address();
unsafe { Self::from_rsdt(handler, revision, rsdt_address as usize) }
} else {
/*
* We're running on ACPI Version 2.0+. We should use the 64-bit XSDT address, truncated
* to 32 bits on x86.
*/
let xsdt_address = rsdp_mapping.xsdt_address();
unsafe { Self::from_rsdt(handler, revision, xsdt_address as usize) }
}
}
/// Create an `AcpiTables` if you have the physical address of the RSDT. This is useful, for example, if your chosen
/// bootloader reads the RSDP and passes you the address of the RSDT. You also need to supply the correct ACPI
/// revision - if `0`, a RSDT is expected, while a `XSDT` is expected for greater revisions.
pub unsafe fn from_rsdt(handler: H, revision: u8, rsdt_address: usize) -> Result<AcpiTables<H>, AcpiError> {
let mut result = AcpiTables { revision, sdts: BTreeMap::new(), dsdt: None, ssdts: Vec::new(), handler };
let header = sdt::peek_at_sdt_header(&result.handler, rsdt_address);
let mapping =
unsafe { result.handler.map_physical_region::<SdtHeader>(rsdt_address, header.length as usize) };
if revision == 0 {
/*
* ACPI Version 1.0. It's a RSDT!
*/
mapping.validate(sdt::Signature::RSDT)?;
let num_tables = (mapping.length as usize - mem::size_of::<SdtHeader>()) / mem::size_of::<u32>();
let tables_base =
((mapping.virtual_start().as_ptr() as usize) + mem::size_of::<SdtHeader>()) as *const u32;
for i in 0..num_tables {
result.process_sdt(unsafe { *tables_base.add(i) as usize })?;
}
} else {
/*
* ACPI Version 2.0+. It's a XSDT!
*/
mapping.validate(sdt::Signature::XSDT)?;
let num_tables = (mapping.length as usize - mem::size_of::<SdtHeader>()) / mem::size_of::<u64>();
let tables_base =
((mapping.virtual_start().as_ptr() as usize) + mem::size_of::<SdtHeader>()) as *const u64;
for i in 0..num_tables {
result.process_sdt(unsafe { *tables_base.add(i) as usize })?;
}
}
Ok(result)
}
/// Construct an `AcpiTables` from a custom set of "discovered" tables. This is provided to allow the library
/// to be used from unconventional settings (e.g. in userspace), for example with a `AcpiHandler` that detects
/// accesses to specific physical addresses, and provides the correct data.
pub fn from_tables_direct(
handler: H,
revision: u8,
sdts: BTreeMap<sdt::Signature, Sdt>,
dsdt: Option<AmlTable>,
ssdts: Vec<AmlTable>,
) -> AcpiTables<H> {
AcpiTables { revision, sdts, dsdt, ssdts, handler }
}
fn process_sdt(&mut self, physical_address: usize) -> Result<(), AcpiError> {
let header = sdt::peek_at_sdt_header(&self.handler, physical_address);
trace!("Found ACPI table with signature {:?} and length {:?}", header.signature, { header.length });
match header.signature {
Signature::FADT => {
use crate::fadt::Fadt;
/*
* For whatever reason, they chose to put the DSDT inside the FADT, instead of just listing it
* as another SDT. We extract it here to provide a nicer public API.
*/
let fadt_mapping =
unsafe { self.handler.map_physical_region::<Fadt>(physical_address, mem::size_of::<Fadt>()) };
fadt_mapping.validate()?;
let dsdt_address = fadt_mapping.dsdt_address()?;
let dsdt_header = sdt::peek_at_sdt_header(&self.handler, dsdt_address);
self.dsdt = Some(AmlTable::new(dsdt_address, dsdt_header.length));
/*
* We've already validated the FADT to get the DSDT out, so it doesn't need to be done again.
*/
self.sdts
.insert(Signature::FADT, Sdt { physical_address, length: header.length, validated: true });
}
Signature::SSDT => {
self.ssdts.push(AmlTable::new(physical_address, header.length));
}
signature => {
self.sdts.insert(signature, Sdt { physical_address, length: header.length, validated: false });
}
}
Ok(())
}
/// Create a mapping to a SDT, given its signature. This validates the SDT if it has not already been
/// validated.
///
/// ### Safety
/// The table's memory is naively interpreted as a `T`, and so you must be careful in providing a type that
/// correctly represents the table's structure. Regardless of the provided type's size, the region mapped will
/// be the size specified in the SDT's header. Providing a `T` that is larger than this, *may* lead to
/// page-faults, aliasing references, or derefencing uninitialized memory (the latter two of which are UB).
/// This isn't forbidden, however, because some tables rely on `T` being larger than a provided SDT in some
/// versions of ACPI (the [`ExtendedField`](crate::sdt::ExtendedField) type will be useful if you need to do
/// this. See our [`Fadt`](crate::fadt::Fadt) type for an example of this).
pub unsafe fn get_sdt<T>(&self, signature: sdt::Signature) -> Result<Option<PhysicalMapping<H, T>>, AcpiError>
where
T: AcpiTable,
{
let sdt = match self.sdts.get(&signature) {
Some(sdt) => sdt,
None => return Ok(None),
};
let mapping = unsafe { self.handler.map_physical_region::<T>(sdt.physical_address, sdt.length as usize) };
if !sdt.validated {
mapping.header().validate(signature)?;
}
Ok(Some(mapping))
}
/// Convenience method for contructing a [`PlatformInfo`](crate::platform::PlatformInfo). This is one of the
/// first things you should usually do with an `AcpiTables`, and allows to collect helpful information about
/// the platform from the ACPI tables.
pub fn platform_info(&self) -> Result<PlatformInfo, AcpiError> {
PlatformInfo::new(self)
}
}
pub struct Sdt {
/// Physical address of the start of the SDT, including the header.
pub physical_address: usize,
/// Length of the table in bytes.
pub length: u32,
/// Whether this SDT has been validated. This is set to `true` the first time it is mapped and validated.
pub validated: bool,
}
/// All types representing ACPI tables should implement this trait.
pub trait AcpiTable {
fn header(&self) -> &sdt::SdtHeader;
}
#[derive(Debug)]
pub struct AmlTable {
/// Physical address of the start of the AML stream (excluding the table header).
pub address: usize,
/// Length (in bytes) of the AML stream.
pub length: u32,
}
impl AmlTable {
/// Create an `AmlTable` from the address and length of the table **including the SDT header**.
pub(crate) fn new(address: usize, length: u32) -> AmlTable {
AmlTable {
address: address + mem::size_of::<SdtHeader>(),
length: length - mem::size_of::<SdtHeader>() as u32,
}
}
}
| 43.619048 | 120 | 0.65262 |
abd78e881666615f96b6fb12e2cc8a9f02169f30
| 17,981 |
use crate::blockcfg::{
ApplyBlockLedger, Block, Fragment, FragmentId, Header, HeaderHash, LedgerParameters,
};
use crate::blockchain::{Checkpoints, LeadershipBlock, StorageError};
use crate::fragment::selection::FragmentSelectionAlgorithmParams;
use crate::network::p2p::{comm::PeerInfo, Address};
use crate::topology::{Gossips, NodeId, Peer, PeerInfo as TopologyPeerInfo, View};
use crate::utils::async_msg::{self, MessageBox, MessageQueue};
use chain_impl_mockchain::fragment::Contents as FragmentContents;
use chain_network::error as net_error;
use jormungandr_lib::interfaces::{FragmentLog, FragmentOrigin, FragmentStatus};
use poldercast::layer::Selection;
use futures::channel::{mpsc, oneshot};
use futures::prelude::*;
use futures::ready;
use std::{
collections::HashMap,
error,
fmt::{self, Debug, Display},
marker::PhantomData,
pin::Pin,
task::{Context, Poll},
};
/// The error values passed via intercom messages.
#[derive(Debug)]
pub struct Error {
code: net_error::Code,
cause: Box<dyn error::Error + Send + Sync>,
}
impl Error {
pub fn failed<T>(cause: T) -> Self
where
T: Into<Box<dyn error::Error + Send + Sync>>,
{
Error {
code: net_error::Code::Internal,
cause: cause.into(),
}
}
pub fn aborted<T>(cause: T) -> Self
where
T: Into<Box<dyn error::Error + Send + Sync>>,
{
Error {
code: net_error::Code::Aborted,
cause: cause.into(),
}
}
pub fn canceled<T>(cause: T) -> Self
where
T: Into<Box<dyn error::Error + Send + Sync>>,
{
Error {
code: net_error::Code::Canceled,
cause: cause.into(),
}
}
pub fn failed_precondition<T>(cause: T) -> Self
where
T: Into<Box<dyn error::Error + Send + Sync>>,
{
Error {
code: net_error::Code::FailedPrecondition,
cause: cause.into(),
}
}
pub fn invalid_argument<T>(cause: T) -> Self
where
T: Into<Box<dyn error::Error + Send + Sync>>,
{
Error {
code: net_error::Code::InvalidArgument,
cause: cause.into(),
}
}
pub fn not_found<T>(cause: T) -> Self
where
T: Into<Box<dyn error::Error + Send + Sync>>,
{
Error {
code: net_error::Code::NotFound,
cause: cause.into(),
}
}
pub fn unimplemented<S: Into<String>>(message: S) -> Self {
Error {
code: net_error::Code::Unimplemented,
cause: message.into().into(),
}
}
pub fn code(&self) -> net_error::Code {
self.code
}
}
impl From<oneshot::Canceled> for Error {
fn from(src: oneshot::Canceled) -> Self {
Error {
code: net_error::Code::Unavailable,
cause: src.into(),
}
}
}
impl From<StorageError> for Error {
fn from(err: StorageError) -> Self {
let code = match &err {
StorageError::BlockNotFound => net_error::Code::NotFound,
StorageError::CannotIterate => net_error::Code::Internal,
StorageError::BackendError(_) => net_error::Code::Internal,
StorageError::BlockAlreadyPresent => net_error::Code::Internal,
StorageError::MissingParent => net_error::Code::InvalidArgument,
StorageError::Deserialize(_) => net_error::Code::Internal,
StorageError::Serialize(_) => net_error::Code::Internal,
};
Error {
code,
cause: err.into(),
}
}
}
impl From<Error> for net_error::Error {
fn from(err: Error) -> Self {
net_error::Error::new(err.code(), err.cause)
}
}
impl Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(&self.cause, f)
}
}
impl error::Error for Error {
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
Some(&*self.cause)
}
}
type ReplySender<T> = oneshot::Sender<Result<T, Error>>;
#[derive(Debug)]
pub struct ReplyHandle<T> {
sender: ReplySender<T>,
}
impl<T> ReplyHandle<T> {
pub fn reply(self, result: Result<T, Error>) {
// Ignoring a send error: it means the result is no longer needed
let _ = self.sender.send(result);
}
pub fn reply_ok(self, response: T) {
self.reply(Ok(response))
}
pub fn reply_error(self, error: Error) {
self.reply(Err(error))
}
}
pub struct ReplyFuture<T> {
receiver: oneshot::Receiver<Result<T, Error>>,
}
impl<T> Unpin for ReplyFuture<T> {}
impl<T> Future for ReplyFuture<T> {
type Output = Result<T, Error>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<T, Error>> {
Pin::new(&mut self.receiver).poll(cx).map(|res| match res {
Ok(Ok(item)) => {
tracing::debug!("request processed");
Ok(item)
}
Ok(Err(e)) => {
tracing::info!(reason = %e, "error processing request");
Err(e)
}
Err(oneshot::Canceled) => {
tracing::warn!("response canceled by the processing task");
Err(Error::from(oneshot::Canceled))
}
})
}
}
pub fn unary_reply<T>() -> (ReplyHandle<T>, ReplyFuture<T>) {
let (sender, receiver) = oneshot::channel();
let future = ReplyFuture { receiver };
(ReplyHandle { sender }, future)
}
#[derive(Debug)]
pub struct ReplySendError;
impl fmt::Display for ReplySendError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "failed to send reply")
}
}
impl error::Error for ReplySendError {}
pub struct ReplyTrySendError<T>(mpsc::TrySendError<Result<T, Error>>);
impl<T> ReplyTrySendError<T> {
pub fn is_full(&self) -> bool {
self.0.is_full()
}
pub fn into_inner(self) -> Result<T, Error> {
self.0.into_inner()
}
pub fn into_send_error(self) -> ReplySendError {
ReplySendError
}
}
impl<T> fmt::Debug for ReplyTrySendError<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("ReplyTrySendError").field(&self.0).finish()
}
}
impl<T> fmt::Display for ReplyTrySendError<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "failed to send reply")
}
}
impl<T: 'static> error::Error for ReplyTrySendError<T> {
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
Some(&self.0)
}
}
#[derive(Debug)]
pub struct ReplyStreamHandle<T> {
lead_sender: oneshot::Sender<Result<mpsc::Receiver<Result<T, Error>>, Error>>,
buffer_size: usize,
}
impl<T> ReplyStreamHandle<T> {
fn reply(self, result: Result<mpsc::Receiver<Result<T, Error>>, Error>) {
// Ignoring a send error: it means the result is no longer needed
let _ = self.lead_sender.send(result);
}
pub fn start_sending(self) -> ReplyStreamSink<T> {
let (sender, receiver) = mpsc::channel(self.buffer_size);
self.reply(Ok(receiver));
ReplyStreamSink { sender }
}
pub fn reply_error(self, error: Error) {
self.reply(Err(error))
}
}
#[derive(Debug)]
pub struct ReplyStreamSink<T> {
sender: mpsc::Sender<Result<T, Error>>,
}
impl<T> Unpin for ReplyStreamSink<T> {}
impl<T> Clone for ReplyStreamSink<T> {
fn clone(&self) -> Self {
ReplyStreamSink {
sender: self.sender.clone(),
}
}
}
impl<T> ReplyStreamSink<T> {
pub fn try_send_item(&mut self, item: Result<T, Error>) -> Result<(), ReplyTrySendError<T>> {
self.sender.try_send(item).map_err(ReplyTrySendError)
}
pub fn poll_ready(&mut self, cx: &mut Context) -> Poll<Result<(), ReplySendError>> {
self.sender.poll_ready(cx).map_err(|_| ReplySendError)
}
}
impl<T> Sink<Result<T, Error>> for ReplyStreamSink<T> {
type Error = ReplySendError;
fn poll_ready(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> {
Pin::new(&mut self.sender)
.poll_ready(cx)
.map_err(|_| ReplySendError)
}
fn start_send(mut self: Pin<&mut Self>, item: Result<T, Error>) -> Result<(), Self::Error> {
Pin::new(&mut self.sender)
.start_send(item)
.map_err(|_| ReplySendError)
}
fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> {
Pin::new(&mut self.sender)
.poll_flush(cx)
.map_err(|_| ReplySendError)
}
fn poll_close(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> {
Pin::new(&mut self.sender)
.poll_close(cx)
.map_err(|_| ReplySendError)
}
}
pub struct ReplyStreamFuture<T, E> {
lead_receiver: oneshot::Receiver<Result<mpsc::Receiver<Result<T, Error>>, Error>>,
_phantom_error: PhantomData<E>,
}
impl<T, E> Unpin for ReplyStreamFuture<T, E> {}
impl<T, E> Future for ReplyStreamFuture<T, E>
where
E: From<Error>,
{
type Output = Result<ReplyStream<T, E>, E>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
let receiver = ready!(Pin::new(&mut self.lead_receiver).poll(cx)).map_err(
|e: oneshot::Canceled| {
tracing::warn!("response canceled by the processing task");
Error::from(e)
},
)??;
let stream = ReplyStream {
receiver,
_phantom_error: PhantomData,
};
Poll::Ready(Ok(stream))
}
}
pub struct ReplyStream<T, E> {
receiver: mpsc::Receiver<Result<T, Error>>,
_phantom_error: PhantomData<E>,
}
impl<T, E> Unpin for ReplyStream<T, E> {}
impl<T> ReplyStream<T, Error> {
/// Converts this stream into an infallible stream for uploading
pub fn upload(self) -> UploadStream<T> {
UploadStream { inner: self }
}
}
impl<T, E> Stream for ReplyStream<T, E>
where
E: From<Error>,
{
type Item = Result<T, E>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
Pin::new(&mut self.receiver)
.poll_next(cx)
.map(|maybe_res| match maybe_res {
Some(Ok(item)) => Some(Ok(item)),
None => None,
Some(Err(e)) => {
tracing::info!(
error = ?e,
"error while streaming response"
);
Some(Err(e.into()))
}
})
}
}
/// An adapter for outbound client streaming requests
pub struct UploadStream<T> {
inner: ReplyStream<T, Error>,
}
impl<T> Stream for UploadStream<T> {
type Item = T;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<T>> {
Pin::new(&mut self.inner)
.poll_next(cx)
.map(|maybe_res| match maybe_res {
Some(Ok(item)) => Some(item),
None => None,
Some(Err(_)) => None,
})
}
}
pub fn stream_reply<T, E>(buffer_size: usize) -> (ReplyStreamHandle<T>, ReplyStreamFuture<T, E>) {
let (lead_sender, lead_receiver) = oneshot::channel();
let handle = ReplyStreamHandle {
lead_sender,
buffer_size,
};
let future = ReplyStreamFuture {
lead_receiver,
_phantom_error: PhantomData,
};
(handle, future)
}
#[derive(Debug)]
pub struct RequestStreamHandle<T, R> {
receiver: MessageQueue<T>,
reply: ReplyHandle<R>,
}
pub struct RequestSink<T> {
sender: MessageBox<T>,
}
impl<T, R> RequestStreamHandle<T, R> {
pub fn into_stream_and_reply(self) -> (MessageQueue<T>, ReplyHandle<R>) {
(self.receiver, self.reply)
}
}
impl<T> RequestSink<T> {
fn map_send_error(&self, _e: mpsc::SendError, msg: &'static str) -> Error {
tracing::debug!("{}", msg);
Error::aborted("request stream processing ended before all items were sent")
}
}
impl<T> Sink<T> for RequestSink<T> {
type Error = Error;
fn poll_ready(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Error>> {
self.sender.poll_ready(cx).map_err(|e| {
self.map_send_error(
e,
"request stream processing ended before receiving some items",
)
})
}
fn start_send(mut self: Pin<&mut Self>, item: T) -> Result<(), Error> {
self.sender.start_send(item).map_err(|e| {
self.map_send_error(
e,
"request stream processing ended before receiving some items",
)
})
}
fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Error>> {
Pin::new(&mut self.sender).poll_flush(cx).map_err(|e| {
self.map_send_error(
e,
"request stream processing ended before receiving some items",
)
})
}
fn poll_close(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Error>> {
Pin::new(&mut self.sender).poll_close(cx).map_err(|e| {
self.map_send_error(
e,
"request stream processing channel did not close gracefully, \
the task possibly failed to receive some items",
)
})
}
}
pub fn stream_request<T, R>(
buffer: usize,
) -> (RequestStreamHandle<T, R>, RequestSink<T>, ReplyFuture<R>) {
let (sender, receiver) = async_msg::channel(buffer);
let (reply, reply_future) = unary_reply();
let handle = RequestStreamHandle { receiver, reply };
let sink = RequestSink { sender };
(handle, sink, reply_future)
}
/// ...
#[allow(clippy::large_enum_variant)]
#[derive(Debug)]
pub enum TransactionMsg {
SendTransaction(FragmentOrigin, Vec<Fragment>),
RemoveTransactions(Vec<FragmentId>, FragmentStatus),
GetLogs(ReplyHandle<Vec<FragmentLog>>),
GetStatuses(
Vec<FragmentId>,
ReplyHandle<HashMap<FragmentId, FragmentStatus>>,
),
SelectTransactions {
pool_idx: usize,
ledger: ApplyBlockLedger,
ledger_params: LedgerParameters,
selection_alg: FragmentSelectionAlgorithmParams,
reply_handle: ReplyHandle<(FragmentContents, ApplyBlockLedger)>,
soft_deadline_future: futures::channel::oneshot::Receiver<()>,
hard_deadline_future: futures::channel::oneshot::Receiver<()>,
},
}
/// Client messages, mainly requests from connected peers to our node.
/// Fetching the block headers, the block, the tip
pub enum ClientMsg {
GetBlockTip(ReplyHandle<Header>),
GetHeaders(Vec<HeaderHash>, ReplyStreamHandle<Header>),
PullHeaders(Vec<HeaderHash>, HeaderHash, ReplyStreamHandle<Header>),
GetBlocks(Vec<HeaderHash>, ReplyStreamHandle<Block>),
PullBlocks(Vec<HeaderHash>, HeaderHash, ReplyStreamHandle<Block>),
PullBlocksToTip(Vec<HeaderHash>, ReplyStreamHandle<Block>),
}
impl Debug for ClientMsg {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ClientMsg::GetBlockTip(_) => f
.debug_tuple("GetBlockTip")
.field(&format_args!("_"))
.finish(),
ClientMsg::GetHeaders(ids, _) => f
.debug_tuple("GetHeaders")
.field(ids)
.field(&format_args!("_"))
.finish(),
ClientMsg::PullHeaders(from, to, _) => f
.debug_tuple("PullHeaders")
.field(from)
.field(to)
.field(&format_args!("_"))
.finish(),
ClientMsg::GetBlocks(ids, _) => f
.debug_tuple("GetBlocks")
.field(ids)
.field(&format_args!("_"))
.finish(),
ClientMsg::PullBlocks(from, to, _) => f
.debug_tuple("PullBlocks")
.field(from)
.field(to)
.field(&format_args!("_"))
.finish(),
ClientMsg::PullBlocksToTip(from, _) => f
.debug_tuple("PullBlocksToTip")
.field(from)
.field(&format_args!("_"))
.finish(),
}
}
}
/// General Block Message for the block task
pub enum BlockMsg {
/// A trusted Block has been received from the leadership task
LeadershipBlock(LeadershipBlock),
/// A untrusted block Header has been received from the network task
AnnouncedBlock(Header, Address),
/// A stream of untrusted blocks has been received from the network task.
NetworkBlocks(RequestStreamHandle<Block, ()>),
/// The stream of headers for missing chain blocks has been received
/// from the network in response to a PullHeaders request or a Missing
/// solicitation event.
ChainHeaders(RequestStreamHandle<Header, ()>),
}
/// Propagation requests for the network task.
#[derive(Debug)]
pub enum PropagateMsg {
Block(Header),
Fragment(Fragment),
Gossip(Peer, Gossips),
}
/// Messages to the network task.
#[derive(Debug)]
pub enum NetworkMsg {
Propagate(PropagateMsg),
GetBlocks(Vec<HeaderHash>),
GetNextBlock(Address, HeaderHash),
PullHeaders {
node_address: Address,
from: Checkpoints,
to: HeaderHash,
},
PeerInfo(ReplyHandle<Vec<PeerInfo>>),
}
/// Messages to the topology task
pub enum TopologyMsg {
AcceptGossip(Gossips),
DemotePeer(NodeId),
PromotePeer(NodeId),
View(Selection, ReplyHandle<View>),
ListAvailable(ReplyHandle<Vec<TopologyPeerInfo>>),
ListNonPublic(ReplyHandle<Vec<TopologyPeerInfo>>),
ListQuarantined(ReplyHandle<Vec<TopologyPeerInfo>>),
}
/// Messages to the explorer task
pub enum ExplorerMsg {
NewBlock(Block),
NewTip(HeaderHash),
}
#[cfg(test)]
mod tests {}
| 28.7696 | 98 | 0.586286 |
710d395664d1866322ce03d53dfd2f510e6d394f
| 176 |
mod bystate;
mod digital_cypher_vol_3;
mod easy_diagonal;
mod float_point_approx;
mod position_average;
mod rainfall;
mod reducing_by_steps;
mod steps_in_primes;
fn main() {}
| 16 | 25 | 0.818182 |
b9809b0f08d76230e1bca1ff63bd011fcc6a06a1
| 5,800 |
use wasmparser::*;
#[test]
fn name_annotations() -> anyhow::Result<()> {
assert_module_name("foo", r#"(module $foo)"#)?;
assert_module_name("foo", r#"(module (@name "foo"))"#)?;
assert_module_name("foo", r#"(module $bar (@name "foo"))"#)?;
assert_module_name("foo bar", r#"(module $bar (@name "foo bar"))"#)?;
Ok(())
}
fn assert_module_name(name: &str, wat: &str) -> anyhow::Result<()> {
let wasm = wat::parse_str(wat)?;
let mut found = false;
for s in get_name_section(&wasm)? {
match s? {
Name::Module(n) => {
assert_eq!(n.get_name()?, name);
found = true;
}
_ => {}
}
}
assert!(found);
Ok(())
}
#[test]
fn func_annotations() -> anyhow::Result<()> {
assert_func_name("foo", r#"(module (func $foo))"#)?;
assert_func_name("foo", r#"(module (func (@name "foo")))"#)?;
assert_func_name("foo", r#"(module (func $bar (@name "foo")))"#)?;
assert_func_name("foo bar", r#"(module (func $bar (@name "foo bar")))"#)?;
Ok(())
}
fn assert_func_name(name: &str, wat: &str) -> anyhow::Result<()> {
let wasm = wat::parse_str(wat)?;
let mut found = false;
for s in get_name_section(&wasm)? {
match s? {
Name::Function(n) => {
let mut map = n.get_map()?;
let naming = map.read()?;
assert_eq!(naming.index, 0);
assert_eq!(naming.name, name);
found = true;
}
_ => {}
}
}
assert!(found);
Ok(())
}
#[test]
fn local_annotations() -> anyhow::Result<()> {
assert_local_name("foo", r#"(module (func (param $foo i32)))"#)?;
assert_local_name("foo", r#"(module (func (local $foo i32)))"#)?;
assert_local_name("foo", r#"(module (func (param (@name "foo") i32)))"#)?;
assert_local_name("foo", r#"(module (func (local (@name "foo") i32)))"#)?;
assert_local_name("foo", r#"(module (func (param $bar (@name "foo") i32)))"#)?;
assert_local_name("foo", r#"(module (func (local $bar (@name "foo") i32)))"#)?;
assert_local_name(
"foo bar",
r#"(module (func (param $bar (@name "foo bar") i32)))"#,
)?;
assert_local_name(
"foo bar",
r#"(module (func (local $bar (@name "foo bar") i32)))"#,
)?;
Ok(())
}
fn assert_local_name(name: &str, wat: &str) -> anyhow::Result<()> {
let wasm = wat::parse_str(wat)?;
let mut found = false;
for s in get_name_section(&wasm)? {
match s? {
Name::Local(n) => {
let mut reader = n.get_indirect_map()?;
let section = reader.read()?;
let mut map = section.get_map()?;
let naming = map.read()?;
assert_eq!(naming.index, 0);
assert_eq!(naming.name, name);
found = true;
}
_ => {}
}
}
assert!(found);
Ok(())
}
fn get_name_section(wasm: &[u8]) -> anyhow::Result<NameSectionReader<'_>> {
for payload in Parser::new(0).parse_all(&wasm) {
if let Payload::CustomSection {
name: "name",
data,
data_offset,
range: _,
} = payload?
{
return Ok(NameSectionReader::new(data, data_offset)?);
}
}
panic!("no name section found");
}
#[test]
fn custom_section_order() -> anyhow::Result<()> {
let bytes = wat::parse_str(
r#"
(module
(@custom "A" "aaa")
(type (func))
(@custom "B" (after func) "bbb")
(@custom "C" (before func) "ccc")
(@custom "D" (after last) "ddd")
(table 10 funcref)
(func (type 0))
(@custom "E" (after import) "eee")
(@custom "F" (before type) "fff")
(@custom "G" (after data) "ggg")
(@custom "H" (after code) "hhh")
(@custom "I" (after func) "iii")
(@custom "J" (before func) "jjj")
(@custom "K" (before first) "kkk")
)
"#,
)?;
macro_rules! assert_matches {
($a:expr, $b:pat $(,)?) => {
match &$a {
$b => {}
a => panic!("`{:?}` doesn't match `{}`", a, stringify!($b)),
}
};
}
let wasm = Parser::new(0)
.parse_all(&bytes)
.collect::<Result<Vec<_>>>()?;
assert_matches!(wasm[0], Payload::Version { .. });
assert_matches!(wasm[1], Payload::CustomSection { name: "K", .. });
assert_matches!(wasm[2], Payload::CustomSection { name: "F", .. });
assert_matches!(wasm[3], Payload::TypeSection(_));
assert_matches!(wasm[4], Payload::CustomSection { name: "E", .. });
assert_matches!(wasm[5], Payload::CustomSection { name: "C", .. });
assert_matches!(wasm[6], Payload::CustomSection { name: "J", .. });
assert_matches!(wasm[7], Payload::FunctionSection(_));
assert_matches!(wasm[8], Payload::CustomSection { name: "B", .. });
assert_matches!(wasm[9], Payload::CustomSection { name: "I", .. });
assert_matches!(wasm[10], Payload::TableSection(_));
assert_matches!(wasm[11], Payload::CodeSectionStart { .. });
assert_matches!(wasm[12], Payload::CodeSectionEntry { .. });
assert_matches!(wasm[13], Payload::CustomSection { name: "H", .. });
assert_matches!(wasm[14], Payload::CustomSection { name: "G", .. });
assert_matches!(wasm[15], Payload::CustomSection { name: "A", .. });
assert_matches!(wasm[16], Payload::CustomSection { name: "D", .. });
match &wasm[17] {
Payload::End(x) if *x == bytes.len() => {}
p => panic!("`{:?}` doesn't match expected length of {}", p, bytes.len()),
}
Ok(())
}
| 34.319527 | 83 | 0.504483 |
e9e6ea1d5caeaf861a78c9aca588dd1d2e6e1a33
| 259 |
pub use self::colorrgba::{Channel, ColorRGBA};
pub use self::surface::Surface;
pub use self::surfacefactory::SurfaceFactory;
pub use self::surfaceiterator::SurfaceIterator;
pub mod colorrgba;
pub mod surface;
pub mod surfacefactory;
pub mod surfaceiterator;
| 25.9 | 47 | 0.795367 |
d9f8fee15b1a73813d887f8f6daec1baf550513a
| 11,318 |
//! Helper to crudely rewrite types from the spec's format to our own.
//!
//! This reads a file containing table and record descriptions (from the spec)
//! and converts them to the form that we use, writing the result to stdout.
//!
//! Input should be in the format:
//!
//!
//! ```
//! { // braces can be used to group items into multiple macro invocations
//! /// an optional comment for each top-level item
//! @table Gpos1_0
//! uint16 majorVersion Major version of the GPOS table, = 1
//! uint16 minorVersion Minor version of the GPOS table, = 0
//! Offset16 scriptListOffset Offset to ScriptList table, from beginning of GPOS table
//! Offset16 featureListOffset Offset to FeatureList table, from beginning of GPOS table
//! Offset16 lookupListOffset Offset to LookupList table, from beginning of GPOS table
//!
//! @table Gpos1_1
//! uint16 majorVersion Major version of the GPOS table, = 1
//! uint16 minorVersion Minor version of the GPOS table, = 1
//! Offset16 scriptListOffset Offset to ScriptList table, from beginning of GPOS table
//! Offset16 featureListOffset Offset to FeatureList table, from beginning of GPOS table
//! Offset16 lookupListOffset Offset to LookupList table, from beginning of GPOS table
//! Offset32 featureVariationsOffset Offset to FeatureVariations table, from beginning of GPOS table (may be NULL)
//! }
//! ```
//!
//! - different records/tables are separated by newlines.
//! - the first line should be a single word, used as the name of the type
//! - other lines are just copy pasted
//!
//! *limitations:* this doesn't handle lifetimes, and doesn't generate annotations.
//! You will need to clean up the output.
use std::{fmt::Write, ops::Deref};
macro_rules! exit_with_msg {
($disp:expr, $line:expr) => {{
eprintln!("ERROR: {}", $disp);
eprintln!("Line {}: '{}'", 1 + $line.number, $line.text);
std::process::exit(1);
}};
}
static MACRO_CALL: &str = "font_types::tables!";
/// a wrapper around a line, so we can report errors with line numbers
struct Line<'a> {
text: &'a str,
number: usize,
}
impl Deref for Line<'_> {
type Target = str;
fn deref(&self) -> &<Self as Deref>::Target {
self.text
}
}
fn main() {
let in_path = std::env::args().nth(1).expect("expected path argument");
let input = std::fs::read_to_string(in_path).expect("failed to read path");
let mut lines = input
.lines()
.enumerate()
.map(|(number, text)| Line {
text: text.trim(),
number,
})
.filter(|l| !l.starts_with('#'));
while let Some(group) = generate_group(&mut lines) {
println!("\n{} {{", MACRO_CALL);
println!("{}", group);
println!("}}");
}
}
/// Generate a group of items. This is multiple items within a pair of brackets,
/// which will share a single macro invocation.
fn generate_group<'a>(lines: impl Iterator<Item = Line<'a>>) -> Option<String> {
let mut lines = lines.skip_while(|s| s.is_empty()).peekable();
// if this is an explicit group, discard the brace line
if lines.peek()?.starts_with('{') {
let _ = lines.next();
}
let mut result = String::new();
// stop at closing brace if it exists; if it doesn't we just consume all items
let mut lines = lines.take_while(|line| !line.starts_with(&['{', '}']));
while let Some(item) = generate_one_item(&mut lines) {
if !result.is_empty() {
result.push_str("\n\n");
}
result.push_str(&item);
}
Some(result)
}
/// parse a single enum, table or record.
///
/// Returns `Some` on success, `None` if there are no more items, and terminates
/// if something goes wrong.
fn generate_one_item<'a>(lines: impl Iterator<Item = Line<'a>>) -> Option<String> {
let mut lines = lines.skip_while(|line| line.is_empty());
let mut comments = Vec::new();
let decl = loop {
match lines.next() {
Some(line) if line.starts_with("///") => comments.push(line.text),
Some(line) if line.starts_with('@') => break Decl::parse(line).unwrap(),
Some(line) => exit_with_msg!("expected table or record name", line),
None => return None,
}
};
let item = match decl.kind {
DeclKind::RawEnum => generate_one_enum(decl, lines),
DeclKind::Table | DeclKind::Record => generate_one_table(decl, lines),
DeclKind::Flags => generate_one_flags(decl, lines),
}?;
let mut comments = comments.join("\n");
if comments.is_empty() {
Some(item)
} else {
comments.push('\n');
comments.push_str(&item);
Some(comments)
}
}
/// Generate a single table or record (they're currently the same)
fn generate_one_table<'a>(decl: Decl, lines: impl Iterator<Item = Line<'a>>) -> Option<String> {
let fields = lines.map_while(parse_field).collect::<Vec<_>>();
let lifetime_str = if fields.iter().any(|x| x.maybe_count.is_some()) {
"<'a>"
} else {
""
};
let mut result = String::new();
writeln!(&mut result, "{}{} {{", decl.name, lifetime_str).unwrap();
for line in &fields {
writeln!(&mut result, "{}", line).unwrap();
}
result.push('}');
Some(result)
}
fn generate_one_enum<'a>(decl: Decl, lines: impl Iterator<Item = Line<'a>>) -> Option<String> {
let fields = lines.map_while(parse_field).collect::<Vec<_>>();
let mut result = String::new();
writeln!(
&mut result,
"#[repr({})]\nenum {} {{",
decl.annotation, decl.name
)
.unwrap();
for line in &fields {
writeln!(&mut result, " {} = {},", line.name, line.typ).unwrap();
}
result.push('}');
Some(result)
}
fn generate_one_flags<'a>(decl: Decl, lines: impl Iterator<Item = Line<'a>>) -> Option<String> {
let fields = lines.map_while(parse_field).collect::<Vec<_>>();
let mut result = String::new();
writeln!(
&mut result,
"#[flags({})]\n{} {{",
decl.annotation, decl.name
)
.unwrap();
for line in &fields {
format_comment(&mut result, " ", line.comment).unwrap();
writeln!(&mut result, " {} = {},", line.name, line.typ).unwrap();
}
result.push('}');
Some(result)
}
enum DeclKind {
Table,
Record,
RawEnum,
Flags,
}
struct Decl<'a> {
kind: DeclKind,
annotation: &'a str,
name: &'a str,
}
impl<'a> Decl<'a> {
fn parse(line: Line<'a>) -> Option<Self> {
let mut decl = line.text.split_whitespace();
let mut annotation = "";
let kind = match decl.next()? {
"@table" => DeclKind::Table,
"@record" => DeclKind::Record,
x if x.starts_with("@enum(") || x.starts_with("@flags(") => {
let repr = x.split_once('(').unwrap().1.trim_end_matches(')');
//let repr = x.trim_start_matches("@enum(").trim_end_matches(')');
if !["u8", "u16"].contains(&repr) {
exit_with_msg!(format!("unexpected enum/flag repr '{}'", repr), line);
}
annotation = repr;
if x.starts_with("@enum") {
DeclKind::RawEnum
} else {
DeclKind::Flags
}
}
"@enum" | "@flags" => exit_with_msg!(
"@enum/@flags requires explicit repr like: '@flags(u16)'",
line
),
other => exit_with_msg!(format!("unknown item kind '{}'", other), line),
};
let name = decl
.next()
.unwrap_or_else(|| exit_with_msg!("missing name", line));
Some(Decl {
kind,
annotation,
name,
})
}
}
struct Field<'a> {
name: &'a str,
maybe_count: Option<String>,
typ: &'a str,
comment: &'a str,
}
impl<'a> std::fmt::Display for Field<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
format_comment(f, " ", self.comment)?;
if self.name.contains("reserved") {
writeln!(f, " #[hidden]")?;
}
if let Some(count) = &self.maybe_count {
writeln!(f, " #[count({})]", count)?;
write!(f, " {}: [{}],", decamalize(self.name), self.typ)?;
} else {
write!(f, " {}: {},", decamalize(self.name), self.typ)?;
}
Ok(())
}
}
fn parse_field(line: Line) -> Option<Field> {
if line.is_empty() {
return None;
}
let mut iter = line.text.splitn(3, '\t');
let (typ, ident, comment) = match (iter.next(), iter.next(), iter.next()) {
(Some(a), Some(b), Some(c)) => (a, b, c),
(Some(a), Some(b), None) => (a, b, ""),
_ => exit_with_msg!("line could not be parsed as type/name/comment", line),
};
let typ = normalize_type(typ);
let (name, maybe_count) = split_ident(ident);
//let name = decamalize(name);
let maybe_count = maybe_count.map(decamalize);
Some(Field {
name,
maybe_count,
typ,
comment,
})
}
/// takes an ident and splits it into the name and an optional count (if the item
/// is an array)
fn split_ident(input: &str) -> (&str, Option<&str>) {
match input.split_once('[') {
Some((front, back)) => (front, Some(back.trim_end_matches(']'))),
None => (input, None),
}
}
fn normalize_type(input: &str) -> &str {
match input {
"uint8" => "BigEndian<u8>",
"uint16" => "BigEndian<u16>",
"uint24" => "BigEndian<Uint24>",
"uint32" => "BigEndian<u32>",
"int8" => "BigEndian<i8>",
"int16" => "BigEndian<i16>",
"int32" => "BigEndian<i32>",
"FWORD" => "BigEndian<FWord>",
"UFWORD" => "BigEndian<UfWord>",
"F2DOT14" => "BigEndian<F2Dot14>",
"LONGDATETIME" => "BigEndian<LongDateTime>",
"Version16Dot16" => "BigEndian<Version16Dot16>",
"Fixed" => "BigEndian<Fixed>",
"Tag" => "BigEndian<Tag>",
"Offset16" => "BigEndian<Offset16>",
"Offset24" => "BigEndian<Offset24>",
"Offset32" => "BigEndian<Offset32>",
other => other,
}
}
fn decamalize(input: &str) -> String {
//taken from serde: https://github.com/serde-rs/serde/blob/7e19ae8c9486a3bbbe51f1befb05edee94c454f9/serde_derive/src/internals/case.rs#L69-L76
let mut snake = String::new();
for (i, ch) in input.char_indices() {
if i > 0 && ch.is_uppercase() {
snake.push('_');
}
snake.push(ch.to_ascii_lowercase());
}
snake
}
fn format_comment(f: &mut dyn std::fmt::Write, whitespace: &str, input: &str) -> std::fmt::Result {
const LINE_LEN: usize = 72;
let mut cur_len = 0;
for token in input.split_inclusive(' ') {
if cur_len == 0 || cur_len + token.len() > LINE_LEN {
if cur_len > 0 {
writeln!(f)?;
}
write!(f, "{}/// ", whitespace)?;
cur_len = whitespace.len() + 4;
}
write!(f, "{}", token)?;
cur_len += token.len();
}
if cur_len > 0 {
writeln!(f)?;
}
Ok(())
}
| 32.710983 | 146 | 0.559463 |
214b1a93ff1c28b5b7c9982ae5fa969b5daed9a4
| 788 |
use language::operations::{make_param_doc, Operation, ParamInfo};
pub struct AgentSetGroupOp;
const DOC: &str =
"Puts the bot agent under command of specified player. Only works in multiplayer.";
pub const OP_CODE: u32 = 1766;
pub const IDENT: &str = "agent_set_group";
impl Operation for AgentSetGroupOp {
fn op_code(&self) -> u32 {
OP_CODE
}
fn documentation(&self) -> &'static str {
DOC
}
fn identifier(&self) -> &'static str {
IDENT
}
fn param_info(&self) -> ParamInfo {
ParamInfo {
num_required: 2,
num_optional: 0,
param_docs: vec![
make_param_doc("<agent_id>", ""),
make_param_doc("<player_leader_id>", ""),
],
}
}
}
| 21.888889 | 87 | 0.567259 |
14286bb207424e21f9b8ae7663bd34b1e7a6049e
| 545 |
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
use generic_array::{typenum::U32, GenericArray};
use generic_bytes_derive::SizedBytes;
#[derive(SizedBytes)]
struct Foo(GenericArray<u8, U32>, GenericArray<u8, U32>);
#[test]
fn test_foo() {
let d1 = [1u8; 32];
let d2 = [2u8; 32];
let g1 = GenericArray::from_slice(&d1);
let g2 = GenericArray::from_slice(&d2);
let _foo = Foo(*g1, *g2);
}
| 24.772727 | 66 | 0.678899 |
acb23d3482e7300f10e517ff9633fc9034123643
| 2,060 |
use ceres::lexer::Lexer;
use ceres::parser::Parser;
use ceres::token::Token;
// AST
use ceres::ast::IdentNode;
use ceres::ast::value::{ValueNode, ValueType};
#[cfg(test)]
mod parser_tests {
use super::*;
fn scan_and_return(input: &str) -> Vec<Token> {
let lexer = Lexer::new(input);
lexer.scan()
}
#[test]
fn parser_parse_number() {
let tokens = scan_and_return("69");
let mut p = Parser::new(tokens);
let r = p.parse_number();
assert_eq!(r, ValueNode::new(ValueType::Integer, 69));
}
#[test]
fn parser_parse_string() {
let tokens = scan_and_return(r#""Hello world""#);
let mut p = Parser::new(tokens);
let r = p.parse_string();
assert_eq!(r, ValueNode::new(ValueType::StringLit, "Hello world".into()));
}
#[test]
fn parser_parse_valid_ident() {
let tokens = scan_and_return(r#"validIdent"#);
let mut p = Parser::new(tokens);
let r = p.parse_ident();
assert_eq!(r, IdentNode::new("validIdent".into()));
}
#[test]
/// Try to parse an identifier, followed by a string literal
fn parser_parse_ident_then_string() {
let tokens = scan_and_return(r#"name "charlotte""#);
let mut p = Parser::new(tokens);
let ident = p.parse_ident();
let string = p.parse_string();
// Expect an identifier, followed by a string literal
assert_eq!(ident, IdentNode::new("name".into()));
assert_eq!(string, ValueNode::new(ValueType::StringLit, "charlotte".into()));
}
#[test]
/// Try to parse an identifier, followed by an integer
fn parser_parse_ident_then_number() {
let tokens = scan_and_return(r#"number 123"#);
let mut p = Parser::new(tokens);
let ident = p.parse_ident();
let num = p.parse_number();
// Expect an identifier, followed by an integer
assert_eq!(ident, IdentNode::new("number".into()));
assert_eq!(num, ValueNode::new(ValueType::Integer, 123));
}
}
| 28.611111 | 85 | 0.599029 |
ac6e60ade1919b3643b73466f315e1b27a7b6209
| 5,996 |
use std::convert::TryFrom;
use chrono::Datelike;
use crate::{
array::*,
datatypes::DataType,
error::{ArrowError, Result},
temporal_conversions::{
utf8_to_naive_timestamp_ns as utf8_to_naive_timestamp_ns_,
utf8_to_timestamp_ns as utf8_to_timestamp_ns_, EPOCH_DAYS_FROM_CE,
},
types::NativeType,
};
use super::CastOptions;
const RFC3339: &str = "%Y-%m-%dT%H:%M:%S%.f%:z";
/// Casts a [`Utf8Array`] to a [`PrimitiveArray`], making any uncastable value a Null.
pub fn utf8_to_primitive<O: Offset, T>(from: &Utf8Array<O>, to: &DataType) -> PrimitiveArray<T>
where
T: NativeType + lexical_core::FromLexical,
{
let iter = from
.iter()
.map(|x| x.and_then::<T, _>(|x| lexical_core::parse(x.as_bytes()).ok()));
PrimitiveArray::<T>::from_trusted_len_iter(iter).to(to.clone())
}
/// Casts a [`Utf8Array`] to a [`PrimitiveArray`] at best-effort using `lexical_core::parse_partial`, making any uncastable value as zero.
pub fn partial_utf8_to_primitive<O: Offset, T>(
from: &Utf8Array<O>,
to: &DataType,
) -> PrimitiveArray<T>
where
T: NativeType + lexical_core::FromLexical,
{
let iter = from.iter().map(|x| {
x.and_then::<T, _>(|x| lexical_core::parse_partial(x.as_bytes()).ok().map(|x| x.0))
});
PrimitiveArray::<T>::from_trusted_len_iter(iter).to(to.clone())
}
pub(super) fn utf8_to_primitive_dyn<O: Offset, T>(
from: &dyn Array,
to: &DataType,
options: CastOptions,
) -> Result<Box<dyn Array>>
where
T: NativeType + lexical_core::FromLexical,
{
let from = from.as_any().downcast_ref().unwrap();
if options.partial {
Ok(Box::new(partial_utf8_to_primitive::<O, T>(from, to)))
} else {
Ok(Box::new(utf8_to_primitive::<O, T>(from, to)))
}
}
/// Casts a [`Utf8Array`] to a Date32 primitive, making any uncastable value a Null.
pub fn utf8_to_date32<O: Offset>(from: &Utf8Array<O>) -> PrimitiveArray<i32> {
let iter = from.iter().map(|x| {
x.and_then(|x| {
x.parse::<chrono::NaiveDate>()
.ok()
.map(|x| x.num_days_from_ce() - EPOCH_DAYS_FROM_CE)
})
});
PrimitiveArray::<i32>::from_trusted_len_iter(iter).to(DataType::Date32)
}
pub(super) fn utf8_to_date32_dyn<O: Offset>(from: &dyn Array) -> Result<Box<dyn Array>> {
let from = from.as_any().downcast_ref().unwrap();
Ok(Box::new(utf8_to_date32::<O>(from)))
}
/// Casts a [`Utf8Array`] to a Date64 primitive, making any uncastable value a Null.
pub fn utf8_to_date64<O: Offset>(from: &Utf8Array<O>) -> PrimitiveArray<i64> {
let iter = from.iter().map(|x| {
x.and_then(|x| {
x.parse::<chrono::NaiveDateTime>()
.ok()
.map(|x| x.timestamp_millis())
})
});
PrimitiveArray::<i64>::from_trusted_len_iter(iter).to(DataType::Date64)
}
pub(super) fn utf8_to_date64_dyn<O: Offset>(from: &dyn Array) -> Result<Box<dyn Array>> {
let from = from.as_any().downcast_ref().unwrap();
Ok(Box::new(utf8_to_date64::<O>(from)))
}
pub(super) fn utf8_to_dictionary_dyn<O: Offset, K: DictionaryKey>(
from: &dyn Array,
) -> Result<Box<dyn Array>> {
let values = from.as_any().downcast_ref().unwrap();
utf8_to_dictionary::<O, K>(values).map(|x| Box::new(x) as Box<dyn Array>)
}
/// Cast [`Utf8Array`] to [`DictionaryArray`], also known as packing.
/// # Errors
/// This function errors if the maximum key is smaller than the number of distinct elements
/// in the array.
pub fn utf8_to_dictionary<O: Offset, K: DictionaryKey>(
from: &Utf8Array<O>,
) -> Result<DictionaryArray<K>> {
let mut array = MutableDictionaryArray::<K, MutableUtf8Array<O>>::new();
array.try_extend(from.iter())?;
Ok(array.into())
}
pub(super) fn utf8_to_naive_timestamp_ns_dyn<O: Offset>(
from: &dyn Array,
) -> Result<Box<dyn Array>> {
let from = from.as_any().downcast_ref().unwrap();
Ok(Box::new(utf8_to_naive_timestamp_ns::<O>(from)))
}
/// [`crate::temporal_conversions::utf8_to_timestamp_ns`] applied for RFC3339 formatting
pub fn utf8_to_naive_timestamp_ns<O: Offset>(from: &Utf8Array<O>) -> PrimitiveArray<i64> {
utf8_to_naive_timestamp_ns_(from, RFC3339)
}
pub(super) fn utf8_to_timestamp_ns_dyn<O: Offset>(
from: &dyn Array,
timezone: String,
) -> Result<Box<dyn Array>> {
let from = from.as_any().downcast_ref().unwrap();
utf8_to_timestamp_ns::<O>(from, timezone)
.map(Box::new)
.map(|x| x as Box<dyn Array>)
}
/// [`crate::temporal_conversions::utf8_to_timestamp_ns`] applied for RFC3339 formatting
pub fn utf8_to_timestamp_ns<O: Offset>(
from: &Utf8Array<O>,
timezone: String,
) -> Result<PrimitiveArray<i64>> {
utf8_to_timestamp_ns_(from, RFC3339, timezone)
}
/// Conversion of utf8
pub fn utf8_to_large_utf8(from: &Utf8Array<i32>) -> Utf8Array<i64> {
let data_type = Utf8Array::<i64>::default_data_type();
let validity = from.validity().cloned();
let values = from.values().clone();
let offsets = from
.offsets()
.iter()
.map(|x| *x as i64)
.collect::<Vec<_>>()
.into();
// Safety: sound because `offsets` fulfills the same invariants as `from.offsets()`
unsafe { Utf8Array::<i64>::from_data_unchecked(data_type, offsets, values, validity) }
}
/// Conversion of utf8
pub fn utf8_large_to_utf8(from: &Utf8Array<i64>) -> Result<Utf8Array<i32>> {
let data_type = Utf8Array::<i32>::default_data_type();
let validity = from.validity().cloned();
let values = from.values().clone();
let _ =
i32::try_from(*from.offsets().last().unwrap()).map_err(ArrowError::from_external_error)?;
let offsets = from
.offsets()
.iter()
.map(|x| *x as i32)
.collect::<Vec<_>>()
.into();
// Safety: sound because `offsets` fulfills the same invariants as `from.offsets()`
Ok(unsafe { Utf8Array::<i32>::from_data_unchecked(data_type, offsets, values, validity) })
}
| 33.497207 | 138 | 0.646765 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.