hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
23351f22a5df55daa99a6e22501dd9b174cf1ded | 13,697 | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files.git)
// DO NOT EDIT
use crate::Accessible;
use crate::AccessibleRole;
use crate::Align;
use crate::Buildable;
use crate::ColorChooser;
use crate::ConstraintTarget;
use crate::LayoutManager;
use crate::Overflow;
use crate::Widget;
use glib::object::Cast;
use glib::object::IsA;
use glib::object::ObjectType as ObjectType_;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use glib::StaticType;
use glib::ToValue;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
glib::wrapper! {
#[doc(alias = "GtkColorChooserWidget")]
pub struct ColorChooserWidget(Object<ffi::GtkColorChooserWidget>) @extends Widget, @implements Accessible, Buildable, ConstraintTarget, ColorChooser;
match fn {
type_ => || ffi::gtk_color_chooser_widget_get_type(),
}
}
impl ColorChooserWidget {
#[doc(alias = "gtk_color_chooser_widget_new")]
pub fn new() -> ColorChooserWidget {
assert_initialized_main_thread!();
unsafe { Widget::from_glib_none(ffi::gtk_color_chooser_widget_new()).unsafe_cast() }
}
// rustdoc-stripper-ignore-next
/// Creates a new builder-pattern struct instance to construct [`ColorChooserWidget`] objects.
///
/// This method returns an instance of [`ColorChooserWidgetBuilder`] which can be used to create [`ColorChooserWidget`] objects.
pub fn builder() -> ColorChooserWidgetBuilder {
ColorChooserWidgetBuilder::default()
}
#[doc(alias = "show-editor")]
pub fn shows_editor(&self) -> bool {
unsafe {
let mut value = glib::Value::from_type(<bool as StaticType>::static_type());
glib::gobject_ffi::g_object_get_property(
self.as_ptr() as *mut glib::gobject_ffi::GObject,
b"show-editor\0".as_ptr() as *const _,
value.to_glib_none_mut().0,
);
value
.get()
.expect("Return Value for property `show-editor` getter")
}
}
#[doc(alias = "show-editor")]
pub fn set_show_editor(&self, show_editor: bool) {
unsafe {
glib::gobject_ffi::g_object_set_property(
self.as_ptr() as *mut glib::gobject_ffi::GObject,
b"show-editor\0".as_ptr() as *const _,
show_editor.to_value().to_glib_none().0,
);
}
}
#[doc(alias = "show-editor")]
pub fn connect_show_editor_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_show_editor_trampoline<F: Fn(&ColorChooserWidget) + 'static>(
this: *mut ffi::GtkColorChooserWidget,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(&from_glib_borrow(this))
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::show-editor\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_show_editor_trampoline::<F> as *const (),
)),
Box_::into_raw(f),
)
}
}
}
impl Default for ColorChooserWidget {
fn default() -> Self {
Self::new()
}
}
#[derive(Clone, Default)]
// rustdoc-stripper-ignore-next
/// A [builder-pattern] type to construct [`ColorChooserWidget`] objects.
///
/// [builder-pattern]: https://doc.rust-lang.org/1.0.0/style/ownership/builders.html
pub struct ColorChooserWidgetBuilder {
show_editor: Option<bool>,
can_focus: Option<bool>,
can_target: Option<bool>,
css_classes: Option<Vec<String>>,
css_name: Option<String>,
cursor: Option<gdk::Cursor>,
focus_on_click: Option<bool>,
focusable: Option<bool>,
halign: Option<Align>,
has_tooltip: Option<bool>,
height_request: Option<i32>,
hexpand: Option<bool>,
hexpand_set: Option<bool>,
layout_manager: Option<LayoutManager>,
margin_bottom: Option<i32>,
margin_end: Option<i32>,
margin_start: Option<i32>,
margin_top: Option<i32>,
name: Option<String>,
opacity: Option<f64>,
overflow: Option<Overflow>,
receives_default: Option<bool>,
sensitive: Option<bool>,
tooltip_markup: Option<String>,
tooltip_text: Option<String>,
valign: Option<Align>,
vexpand: Option<bool>,
vexpand_set: Option<bool>,
visible: Option<bool>,
width_request: Option<i32>,
accessible_role: Option<AccessibleRole>,
rgba: Option<gdk::RGBA>,
use_alpha: Option<bool>,
}
impl ColorChooserWidgetBuilder {
// rustdoc-stripper-ignore-next
/// Create a new [`ColorChooserWidgetBuilder`].
pub fn new() -> Self {
Self::default()
}
// rustdoc-stripper-ignore-next
/// Build the [`ColorChooserWidget`].
pub fn build(self) -> ColorChooserWidget {
let mut properties: Vec<(&str, &dyn ToValue)> = vec![];
if let Some(ref show_editor) = self.show_editor {
properties.push(("show-editor", show_editor));
}
if let Some(ref can_focus) = self.can_focus {
properties.push(("can-focus", can_focus));
}
if let Some(ref can_target) = self.can_target {
properties.push(("can-target", can_target));
}
if let Some(ref css_classes) = self.css_classes {
properties.push(("css-classes", css_classes));
}
if let Some(ref css_name) = self.css_name {
properties.push(("css-name", css_name));
}
if let Some(ref cursor) = self.cursor {
properties.push(("cursor", cursor));
}
if let Some(ref focus_on_click) = self.focus_on_click {
properties.push(("focus-on-click", focus_on_click));
}
if let Some(ref focusable) = self.focusable {
properties.push(("focusable", focusable));
}
if let Some(ref halign) = self.halign {
properties.push(("halign", halign));
}
if let Some(ref has_tooltip) = self.has_tooltip {
properties.push(("has-tooltip", has_tooltip));
}
if let Some(ref height_request) = self.height_request {
properties.push(("height-request", height_request));
}
if let Some(ref hexpand) = self.hexpand {
properties.push(("hexpand", hexpand));
}
if let Some(ref hexpand_set) = self.hexpand_set {
properties.push(("hexpand-set", hexpand_set));
}
if let Some(ref layout_manager) = self.layout_manager {
properties.push(("layout-manager", layout_manager));
}
if let Some(ref margin_bottom) = self.margin_bottom {
properties.push(("margin-bottom", margin_bottom));
}
if let Some(ref margin_end) = self.margin_end {
properties.push(("margin-end", margin_end));
}
if let Some(ref margin_start) = self.margin_start {
properties.push(("margin-start", margin_start));
}
if let Some(ref margin_top) = self.margin_top {
properties.push(("margin-top", margin_top));
}
if let Some(ref name) = self.name {
properties.push(("name", name));
}
if let Some(ref opacity) = self.opacity {
properties.push(("opacity", opacity));
}
if let Some(ref overflow) = self.overflow {
properties.push(("overflow", overflow));
}
if let Some(ref receives_default) = self.receives_default {
properties.push(("receives-default", receives_default));
}
if let Some(ref sensitive) = self.sensitive {
properties.push(("sensitive", sensitive));
}
if let Some(ref tooltip_markup) = self.tooltip_markup {
properties.push(("tooltip-markup", tooltip_markup));
}
if let Some(ref tooltip_text) = self.tooltip_text {
properties.push(("tooltip-text", tooltip_text));
}
if let Some(ref valign) = self.valign {
properties.push(("valign", valign));
}
if let Some(ref vexpand) = self.vexpand {
properties.push(("vexpand", vexpand));
}
if let Some(ref vexpand_set) = self.vexpand_set {
properties.push(("vexpand-set", vexpand_set));
}
if let Some(ref visible) = self.visible {
properties.push(("visible", visible));
}
if let Some(ref width_request) = self.width_request {
properties.push(("width-request", width_request));
}
if let Some(ref accessible_role) = self.accessible_role {
properties.push(("accessible-role", accessible_role));
}
if let Some(ref rgba) = self.rgba {
properties.push(("rgba", rgba));
}
if let Some(ref use_alpha) = self.use_alpha {
properties.push(("use-alpha", use_alpha));
}
glib::Object::new::<ColorChooserWidget>(&properties)
.expect("Failed to create an instance of ColorChooserWidget")
}
pub fn show_editor(mut self, show_editor: bool) -> Self {
self.show_editor = Some(show_editor);
self
}
pub fn can_focus(mut self, can_focus: bool) -> Self {
self.can_focus = Some(can_focus);
self
}
pub fn can_target(mut self, can_target: bool) -> Self {
self.can_target = Some(can_target);
self
}
pub fn css_classes(mut self, css_classes: Vec<String>) -> Self {
self.css_classes = Some(css_classes);
self
}
pub fn css_name(mut self, css_name: &str) -> Self {
self.css_name = Some(css_name.to_string());
self
}
pub fn cursor(mut self, cursor: &gdk::Cursor) -> Self {
self.cursor = Some(cursor.clone());
self
}
pub fn focus_on_click(mut self, focus_on_click: bool) -> Self {
self.focus_on_click = Some(focus_on_click);
self
}
pub fn focusable(mut self, focusable: bool) -> Self {
self.focusable = Some(focusable);
self
}
pub fn halign(mut self, halign: Align) -> Self {
self.halign = Some(halign);
self
}
pub fn has_tooltip(mut self, has_tooltip: bool) -> Self {
self.has_tooltip = Some(has_tooltip);
self
}
pub fn height_request(mut self, height_request: i32) -> Self {
self.height_request = Some(height_request);
self
}
pub fn hexpand(mut self, hexpand: bool) -> Self {
self.hexpand = Some(hexpand);
self
}
pub fn hexpand_set(mut self, hexpand_set: bool) -> Self {
self.hexpand_set = Some(hexpand_set);
self
}
pub fn layout_manager<P: IsA<LayoutManager>>(mut self, layout_manager: &P) -> Self {
self.layout_manager = Some(layout_manager.clone().upcast());
self
}
pub fn margin_bottom(mut self, margin_bottom: i32) -> Self {
self.margin_bottom = Some(margin_bottom);
self
}
pub fn margin_end(mut self, margin_end: i32) -> Self {
self.margin_end = Some(margin_end);
self
}
pub fn margin_start(mut self, margin_start: i32) -> Self {
self.margin_start = Some(margin_start);
self
}
pub fn margin_top(mut self, margin_top: i32) -> Self {
self.margin_top = Some(margin_top);
self
}
pub fn name(mut self, name: &str) -> Self {
self.name = Some(name.to_string());
self
}
pub fn opacity(mut self, opacity: f64) -> Self {
self.opacity = Some(opacity);
self
}
pub fn overflow(mut self, overflow: Overflow) -> Self {
self.overflow = Some(overflow);
self
}
pub fn receives_default(mut self, receives_default: bool) -> Self {
self.receives_default = Some(receives_default);
self
}
pub fn sensitive(mut self, sensitive: bool) -> Self {
self.sensitive = Some(sensitive);
self
}
pub fn tooltip_markup(mut self, tooltip_markup: &str) -> Self {
self.tooltip_markup = Some(tooltip_markup.to_string());
self
}
pub fn tooltip_text(mut self, tooltip_text: &str) -> Self {
self.tooltip_text = Some(tooltip_text.to_string());
self
}
pub fn valign(mut self, valign: Align) -> Self {
self.valign = Some(valign);
self
}
pub fn vexpand(mut self, vexpand: bool) -> Self {
self.vexpand = Some(vexpand);
self
}
pub fn vexpand_set(mut self, vexpand_set: bool) -> Self {
self.vexpand_set = Some(vexpand_set);
self
}
pub fn visible(mut self, visible: bool) -> Self {
self.visible = Some(visible);
self
}
pub fn width_request(mut self, width_request: i32) -> Self {
self.width_request = Some(width_request);
self
}
pub fn accessible_role(mut self, accessible_role: AccessibleRole) -> Self {
self.accessible_role = Some(accessible_role);
self
}
pub fn rgba(mut self, rgba: &gdk::RGBA) -> Self {
self.rgba = Some(rgba.clone());
self
}
pub fn use_alpha(mut self, use_alpha: bool) -> Self {
self.use_alpha = Some(use_alpha);
self
}
}
impl fmt::Display for ColorChooserWidget {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("ColorChooserWidget")
}
}
| 31.706019 | 153 | 0.594145 |
562f313e70839af6883071529ad9914ea8e46fe9 | 2,229 | use crate::{
container::ReferenceSequenceId,
num::{Itf8, Ltf8},
};
use super::Header;
#[derive(Debug, Default)]
pub struct Builder {
length: i32,
reference_sequence_id: ReferenceSequenceId,
start_position: Itf8,
alignment_span: Itf8,
record_count: Itf8,
record_counter: Ltf8,
base_count: Ltf8,
block_count: Itf8,
landmarks: Vec<Itf8>,
crc32: u32,
}
impl Builder {
pub fn set_length(mut self, length: i32) -> Self {
self.length = length;
self
}
pub fn set_reference_sequence_id(mut self, reference_sequence_id: ReferenceSequenceId) -> Self {
self.reference_sequence_id = reference_sequence_id;
self
}
pub fn set_start_position(mut self, start_position: Itf8) -> Self {
self.start_position = start_position;
self
}
pub fn set_alignment_span(mut self, alignment_span: Itf8) -> Self {
self.alignment_span = alignment_span;
self
}
pub fn set_record_count(mut self, record_count: Itf8) -> Self {
self.record_count = record_count;
self
}
pub fn set_record_counter(mut self, record_counter: Ltf8) -> Self {
self.record_counter = record_counter;
self
}
pub fn set_base_count(mut self, base_count: Ltf8) -> Self {
self.base_count = base_count;
self
}
pub fn set_block_count(mut self, block_count: Itf8) -> Self {
self.block_count = block_count;
self
}
pub fn set_landmarks(mut self, landmarks: Vec<Itf8>) -> Self {
self.landmarks = landmarks;
self
}
pub fn set_crc32(mut self, crc32: u32) -> Self {
self.crc32 = crc32;
self
}
pub fn build(self) -> Header {
Header {
length: self.length,
reference_sequence_id: self.reference_sequence_id,
start_position: self.start_position,
alignment_span: self.alignment_span,
record_count: self.record_count,
record_counter: self.record_counter,
base_count: self.base_count,
block_count: self.block_count,
landmarks: self.landmarks,
crc32: self.crc32,
}
}
}
| 25.329545 | 100 | 0.617317 |
ab02559175302f8994581e3d8967ababbba24b48 | 52,287 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! This module provides an `Expr` enum for representing expressions
//! such as `col = 5` or `SUM(col)`. See examples on the [`Expr`] struct.
pub use super::Operator;
use std::fmt;
use std::sync::Arc;
use aggregates::{AccumulatorFunctionImplementation, StateTypeFunction};
use arrow::{compute::can_cast_types, datatypes::DataType};
use crate::error::{DataFusionError, Result};
use crate::logical_plan::{DFField, DFSchema};
use crate::physical_plan::{
aggregates, expressions::binary_operator_data_type, functions, udf::ScalarUDF,
window_functions,
};
use crate::{physical_plan::udaf::AggregateUDF, scalar::ScalarValue};
use functions::{ReturnTypeFunction, ScalarFunctionImplementation, Signature};
use std::collections::HashSet;
/// `Expr` is a central struct of DataFusion's query API, and
/// represent logical expressions such as `A + 1`, or `CAST(c1 AS
/// int)`.
///
/// An `Expr` can compute its [DataType](arrow::datatypes::DataType)
/// and nullability, and has functions for building up complex
/// expressions.
///
/// # Examples
///
/// ## Create an expression `c1` referring to column named "c1"
/// ```
/// # use datafusion::logical_plan::*;
/// let expr = col("c1");
/// assert_eq!(expr, Expr::Column("c1".to_string()));
/// ```
///
/// ## Create the expression `c1 + c2` to add columns "c1" and "c2" together
/// ```
/// # use datafusion::logical_plan::*;
/// let expr = col("c1") + col("c2");
///
/// assert!(matches!(expr, Expr::BinaryExpr { ..} ));
/// if let Expr::BinaryExpr { left, right, op } = expr {
/// assert_eq!(*left, col("c1"));
/// assert_eq!(*right, col("c2"));
/// assert_eq!(op, Operator::Plus);
/// }
/// ```
///
/// ## Create expression `c1 = 42` to compare the value in coumn "c1" to the literal value `42`
/// ```
/// # use datafusion::logical_plan::*;
/// # use datafusion::scalar::*;
/// let expr = col("c1").eq(lit(42));
///
/// assert!(matches!(expr, Expr::BinaryExpr { ..} ));
/// if let Expr::BinaryExpr { left, right, op } = expr {
/// assert_eq!(*left, col("c1"));
/// let scalar = ScalarValue::Int32(Some(42));
/// assert_eq!(*right, Expr::Literal(scalar));
/// assert_eq!(op, Operator::Eq);
/// }
/// ```
#[derive(Clone, PartialEq)]
pub enum Expr {
/// An expression with a specific name.
Alias(Box<Expr>, String),
/// A named reference to a field in a schema.
Column(String),
/// A named reference to a variable in a registry.
ScalarVariable(Vec<String>),
/// A constant value.
Literal(ScalarValue),
/// A binary expression such as "age > 21"
BinaryExpr {
/// Left-hand side of the expression
left: Box<Expr>,
/// The comparison operator
op: Operator,
/// Right-hand side of the expression
right: Box<Expr>,
},
/// Negation of an expression. The expression's type must be a boolean to make sense.
Not(Box<Expr>),
/// Whether an expression is not Null. This expression is never null.
IsNotNull(Box<Expr>),
/// Whether an expression is Null. This expression is never null.
IsNull(Box<Expr>),
/// arithmetic negation of an expression, the operand must be of a signed numeric data type
Negative(Box<Expr>),
/// Whether an expression is between a given range.
Between {
/// The value to compare
expr: Box<Expr>,
/// Whether the expression is negated
negated: bool,
/// The low end of the range
low: Box<Expr>,
/// The high end of the range
high: Box<Expr>,
},
/// The CASE expression is similar to a series of nested if/else and there are two forms that
/// can be used. The first form consists of a series of boolean "when" expressions with
/// corresponding "then" expressions, and an optional "else" expression.
///
/// CASE WHEN condition THEN result
/// [WHEN ...]
/// [ELSE result]
/// END
///
/// The second form uses a base expression and then a series of "when" clauses that match on a
/// literal value.
///
/// CASE expression
/// WHEN value THEN result
/// [WHEN ...]
/// [ELSE result]
/// END
Case {
/// Optional base expression that can be compared to literal values in the "when" expressions
expr: Option<Box<Expr>>,
/// One or more when/then expressions
when_then_expr: Vec<(Box<Expr>, Box<Expr>)>,
/// Optional "else" expression
else_expr: Option<Box<Expr>>,
},
/// Casts the expression to a given type and will return a runtime error if the expression cannot be cast.
/// This expression is guaranteed to have a fixed type.
Cast {
/// The expression being cast
expr: Box<Expr>,
/// The `DataType` the expression will yield
data_type: DataType,
},
/// Casts the expression to a given type and will return a null value if the expression cannot be cast.
/// This expression is guaranteed to have a fixed type.
TryCast {
/// The expression being cast
expr: Box<Expr>,
/// The `DataType` the expression will yield
data_type: DataType,
},
/// A sort expression, that can be used to sort values.
Sort {
/// The expression to sort on
expr: Box<Expr>,
/// The direction of the sort
asc: bool,
/// Whether to put Nulls before all other data values
nulls_first: bool,
},
/// Represents the call of a built-in scalar function with a set of arguments.
ScalarFunction {
/// The function
fun: functions::BuiltinScalarFunction,
/// List of expressions to feed to the functions as arguments
args: Vec<Expr>,
},
/// Represents the call of a user-defined scalar function with arguments.
ScalarUDF {
/// The function
fun: Arc<ScalarUDF>,
/// List of expressions to feed to the functions as arguments
args: Vec<Expr>,
},
/// Represents the call of an aggregate built-in function with arguments.
AggregateFunction {
/// Name of the function
fun: aggregates::AggregateFunction,
/// List of expressions to feed to the functions as arguments
args: Vec<Expr>,
/// Whether this is a DISTINCT aggregation or not
distinct: bool,
},
/// Represents the call of a window function with arguments.
WindowFunction {
/// Name of the function
fun: window_functions::WindowFunction,
/// List of expressions to feed to the functions as arguments
args: Vec<Expr>,
},
/// aggregate function
AggregateUDF {
/// The function
fun: Arc<AggregateUDF>,
/// List of expressions to feed to the functions as arguments
args: Vec<Expr>,
},
/// Returns whether the list contains the expr value.
InList {
/// The expression to compare
expr: Box<Expr>,
/// A list of values to compare against
list: Vec<Expr>,
/// Whether the expression is negated
negated: bool,
},
/// Represents a reference to all fields in a schema.
Wildcard,
}
impl Expr {
/// Returns the [arrow::datatypes::DataType] of the expression based on [arrow::datatypes::Schema].
///
/// # Errors
///
/// This function errors when it is not possible to compute its [arrow::datatypes::DataType].
/// This happens when e.g. the expression refers to a column that does not exist in the schema, or when
/// the expression is incorrectly typed (e.g. `[utf8] + [bool]`).
pub fn get_type(&self, schema: &DFSchema) -> Result<DataType> {
match self {
Expr::Alias(expr, _) => expr.get_type(schema),
Expr::Column(name) => Ok(schema
.field_with_unqualified_name(name)?
.data_type()
.clone()),
Expr::ScalarVariable(_) => Ok(DataType::Utf8),
Expr::Literal(l) => Ok(l.get_datatype()),
Expr::Case { when_then_expr, .. } => when_then_expr[0].1.get_type(schema),
Expr::Cast { data_type, .. } => Ok(data_type.clone()),
Expr::TryCast { data_type, .. } => Ok(data_type.clone()),
Expr::ScalarUDF { fun, args } => {
let data_types = args
.iter()
.map(|e| e.get_type(schema))
.collect::<Result<Vec<_>>>()?;
Ok((fun.return_type)(&data_types)?.as_ref().clone())
}
Expr::ScalarFunction { fun, args } => {
let data_types = args
.iter()
.map(|e| e.get_type(schema))
.collect::<Result<Vec<_>>>()?;
functions::return_type(fun, &data_types)
}
Expr::WindowFunction { fun, args, .. } => {
let data_types = args
.iter()
.map(|e| e.get_type(schema))
.collect::<Result<Vec<_>>>()?;
window_functions::return_type(fun, &data_types)
}
Expr::AggregateFunction { fun, args, .. } => {
let data_types = args
.iter()
.map(|e| e.get_type(schema))
.collect::<Result<Vec<_>>>()?;
aggregates::return_type(fun, &data_types)
}
Expr::AggregateUDF { fun, args, .. } => {
let data_types = args
.iter()
.map(|e| e.get_type(schema))
.collect::<Result<Vec<_>>>()?;
Ok((fun.return_type)(&data_types)?.as_ref().clone())
}
Expr::Not(_) => Ok(DataType::Boolean),
Expr::Negative(expr) => expr.get_type(schema),
Expr::IsNull(_) => Ok(DataType::Boolean),
Expr::IsNotNull(_) => Ok(DataType::Boolean),
Expr::BinaryExpr {
ref left,
ref right,
ref op,
} => binary_operator_data_type(
&left.get_type(schema)?,
op,
&right.get_type(schema)?,
),
Expr::Sort { ref expr, .. } => expr.get_type(schema),
Expr::Between { .. } => Ok(DataType::Boolean),
Expr::InList { .. } => Ok(DataType::Boolean),
Expr::Wildcard => Err(DataFusionError::Internal(
"Wildcard expressions are not valid in a logical query plan".to_owned(),
)),
}
}
/// Returns the nullability of the expression based on [arrow::datatypes::Schema].
///
/// # Errors
///
/// This function errors when it is not possible to compute its nullability.
/// This happens when the expression refers to a column that does not exist in the schema.
pub fn nullable(&self, input_schema: &DFSchema) -> Result<bool> {
match self {
Expr::Alias(expr, _) => expr.nullable(input_schema),
Expr::Column(name) => Ok(input_schema
.field_with_unqualified_name(name)?
.is_nullable()),
Expr::Literal(value) => Ok(value.is_null()),
Expr::ScalarVariable(_) => Ok(true),
Expr::Case {
when_then_expr,
else_expr,
..
} => {
// this expression is nullable if any of the input expressions are nullable
let then_nullable = when_then_expr
.iter()
.map(|(_, t)| t.nullable(input_schema))
.collect::<Result<Vec<_>>>()?;
if then_nullable.contains(&true) {
Ok(true)
} else if let Some(e) = else_expr {
e.nullable(input_schema)
} else {
Ok(false)
}
}
Expr::Cast { expr, .. } => expr.nullable(input_schema),
Expr::TryCast { .. } => Ok(true),
Expr::ScalarFunction { .. } => Ok(true),
Expr::ScalarUDF { .. } => Ok(true),
Expr::WindowFunction { .. } => Ok(true),
Expr::AggregateFunction { .. } => Ok(true),
Expr::AggregateUDF { .. } => Ok(true),
Expr::Not(expr) => expr.nullable(input_schema),
Expr::Negative(expr) => expr.nullable(input_schema),
Expr::IsNull(_) => Ok(false),
Expr::IsNotNull(_) => Ok(false),
Expr::BinaryExpr {
ref left,
ref right,
..
} => Ok(left.nullable(input_schema)? || right.nullable(input_schema)?),
Expr::Sort { ref expr, .. } => expr.nullable(input_schema),
Expr::Between { ref expr, .. } => expr.nullable(input_schema),
Expr::InList { ref expr, .. } => expr.nullable(input_schema),
Expr::Wildcard => Err(DataFusionError::Internal(
"Wildcard expressions are not valid in a logical query plan".to_owned(),
)),
}
}
/// Returns the name of this expression based on [arrow::datatypes::Schema].
///
/// This represents how a column with this expression is named when no alias is chosen
pub fn name(&self, input_schema: &DFSchema) -> Result<String> {
create_name(self, input_schema)
}
/// Returns a [arrow::datatypes::Field] compatible with this expression.
pub fn to_field(&self, input_schema: &DFSchema) -> Result<DFField> {
Ok(DFField::new(
None, //TODO qualifier
&self.name(input_schema)?,
self.get_type(input_schema)?,
self.nullable(input_schema)?,
))
}
/// Wraps this expression in a cast to a target [arrow::datatypes::DataType].
///
/// # Errors
///
/// This function errors when it is impossible to cast the
/// expression to the target [arrow::datatypes::DataType].
pub fn cast_to(self, cast_to_type: &DataType, schema: &DFSchema) -> Result<Expr> {
let this_type = self.get_type(schema)?;
if this_type == *cast_to_type {
Ok(self)
} else if can_cast_types(&this_type, cast_to_type) {
Ok(Expr::Cast {
expr: Box::new(self),
data_type: cast_to_type.clone(),
})
} else {
Err(DataFusionError::Plan(format!(
"Cannot automatically convert {:?} to {:?}",
this_type, cast_to_type
)))
}
}
/// Return `self == other`
pub fn eq(self, other: Expr) -> Expr {
binary_expr(self, Operator::Eq, other)
}
/// Return `self != other`
pub fn not_eq(self, other: Expr) -> Expr {
binary_expr(self, Operator::NotEq, other)
}
/// Return `self > other`
pub fn gt(self, other: Expr) -> Expr {
binary_expr(self, Operator::Gt, other)
}
/// Return `self >= other`
pub fn gt_eq(self, other: Expr) -> Expr {
binary_expr(self, Operator::GtEq, other)
}
/// Return `self < other`
pub fn lt(self, other: Expr) -> Expr {
binary_expr(self, Operator::Lt, other)
}
/// Return `self <= other`
pub fn lt_eq(self, other: Expr) -> Expr {
binary_expr(self, Operator::LtEq, other)
}
/// Return `self && other`
pub fn and(self, other: Expr) -> Expr {
binary_expr(self, Operator::And, other)
}
/// Return `self || other`
pub fn or(self, other: Expr) -> Expr {
binary_expr(self, Operator::Or, other)
}
/// Return `!self`
#[allow(clippy::should_implement_trait)]
pub fn not(self) -> Expr {
Expr::Not(Box::new(self))
}
/// Calculate the modulus of two expressions.
/// Return `self % other`
pub fn modulus(self, other: Expr) -> Expr {
binary_expr(self, Operator::Modulus, other)
}
/// Return `self LIKE other`
pub fn like(self, other: Expr) -> Expr {
binary_expr(self, Operator::Like, other)
}
/// Return `self NOT LIKE other`
pub fn not_like(self, other: Expr) -> Expr {
binary_expr(self, Operator::NotLike, other)
}
/// Return `self AS name` alias expression
pub fn alias(self, name: &str) -> Expr {
Expr::Alias(Box::new(self), name.to_owned())
}
/// Return `self IN <list>` if `negated` is false, otherwise
/// return `self NOT IN <list>`.a
pub fn in_list(self, list: Vec<Expr>, negated: bool) -> Expr {
Expr::InList {
expr: Box::new(self),
list,
negated,
}
}
/// Return `IsNull(Box(self))
#[allow(clippy::wrong_self_convention)]
pub fn is_null(self) -> Expr {
Expr::IsNull(Box::new(self))
}
/// Return `IsNotNull(Box(self))
#[allow(clippy::wrong_self_convention)]
pub fn is_not_null(self) -> Expr {
Expr::IsNotNull(Box::new(self))
}
/// Create a sort expression from an existing expression.
///
/// ```
/// # use datafusion::logical_plan::col;
/// let sort_expr = col("foo").sort(true, true); // SORT ASC NULLS_FIRST
/// ```
pub fn sort(self, asc: bool, nulls_first: bool) -> Expr {
Expr::Sort {
expr: Box::new(self),
asc,
nulls_first,
}
}
/// Performs a depth first walk of an expression and
/// its children, calling [`ExpressionVisitor::pre_visit`] and
/// `visitor.post_visit`.
///
/// Implements the [visitor pattern](https://en.wikipedia.org/wiki/Visitor_pattern) to
/// separate expression algorithms from the structure of the
/// `Expr` tree and make it easier to add new types of expressions
/// and algorithms that walk the tree.
///
/// For an expression tree such as
/// ```text
/// BinaryExpr (GT)
/// left: Column("foo")
/// right: Column("bar")
/// ```
///
/// The nodes are visited using the following order
/// ```text
/// pre_visit(BinaryExpr(GT))
/// pre_visit(Column("foo"))
/// pre_visit(Column("bar"))
/// post_visit(Column("bar"))
/// post_visit(Column("bar"))
/// post_visit(BinaryExpr(GT))
/// ```
///
/// If an Err result is returned, recursion is stopped immediately
///
/// If `Recursion::Stop` is returned on a call to pre_visit, no
/// children of that expression are visited, nor is post_visit
/// called on that expression
///
pub fn accept<V: ExpressionVisitor>(&self, visitor: V) -> Result<V> {
let visitor = match visitor.pre_visit(self)? {
Recursion::Continue(visitor) => visitor,
// If the recursion should stop, do not visit children
Recursion::Stop(visitor) => return Ok(visitor),
};
// recurse (and cover all expression types)
let visitor = match self {
Expr::Alias(expr, _) => expr.accept(visitor),
Expr::Column(..) => Ok(visitor),
Expr::ScalarVariable(..) => Ok(visitor),
Expr::Literal(..) => Ok(visitor),
Expr::BinaryExpr { left, right, .. } => {
let visitor = left.accept(visitor)?;
right.accept(visitor)
}
Expr::Not(expr) => expr.accept(visitor),
Expr::IsNotNull(expr) => expr.accept(visitor),
Expr::IsNull(expr) => expr.accept(visitor),
Expr::Negative(expr) => expr.accept(visitor),
Expr::Between {
expr, low, high, ..
} => {
let visitor = expr.accept(visitor)?;
let visitor = low.accept(visitor)?;
high.accept(visitor)
}
Expr::Case {
expr,
when_then_expr,
else_expr,
} => {
let visitor = if let Some(expr) = expr.as_ref() {
expr.accept(visitor)
} else {
Ok(visitor)
}?;
let visitor = when_then_expr.iter().try_fold(
visitor,
|visitor, (when, then)| {
let visitor = when.accept(visitor)?;
then.accept(visitor)
},
)?;
if let Some(else_expr) = else_expr.as_ref() {
else_expr.accept(visitor)
} else {
Ok(visitor)
}
}
Expr::Cast { expr, .. } => expr.accept(visitor),
Expr::TryCast { expr, .. } => expr.accept(visitor),
Expr::Sort { expr, .. } => expr.accept(visitor),
Expr::ScalarFunction { args, .. } => args
.iter()
.try_fold(visitor, |visitor, arg| arg.accept(visitor)),
Expr::ScalarUDF { args, .. } => args
.iter()
.try_fold(visitor, |visitor, arg| arg.accept(visitor)),
Expr::WindowFunction { args, .. } => args
.iter()
.try_fold(visitor, |visitor, arg| arg.accept(visitor)),
Expr::AggregateFunction { args, .. } => args
.iter()
.try_fold(visitor, |visitor, arg| arg.accept(visitor)),
Expr::AggregateUDF { args, .. } => args
.iter()
.try_fold(visitor, |visitor, arg| arg.accept(visitor)),
Expr::InList { expr, list, .. } => {
let visitor = expr.accept(visitor)?;
list.iter()
.try_fold(visitor, |visitor, arg| arg.accept(visitor))
}
Expr::Wildcard => Ok(visitor),
}?;
visitor.post_visit(self)
}
/// Performs a depth first walk of an expression and its children
/// to rewrite an expression, consuming `self` producing a new
/// [`Expr`].
///
/// Implements a modified version of the [visitor
/// pattern](https://en.wikipedia.org/wiki/Visitor_pattern) to
/// separate algorithms from the structure of the `Expr` tree and
/// make it easier to write new, efficient expression
/// transformation algorithms.
///
/// For an expression tree such as
/// ```text
/// BinaryExpr (GT)
/// left: Column("foo")
/// right: Column("bar")
/// ```
///
/// The nodes are visited using the following order
/// ```text
/// pre_visit(BinaryExpr(GT))
/// pre_visit(Column("foo"))
/// mutatate(Column("foo"))
/// pre_visit(Column("bar"))
/// mutate(Column("bar"))
/// mutate(BinaryExpr(GT))
/// ```
///
/// If an Err result is returned, recursion is stopped immediately
///
/// If [`false`] is returned on a call to pre_visit, no
/// children of that expression are visited, nor is mutate
/// called on that expression
///
pub fn rewrite<R>(self, rewriter: &mut R) -> Result<Self>
where
R: ExprRewriter,
{
if !rewriter.pre_visit(&self)? {
return Ok(self);
};
// recurse into all sub expressions(and cover all expression types)
let expr = match self {
Expr::Alias(expr, name) => Expr::Alias(rewrite_boxed(expr, rewriter)?, name),
Expr::Column(name) => Expr::Column(name),
Expr::ScalarVariable(names) => Expr::ScalarVariable(names),
Expr::Literal(value) => Expr::Literal(value),
Expr::BinaryExpr { left, op, right } => Expr::BinaryExpr {
left: rewrite_boxed(left, rewriter)?,
op,
right: rewrite_boxed(right, rewriter)?,
},
Expr::Not(expr) => Expr::Not(rewrite_boxed(expr, rewriter)?),
Expr::IsNotNull(expr) => Expr::IsNotNull(rewrite_boxed(expr, rewriter)?),
Expr::IsNull(expr) => Expr::IsNull(rewrite_boxed(expr, rewriter)?),
Expr::Negative(expr) => Expr::Negative(rewrite_boxed(expr, rewriter)?),
Expr::Between {
expr,
low,
high,
negated,
} => Expr::Between {
expr: rewrite_boxed(expr, rewriter)?,
low: rewrite_boxed(low, rewriter)?,
high: rewrite_boxed(high, rewriter)?,
negated,
},
Expr::Case {
expr,
when_then_expr,
else_expr,
} => {
let expr = rewrite_option_box(expr, rewriter)?;
let when_then_expr = when_then_expr
.into_iter()
.map(|(when, then)| {
Ok((
rewrite_boxed(when, rewriter)?,
rewrite_boxed(then, rewriter)?,
))
})
.collect::<Result<Vec<_>>>()?;
let else_expr = rewrite_option_box(else_expr, rewriter)?;
Expr::Case {
expr,
when_then_expr,
else_expr,
}
}
Expr::Cast { expr, data_type } => Expr::Cast {
expr: rewrite_boxed(expr, rewriter)?,
data_type,
},
Expr::TryCast { expr, data_type } => Expr::TryCast {
expr: rewrite_boxed(expr, rewriter)?,
data_type,
},
Expr::Sort {
expr,
asc,
nulls_first,
} => Expr::Sort {
expr: rewrite_boxed(expr, rewriter)?,
asc,
nulls_first,
},
Expr::ScalarFunction { args, fun } => Expr::ScalarFunction {
args: rewrite_vec(args, rewriter)?,
fun,
},
Expr::ScalarUDF { args, fun } => Expr::ScalarUDF {
args: rewrite_vec(args, rewriter)?,
fun,
},
Expr::WindowFunction { args, fun } => Expr::WindowFunction {
args: rewrite_vec(args, rewriter)?,
fun,
},
Expr::AggregateFunction {
args,
fun,
distinct,
} => Expr::AggregateFunction {
args: rewrite_vec(args, rewriter)?,
fun,
distinct,
},
Expr::AggregateUDF { args, fun } => Expr::AggregateUDF {
args: rewrite_vec(args, rewriter)?,
fun,
},
Expr::InList {
expr,
list,
negated,
} => Expr::InList {
expr: rewrite_boxed(expr, rewriter)?,
list,
negated,
},
Expr::Wildcard => Expr::Wildcard,
};
// now rewrite this expression itself
rewriter.mutate(expr)
}
}
#[allow(clippy::boxed_local)]
fn rewrite_boxed<R>(boxed_expr: Box<Expr>, rewriter: &mut R) -> Result<Box<Expr>>
where
R: ExprRewriter,
{
// TODO: It might be possible to avoid an allocation (the
// Box::new) below by reusing the box.
let expr: Expr = *boxed_expr;
let rewritten_expr = expr.rewrite(rewriter)?;
Ok(Box::new(rewritten_expr))
}
fn rewrite_option_box<R>(
option_box: Option<Box<Expr>>,
rewriter: &mut R,
) -> Result<Option<Box<Expr>>>
where
R: ExprRewriter,
{
option_box
.map(|expr| rewrite_boxed(expr, rewriter))
.transpose()
}
/// rewrite a `Vec` of `Expr`s with the rewriter
fn rewrite_vec<R>(v: Vec<Expr>, rewriter: &mut R) -> Result<Vec<Expr>>
where
R: ExprRewriter,
{
v.into_iter().map(|expr| expr.rewrite(rewriter)).collect()
}
/// Controls how the visitor recursion should proceed.
pub enum Recursion<V: ExpressionVisitor> {
/// Attempt to visit all the children, recursively, of this expression.
Continue(V),
/// Do not visit the children of this expression, though the walk
/// of parents of this expression will not be affected
Stop(V),
}
/// Encode the traversal of an expression tree. When passed to
/// `Expr::accept`, `ExpressionVisitor::visit` is invoked
/// recursively on all nodes of an expression tree. See the comments
/// on `Expr::accept` for details on its use
pub trait ExpressionVisitor: Sized {
/// Invoked before any children of `expr` are visisted.
fn pre_visit(self, expr: &Expr) -> Result<Recursion<Self>>;
/// Invoked after all children of `expr` are visited. Default
/// implementation does nothing.
fn post_visit(self, _expr: &Expr) -> Result<Self> {
Ok(self)
}
}
/// Trait for potentially recursively rewriting an [`Expr`] expression
/// tree. When passed to `Expr::rewrite`, `ExpressionVisitor::mutate` is
/// invoked recursively on all nodes of an expression tree. See the
/// comments on `Expr::rewrite` for details on its use
pub trait ExprRewriter: Sized {
/// Invoked before any children of `expr` are rewritten /
/// visited. Default implementation returns `Ok(true)`
fn pre_visit(&mut self, _expr: &Expr) -> Result<bool> {
Ok(true)
}
/// Invoked after all children of `expr` have been mutated and
/// returns a potentially modified expr.
fn mutate(&mut self, expr: Expr) -> Result<Expr>;
}
pub struct CaseBuilder {
expr: Option<Box<Expr>>,
when_expr: Vec<Expr>,
then_expr: Vec<Expr>,
else_expr: Option<Box<Expr>>,
}
impl CaseBuilder {
pub fn when(&mut self, when: Expr, then: Expr) -> CaseBuilder {
self.when_expr.push(when);
self.then_expr.push(then);
CaseBuilder {
expr: self.expr.clone(),
when_expr: self.when_expr.clone(),
then_expr: self.then_expr.clone(),
else_expr: self.else_expr.clone(),
}
}
pub fn otherwise(&mut self, else_expr: Expr) -> Result<Expr> {
self.else_expr = Some(Box::new(else_expr));
self.build()
}
pub fn end(&self) -> Result<Expr> {
self.build()
}
}
impl CaseBuilder {
fn build(&self) -> Result<Expr> {
// collect all "then" expressions
let mut then_expr = self.then_expr.clone();
if let Some(e) = &self.else_expr {
then_expr.push(e.as_ref().to_owned());
}
let then_types: Vec<DataType> = then_expr
.iter()
.map(|e| match e {
Expr::Literal(_) => e.get_type(&DFSchema::empty()),
_ => Ok(DataType::Null),
})
.collect::<Result<Vec<_>>>()?;
if then_types.contains(&DataType::Null) {
// cannot verify types until execution type
} else {
let unique_types: HashSet<&DataType> = then_types.iter().collect();
if unique_types.len() != 1 {
return Err(DataFusionError::Plan(format!(
"CASE expression 'then' values had multiple data types: {:?}",
unique_types
)));
}
}
Ok(Expr::Case {
expr: self.expr.clone(),
when_then_expr: self
.when_expr
.iter()
.zip(self.then_expr.iter())
.map(|(w, t)| (Box::new(w.clone()), Box::new(t.clone())))
.collect(),
else_expr: self.else_expr.clone(),
})
}
}
/// Create a CASE WHEN statement with literal WHEN expressions for comparison to the base expression.
pub fn case(expr: Expr) -> CaseBuilder {
CaseBuilder {
expr: Some(Box::new(expr)),
when_expr: vec![],
then_expr: vec![],
else_expr: None,
}
}
/// Create a CASE WHEN statement with boolean WHEN expressions and no base expression.
pub fn when(when: Expr, then: Expr) -> CaseBuilder {
CaseBuilder {
expr: None,
when_expr: vec![when],
then_expr: vec![then],
else_expr: None,
}
}
/// return a new expression l <op> r
pub fn binary_expr(l: Expr, op: Operator, r: Expr) -> Expr {
Expr::BinaryExpr {
left: Box::new(l),
op,
right: Box::new(r),
}
}
/// return a new expression with a logical AND
pub fn and(left: Expr, right: Expr) -> Expr {
Expr::BinaryExpr {
left: Box::new(left),
op: Operator::And,
right: Box::new(right),
}
}
/// Combines an array of filter expressions into a single filter expression
/// consisting of the input filter expressions joined with logical AND.
/// Returns None if the filters array is empty.
pub fn combine_filters(filters: &[Expr]) -> Option<Expr> {
if filters.is_empty() {
return None;
}
let combined_filter = filters
.iter()
.skip(1)
.fold(filters[0].clone(), |acc, filter| and(acc, filter.clone()));
Some(combined_filter)
}
/// return a new expression with a logical OR
pub fn or(left: Expr, right: Expr) -> Expr {
Expr::BinaryExpr {
left: Box::new(left),
op: Operator::Or,
right: Box::new(right),
}
}
/// Create a column expression based on a column name
pub fn col(name: &str) -> Expr {
Expr::Column(name.to_owned())
}
/// Create an expression to represent the min() aggregate function
pub fn min(expr: Expr) -> Expr {
Expr::AggregateFunction {
fun: aggregates::AggregateFunction::Min,
distinct: false,
args: vec![expr],
}
}
/// Create an expression to represent the max() aggregate function
pub fn max(expr: Expr) -> Expr {
Expr::AggregateFunction {
fun: aggregates::AggregateFunction::Max,
distinct: false,
args: vec![expr],
}
}
/// Create an expression to represent the sum() aggregate function
pub fn sum(expr: Expr) -> Expr {
Expr::AggregateFunction {
fun: aggregates::AggregateFunction::Sum,
distinct: false,
args: vec![expr],
}
}
/// Create an expression to represent the avg() aggregate function
pub fn avg(expr: Expr) -> Expr {
Expr::AggregateFunction {
fun: aggregates::AggregateFunction::Avg,
distinct: false,
args: vec![expr],
}
}
/// Create an expression to represent the count() aggregate function
pub fn count(expr: Expr) -> Expr {
Expr::AggregateFunction {
fun: aggregates::AggregateFunction::Count,
distinct: false,
args: vec![expr],
}
}
/// Create an expression to represent the count(distinct) aggregate function
pub fn count_distinct(expr: Expr) -> Expr {
Expr::AggregateFunction {
fun: aggregates::AggregateFunction::Count,
distinct: true,
args: vec![expr],
}
}
/// Create an in_list expression
pub fn in_list(expr: Expr, list: Vec<Expr>, negated: bool) -> Expr {
Expr::InList {
expr: Box::new(expr),
list,
negated,
}
}
/// Trait for converting a type to a [`Literal`] literal expression.
pub trait Literal {
/// convert the value to a Literal expression
fn lit(&self) -> Expr;
}
impl Literal for &str {
fn lit(&self) -> Expr {
Expr::Literal(ScalarValue::Utf8(Some((*self).to_owned())))
}
}
impl Literal for String {
fn lit(&self) -> Expr {
Expr::Literal(ScalarValue::Utf8(Some((*self).to_owned())))
}
}
impl Literal for ScalarValue {
fn lit(&self) -> Expr {
Expr::Literal(self.clone())
}
}
macro_rules! make_literal {
($TYPE:ty, $SCALAR:ident) => {
#[allow(missing_docs)]
impl Literal for $TYPE {
fn lit(&self) -> Expr {
Expr::Literal(ScalarValue::$SCALAR(Some(self.clone())))
}
}
};
}
make_literal!(bool, Boolean);
make_literal!(f32, Float32);
make_literal!(f64, Float64);
make_literal!(i8, Int8);
make_literal!(i16, Int16);
make_literal!(i32, Int32);
make_literal!(i64, Int64);
make_literal!(u8, UInt8);
make_literal!(u16, UInt16);
make_literal!(u32, UInt32);
make_literal!(u64, UInt64);
/// Create a literal expression
pub fn lit<T: Literal>(n: T) -> Expr {
n.lit()
}
/// Create an convenience function representing a unary scalar function
macro_rules! unary_scalar_expr {
($ENUM:ident, $FUNC:ident) => {
#[allow(missing_docs)]
pub fn $FUNC(e: Expr) -> Expr {
Expr::ScalarFunction {
fun: functions::BuiltinScalarFunction::$ENUM,
args: vec![e],
}
}
};
}
// generate methods for creating the supported unary expressions
// math functions
unary_scalar_expr!(Sqrt, sqrt);
unary_scalar_expr!(Sin, sin);
unary_scalar_expr!(Cos, cos);
unary_scalar_expr!(Tan, tan);
unary_scalar_expr!(Asin, asin);
unary_scalar_expr!(Acos, acos);
unary_scalar_expr!(Atan, atan);
unary_scalar_expr!(Floor, floor);
unary_scalar_expr!(Ceil, ceil);
unary_scalar_expr!(Round, round);
unary_scalar_expr!(Trunc, trunc);
unary_scalar_expr!(Abs, abs);
unary_scalar_expr!(Signum, signum);
unary_scalar_expr!(Exp, exp);
unary_scalar_expr!(Log2, log2);
unary_scalar_expr!(Log10, log10);
unary_scalar_expr!(Ln, ln);
// string functions
unary_scalar_expr!(Ascii, ascii);
unary_scalar_expr!(BitLength, bit_length);
unary_scalar_expr!(Btrim, btrim);
unary_scalar_expr!(CharacterLength, character_length);
unary_scalar_expr!(CharacterLength, length);
unary_scalar_expr!(Chr, chr);
unary_scalar_expr!(Concat, concat);
unary_scalar_expr!(ConcatWithSeparator, concat_ws);
unary_scalar_expr!(InitCap, initcap);
unary_scalar_expr!(Left, left);
unary_scalar_expr!(Lower, lower);
unary_scalar_expr!(Lpad, lpad);
unary_scalar_expr!(Ltrim, ltrim);
unary_scalar_expr!(MD5, md5);
unary_scalar_expr!(OctetLength, octet_length);
unary_scalar_expr!(RegexpMatch, regexp_match);
unary_scalar_expr!(RegexpReplace, regexp_replace);
unary_scalar_expr!(Replace, replace);
unary_scalar_expr!(Repeat, repeat);
unary_scalar_expr!(Reverse, reverse);
unary_scalar_expr!(Right, right);
unary_scalar_expr!(Rpad, rpad);
unary_scalar_expr!(Rtrim, rtrim);
unary_scalar_expr!(SHA224, sha224);
unary_scalar_expr!(SHA256, sha256);
unary_scalar_expr!(SHA384, sha384);
unary_scalar_expr!(SHA512, sha512);
unary_scalar_expr!(SplitPart, split_part);
unary_scalar_expr!(StartsWith, starts_with);
unary_scalar_expr!(Strpos, strpos);
unary_scalar_expr!(Substr, substr);
unary_scalar_expr!(ToHex, to_hex);
unary_scalar_expr!(Translate, translate);
unary_scalar_expr!(Trim, trim);
unary_scalar_expr!(Upper, upper);
/// returns an array of fixed size with each argument on it.
pub fn array(args: Vec<Expr>) -> Expr {
Expr::ScalarFunction {
fun: functions::BuiltinScalarFunction::Array,
args,
}
}
/// Creates a new UDF with a specific signature and specific return type.
/// This is a helper function to create a new UDF.
/// The function `create_udf` returns a subset of all possible `ScalarFunction`:
/// * the UDF has a fixed return type
/// * the UDF has a fixed signature (e.g. [f64, f64])
pub fn create_udf(
name: &str,
input_types: Vec<DataType>,
return_type: Arc<DataType>,
fun: ScalarFunctionImplementation,
) -> ScalarUDF {
let return_type: ReturnTypeFunction = Arc::new(move |_| Ok(return_type.clone()));
ScalarUDF::new(name, &Signature::Exact(input_types), &return_type, &fun)
}
/// Creates a new UDAF with a specific signature, state type and return type.
/// The signature and state type must match the `Accumulator's implementation`.
#[allow(clippy::rc_buffer)]
pub fn create_udaf(
name: &str,
input_type: DataType,
return_type: Arc<DataType>,
accumulator: AccumulatorFunctionImplementation,
state_type: Arc<Vec<DataType>>,
) -> AggregateUDF {
let return_type: ReturnTypeFunction = Arc::new(move |_| Ok(return_type.clone()));
let state_type: StateTypeFunction = Arc::new(move |_| Ok(state_type.clone()));
AggregateUDF::new(
name,
&Signature::Exact(vec![input_type]),
&return_type,
&accumulator,
&state_type,
)
}
fn fmt_function(
f: &mut fmt::Formatter,
fun: &str,
distinct: bool,
args: &[Expr],
) -> fmt::Result {
let args: Vec<String> = args.iter().map(|arg| format!("{:?}", arg)).collect();
let distinct_str = match distinct {
true => "DISTINCT ",
false => "",
};
write!(f, "{}({}{})", fun, distinct_str, args.join(", "))
}
impl fmt::Debug for Expr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Expr::Alias(expr, alias) => write!(f, "{:?} AS {}", expr, alias),
Expr::Column(name) => write!(f, "#{}", name),
Expr::ScalarVariable(var_names) => write!(f, "{}", var_names.join(".")),
Expr::Literal(v) => write!(f, "{:?}", v),
Expr::Case {
expr,
when_then_expr,
else_expr,
..
} => {
write!(f, "CASE ")?;
if let Some(e) = expr {
write!(f, "{:?} ", e)?;
}
for (w, t) in when_then_expr {
write!(f, "WHEN {:?} THEN {:?} ", w, t)?;
}
if let Some(e) = else_expr {
write!(f, "ELSE {:?} ", e)?;
}
write!(f, "END")
}
Expr::Cast { expr, data_type } => {
write!(f, "CAST({:?} AS {:?})", expr, data_type)
}
Expr::TryCast { expr, data_type } => {
write!(f, "TRY_CAST({:?} AS {:?})", expr, data_type)
}
Expr::Not(expr) => write!(f, "NOT {:?}", expr),
Expr::Negative(expr) => write!(f, "(- {:?})", expr),
Expr::IsNull(expr) => write!(f, "{:?} IS NULL", expr),
Expr::IsNotNull(expr) => write!(f, "{:?} IS NOT NULL", expr),
Expr::BinaryExpr { left, op, right } => {
write!(f, "{:?} {:?} {:?}", left, op, right)
}
Expr::Sort {
expr,
asc,
nulls_first,
} => {
if *asc {
write!(f, "{:?} ASC", expr)?;
} else {
write!(f, "{:?} DESC", expr)?;
}
if *nulls_first {
write!(f, " NULLS FIRST")
} else {
write!(f, " NULLS LAST")
}
}
Expr::ScalarFunction { fun, args, .. } => {
fmt_function(f, &fun.to_string(), false, args)
}
Expr::ScalarUDF { fun, ref args, .. } => {
fmt_function(f, &fun.name, false, args)
}
Expr::WindowFunction { fun, ref args, .. } => {
fmt_function(f, &fun.to_string(), false, args)
}
Expr::AggregateFunction {
fun,
distinct,
ref args,
..
} => fmt_function(f, &fun.to_string(), *distinct, args),
Expr::AggregateUDF { fun, ref args, .. } => {
fmt_function(f, &fun.name, false, args)
}
Expr::Between {
expr,
negated,
low,
high,
} => {
if *negated {
write!(f, "{:?} NOT BETWEEN {:?} AND {:?}", expr, low, high)
} else {
write!(f, "{:?} BETWEEN {:?} AND {:?}", expr, low, high)
}
}
Expr::InList {
expr,
list,
negated,
} => {
if *negated {
write!(f, "{:?} NOT IN ({:?})", expr, list)
} else {
write!(f, "{:?} IN ({:?})", expr, list)
}
}
Expr::Wildcard => write!(f, "*"),
}
}
}
fn create_function_name(
fun: &str,
distinct: bool,
args: &[Expr],
input_schema: &DFSchema,
) -> Result<String> {
let names: Vec<String> = args
.iter()
.map(|e| create_name(e, input_schema))
.collect::<Result<_>>()?;
let distinct_str = match distinct {
true => "DISTINCT ",
false => "",
};
Ok(format!("{}({}{})", fun, distinct_str, names.join(",")))
}
/// Returns a readable name of an expression based on the input schema.
/// This function recursively transverses the expression for names such as "CAST(a > 2)".
fn create_name(e: &Expr, input_schema: &DFSchema) -> Result<String> {
match e {
Expr::Alias(_, name) => Ok(name.clone()),
Expr::Column(name) => Ok(name.clone()),
Expr::ScalarVariable(variable_names) => Ok(variable_names.join(".")),
Expr::Literal(value) => Ok(format!("{:?}", value)),
Expr::BinaryExpr { left, op, right } => {
let left = create_name(left, input_schema)?;
let right = create_name(right, input_schema)?;
Ok(format!("{} {:?} {}", left, op, right))
}
Expr::Case {
expr,
when_then_expr,
else_expr,
} => {
let mut name = "CASE ".to_string();
if let Some(e) = expr {
name += &format!("{:?} ", e);
}
for (w, t) in when_then_expr {
name += &format!("WHEN {:?} THEN {:?} ", w, t);
}
if let Some(e) = else_expr {
name += &format!("ELSE {:?} ", e);
}
name += "END";
Ok(name)
}
Expr::Cast { expr, data_type } => {
let expr = create_name(expr, input_schema)?;
Ok(format!("CAST({} AS {:?})", expr, data_type))
}
Expr::TryCast { expr, data_type } => {
let expr = create_name(expr, input_schema)?;
Ok(format!("TRY_CAST({} AS {:?})", expr, data_type))
}
Expr::Not(expr) => {
let expr = create_name(expr, input_schema)?;
Ok(format!("NOT {}", expr))
}
Expr::Negative(expr) => {
let expr = create_name(expr, input_schema)?;
Ok(format!("(- {})", expr))
}
Expr::IsNull(expr) => {
let expr = create_name(expr, input_schema)?;
Ok(format!("{} IS NULL", expr))
}
Expr::IsNotNull(expr) => {
let expr = create_name(expr, input_schema)?;
Ok(format!("{} IS NOT NULL", expr))
}
Expr::ScalarFunction { fun, args, .. } => {
create_function_name(&fun.to_string(), false, args, input_schema)
}
Expr::ScalarUDF { fun, args, .. } => {
create_function_name(&fun.name, false, args, input_schema)
}
Expr::WindowFunction { fun, args } => {
create_function_name(&fun.to_string(), false, args, input_schema)
}
Expr::AggregateFunction {
fun,
distinct,
args,
..
} => create_function_name(&fun.to_string(), *distinct, args, input_schema),
Expr::AggregateUDF { fun, args } => {
let mut names = Vec::with_capacity(args.len());
for e in args {
names.push(create_name(e, input_schema)?);
}
Ok(format!("{}({})", fun.name, names.join(",")))
}
Expr::InList {
expr,
list,
negated,
} => {
let expr = create_name(expr, input_schema)?;
let list = list.iter().map(|expr| create_name(expr, input_schema));
if *negated {
Ok(format!("{} NOT IN ({:?})", expr, list))
} else {
Ok(format!("{} IN ({:?})", expr, list))
}
}
other => Err(DataFusionError::NotImplemented(format!(
"Create name does not support logical expression {:?}",
other
))),
}
}
/// Create field meta-data from an expression, for use in a result set schema
pub fn exprlist_to_fields<'a>(
expr: impl IntoIterator<Item = &'a Expr>,
input_schema: &DFSchema,
) -> Result<Vec<DFField>> {
expr.into_iter().map(|e| e.to_field(input_schema)).collect()
}
#[cfg(test)]
mod tests {
use super::super::{col, lit, when};
use super::*;
#[test]
fn case_when_same_literal_then_types() -> Result<()> {
let _ = when(col("state").eq(lit("CO")), lit(303))
.when(col("state").eq(lit("NY")), lit(212))
.end()?;
Ok(())
}
#[test]
fn case_when_different_literal_then_types() {
let maybe_expr = when(col("state").eq(lit("CO")), lit(303))
.when(col("state").eq(lit("NY")), lit("212"))
.end();
assert!(maybe_expr.is_err());
}
#[test]
fn rewriter_visit() {
let mut rewriter = RecordingRewriter::default();
col("state").eq(lit("CO")).rewrite(&mut rewriter).unwrap();
assert_eq!(
rewriter.v,
vec![
"Previsited #state Eq Utf8(\"CO\")",
"Previsited #state",
"Mutated #state",
"Previsited Utf8(\"CO\")",
"Mutated Utf8(\"CO\")",
"Mutated #state Eq Utf8(\"CO\")"
]
)
}
#[test]
fn filter_is_null_and_is_not_null() {
let col_null = Expr::Column("col1".to_string());
let col_not_null = Expr::Column("col2".to_string());
assert_eq!(format!("{:?}", col_null.is_null()), "#col1 IS NULL");
assert_eq!(
format!("{:?}", col_not_null.is_not_null()),
"#col2 IS NOT NULL"
);
}
#[derive(Default)]
struct RecordingRewriter {
v: Vec<String>,
}
impl ExprRewriter for RecordingRewriter {
fn mutate(&mut self, expr: Expr) -> Result<Expr> {
self.v.push(format!("Mutated {:?}", expr));
Ok(expr)
}
fn pre_visit(&mut self, expr: &Expr) -> Result<bool> {
self.v.push(format!("Previsited {:?}", expr));
Ok(true)
}
}
#[test]
fn rewriter_rewrite() {
let mut rewriter = FooBarRewriter {};
// rewrites "foo" --> "bar"
let rewritten = col("state").eq(lit("foo")).rewrite(&mut rewriter).unwrap();
assert_eq!(rewritten, col("state").eq(lit("bar")));
// doesn't wrewrite
let rewritten = col("state").eq(lit("baz")).rewrite(&mut rewriter).unwrap();
assert_eq!(rewritten, col("state").eq(lit("baz")));
}
/// rewrites all "foo" string literals to "bar"
struct FooBarRewriter {}
impl ExprRewriter for FooBarRewriter {
fn mutate(&mut self, expr: Expr) -> Result<Expr> {
match expr {
Expr::Literal(scalar) => {
if let ScalarValue::Utf8(Some(utf8_val)) = scalar {
let utf8_val = if utf8_val == "foo" {
"bar".to_string()
} else {
utf8_val
};
Ok(lit(utf8_val))
} else {
Ok(Expr::Literal(scalar))
}
}
// otherwise, return the expression unchanged
expr => Ok(expr),
}
}
}
}
| 34.063192 | 110 | 0.539828 |
bffacb24582cdf9284278497a8667b0828e8da52 | 76 | net.sf.jasperreports.engine.json.expression.filter.CompoundFilterExpression
| 38 | 75 | 0.894737 |
39d86d1b213e67954d12d8058958ff3a4c5bc39a | 2,391 | #[doc = "Reader of register CH15_LINK"]
pub type R = crate::R<u32, super::CH15_LINK>;
#[doc = "Writer for register CH15_LINK"]
pub type W = crate::W<u32, super::CH15_LINK>;
#[doc = "Register CH15_LINK `reset()`'s with value 0"]
impl crate::ResetValue for super::CH15_LINK {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `LINKMODE`"]
pub type LINKMODE_R = crate::R<bool, bool>;
#[doc = "Reader of field `LINK`"]
pub type LINK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `LINK`"]
pub struct LINK_W<'a> {
w: &'a mut W,
}
impl<'a> LINK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `LINKADDR`"]
pub type LINKADDR_R = crate::R<u32, u32>;
#[doc = "Write proxy for field `LINKADDR`"]
pub struct LINKADDR_W<'a> {
w: &'a mut W,
}
impl<'a> LINKADDR_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x3fff_ffff << 2)) | (((value as u32) & 0x3fff_ffff) << 2);
self.w
}
}
impl R {
#[doc = "Bit 0 - Link Structure Addressing Mode"]
#[inline(always)]
pub fn linkmode(&self) -> LINKMODE_R {
LINKMODE_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Link Next Structure"]
#[inline(always)]
pub fn link(&self) -> LINK_R {
LINK_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bits 2:31 - Link Structure Address"]
#[inline(always)]
pub fn linkaddr(&self) -> LINKADDR_R {
LINKADDR_R::new(((self.bits >> 2) & 0x3fff_ffff) as u32)
}
}
impl W {
#[doc = "Bit 1 - Link Next Structure"]
#[inline(always)]
pub fn link(&mut self) -> LINK_W {
LINK_W { w: self }
}
#[doc = "Bits 2:31 - Link Structure Address"]
#[inline(always)]
pub fn linkaddr(&mut self) -> LINKADDR_W {
LINKADDR_W { w: self }
}
}
| 29.158537 | 98 | 0.559598 |
6a449831b594a6580b11b0dca12d9ad00b5513b9 | 12,393 | #[doc = "Register `P1OUT` reader"]
pub struct R(crate::R<P1OUT_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<P1OUT_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::convert::From<crate::R<P1OUT_SPEC>> for R {
fn from(reader: crate::R<P1OUT_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `P1OUT` writer"]
pub struct W(crate::W<P1OUT_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<P1OUT_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl core::convert::From<crate::W<P1OUT_SPEC>> for W {
fn from(writer: crate::W<P1OUT_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `P0` reader - P0"]
pub struct P0_R(crate::FieldReader<bool, bool>);
impl P0_R {
pub(crate) fn new(bits: bool) -> Self {
P0_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for P0_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `P0` writer - P0"]
pub struct P0_W<'a> {
w: &'a mut W,
}
impl<'a> P0_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | (value as u8 & 0x01);
self.w
}
}
#[doc = "Field `P1` reader - P1"]
pub struct P1_R(crate::FieldReader<bool, bool>);
impl P1_R {
pub(crate) fn new(bits: bool) -> Self {
P1_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for P1_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `P1` writer - P1"]
pub struct P1_W<'a> {
w: &'a mut W,
}
impl<'a> P1_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | ((value as u8 & 0x01) << 1);
self.w
}
}
#[doc = "Field `P2` reader - P2"]
pub struct P2_R(crate::FieldReader<bool, bool>);
impl P2_R {
pub(crate) fn new(bits: bool) -> Self {
P2_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for P2_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `P2` writer - P2"]
pub struct P2_W<'a> {
w: &'a mut W,
}
impl<'a> P2_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | ((value as u8 & 0x01) << 2);
self.w
}
}
#[doc = "Field `P3` reader - P3"]
pub struct P3_R(crate::FieldReader<bool, bool>);
impl P3_R {
pub(crate) fn new(bits: bool) -> Self {
P3_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for P3_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `P3` writer - P3"]
pub struct P3_W<'a> {
w: &'a mut W,
}
impl<'a> P3_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | ((value as u8 & 0x01) << 3);
self.w
}
}
#[doc = "Field `P4` reader - P4"]
pub struct P4_R(crate::FieldReader<bool, bool>);
impl P4_R {
pub(crate) fn new(bits: bool) -> Self {
P4_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for P4_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `P4` writer - P4"]
pub struct P4_W<'a> {
w: &'a mut W,
}
impl<'a> P4_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | ((value as u8 & 0x01) << 4);
self.w
}
}
#[doc = "Field `P5` reader - P5"]
pub struct P5_R(crate::FieldReader<bool, bool>);
impl P5_R {
pub(crate) fn new(bits: bool) -> Self {
P5_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for P5_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `P5` writer - P5"]
pub struct P5_W<'a> {
w: &'a mut W,
}
impl<'a> P5_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | ((value as u8 & 0x01) << 5);
self.w
}
}
#[doc = "Field `P6` reader - P6"]
pub struct P6_R(crate::FieldReader<bool, bool>);
impl P6_R {
pub(crate) fn new(bits: bool) -> Self {
P6_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for P6_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `P6` writer - P6"]
pub struct P6_W<'a> {
w: &'a mut W,
}
impl<'a> P6_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | ((value as u8 & 0x01) << 6);
self.w
}
}
#[doc = "Field `P7` reader - P7"]
pub struct P7_R(crate::FieldReader<bool, bool>);
impl P7_R {
pub(crate) fn new(bits: bool) -> Self {
P7_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for P7_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `P7` writer - P7"]
pub struct P7_W<'a> {
w: &'a mut W,
}
impl<'a> P7_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | ((value as u8 & 0x01) << 7);
self.w
}
}
#[doc = "Field `P1OUT` reader - Port 1 Output register"]
pub struct P1OUT_R(crate::FieldReader<u8, u8>);
impl P1OUT_R {
pub(crate) fn new(bits: u8) -> Self {
P1OUT_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for P1OUT_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `P1OUT` writer - Port 1 Output register"]
pub struct P1OUT_W<'a> {
w: &'a mut W,
}
impl<'a> P1OUT_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0xff) | (value as u8 & 0xff);
self.w
}
}
impl R {
#[doc = "Bit 0 - P0"]
#[inline(always)]
pub fn p0(&self) -> P0_R {
P0_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - P1"]
#[inline(always)]
pub fn p1(&self) -> P1_R {
P1_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - P2"]
#[inline(always)]
pub fn p2(&self) -> P2_R {
P2_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - P3"]
#[inline(always)]
pub fn p3(&self) -> P3_R {
P3_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - P4"]
#[inline(always)]
pub fn p4(&self) -> P4_R {
P4_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - P5"]
#[inline(always)]
pub fn p5(&self) -> P5_R {
P5_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - P6"]
#[inline(always)]
pub fn p6(&self) -> P6_R {
P6_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - P7"]
#[inline(always)]
pub fn p7(&self) -> P7_R {
P7_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bits 0:7 - Port 1 Output register"]
#[inline(always)]
pub fn p1out(&self) -> P1OUT_R {
P1OUT_R::new((self.bits & 0xff) as u8)
}
}
impl W {
#[doc = "Bit 0 - P0"]
#[inline(always)]
pub fn p0(&mut self) -> P0_W {
P0_W { w: self }
}
#[doc = "Bit 1 - P1"]
#[inline(always)]
pub fn p1(&mut self) -> P1_W {
P1_W { w: self }
}
#[doc = "Bit 2 - P2"]
#[inline(always)]
pub fn p2(&mut self) -> P2_W {
P2_W { w: self }
}
#[doc = "Bit 3 - P3"]
#[inline(always)]
pub fn p3(&mut self) -> P3_W {
P3_W { w: self }
}
#[doc = "Bit 4 - P4"]
#[inline(always)]
pub fn p4(&mut self) -> P4_W {
P4_W { w: self }
}
#[doc = "Bit 5 - P5"]
#[inline(always)]
pub fn p5(&mut self) -> P5_W {
P5_W { w: self }
}
#[doc = "Bit 6 - P6"]
#[inline(always)]
pub fn p6(&mut self) -> P6_W {
P6_W { w: self }
}
#[doc = "Bit 7 - P7"]
#[inline(always)]
pub fn p7(&mut self) -> P7_W {
P7_W { w: self }
}
#[doc = "Bits 0:7 - Port 1 Output register"]
#[inline(always)]
pub fn p1out(&mut self) -> P1OUT_W {
P1OUT_W { w: self }
}
#[doc = "Writes raw bits to the register."]
pub unsafe fn bits(&mut self, bits: u8) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Port 1 Output\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [p1out](index.html) module"]
pub struct P1OUT_SPEC;
impl crate::RegisterSpec for P1OUT_SPEC {
type Ux = u8;
}
#[doc = "`read()` method returns [p1out::R](R) reader structure"]
impl crate::Readable for P1OUT_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [p1out::W](W) writer structure"]
impl crate::Writable for P1OUT_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets P1OUT to value 0"]
impl crate::Resettable for P1OUT_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 26.480769 | 399 | 0.529815 |
62d82934d992452f55af23d72f2d4508802a1138 | 17,045 | #[cfg(feature = "e2e-tests")]
#[macro_use]
extern crate cdrs;
#[cfg(feature = "e2e-tests")]
#[macro_use]
extern crate maplit;
extern crate regex;
extern crate time;
extern crate uuid;
mod common;
#[cfg(feature = "e2e-tests")]
use common::*;
#[cfg(feature = "e2e-tests")]
use cdrs_tokio::query::QueryExecutor;
#[cfg(feature = "e2e-tests")]
use cdrs_tokio::types::blob::Blob;
#[cfg(feature = "e2e-tests")]
use cdrs_tokio::types::list::List;
#[cfg(feature = "e2e-tests")]
use cdrs_tokio::types::map::Map;
#[cfg(feature = "e2e-tests")]
use cdrs_tokio::types::AsRust;
#[cfg(feature = "e2e-tests")]
use cdrs_tokio::types::ByName;
#[cfg(feature = "e2e-tests")]
use uuid::Uuid;
#[cfg(feature = "e2e-tests")]
use std::collections::HashMap;
#[cfg(feature = "e2e-tests")]
use std::str::FromStr;
#[tokio::test]
#[cfg(feature = "e2e-tests")]
async fn list() {
let cql = "CREATE TABLE IF NOT EXISTS cdrs_test.test_lists \
(my_text_list frozen<list<text>> PRIMARY KEY, \
my_nested_list list<frozen<list<int>>>)";
let session = setup(cql).await.expect("setup");
let my_text_list = vec!["text1", "text2", "text3"];
let my_nested_list: Vec<Vec<i32>> =
vec![vec![1, 2, 3], vec![999, 888, 777, 666, 555], vec![-1, -2]];
let values = query_values!(my_text_list.clone(), my_nested_list.clone());
let cql = "INSERT INTO cdrs_test.test_lists \
(my_text_list, my_nested_list) VALUES (?, ?)";
session
.query_with_values(cql, values)
.await
.expect("insert lists error");
let cql = "SELECT * FROM cdrs_test.test_lists";
let rows = session
.query(cql)
.await
.expect("query lists error")
.get_body()
.expect("get body with lists error")
.into_rows()
.expect("converting body with lists into rows error");
assert_eq!(rows.len(), 1);
for row in rows {
let my_text_list_row: Vec<String> = row
.r_by_name::<List>("my_text_list")
.expect("my_text_list")
.as_r_rust()
.expect("my_text_list as rust");
let my_nested_list_outer_row: Vec<List> = row
.r_by_name::<List>("my_nested_list")
.expect("my_nested_list")
.as_r_rust()
.expect("my_nested_list (outer) as rust");
let mut my_nested_list_row = Vec::with_capacity(my_nested_list_outer_row.len());
for my_nested_list_inner_row in my_nested_list_outer_row {
let my_nested_list_inner_row: Vec<i32> = my_nested_list_inner_row
.as_r_rust()
.expect("my_nested_list (inner) as rust");
my_nested_list_row.push(my_nested_list_inner_row);
}
assert_eq!(my_text_list_row, vec!["text1", "text2", "text3"]);
assert_eq!(my_nested_list_row, my_nested_list);
}
}
#[tokio::test]
#[ignore]
#[cfg(all(feature = "v4", feature = "e2e-tests"))]
async fn list_v4() {
let cql = "CREATE TABLE IF NOT EXISTS cdrs_test.test_lists_v4 \
(my_text_list frozen<list<text>> PRIMARY KEY, \
my_nested_list list<frozen<list<smallint>>>)";
let session = setup(cql).await.expect("setup");
let my_text_list = vec![
"text1".to_string(),
"text2".to_string(),
"text3".to_string(),
];
let my_nested_list: Vec<Vec<i16>> =
vec![vec![1, 2, 3], vec![999, 888, 777, 666, 555], vec![-1, -2]];
let values = query_values!(my_text_list.clone(), my_nested_list.clone());
let cql = "INSERT INTO cdrs_test.test_lists_v4 \
(my_text_list, my_nested_list) VALUES (?, ?)";
session
.query_with_values(cql, values)
.await
.expect("insert");
let cql = "SELECT * FROM cdrs_test.test_lists_v4";
let rows = session
.query(cql)
.await
.expect("query")
.get_body()
.expect("get body")
.into_rows()
.expect("into rows");
assert_eq!(rows.len(), 1);
for row in rows {
let my_text_list_row: Vec<String> = row
.r_by_name::<List>("my_text_list")
.expect("my_text_list")
.as_r_rust()
.expect("my_text_list as rust");
let my_nested_list_outer_row: Vec<List> = row
.r_by_name::<List>("my_nested_list")
.expect("my_nested_list")
.as_r_rust()
.expect("my_nested_list (outer) as rust");
let mut my_nested_list_row = Vec::with_capacity(my_nested_list_outer_row.len());
for my_nested_list_inner_row in my_nested_list_outer_row {
let my_nested_list_inner_row: Vec<i16> = my_nested_list_inner_row
.as_r_rust()
.expect("my_nested_list (inner) as rust");
my_nested_list_row.push(my_nested_list_inner_row);
}
assert_eq!(my_text_list_row, my_text_list);
assert_eq!(my_nested_list_row, my_nested_list);
}
}
#[tokio::test]
#[cfg(feature = "e2e-tests")]
async fn set() {
let cql = "CREATE TABLE IF NOT EXISTS cdrs_test.test_sets \
(my_text_set frozen<set<text>> PRIMARY KEY, \
my_nested_set set<frozen<set<int>>>)";
let session = setup(cql).await.expect("setup");
let my_text_set = vec![
"text1".to_string(),
"text2".to_string(),
"text3".to_string(),
];
let my_nested_set: Vec<Vec<i32>> =
vec![vec![-2, -1], vec![1, 2, 3], vec![555, 666, 777, 888, 999]];
let values = query_values!(my_text_set.clone(), my_nested_set.clone());
let cql = "INSERT INTO cdrs_test.test_sets \
(my_text_set, my_nested_set) VALUES (?, ?)";
session
.query_with_values(cql, values)
.await
.expect("insert");
let cql = "SELECT * FROM cdrs_test.test_sets";
let rows = session
.query(cql)
.await
.expect("query")
.get_body()
.expect("get body")
.into_rows()
.expect("into rows");
assert_eq!(rows.len(), 1);
for row in rows {
let my_text_set_row: Vec<String> = row
.r_by_name::<List>("my_text_set")
.expect("my_text_set")
.as_r_rust()
.expect("my_text_set as rust");
let my_nested_set_outer_row: Vec<List> = row
.r_by_name::<List>("my_nested_set")
.expect("my_nested_set")
.as_r_rust()
.expect("my_nested_set (outer) as rust");
let mut my_nested_set_row = Vec::with_capacity(my_nested_set_outer_row.len());
for my_nested_set_inner_row in my_nested_set_outer_row {
let my_nested_set_inner_row: Vec<i32> = my_nested_set_inner_row
.as_r_rust()
.expect("my_nested_set (inner) as rust");
my_nested_set_row.push(my_nested_set_inner_row);
}
assert_eq!(my_text_set_row, my_text_set);
assert_eq!(my_nested_set_row, my_nested_set);
}
}
#[tokio::test]
#[cfg(all(feature = "v4", feature = "e2e-tests"))]
async fn set_v4() {
let cql = "CREATE TABLE IF NOT EXISTS cdrs_test.test_sets_v4 \
(my_text_set frozen<set<text>> PRIMARY KEY, \
my_nested_set set<frozen<set<smallint>>>)";
let session = setup(cql).await.expect("setup");
let my_text_set = vec![
"text1".to_string(),
"text2".to_string(),
"text3".to_string(),
];
let my_nested_set: Vec<Vec<i16>> =
vec![vec![-2, -1], vec![1, 2, 3], vec![555, 666, 777, 888, 999]];
let values = query_values!(my_text_set.clone(), my_nested_set.clone());
let cql = "INSERT INTO cdrs_test.test_sets_v4 \
(my_text_set, my_nested_set) VALUES (?, ?)";
session
.query_with_values(cql, values)
.await
.expect("insert");
let cql = "SELECT * FROM cdrs_test.test_sets_v4";
let rows = session
.query(cql)
.await
.expect("query")
.get_body()
.expect("get body")
.into_rows()
.expect("into rows");
assert_eq!(rows.len(), 1);
for row in rows {
let my_text_set_row: Vec<String> = row
.r_by_name::<List>("my_text_set")
.expect("my_text_set")
.as_r_rust()
.expect("my_text_set as rust");
let my_nested_set_outer_row: Vec<List> = row
.r_by_name::<List>("my_nested_set")
.expect("my_nested_set")
.as_r_rust()
.expect("my_nested_set (outer) as rust");
let mut my_nested_set_row = Vec::with_capacity(my_nested_set_outer_row.len());
for my_nested_set_inner_row in my_nested_set_outer_row {
let my_nested_set_inner_row: Vec<i16> = my_nested_set_inner_row
.as_r_rust()
.expect("my_nested_set (inner) as rust");
my_nested_set_row.push(my_nested_set_inner_row);
}
assert_eq!(my_text_set_row, my_text_set);
assert_eq!(my_nested_set_row, my_nested_set);
}
}
#[tokio::test]
#[cfg(feature = "e2e-tests")]
async fn map_without_blob() {
let cql = "CREATE TABLE IF NOT EXISTS cdrs_test.test_maps_without_blob \
(my_key int PRIMARY KEY, \
my_text_map map<text, text>, \
my_nested_map map<uuid, frozen<map<bigint, int>>>)";
let session = setup(cql).await.expect("setup");
let my_text_map = hashmap! {
"key1".to_string() => "value1".to_string(),
"key2".to_string() => "value2".to_string(),
"key3".to_string() => "value3".to_string(),
};
let my_nested_map: HashMap<Uuid, HashMap<i64, i32>> = hashmap! {
Uuid::from_str("bb16106a-10bc-4a07-baa3-126ffe208c43").unwrap() => hashmap!{
1 => 1,
2 => 2,
},
Uuid::from_str("687d7677-dbf0-4d25-8cf3-e5d9185bba0b").unwrap() => hashmap!{
1 => 1,
},
Uuid::from_str("c4dc6e8b-758a-4af4-ab00-ec356fb688d9").unwrap() => hashmap!{
1 => 1,
2 => 2,
3 => 3,
},
};
let values = query_values!(0i32, my_text_map.clone(), my_nested_map.clone());
let cql = "INSERT INTO cdrs_test.test_maps_without_blob \
(my_key, my_text_map, my_nested_map) VALUES (?, ?, ?)";
session
.query_with_values(cql, values)
.await
.expect("insert");
let cql = "SELECT * FROM cdrs_test.test_maps_without_blob";
let rows = session
.query(cql)
.await
.expect("query")
.get_body()
.expect("get body")
.into_rows()
.expect("into rows");
assert_eq!(rows.len(), 1);
for row in rows {
let my_text_map_row: HashMap<String, String> = row
.r_by_name::<Map>("my_text_map")
.expect("my_text_map")
.as_r_rust()
.expect("my_text_map as rust");
let my_nested_map_outer_row: HashMap<Uuid, Map> = row
.r_by_name::<Map>("my_nested_map")
.expect("my_nested_map")
.as_r_rust()
.expect("my_nested_map (outer) as rust");
let mut my_nested_map_row = HashMap::with_capacity(my_nested_map_outer_row.len());
for (index, my_nested_map_inner_row) in my_nested_map_outer_row {
let my_nested_map_inner_row: HashMap<i64, i32> = my_nested_map_inner_row
.as_r_rust()
.expect("my_nested_map (inner) as rust");
my_nested_map_row.insert(index, my_nested_map_inner_row);
}
assert_eq!(my_text_map_row, my_text_map);
assert_eq!(my_nested_map_row, my_nested_map);
}
}
#[tokio::test]
#[cfg(all(feature = "v4", feature = "e2e-tests"))]
async fn map_without_blob_v4() {
let cql = "CREATE TABLE IF NOT EXISTS cdrs_test.test_maps_without_blob_v4 \
(my_text_map frozen<map<text, text>> PRIMARY KEY, \
my_nested_map map<uuid, frozen<map<bigint, tinyint>>>)";
let session = setup(cql).await.expect("setup");
let my_text_map = hashmap! {
"key1".to_string() => "value1".to_string(),
"key2".to_string() => "value2".to_string(),
"key3".to_string() => "value3".to_string(),
};
let my_nested_map: HashMap<Uuid, HashMap<i64, i8>> = hashmap! {
Uuid::from_str("bb16106a-10bc-4a07-baa3-126ffe208c43").unwrap() => hashmap!{
1 => 1,
2 => 2,
},
Uuid::from_str("687d7677-dbf0-4d25-8cf3-e5d9185bba0b").unwrap() => hashmap!{
1 => 1,
},
Uuid::from_str("c4dc6e8b-758a-4af4-ab00-ec356fb688d9").unwrap() => hashmap!{
1 => 1,
2 => 2,
3 => 3,
},
};
let values = query_values!(my_text_map.clone(), my_nested_map.clone());
let cql = "INSERT INTO cdrs_test.test_maps_without_blob_v4 \
(my_text_map, my_nested_map) VALUES (?, ?)";
session
.query_with_values(cql, values)
.await
.expect("insert");
let cql = "SELECT * FROM cdrs_test.test_maps_without_blob_v4";
let rows = session
.query(cql)
.await
.expect("query")
.get_body()
.expect("get body")
.into_rows()
.expect("into rows");
assert_eq!(rows.len(), 1);
for row in rows {
let my_text_map_row: HashMap<String, String> = row
.r_by_name::<Map>("my_text_map")
.expect("my_text_map")
.as_r_rust()
.expect("my_text_map as rust");
let my_nested_map_outer_row: HashMap<Uuid, Map> = row
.r_by_name::<Map>("my_nested_map")
.expect("my_nested_map")
.as_r_rust()
.expect("my_nested_map (outer) as rust");
let mut my_nested_map_row = HashMap::with_capacity(my_nested_map_outer_row.len());
for (index, my_nested_map_inner_row) in my_nested_map_outer_row {
let my_nested_map_inner_row: HashMap<i64, i8> = my_nested_map_inner_row
.as_r_rust()
.expect("my_nested_map (inner) as rust");
my_nested_map_row.insert(index, my_nested_map_inner_row);
}
assert_eq!(my_text_map_row, my_text_map);
assert_eq!(my_nested_map_row, my_nested_map);
}
}
#[tokio::test]
#[cfg(feature = "e2e-tests")]
async fn map() {
let cql = "CREATE TABLE IF NOT EXISTS cdrs_test.test_maps \
(my_text_map frozen<map<text, text>> PRIMARY KEY, \
my_nested_map map<uuid, frozen<map<bigint, blob>>>)";
let session = setup(cql).await.expect("setup");
let my_text_map = hashmap! {
"key1".to_string() => "value1".to_string(),
"key2".to_string() => "value2".to_string(),
"key3".to_string() => "value3".to_string(),
};
let my_nested_map: HashMap<Uuid, HashMap<i64, Blob>> = hashmap! {
Uuid::from_str("bb16106a-10bc-4a07-baa3-126ffe208c43").unwrap() => hashmap!{
1 => vec![52, 121, 209, 200, 81, 118, 181, 17].into(),
2 => vec![226, 90, 51, 10, 26, 87, 141, 61].into(),
},
Uuid::from_str("687d7677-dbf0-4d25-8cf3-e5d9185bba0b").unwrap() => hashmap!{
1 => vec![224, 155, 148, 6, 217, 96, 120, 38].into(),
},
Uuid::from_str("c4dc6e8b-758a-4af4-ab00-ec356fb688d9").unwrap() => hashmap!{
1 => vec![164, 238, 196, 10, 149, 169, 145, 239].into(),
2 => vec![250, 87, 119, 134, 105, 236, 240, 64].into(),
3 => vec![72, 81, 26, 173, 107, 96, 38, 91].into(),
},
};
let values = query_values!(my_text_map.clone(), my_nested_map.clone());
let cql = "INSERT INTO cdrs_test.test_maps \
(my_text_map, my_nested_map) VALUES (?, ?)";
session
.query_with_values(cql, values)
.await
.expect("insert");
let cql = "SELECT * FROM cdrs_test.test_maps";
let rows = session
.query(cql)
.await
.expect("query")
.get_body()
.expect("get body")
.into_rows()
.expect("into rows");
assert_eq!(rows.len(), 1);
for row in rows {
let my_text_map_row: HashMap<String, String> = row
.r_by_name::<Map>("my_text_map")
.expect("my_text_map")
.as_r_rust()
.expect("my_text_map as rust");
let my_nested_map_outer_row: HashMap<Uuid, Map> = row
.r_by_name::<Map>("my_nested_map")
.expect("my_nested_map")
.as_r_rust()
.expect("my_nested_map (outer) as rust");
let mut my_nested_map_row = HashMap::with_capacity(my_nested_map_outer_row.len());
for (index, my_nested_map_inner_row) in my_nested_map_outer_row {
let my_nested_map_inner_row: HashMap<i64, Blob> = my_nested_map_inner_row
.as_r_rust()
.expect("my_nested_map (inner) as rust");
my_nested_map_row.insert(index, my_nested_map_inner_row);
}
assert_eq!(my_text_map_row, my_text_map);
assert_eq!(my_nested_map_row, my_nested_map);
}
}
| 35.884211 | 90 | 0.583749 |
3afa4a7ecf0923aa720106154edb0629bd434733 | 5,136 | //! This crate defines a custom derive macro `Iterator`. Should not be used
//! directly, but only through `enso-shapely` crate, as it provides utilities
//! necessary for the generated code to compile.
// === Features ===
#![feature(bool_to_option)]
#![feature(exact_size_is_empty)]
// === Standard Linter Configuration ===
#![deny(non_ascii_idents)]
#![warn(unsafe_code)]
// === Non-Standard Linter Configuration ===
#![warn(missing_docs)]
#![warn(trivial_casts)]
#![warn(trivial_numeric_casts)]
#![warn(unused_import_braces)]
#![warn(unused_qualifications)]
#![warn(missing_copy_implementations)]
#![warn(missing_debug_implementations)]
extern crate proc_macro;
mod derive_clone_ref;
mod derive_entry_point;
mod derive_iterator;
mod derive_no_clone;
mod overlappable;
mod tagged_enum;
mod prelude {
pub use enso_macro_utils::repr;
pub use proc_macro2::Span;
pub use proc_macro2::TokenStream;
pub use quote::quote;
}
use crate::derive_iterator::IsMut;
/// For `struct Foo<T>` or `enum Foo<T>` provides:
/// * `IntoIterator` implementations for `&'t Foo<T>`, `iter` and `into_iter`
/// methods.
///
/// The iterators will:
/// * for structs: go over each field that declared type is same as the struct's last type
/// parameter.
/// * enums: delegate to current constructor's nested value's iterator.
///
/// Enums are required to use only a single element tuple-like variant. This
/// limitation should be lifted in the future.
///
/// Any dependent type stored in struct, tuple or wrapped in enum should have
/// dependency only in its last type parameter. All dependent types that are not
/// tuples nor directly the yielded type, are required to provide `iter` method
/// that returns a compatible iterator (possible also derived).
///
/// Caller must have the following features enabled:
/// ```
/// #![feature(generators)]
/// #![feature(type_alias_impl_trait)]
/// ```
///
/// When used on type that takes no type parameters, like `struct Foo`, does
/// nothing but yields no errors.
#[proc_macro_derive(Iterator)]
pub fn derive_iterator(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
derive_iterator::derive(input, IsMut::Immutable)
}
/// Same as `derive(Iterator)` but generates mutable iterator.
///
/// It is separate, as some types allow deriving immutable iterator but ont the
/// mutable one.
#[proc_macro_derive(IteratorMut)]
pub fn derive_iterator_mut(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
derive_iterator::derive(input, IsMut::Mutable)
}
/// Derives `CloneRef` implementation for given type. It performs `clone_ref` on every member
/// field. The input type must implement `Clone` and its every field must implement `CloneRef`.
///
/// For generic types no bounds are introduced in the generated implementation. To customize this
/// behavior user might add `#[clone_ref(bound="…")]` attribute. Then the generated implementation
/// will use the provided bounds.
///
/// Moreover, for a given struct `X` this macro generates also `impl From<&X> for X` which uses
/// `CloneRef` under the hood. The semantics of `CloneRef` makes each object to naturally provide
/// transformation from reference to an owned type.
///
/// Supported inputs are structs (unit, named, unnamed), enums (with unit, named, unnamed and no
/// variants at all). Unions are currently not supported.
#[proc_macro_derive(CloneRef, attributes(clone_ref))]
pub fn derive_clone_ref(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
derive_clone_ref::derive(input)
}
/// Makes sure that the structure does not derive [`Clone`] and that it implements custom [`Drop`]
/// implementation.
///
/// For the given input
/// ```ignore
/// #[derive(NoCloneBecauseOfCustomDrop)]
/// struct Test {}
/// ```
///
/// The following output will be generated:
/// ```ignore
/// struct Test {}
/// impl !Clone for Test {}
// impl ImplementsDrop for Test {}
/// ```
#[proc_macro_derive(NoCloneBecauseOfCustomDrop)]
pub fn derive_no_clone(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
derive_no_clone::derive(input)
}
/// Exposes the function as an application entry point. Entry points are alternative application
/// running modes that you can access by adding `?entry=` to the end of the application URL.
#[proc_macro_attribute]
pub fn entry_point(
_: proc_macro::TokenStream,
item: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
derive_entry_point::derive(item)
}
#[allow(missing_docs)]
#[proc_macro_attribute]
pub fn overlappable(
attrs: proc_macro::TokenStream,
input: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
overlappable::overlappable(attrs, input)
}
/// Transforms Rust enums into enums where each variant is a separate type. It also implements
/// several traits (such as conversions between variants and the enum type) and defines utility
/// functions, such as constructors. See [`tagged_enum::run`] to learn more.
#[proc_macro_attribute]
pub fn tagged_enum(
attr: proc_macro::TokenStream,
input: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
tagged_enum::run(attr, input)
}
| 35.178082 | 98 | 0.730724 |
ab44d863fa5a63f4e3d6282589f00e971ded3b8f | 12,416 | //! Common types shared between the encoder and decoder
use crate::filter;
use std::fmt;
/// Describes the layout of samples in a pixel
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
pub enum ColorType {
Grayscale = 0,
RGB = 2,
Indexed = 3,
GrayscaleAlpha = 4,
RGBA = 6,
}
impl ColorType {
/// Returns the number of samples used per pixel of `ColorType`
pub fn samples(&self) -> usize {
use self::ColorType::*;
match *self {
Grayscale | Indexed => 1,
RGB => 3,
GrayscaleAlpha => 2,
RGBA => 4,
}
}
/// u8 -> Self. Temporary solution until Rust provides a canonical one.
pub fn from_u8(n: u8) -> Option<ColorType> {
match n {
0 => Some(ColorType::Grayscale),
2 => Some(ColorType::RGB),
3 => Some(ColorType::Indexed),
4 => Some(ColorType::GrayscaleAlpha),
6 => Some(ColorType::RGBA),
_ => None,
}
}
}
/// Bit depth of the png file
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
pub enum BitDepth {
One = 1,
Two = 2,
Four = 4,
Eight = 8,
Sixteen = 16,
}
impl BitDepth {
/// u8 -> Self. Temporary solution until Rust provides a canonical one.
pub fn from_u8(n: u8) -> Option<BitDepth> {
match n {
1 => Some(BitDepth::One),
2 => Some(BitDepth::Two),
4 => Some(BitDepth::Four),
8 => Some(BitDepth::Eight),
16 => Some(BitDepth::Sixteen),
_ => None,
}
}
}
/// Pixel dimensions information
#[derive(Clone, Copy, Debug)]
pub struct PixelDimensions {
/// Pixels per unit, X axis
pub xppu: u32,
/// Pixels per unit, Y axis
pub yppu: u32,
/// Either *Meter* or *Unspecified*
pub unit: Unit,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
/// Physical unit of the pixel dimensions
pub enum Unit {
Unspecified = 0,
Meter = 1,
}
impl Unit {
/// u8 -> Self. Temporary solution until Rust provides a canonical one.
pub fn from_u8(n: u8) -> Option<Unit> {
match n {
0 => Some(Unit::Unspecified),
1 => Some(Unit::Meter),
_ => None,
}
}
}
/// How to reset buffer of an animated png (APNG) at the end of a frame.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
pub enum DisposeOp {
/// Leave the buffer unchanged.
None = 0,
/// Clear buffer with the background color.
Background = 1,
/// Reset the buffer to the state before the current frame.
Previous = 2,
}
impl DisposeOp {
/// u8 -> Self. Using enum_primitive or transmute is probably the right thing but this will do for now.
pub fn from_u8(n: u8) -> Option<DisposeOp> {
match n {
0 => Some(DisposeOp::None),
1 => Some(DisposeOp::Background),
2 => Some(DisposeOp::Previous),
_ => None,
}
}
}
impl fmt::Display for DisposeOp {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
DisposeOp::None => "DISPOSE_OP_NONE",
DisposeOp::Background => "DISPOSE_OP_BACKGROUND",
DisposeOp::Previous => "DISPOSE_OP_PREVIOUS",
};
write!(f, "{}", name)
}
}
/// How pixels are written into the buffer.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
pub enum BlendOp {
/// Pixels overwrite the value at their position.
Source = 0,
/// The new pixels are blended into the current state based on alpha.
Over = 1,
}
impl BlendOp {
/// u8 -> Self. Using enum_primitive or transmute is probably the right thing but this will do for now.
pub fn from_u8(n: u8) -> Option<BlendOp> {
match n {
0 => Some(BlendOp::Source),
1 => Some(BlendOp::Over),
_ => None,
}
}
}
impl fmt::Display for BlendOp {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match *self {
BlendOp::Source => "BLEND_OP_SOURCE",
BlendOp::Over => "BLEND_OP_OVER",
};
write!(f, "{}", name)
}
}
/// Frame control information
#[derive(Clone, Copy, Debug)]
pub struct FrameControl {
/// Sequence number of the animation chunk, starting from 0
pub sequence_number: u32,
/// Width of the following frame
pub width: u32,
/// Height of the following frame
pub height: u32,
/// X position at which to render the following frame
pub x_offset: u32,
/// Y position at which to render the following frame
pub y_offset: u32,
/// Frame delay fraction numerator
pub delay_num: u16,
/// Frame delay fraction denominator
pub delay_den: u16,
/// Type of frame area disposal to be done after rendering this frame
pub dispose_op: DisposeOp,
/// Type of frame area rendering for this frame
pub blend_op: BlendOp,
}
impl Default for FrameControl {
fn default() -> FrameControl {
FrameControl {
sequence_number: 0,
width: 0,
height: 0,
x_offset: 0,
y_offset: 0,
delay_num: 1,
delay_den: 30,
dispose_op: DisposeOp::None,
blend_op: BlendOp::Source,
}
}
}
impl FrameControl {
pub fn set_seq_num(&mut self, s: u32) {
self.sequence_number = s;
}
pub fn inc_seq_num(&mut self, i: u32) {
self.sequence_number += i;
}
}
/// Animation control information
#[derive(Clone, Copy, Debug)]
pub struct AnimationControl {
/// Number of frames
pub num_frames: u32,
/// Number of times to loop this APNG. 0 indicates infinite looping.
pub num_plays: u32,
}
/// The type and strength of applied compression.
#[derive(Debug, Clone)]
pub enum Compression {
/// Default level
Default,
/// Fast minimal compression
Fast,
/// Higher compression level
///
/// Best in this context isn't actually the highest possible level
/// the encoder can do, but is meant to emulate the `Best` setting in the `Flate2`
/// library.
Best,
Huffman,
Rle,
}
/// PNG info struct
#[derive(Debug)]
pub struct Info {
pub width: u32,
pub height: u32,
pub bit_depth: BitDepth,
pub color_type: ColorType,
pub interlaced: bool,
pub trns: Option<Vec<u8>>,
pub pixel_dims: Option<PixelDimensions>,
pub palette: Option<Vec<u8>>,
pub frame_control: Option<FrameControl>,
pub animation_control: Option<AnimationControl>,
pub compression: Compression,
pub filter: filter::FilterType,
}
impl Default for Info {
fn default() -> Info {
Info {
width: 0,
height: 0,
bit_depth: BitDepth::Eight,
color_type: ColorType::Grayscale,
interlaced: false,
palette: None,
trns: None,
pixel_dims: None,
frame_control: None,
animation_control: None,
// Default to `deflate::Compresion::Fast` and `filter::FilterType::Sub`
// to maintain backward compatible output.
compression: Compression::Fast,
filter: filter::FilterType::Sub,
}
}
}
impl Info {
/// Size of the image
pub fn size(&self) -> (u32, u32) {
(self.width, self.height)
}
/// Returns true if the image is an APNG image.
pub fn is_animated(&self) -> bool {
self.frame_control.is_some() && self.animation_control.is_some()
}
/// Returns the frame control information of the image
pub fn animation_control(&self) -> Option<&AnimationControl> {
self.animation_control.as_ref()
}
/// Returns the frame control information of the current frame
pub fn frame_control(&self) -> Option<&FrameControl> {
self.frame_control.as_ref()
}
/// Returns the bits per pixel
pub fn bits_per_pixel(&self) -> usize {
self.color_type.samples() * self.bit_depth as usize
}
/// Returns the bytes per pixel
pub fn bytes_per_pixel(&self) -> usize {
self.color_type.samples() * ((self.bit_depth as usize + 7) >> 3)
}
/// Returns the number of bytes needed for one deinterlaced image
pub fn raw_bytes(&self) -> usize {
self.height as usize * self.raw_row_length()
}
/// Returns the number of bytes needed for one deinterlaced row
pub fn raw_row_length(&self) -> usize {
let bits = self.width as usize * self.color_type.samples() * self.bit_depth as usize;
let extra = bits % 8;
bits / 8
+ match extra {
0 => 0,
_ => 1,
}
+ 1 // filter method
}
/// Returns the number of bytes needed for one deinterlaced row of width `width`
pub fn raw_row_length_from_width(&self, width: u32) -> usize {
let bits = width as usize * self.color_type.samples() * self.bit_depth as usize;
let extra = bits % 8;
bits / 8
+ match extra {
0 => 0,
_ => 1,
}
+ 1 // filter method
}
}
bitflags! {
/// # Output transformations
///
/// Only `IDENTITY` and `TRANSFORM_EXPAND | TRANSFORM_STRIP_ALPHA` can be used at the moment.
pub struct Transformations: u32 {
/// No transformation
const IDENTITY = 0x0000; // read and write */
/// Strip 16-bit samples to 8 bits
const STRIP_16 = 0x0001; // read only */
/// Discard the alpha channel
const STRIP_ALPHA = 0x0002; // read only */
/// Expand 1; 2 and 4-bit samples to bytes
const PACKING = 0x0004; // read and write */
/// Change order of packed pixels to LSB first
const PACKSWAP = 0x0008; // read and write */
/// Expand paletted images to RGB; expand grayscale images of
/// less than 8-bit depth to 8-bit depth; and expand tRNS chunks
/// to alpha channels.
const EXPAND = 0x0010; // read only */
/// Invert monochrome images
const INVERT_MONO = 0x0020; // read and write */
/// Normalize pixels to the sBIT depth
const SHIFT = 0x0040; // read and write */
/// Flip RGB to BGR; RGBA to BGRA
const BGR = 0x0080; // read and write */
/// Flip RGBA to ARGB or GA to AG
const SWAP_ALPHA = 0x0100; // read and write */
/// Byte-swap 16-bit samples
const SWAP_ENDIAN = 0x0200; // read and write */
/// Change alpha from opacity to transparency
const INVERT_ALPHA = 0x0400; // read and write */
const STRIP_FILLER = 0x0800; // write only */
const STRIP_FILLER_BEFORE = 0x0800; // write only
const STRIP_FILLER_AFTER = 0x1000; // write only */
const GRAY_TO_RGB = 0x2000; // read only */
const EXPAND_16 = 0x4000; // read only */
const SCALE_16 = 0x8000; // read only */
}
}
/// Mod to encapsulate the converters depending on the `deflate` crate.
///
/// Since this only contains trait impls, there is no need to make this public, they are simply
/// available when the mod is compiled as well.
#[cfg(feature = "png-encoding")]
mod deflate_convert {
extern crate deflate;
use super::Compression;
impl From<deflate::Compression> for Compression {
fn from(c: deflate::Compression) -> Self {
match c {
deflate::Compression::Default => Compression::Default,
deflate::Compression::Fast => Compression::Fast,
deflate::Compression::Best => Compression::Best,
}
}
}
impl From<Compression> for deflate::CompressionOptions {
fn from(c: Compression) -> Self {
match c {
Compression::Default => deflate::CompressionOptions::default(),
Compression::Fast => deflate::CompressionOptions::fast(),
Compression::Best => deflate::CompressionOptions::high(),
Compression::Huffman => deflate::CompressionOptions::huffman_only(),
Compression::Rle => deflate::CompressionOptions::rle(),
}
}
}
}
| 30.431373 | 107 | 0.574259 |
d9c9118bcffc27f97a5db60f3c9e43ea410b78e9 | 1,787 | //! A collection of oneshot functions for performing requests over the API.
//!
//! These are functions that create a oneshot [`OwoReqwestClient`]. Use at your
//! own expense. Oneshots are more expensive with repeated use.
//!
//! [`OwoReqwestClient`]: ../struct.OwoClient.html
use model::{FileUploadResponse, UploadedFile};
use super::{OwoClient, OwoRequester};
use ::Result;
/// Uploads a single file via the service.
///
/// Refer to [`OwoRequester::upload_file`] for more information.
///
/// # Errors
///
/// Returns [`Error::Reqwest`] if building the request fails.
///
/// [`Error::Reqwest`]: ../../../enum.Error.html#variant.Reqwest
/// [`OwoReqwestRequester::upload_file`]: ../trait.OwoRequester.html#tymethod.upload_file
#[inline]
pub fn upload_file(file: Vec<u8>) -> Result<FileUploadResponse> {
OwoClient::new(key)?.upload_file(file)
}
/// Uploads multiple files via the service.
///
/// See [`OwoReqwestRequester::upload_files`] for more information.
///
/// # Errors
///
/// Returns [`Error::Reqwest`] if building the request fails.
///
/// [`Error::Reqwest`]: ../../../enum.Error.html#variant.Reqwest
/// [`OwoReqwestRequester::upload_files`]: ../trait.OwoRequester.html#tymethod.upload_files
#[inline]
pub fn upload_files(files: Vec<Vec<u8>>) -> Result<FileUploadResponse> {
OwoClient::new(key)?.upload_files(files)
}
/// Shortens a URL via the service.
///
/// See [`OwoReqwestRequester`] for more information.
///
/// # Errors
///
/// Returns [`Error::NativeTls`] if there was an error instantiating the client.
///
/// [`Error::NativeTls`]: ../../../enum.Error.html#variant.NativeTls
/// [`OwoReqwestRequester`]: ../trait.OwoRequester.html
#[inline]
pub fn shorten_url(key: &str, url: &str) -> Result<String> {
OwoClient::new(key)?.shorten_url(url)
}
| 31.910714 | 91 | 0.688304 |
dec8699f46e3b3542b67f6f22377d0292b08f691 | 1,799 | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test `?Sized` local variables.
trait T {}
fn f1<W: ?Sized, X: ?Sized, Y: ?Sized, Z: ?Sized>(x: &X) {
let _: W; // <-- this is OK, no bindings created, no initializer.
let _: (isize, (X, isize)); //~ERROR `X: std::marker::Sized` is not satisfie
let y: Y; //~ERROR `Y: std::marker::Sized` is not satisfied
let y: (isize, (Z, usize)); //~ERROR `Z: std::marker::Sized` is not satisfied
}
fn f2<X: ?Sized, Y: ?Sized>(x: &X) {
let y: X; //~ERROR `X: std::marker::Sized` is not satisfied
let y: (isize, (Y, isize)); //~ERROR `Y: std::marker::Sized` is not satisfied
}
fn f3<X: ?Sized>(x1: Box<X>, x2: Box<X>, x3: Box<X>) {
let y: X = *x1; //~ERROR `X: std::marker::Sized` is not satisfied
let y = *x2; //~ERROR `X: std::marker::Sized` is not satisfied
let (y, z) = (*x3, 4); //~ERROR `X: std::marker::Sized` is not satisfied
}
fn f4<X: ?Sized + T>(x1: Box<X>, x2: Box<X>, x3: Box<X>) {
let y: X = *x1; //~ERROR `X: std::marker::Sized` is not satisfied
let y = *x2; //~ERROR `X: std::marker::Sized` is not satisfied
let (y, z) = (*x3, 4); //~ERROR `X: std::marker::Sized` is not satisfied
}
fn g1<X: ?Sized>(x: X) {} //~ERROR `X: std::marker::Sized` is not satisfied
fn g2<X: ?Sized + T>(x: X) {} //~ERROR `X: std::marker::Sized` is not satisfied
pub fn main() {
}
| 42.833333 | 81 | 0.609783 |
e9b42d54172a7b9c2e7350fc0d704d113a7bbd4b | 5,300 | // Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::convert::TryInto;
use std::time::Duration;
use common_arrow::arrow_format::flight::data::Action;
use common_arrow::arrow_format::flight::data::BasicAuth;
use common_arrow::arrow_format::flight::data::HandshakeRequest;
use common_arrow::arrow_format::flight::service::flight_service_client::FlightServiceClient;
use common_exception::ErrorCode;
use common_exception::Result;
use common_flight_rpc::ConnectionFactory;
use common_flight_rpc::FlightClientTlsConfig;
use common_tracing::tracing;
use futures::stream;
use futures::StreamExt;
use prost::Message;
use serde::de::DeserializeOwned;
use tonic::codegen::InterceptedService;
use tonic::metadata::MetadataValue;
use tonic::service::Interceptor;
use tonic::transport::Channel;
use tonic::Request;
use crate::flight_action::MetaFlightAction;
use crate::flight_action::RequestFor;
use crate::flight_client_conf::MetaFlightClientConf;
#[allow(dead_code)]
#[derive(Clone, Debug)]
pub struct MetaFlightClient {
token: Vec<u8>,
pub(crate) client: FlightServiceClient<InterceptedService<Channel, AuthInterceptor>>,
}
const AUTH_TOKEN_KEY: &str = "auth-token-bin";
impl MetaFlightClient {
pub async fn try_new(conf: &MetaFlightClientConf) -> Result<MetaFlightClient> {
Self::with_tls_conf(
&conf.meta_service_config.address,
&conf.meta_service_config.username,
&conf.meta_service_config.password,
Some(Duration::from_secs(conf.client_timeout_in_second)),
conf.meta_service_config.tls_conf.clone(),
)
.await
}
#[tracing::instrument(level = "debug", skip(password))]
pub async fn try_create(addr: &str, username: &str, password: &str) -> Result<Self> {
Self::with_tls_conf(addr, username, password, None, None).await
}
#[tracing::instrument(level = "debug", skip(password))]
pub async fn with_tls_conf(
addr: &str,
username: &str,
password: &str,
timeout: Option<Duration>,
conf: Option<FlightClientTlsConfig>,
) -> Result<Self> {
let res = ConnectionFactory::create_flight_channel(addr, timeout, conf);
tracing::debug!("connecting to {}, res: {:?}", addr, res);
let channel = res?;
let mut client = FlightServiceClient::new(channel.clone());
let token = MetaFlightClient::handshake(&mut client, username, password).await?;
let client = {
let token = token.clone();
FlightServiceClient::with_interceptor(channel, AuthInterceptor { token })
};
let rx = Self { token, client };
Ok(rx)
}
/// Handshake.
#[tracing::instrument(level = "debug", skip(client, password))]
async fn handshake(
client: &mut FlightServiceClient<Channel>,
username: &str,
password: &str,
) -> Result<Vec<u8>> {
let auth = BasicAuth {
username: username.to_string(),
password: password.to_string(),
};
let mut payload = vec![];
auth.encode(&mut payload)?;
let req = Request::new(stream::once(async {
HandshakeRequest {
payload,
..HandshakeRequest::default()
}
}));
let rx = client.handshake(req).await?;
let mut rx = rx.into_inner();
let resp = rx.next().await.expect("Must respond from handshake")?;
let token = resp.payload;
Ok(token)
}
#[tracing::instrument(level = "debug", skip(self, v))]
pub(crate) async fn do_action<T, R>(&self, v: T) -> Result<R>
where
T: RequestFor<Reply = R>,
T: Into<MetaFlightAction>,
R: DeserializeOwned,
{
let act: MetaFlightAction = v.into();
let req: Request<Action> = (&act).try_into()?;
let req = common_tracing::inject_span_to_tonic_request(req);
let mut stream = self.client.clone().do_action(req).await?.into_inner();
match stream.message().await? {
None => Err(ErrorCode::EmptyData(format!(
"Can not receive data from dfs flight server, action: {:?}",
act
))),
Some(resp) => {
let v = serde_json::from_slice::<R>(&resp.body)?;
Ok(v)
}
}
}
}
#[derive(Clone)]
pub struct AuthInterceptor {
pub token: Vec<u8>,
}
impl Interceptor for AuthInterceptor {
fn call(
&mut self,
mut req: tonic::Request<()>,
) -> std::result::Result<tonic::Request<()>, tonic::Status> {
let metadata = req.metadata_mut();
metadata.insert_bin(AUTH_TOKEN_KEY, MetadataValue::from_bytes(&self.token));
Ok(req)
}
}
| 32.716049 | 92 | 0.637736 |
8a61a58591fe9b0a74ae9550d8eafa09050a21e0 | 38,432 | #![allow(unused_imports, non_camel_case_types)]
use crate::models::r4b::Age::Age;
use crate::models::r4b::Annotation::Annotation;
use crate::models::r4b::CodeableConcept::CodeableConcept;
use crate::models::r4b::Element::Element;
use crate::models::r4b::Extension::Extension;
use crate::models::r4b::Identifier::Identifier;
use crate::models::r4b::Meta::Meta;
use crate::models::r4b::Narrative::Narrative;
use crate::models::r4b::Period::Period;
use crate::models::r4b::Procedure_FocalDevice::Procedure_FocalDevice;
use crate::models::r4b::Procedure_Performer::Procedure_Performer;
use crate::models::r4b::Range::Range;
use crate::models::r4b::Reference::Reference;
use crate::models::r4b::ResourceList::ResourceList;
use serde_json::json;
use serde_json::value::Value;
use std::borrow::Cow;
/// An action that is or was performed on or for a patient. This can be a physical
/// intervention like an operation, or less invasive like long term services,
/// counseling, or hypnotherapy.
#[derive(Debug)]
pub struct Procedure<'a> {
pub(crate) value: Cow<'a, Value>,
}
impl Procedure<'_> {
pub fn new(value: &Value) -> Procedure {
Procedure {
value: Cow::Borrowed(value),
}
}
pub fn to_json(&self) -> Value {
(*self.value).clone()
}
/// Extensions for implicitRules
pub fn _implicit_rules(&self) -> Option<Element> {
if let Some(val) = self.value.get("_implicitRules") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for instantiatesUri
pub fn _instantiates_uri(&self) -> Option<Vec<Element>> {
if let Some(Value::Array(val)) = self.value.get("_instantiatesUri") {
return Some(
val.into_iter()
.map(|e| Element {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Extensions for language
pub fn _language(&self) -> Option<Element> {
if let Some(val) = self.value.get("_language") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for performedDateTime
pub fn _performed_date_time(&self) -> Option<Element> {
if let Some(val) = self.value.get("_performedDateTime") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for performedString
pub fn _performed_string(&self) -> Option<Element> {
if let Some(val) = self.value.get("_performedString") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for status
pub fn _status(&self) -> Option<Element> {
if let Some(val) = self.value.get("_status") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Individual who is making the procedure statement.
pub fn asserter(&self) -> Option<Reference> {
if let Some(val) = self.value.get("asserter") {
return Some(Reference {
value: Cow::Borrowed(val),
});
}
return None;
}
/// A reference to a resource that contains details of the request for this procedure.
pub fn based_on(&self) -> Option<Vec<Reference>> {
if let Some(Value::Array(val)) = self.value.get("basedOn") {
return Some(
val.into_iter()
.map(|e| Reference {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Detailed and structured anatomical location information. Multiple locations are
/// allowed - e.g. multiple punch biopsies of a lesion.
pub fn body_site(&self) -> Option<Vec<CodeableConcept>> {
if let Some(Value::Array(val)) = self.value.get("bodySite") {
return Some(
val.into_iter()
.map(|e| CodeableConcept {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// A code that classifies the procedure for searching, sorting and display purposes
/// (e.g. "Surgical Procedure").
pub fn category(&self) -> Option<CodeableConcept> {
if let Some(val) = self.value.get("category") {
return Some(CodeableConcept {
value: Cow::Borrowed(val),
});
}
return None;
}
/// The specific procedure that is performed. Use text if the exact nature of the
/// procedure cannot be coded (e.g. "Laparoscopic Appendectomy").
pub fn code(&self) -> Option<CodeableConcept> {
if let Some(val) = self.value.get("code") {
return Some(CodeableConcept {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Any complications that occurred during the procedure, or in the immediate post-
/// performance period. These are generally tracked separately from the notes, which
/// will typically describe the procedure itself rather than any 'post procedure'
/// issues.
pub fn complication(&self) -> Option<Vec<CodeableConcept>> {
if let Some(Value::Array(val)) = self.value.get("complication") {
return Some(
val.into_iter()
.map(|e| CodeableConcept {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Any complications that occurred during the procedure, or in the immediate post-
/// performance period.
pub fn complication_detail(&self) -> Option<Vec<Reference>> {
if let Some(Value::Array(val)) = self.value.get("complicationDetail") {
return Some(
val.into_iter()
.map(|e| Reference {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// These resources do not have an independent existence apart from the resource that
/// contains them - they cannot be identified independently, and nor can they have
/// their own independent transaction scope.
pub fn contained(&self) -> Option<Vec<ResourceList>> {
if let Some(Value::Array(val)) = self.value.get("contained") {
return Some(
val.into_iter()
.map(|e| ResourceList {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// The Encounter during which this Procedure was created or performed or to which the
/// creation of this record is tightly associated.
pub fn encounter(&self) -> Option<Reference> {
if let Some(val) = self.value.get("encounter") {
return Some(Reference {
value: Cow::Borrowed(val),
});
}
return None;
}
/// May be used to represent additional information that is not part of the basic
/// definition of the resource. To make the use of extensions safe and manageable,
/// there is a strict set of governance applied to the definition and use of
/// extensions. Though any implementer can define an extension, there is a set of
/// requirements that SHALL be met as part of the definition of the extension.
pub fn extension(&self) -> Option<Vec<Extension>> {
if let Some(Value::Array(val)) = self.value.get("extension") {
return Some(
val.into_iter()
.map(|e| Extension {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// A device that is implanted, removed or otherwise manipulated (calibration, battery
/// replacement, fitting a prosthesis, attaching a wound-vac, etc.) as a focal portion
/// of the Procedure.
pub fn focal_device(&self) -> Option<Vec<Procedure_FocalDevice>> {
if let Some(Value::Array(val)) = self.value.get("focalDevice") {
return Some(
val.into_iter()
.map(|e| Procedure_FocalDevice {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// If the procedure required specific follow up - e.g. removal of sutures. The follow
/// up may be represented as a simple note or could potentially be more complex, in
/// which case the CarePlan resource can be used.
pub fn follow_up(&self) -> Option<Vec<CodeableConcept>> {
if let Some(Value::Array(val)) = self.value.get("followUp") {
return Some(
val.into_iter()
.map(|e| CodeableConcept {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// The logical id of the resource, as used in the URL for the resource. Once
/// assigned, this value never changes.
pub fn id(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("id") {
return Some(string);
}
return None;
}
/// Business identifiers assigned to this procedure by the performer or other systems
/// which remain constant as the resource is updated and is propagated from server
/// to server.
pub fn identifier(&self) -> Option<Vec<Identifier>> {
if let Some(Value::Array(val)) = self.value.get("identifier") {
return Some(
val.into_iter()
.map(|e| Identifier {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// A reference to a set of rules that were followed when the resource was
/// constructed, and which must be understood when processing the content. Often, this
/// is a reference to an implementation guide that defines the special rules along
/// with other profiles etc.
pub fn implicit_rules(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("implicitRules") {
return Some(string);
}
return None;
}
/// The URL pointing to a FHIR-defined protocol, guideline, order set or other
/// definition that is adhered to in whole or in part by this Procedure.
pub fn instantiates_canonical(&self) -> Option<Vec<&str>> {
if let Some(Value::Array(val)) = self.value.get("instantiatesCanonical") {
return Some(
val.into_iter()
.map(|e| e.as_str().unwrap())
.collect::<Vec<_>>(),
);
}
return None;
}
/// The URL pointing to an externally maintained protocol, guideline, order set or
/// other definition that is adhered to in whole or in part by this Procedure.
pub fn instantiates_uri(&self) -> Option<Vec<&str>> {
if let Some(Value::Array(val)) = self.value.get("instantiatesUri") {
return Some(
val.into_iter()
.map(|e| e.as_str().unwrap())
.collect::<Vec<_>>(),
);
}
return None;
}
/// The base language in which the resource is written.
pub fn language(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("language") {
return Some(string);
}
return None;
}
/// The location where the procedure actually happened. E.g. a newborn at home, a
/// tracheostomy at a restaurant.
pub fn location(&self) -> Option<Reference> {
if let Some(val) = self.value.get("location") {
return Some(Reference {
value: Cow::Borrowed(val),
});
}
return None;
}
/// The metadata about the resource. This is content that is maintained by the
/// infrastructure. Changes to the content might not always be associated with version
/// changes to the resource.
pub fn meta(&self) -> Option<Meta> {
if let Some(val) = self.value.get("meta") {
return Some(Meta {
value: Cow::Borrowed(val),
});
}
return None;
}
/// May be used to represent additional information that is not part of the basic
/// definition of the resource and that modifies the understanding of the element
/// that contains it and/or the understanding of the containing element's descendants.
/// Usually modifier elements provide negation or qualification. To make the use of
/// extensions safe and manageable, there is a strict set of governance applied to
/// the definition and use of extensions. Though any implementer is allowed to define
/// an extension, there is a set of requirements that SHALL be met as part of the
/// definition of the extension. Applications processing a resource are required to
/// check for modifier extensions. Modifier extensions SHALL NOT change the meaning
/// of any elements on Resource or DomainResource (including cannot change the meaning
/// of modifierExtension itself).
pub fn modifier_extension(&self) -> Option<Vec<Extension>> {
if let Some(Value::Array(val)) = self.value.get("modifierExtension") {
return Some(
val.into_iter()
.map(|e| Extension {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Any other notes and comments about the procedure.
pub fn note(&self) -> Option<Vec<Annotation>> {
if let Some(Value::Array(val)) = self.value.get("note") {
return Some(
val.into_iter()
.map(|e| Annotation {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// The outcome of the procedure - did it resolve the reasons for the procedure being
/// performed?
pub fn outcome(&self) -> Option<CodeableConcept> {
if let Some(val) = self.value.get("outcome") {
return Some(CodeableConcept {
value: Cow::Borrowed(val),
});
}
return None;
}
/// A larger event of which this particular procedure is a component or step.
pub fn part_of(&self) -> Option<Vec<Reference>> {
if let Some(Value::Array(val)) = self.value.get("partOf") {
return Some(
val.into_iter()
.map(|e| Reference {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Estimated or actual date, date-time, period, or age when the procedure was
/// performed. Allows a period to support complex procedures that span more than one
/// date, and also allows for the length of the procedure to be captured.
pub fn performed_age(&self) -> Option<Age> {
if let Some(val) = self.value.get("performedAge") {
return Some(Age {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Estimated or actual date, date-time, period, or age when the procedure was
/// performed. Allows a period to support complex procedures that span more than one
/// date, and also allows for the length of the procedure to be captured.
pub fn performed_date_time(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("performedDateTime") {
return Some(string);
}
return None;
}
/// Estimated or actual date, date-time, period, or age when the procedure was
/// performed. Allows a period to support complex procedures that span more than one
/// date, and also allows for the length of the procedure to be captured.
pub fn performed_period(&self) -> Option<Period> {
if let Some(val) = self.value.get("performedPeriod") {
return Some(Period {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Estimated or actual date, date-time, period, or age when the procedure was
/// performed. Allows a period to support complex procedures that span more than one
/// date, and also allows for the length of the procedure to be captured.
pub fn performed_range(&self) -> Option<Range> {
if let Some(val) = self.value.get("performedRange") {
return Some(Range {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Estimated or actual date, date-time, period, or age when the procedure was
/// performed. Allows a period to support complex procedures that span more than one
/// date, and also allows for the length of the procedure to be captured.
pub fn performed_string(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("performedString") {
return Some(string);
}
return None;
}
/// Limited to "real" people rather than equipment.
pub fn performer(&self) -> Option<Vec<Procedure_Performer>> {
if let Some(Value::Array(val)) = self.value.get("performer") {
return Some(
val.into_iter()
.map(|e| Procedure_Performer {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// The coded reason why the procedure was performed. This may be a coded entity of
/// some type, or may simply be present as text.
pub fn reason_code(&self) -> Option<Vec<CodeableConcept>> {
if let Some(Value::Array(val)) = self.value.get("reasonCode") {
return Some(
val.into_iter()
.map(|e| CodeableConcept {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// The justification of why the procedure was performed.
pub fn reason_reference(&self) -> Option<Vec<Reference>> {
if let Some(Value::Array(val)) = self.value.get("reasonReference") {
return Some(
val.into_iter()
.map(|e| Reference {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Individual who recorded the record and takes responsibility for its content.
pub fn recorder(&self) -> Option<Reference> {
if let Some(val) = self.value.get("recorder") {
return Some(Reference {
value: Cow::Borrowed(val),
});
}
return None;
}
/// This could be a histology result, pathology report, surgical report, etc.
pub fn report(&self) -> Option<Vec<Reference>> {
if let Some(Value::Array(val)) = self.value.get("report") {
return Some(
val.into_iter()
.map(|e| Reference {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// A code specifying the state of the procedure. Generally, this will be the in-
/// progress or completed state.
pub fn status(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("status") {
return Some(string);
}
return None;
}
/// Captures the reason for the current state of the procedure.
pub fn status_reason(&self) -> Option<CodeableConcept> {
if let Some(val) = self.value.get("statusReason") {
return Some(CodeableConcept {
value: Cow::Borrowed(val),
});
}
return None;
}
/// The person, animal or group on which the procedure was performed.
pub fn subject(&self) -> Reference {
Reference {
value: Cow::Borrowed(&self.value["subject"]),
}
}
/// A human-readable narrative that contains a summary of the resource and can be used
/// to represent the content of the resource to a human. The narrative need not encode
/// all the structured data, but is required to contain sufficient detail to make it
/// "clinically safe" for a human to just read the narrative. Resource definitions
/// may define what content should be represented in the narrative to ensure clinical
/// safety.
pub fn text(&self) -> Option<Narrative> {
if let Some(val) = self.value.get("text") {
return Some(Narrative {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Identifies coded items that were used as part of the procedure.
pub fn used_code(&self) -> Option<Vec<CodeableConcept>> {
if let Some(Value::Array(val)) = self.value.get("usedCode") {
return Some(
val.into_iter()
.map(|e| CodeableConcept {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Identifies medications, devices and any other substance used as part of the
/// procedure.
pub fn used_reference(&self) -> Option<Vec<Reference>> {
if let Some(Value::Array(val)) = self.value.get("usedReference") {
return Some(
val.into_iter()
.map(|e| Reference {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
pub fn validate(&self) -> bool {
if let Some(_val) = self._implicit_rules() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._instantiates_uri() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self._language() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._performed_date_time() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._performed_string() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._status() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.asserter() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.based_on() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.body_site() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.category() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.code() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.complication() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.complication_detail() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.contained() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.encounter() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.extension() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.focal_device() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.follow_up() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.id() {}
if let Some(_val) = self.identifier() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.implicit_rules() {}
if let Some(_val) = self.instantiates_canonical() {
_val.into_iter().for_each(|_e| {});
}
if let Some(_val) = self.instantiates_uri() {
_val.into_iter().for_each(|_e| {});
}
if let Some(_val) = self.language() {}
if let Some(_val) = self.location() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.meta() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.modifier_extension() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.note() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.outcome() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.part_of() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.performed_age() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.performed_date_time() {}
if let Some(_val) = self.performed_period() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.performed_range() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.performed_string() {}
if let Some(_val) = self.performer() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.reason_code() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.reason_reference() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.recorder() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.report() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.status() {}
if let Some(_val) = self.status_reason() {
if !_val.validate() {
return false;
}
}
if !self.subject().validate() {
return false;
}
if let Some(_val) = self.text() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.used_code() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.used_reference() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
return true;
}
}
#[derive(Debug)]
pub struct ProcedureBuilder {
pub(crate) value: Value,
}
impl ProcedureBuilder {
pub fn build(&self) -> Procedure {
Procedure {
value: Cow::Owned(self.value.clone()),
}
}
pub fn with(existing: Procedure) -> ProcedureBuilder {
ProcedureBuilder {
value: (*existing.value).clone(),
}
}
pub fn new(subject: Reference) -> ProcedureBuilder {
let mut __value: Value = json!({});
__value["subject"] = json!(subject.value);
return ProcedureBuilder { value: __value };
}
pub fn _implicit_rules<'a>(&'a mut self, val: Element) -> &'a mut ProcedureBuilder {
self.value["_implicitRules"] = json!(val.value);
return self;
}
pub fn _instantiates_uri<'a>(&'a mut self, val: Vec<Element>) -> &'a mut ProcedureBuilder {
self.value["_instantiatesUri"] =
json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn _language<'a>(&'a mut self, val: Element) -> &'a mut ProcedureBuilder {
self.value["_language"] = json!(val.value);
return self;
}
pub fn _performed_date_time<'a>(&'a mut self, val: Element) -> &'a mut ProcedureBuilder {
self.value["_performedDateTime"] = json!(val.value);
return self;
}
pub fn _performed_string<'a>(&'a mut self, val: Element) -> &'a mut ProcedureBuilder {
self.value["_performedString"] = json!(val.value);
return self;
}
pub fn _status<'a>(&'a mut self, val: Element) -> &'a mut ProcedureBuilder {
self.value["_status"] = json!(val.value);
return self;
}
pub fn asserter<'a>(&'a mut self, val: Reference) -> &'a mut ProcedureBuilder {
self.value["asserter"] = json!(val.value);
return self;
}
pub fn based_on<'a>(&'a mut self, val: Vec<Reference>) -> &'a mut ProcedureBuilder {
self.value["basedOn"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn body_site<'a>(&'a mut self, val: Vec<CodeableConcept>) -> &'a mut ProcedureBuilder {
self.value["bodySite"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn category<'a>(&'a mut self, val: CodeableConcept) -> &'a mut ProcedureBuilder {
self.value["category"] = json!(val.value);
return self;
}
pub fn code<'a>(&'a mut self, val: CodeableConcept) -> &'a mut ProcedureBuilder {
self.value["code"] = json!(val.value);
return self;
}
pub fn complication<'a>(&'a mut self, val: Vec<CodeableConcept>) -> &'a mut ProcedureBuilder {
self.value["complication"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn complication_detail<'a>(&'a mut self, val: Vec<Reference>) -> &'a mut ProcedureBuilder {
self.value["complicationDetail"] =
json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn contained<'a>(&'a mut self, val: Vec<ResourceList>) -> &'a mut ProcedureBuilder {
self.value["contained"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn encounter<'a>(&'a mut self, val: Reference) -> &'a mut ProcedureBuilder {
self.value["encounter"] = json!(val.value);
return self;
}
pub fn extension<'a>(&'a mut self, val: Vec<Extension>) -> &'a mut ProcedureBuilder {
self.value["extension"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn focal_device<'a>(
&'a mut self,
val: Vec<Procedure_FocalDevice>,
) -> &'a mut ProcedureBuilder {
self.value["focalDevice"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn follow_up<'a>(&'a mut self, val: Vec<CodeableConcept>) -> &'a mut ProcedureBuilder {
self.value["followUp"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn id<'a>(&'a mut self, val: &str) -> &'a mut ProcedureBuilder {
self.value["id"] = json!(val);
return self;
}
pub fn identifier<'a>(&'a mut self, val: Vec<Identifier>) -> &'a mut ProcedureBuilder {
self.value["identifier"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn implicit_rules<'a>(&'a mut self, val: &str) -> &'a mut ProcedureBuilder {
self.value["implicitRules"] = json!(val);
return self;
}
pub fn instantiates_canonical<'a>(&'a mut self, val: Vec<&str>) -> &'a mut ProcedureBuilder {
self.value["instantiatesCanonical"] = json!(val);
return self;
}
pub fn instantiates_uri<'a>(&'a mut self, val: Vec<&str>) -> &'a mut ProcedureBuilder {
self.value["instantiatesUri"] = json!(val);
return self;
}
pub fn language<'a>(&'a mut self, val: &str) -> &'a mut ProcedureBuilder {
self.value["language"] = json!(val);
return self;
}
pub fn location<'a>(&'a mut self, val: Reference) -> &'a mut ProcedureBuilder {
self.value["location"] = json!(val.value);
return self;
}
pub fn meta<'a>(&'a mut self, val: Meta) -> &'a mut ProcedureBuilder {
self.value["meta"] = json!(val.value);
return self;
}
pub fn modifier_extension<'a>(&'a mut self, val: Vec<Extension>) -> &'a mut ProcedureBuilder {
self.value["modifierExtension"] =
json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn note<'a>(&'a mut self, val: Vec<Annotation>) -> &'a mut ProcedureBuilder {
self.value["note"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn outcome<'a>(&'a mut self, val: CodeableConcept) -> &'a mut ProcedureBuilder {
self.value["outcome"] = json!(val.value);
return self;
}
pub fn part_of<'a>(&'a mut self, val: Vec<Reference>) -> &'a mut ProcedureBuilder {
self.value["partOf"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn performed_age<'a>(&'a mut self, val: Age) -> &'a mut ProcedureBuilder {
self.value["performedAge"] = json!(val.value);
return self;
}
pub fn performed_date_time<'a>(&'a mut self, val: &str) -> &'a mut ProcedureBuilder {
self.value["performedDateTime"] = json!(val);
return self;
}
pub fn performed_period<'a>(&'a mut self, val: Period) -> &'a mut ProcedureBuilder {
self.value["performedPeriod"] = json!(val.value);
return self;
}
pub fn performed_range<'a>(&'a mut self, val: Range) -> &'a mut ProcedureBuilder {
self.value["performedRange"] = json!(val.value);
return self;
}
pub fn performed_string<'a>(&'a mut self, val: &str) -> &'a mut ProcedureBuilder {
self.value["performedString"] = json!(val);
return self;
}
pub fn performer<'a>(&'a mut self, val: Vec<Procedure_Performer>) -> &'a mut ProcedureBuilder {
self.value["performer"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn reason_code<'a>(&'a mut self, val: Vec<CodeableConcept>) -> &'a mut ProcedureBuilder {
self.value["reasonCode"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn reason_reference<'a>(&'a mut self, val: Vec<Reference>) -> &'a mut ProcedureBuilder {
self.value["reasonReference"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn recorder<'a>(&'a mut self, val: Reference) -> &'a mut ProcedureBuilder {
self.value["recorder"] = json!(val.value);
return self;
}
pub fn report<'a>(&'a mut self, val: Vec<Reference>) -> &'a mut ProcedureBuilder {
self.value["report"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn status<'a>(&'a mut self, val: &str) -> &'a mut ProcedureBuilder {
self.value["status"] = json!(val);
return self;
}
pub fn status_reason<'a>(&'a mut self, val: CodeableConcept) -> &'a mut ProcedureBuilder {
self.value["statusReason"] = json!(val.value);
return self;
}
pub fn text<'a>(&'a mut self, val: Narrative) -> &'a mut ProcedureBuilder {
self.value["text"] = json!(val.value);
return self;
}
pub fn used_code<'a>(&'a mut self, val: Vec<CodeableConcept>) -> &'a mut ProcedureBuilder {
self.value["usedCode"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn used_reference<'a>(&'a mut self, val: Vec<Reference>) -> &'a mut ProcedureBuilder {
self.value["usedReference"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
}
| 35.453875 | 100 | 0.533254 |
fe04224aeaf0483faeadbc0fecc10d5ed127cf62 | 1,741 | use protobuf::compiler_plugin::GenResult;
use protobuf::descriptor::FileDescriptorProto;
use protobuf::descriptorx;
use std::collections::HashMap;
use std::io;
use self::provider::*;
use self::stub::*;
pub mod name;
pub mod provider;
pub mod stub;
pub fn gen(
file_descriptors: &[FileDescriptorProto],
files_to_generate: &[String],
) -> io::Result<Vec<GenResult>> {
let files_map: HashMap<_, _> = file_descriptors.iter().map(|f| (f.get_name(), f)).collect();
let root_scope = descriptorx::RootScope { file_descriptors };
let mut results = Vec::new();
for file_name in files_to_generate {
let file = files_map[file_name.as_str()];
if file.get_service().is_empty() {
continue;
}
results.push(gen_file(file, &root_scope)?);
}
Ok(results)
}
fn gen_file(file: &FileDescriptorProto, root: &descriptorx::RootScope) -> io::Result<GenResult> {
let base_name = descriptorx::proto_path_to_rust_mod(file.get_name());
let mut snippets = Vec::new();
for service in file.get_service() {
snippets.push(generate_service_trait(service, root)?);
snippets.push(generate_registrant_basic(service)?);
snippets.push(generate_registrant_service(service, root)?);
snippets.push(generate_client_stub(service, root)?);
}
let content = snippets.iter().fold(generate_file_header(), |acc, x| acc + "\n" + x);
Ok(GenResult {
name: base_name + "_copra.rs",
content: content.into(),
})
}
fn generate_file_header() -> String {
let header = "\
// This file is generated, Do not edit
// @generated
#![cfg_attr(rustfmt, rustfmt_skip)]
#![allow(missing_docs)]
#![allow(dead_code)]
";
header.to_string()
}
| 26.378788 | 97 | 0.662837 |
e4b6687e00ba71bca2a2190936fc82734c573a90 | 227 | // build-pass
// only-x86_64
#![feature(target_feature_11)]
use std::arch::asm;
#[target_feature(enable = "avx")]
fn main() {
unsafe {
asm!(
"/* {} */",
out(ymm_reg) _,
);
}
}
| 13.352941 | 33 | 0.462555 |
d61670be4e48248540e84d4da7b32ea4d4e79004 | 37,213 | use num_cpus;
use std::cell::RefCell;
use std::ffi::CStr;
use std::fs::File;
use std::fs::OpenOptions;
use std::io::{self, Error, ErrorKind, Read, Write};
use std::time::{Duration, SystemTime};
use std::{env, fs};
#[cfg(unix)]
use nix::errno::Errno;
#[cfg(all(unix, not(target_os = "redox")))]
use nix::pty::openpty;
#[cfg(unix)]
use nix::unistd::{self, Gid, Pid, Uid};
use num_traits::cast::ToPrimitive;
use bitflags::bitflags;
use crate::function::{IntoPyNativeFunc, PyFuncArgs};
use crate::obj::objbytes::PyBytesRef;
use crate::obj::objdict::PyDictRef;
use crate::obj::objint::{self, PyInt, PyIntRef};
use crate::obj::objiter;
use crate::obj::objset::PySet;
use crate::obj::objstr::{self, PyString, PyStringRef};
use crate::obj::objtype::{self, PyClassRef};
use crate::pyobject::{
ItemProtocol, PyClassImpl, PyObjectRef, PyRef, PyResult, PyValue, TryIntoRef, TypeProtocol,
};
use crate::vm::VirtualMachine;
#[cfg(unix)]
pub fn raw_file_number(handle: File) -> i64 {
use std::os::unix::io::IntoRawFd;
i64::from(handle.into_raw_fd())
}
#[cfg(unix)]
pub fn rust_file(raw_fileno: i64) -> File {
use std::os::unix::io::FromRawFd;
unsafe { File::from_raw_fd(raw_fileno as i32) }
}
#[cfg(windows)]
pub fn raw_file_number(handle: File) -> i64 {
use std::os::windows::io::IntoRawHandle;
handle.into_raw_handle() as i64
}
#[cfg(windows)]
pub fn rust_file(raw_fileno: i64) -> File {
use std::ffi::c_void;
use std::os::windows::io::FromRawHandle;
//This seems to work as expected but further testing is required.
unsafe { File::from_raw_handle(raw_fileno as *mut c_void) }
}
#[cfg(all(not(unix), not(windows)))]
pub fn rust_file(raw_fileno: i64) -> File {
unimplemented!();
}
#[cfg(all(not(unix), not(windows)))]
pub fn raw_file_number(handle: File) -> i64 {
unimplemented!();
}
fn make_path(_vm: &VirtualMachine, path: PyStringRef, dir_fd: &DirFd) -> PyStringRef {
if dir_fd.dir_fd.is_some() {
unimplemented!();
} else {
path
}
}
pub fn os_close(vm: &VirtualMachine, args: PyFuncArgs) -> PyResult {
arg_check!(vm, args, required = [(fileno, Some(vm.ctx.int_type()))]);
let raw_fileno = objint::get_value(&fileno);
//The File type automatically closes when it goes out of scope.
//To enable us to close these file descriptors (and hence prevent leaks)
//we seek to create the relevant File and simply let it pass out of scope!
rust_file(raw_fileno.to_i64().unwrap());
Ok(vm.get_none())
}
bitflags! {
pub struct FileCreationFlags: u32 {
// https://elixir.bootlin.com/linux/v4.8/source/include/uapi/asm-generic/fcntl.h
const O_RDONLY = 0o0000_0000;
const O_WRONLY = 0o0000_0001;
const O_RDWR = 0o0000_0002;
const O_CREAT = 0o0000_0100;
const O_EXCL = 0o0000_0200;
const O_APPEND = 0o0000_2000;
const O_NONBLOCK = 0o0000_4000;
}
}
pub fn os_open(vm: &VirtualMachine, args: PyFuncArgs) -> PyResult {
arg_check!(
vm,
args,
required = [
(name, Some(vm.ctx.str_type())),
(flags, Some(vm.ctx.int_type()))
],
optional = [
(_mode, Some(vm.ctx.int_type())),
(dir_fd, Some(vm.ctx.int_type()))
]
);
let name = name.clone().downcast::<PyString>().unwrap();
let dir_fd = if let Some(obj) = dir_fd {
DirFd {
dir_fd: Some(obj.clone().downcast::<PyInt>().unwrap()),
}
} else {
DirFd::default()
};
let fname = &make_path(vm, name, &dir_fd).value;
let flags = FileCreationFlags::from_bits(objint::get_value(flags).to_u32().unwrap())
.ok_or_else(|| vm.new_value_error("Unsupported flag".to_string()))?;
let mut options = &mut OpenOptions::new();
if flags.contains(FileCreationFlags::O_WRONLY) {
options = options.write(true);
} else if flags.contains(FileCreationFlags::O_RDWR) {
options = options.read(true).write(true);
} else {
options = options.read(true);
}
if flags.contains(FileCreationFlags::O_APPEND) {
options = options.append(true);
}
if flags.contains(FileCreationFlags::O_CREAT) {
if flags.contains(FileCreationFlags::O_EXCL) {
options = options.create_new(true);
} else {
options = options.create(true);
}
}
let handle = options
.open(&fname)
.map_err(|err| convert_io_error(vm, err))?;
Ok(vm.ctx.new_int(raw_file_number(handle)))
}
pub fn convert_io_error(vm: &VirtualMachine, err: io::Error) -> PyObjectRef {
let os_error = match err.kind() {
ErrorKind::NotFound => {
let exc_type = vm.ctx.exceptions.file_not_found_error.clone();
vm.new_exception(exc_type, err.to_string())
}
ErrorKind::PermissionDenied => {
let exc_type = vm.ctx.exceptions.permission_error.clone();
vm.new_exception(exc_type, err.to_string())
}
ErrorKind::AlreadyExists => {
let exc_type = vm.ctx.exceptions.file_exists_error.clone();
vm.new_exception(exc_type, err.to_string())
}
_ => vm.new_os_error(err.to_string()),
};
if let Some(errno) = err.raw_os_error() {
vm.set_attr(&os_error, "errno", vm.ctx.new_int(errno))
.unwrap();
}
os_error
}
#[cfg(unix)]
pub fn convert_nix_error(vm: &VirtualMachine, err: nix::Error) -> PyObjectRef {
let nix_error = match err {
nix::Error::InvalidPath => {
let exc_type = vm.ctx.exceptions.file_not_found_error.clone();
vm.new_exception(exc_type, err.to_string())
}
nix::Error::InvalidUtf8 => {
let exc_type = vm.ctx.exceptions.unicode_error.clone();
vm.new_exception(exc_type, err.to_string())
}
nix::Error::UnsupportedOperation => {
let exc_type = vm.ctx.exceptions.runtime_error.clone();
vm.new_exception(exc_type, err.to_string())
}
nix::Error::Sys(errno) => {
let exc_type = convert_nix_errno(vm, errno);
vm.new_exception(exc_type, err.to_string())
}
};
if let nix::Error::Sys(errno) = err {
vm.set_attr(&nix_error, "errno", vm.ctx.new_int(errno as i32))
.unwrap();
}
nix_error
}
#[cfg(unix)]
fn convert_nix_errno(vm: &VirtualMachine, errno: Errno) -> PyClassRef {
match errno {
Errno::EPERM => vm.ctx.exceptions.permission_error.clone(),
_ => vm.ctx.exceptions.os_error.clone(),
}
}
// Flags for os_access
bitflags! {
pub struct AccessFlags: u8{
const F_OK = 0;
const R_OK = 4;
const W_OK = 2;
const X_OK = 1;
}
}
#[cfg(unix)]
struct Permissions {
is_readable: bool,
is_writable: bool,
is_executable: bool,
}
#[cfg(unix)]
fn get_permissions(mode: u32) -> Permissions {
Permissions {
is_readable: mode & 4 != 0,
is_writable: mode & 2 != 0,
is_executable: mode & 1 != 0,
}
}
#[cfg(unix)]
fn get_right_permission(
mode: u32,
file_owner: Uid,
file_group: Gid,
) -> Result<Permissions, nix::Error> {
let owner_mode = (mode & 0o700) >> 6;
let owner_permissions = get_permissions(owner_mode);
let group_mode = (mode & 0o070) >> 3;
let group_permissions = get_permissions(group_mode);
let others_mode = mode & 0o007;
let others_permissions = get_permissions(others_mode);
let user_id = nix::unistd::getuid();
let groups_ids = getgroups()?;
if file_owner == user_id {
Ok(owner_permissions)
} else if groups_ids.contains(&file_group) {
Ok(group_permissions)
} else {
Ok(others_permissions)
}
}
#[cfg(target_os = "macos")]
fn getgroups() -> nix::Result<Vec<Gid>> {
use libc::{c_int, gid_t};
use std::ptr;
let ret = unsafe { libc::getgroups(0, ptr::null_mut()) };
let mut groups = Vec::<Gid>::with_capacity(Errno::result(ret)? as usize);
loop {
let ret = unsafe {
libc::getgroups(
groups.capacity() as c_int,
groups.as_mut_ptr() as *mut gid_t,
)
};
return Errno::result(ret).map(|s| {
unsafe { groups.set_len(s as usize) };
groups
});
}
}
#[cfg(target_os = "linux")]
fn getgroups() -> nix::Result<Vec<Gid>> {
nix::unistd::getgroups()
}
#[cfg(unix)]
fn os_access(path: PyStringRef, mode: u8, vm: &VirtualMachine) -> PyResult<bool> {
use std::os::unix::fs::MetadataExt;
let path = path.as_str();
let flags = AccessFlags::from_bits(mode).ok_or_else(|| {
vm.new_value_error(
"One of the flags is wrong, there are only 4 possibilities F_OK, R_OK, W_OK and X_OK"
.to_string(),
)
})?;
let metadata = fs::metadata(path);
// if it's only checking for F_OK
if flags == AccessFlags::F_OK {
return Ok(metadata.is_ok());
}
let metadata = metadata.map_err(|err| convert_io_error(vm, err))?;
let user_id = metadata.uid();
let group_id = metadata.gid();
let mode = metadata.mode();
let perm = get_right_permission(mode, Uid::from_raw(user_id), Gid::from_raw(group_id))
.map_err(|err| convert_nix_error(vm, err))?;
let r_ok = !flags.contains(AccessFlags::R_OK) || perm.is_readable;
let w_ok = !flags.contains(AccessFlags::W_OK) || perm.is_writable;
let x_ok = !flags.contains(AccessFlags::X_OK) || perm.is_executable;
Ok(r_ok && w_ok && x_ok)
}
fn os_error(vm: &VirtualMachine, args: PyFuncArgs) -> PyResult {
arg_check!(
vm,
args,
required = [],
optional = [(message, Some(vm.ctx.str_type()))]
);
let msg = if let Some(val) = message {
objstr::get_value(&val)
} else {
"".to_string()
};
Err(vm.new_os_error(msg))
}
fn os_fsync(fd: PyIntRef, vm: &VirtualMachine) -> PyResult<()> {
let file = rust_file(fd.as_bigint().to_i64().unwrap());
file.sync_all().map_err(|err| convert_io_error(vm, err))?;
// Avoid closing the fd
raw_file_number(file);
Ok(())
}
fn os_read(fd: PyIntRef, n: PyIntRef, vm: &VirtualMachine) -> PyResult {
let mut buffer = vec![0u8; n.as_bigint().to_usize().unwrap()];
let mut file = rust_file(fd.as_bigint().to_i64().unwrap());
file.read_exact(&mut buffer)
.map_err(|err| convert_io_error(vm, err))?;
// Avoid closing the fd
raw_file_number(file);
Ok(vm.ctx.new_bytes(buffer))
}
fn os_write(fd: PyIntRef, data: PyBytesRef, vm: &VirtualMachine) -> PyResult {
let mut file = rust_file(fd.as_bigint().to_i64().unwrap());
let written = file.write(&data).map_err(|err| convert_io_error(vm, err))?;
// Avoid closing the fd
raw_file_number(file);
Ok(vm.ctx.new_int(written))
}
fn os_remove(path: PyStringRef, dir_fd: DirFd, vm: &VirtualMachine) -> PyResult<()> {
let path = make_path(vm, path, &dir_fd);
fs::remove_file(&path.value).map_err(|err| convert_io_error(vm, err))
}
fn os_mkdir(path: PyStringRef, dir_fd: DirFd, vm: &VirtualMachine) -> PyResult<()> {
let path = make_path(vm, path, &dir_fd);
fs::create_dir(&path.value).map_err(|err| convert_io_error(vm, err))
}
fn os_mkdirs(path: PyStringRef, vm: &VirtualMachine) -> PyResult<()> {
fs::create_dir_all(&path.value).map_err(|err| convert_io_error(vm, err))
}
fn os_rmdir(path: PyStringRef, dir_fd: DirFd, vm: &VirtualMachine) -> PyResult<()> {
let path = make_path(vm, path, &dir_fd);
fs::remove_dir(&path.value).map_err(|err| convert_io_error(vm, err))
}
fn os_listdir(path: PyStringRef, vm: &VirtualMachine) -> PyResult {
match fs::read_dir(&path.value) {
Ok(iter) => {
let res: PyResult<Vec<PyObjectRef>> = iter
.map(|entry| match entry {
Ok(path) => Ok(vm.ctx.new_str(path.file_name().into_string().unwrap())),
Err(s) => Err(convert_io_error(vm, s)),
})
.collect();
Ok(vm.ctx.new_list(res?))
}
Err(s) => Err(vm.new_os_error(s.to_string())),
}
}
fn os_putenv(key: PyStringRef, value: PyStringRef, _vm: &VirtualMachine) {
env::set_var(&key.value, &value.value)
}
fn os_unsetenv(key: PyStringRef, _vm: &VirtualMachine) {
env::remove_var(&key.value)
}
fn _os_environ(vm: &VirtualMachine) -> PyDictRef {
let environ = vm.ctx.new_dict();
for (key, value) in env::vars() {
environ.set_item(&key, vm.new_str(value), vm).unwrap();
}
environ
}
fn os_readlink(path: PyStringRef, dir_fd: DirFd, vm: &VirtualMachine) -> PyResult {
let path = make_path(vm, path, &dir_fd);
let path = fs::read_link(path.as_str()).map_err(|err| convert_io_error(vm, err))?;
let path = path.into_os_string().into_string().map_err(|_osstr| {
vm.new_unicode_decode_error("Can't convert OS path to valid UTF-8 string".into())
})?;
Ok(vm.ctx.new_str(path))
}
#[derive(Debug)]
struct DirEntry {
entry: fs::DirEntry,
}
type DirEntryRef = PyRef<DirEntry>;
impl PyValue for DirEntry {
fn class(vm: &VirtualMachine) -> PyClassRef {
vm.class("_os", "DirEntry")
}
}
#[derive(FromArgs, Default)]
struct DirFd {
#[pyarg(keyword_only, default = "None")]
dir_fd: Option<PyIntRef>,
}
#[derive(FromArgs)]
struct FollowSymlinks {
#[pyarg(keyword_only, default = "true")]
follow_symlinks: bool,
}
impl DirEntryRef {
fn name(self, _vm: &VirtualMachine) -> String {
self.entry.file_name().into_string().unwrap()
}
fn path(self, _vm: &VirtualMachine) -> String {
self.entry.path().to_str().unwrap().to_string()
}
#[allow(clippy::match_bool)]
fn perform_on_metadata(
self,
follow_symlinks: FollowSymlinks,
action: fn(fs::Metadata) -> bool,
vm: &VirtualMachine,
) -> PyResult<bool> {
let metadata = match follow_symlinks.follow_symlinks {
true => fs::metadata(self.entry.path()),
false => fs::symlink_metadata(self.entry.path()),
};
let meta = metadata.map_err(|err| convert_io_error(vm, err))?;
Ok(action(meta))
}
fn is_dir(self, follow_symlinks: FollowSymlinks, vm: &VirtualMachine) -> PyResult<bool> {
self.perform_on_metadata(
follow_symlinks,
|meta: fs::Metadata| -> bool { meta.is_dir() },
vm,
)
}
fn is_file(self, follow_symlinks: FollowSymlinks, vm: &VirtualMachine) -> PyResult<bool> {
self.perform_on_metadata(
follow_symlinks,
|meta: fs::Metadata| -> bool { meta.is_file() },
vm,
)
}
fn is_symlink(self, vm: &VirtualMachine) -> PyResult<bool> {
Ok(self
.entry
.file_type()
.map_err(|err| convert_io_error(vm, err))?
.is_symlink())
}
fn stat(
self,
dir_fd: DirFd,
follow_symlinks: FollowSymlinks,
vm: &VirtualMachine,
) -> PyResult<StatResult> {
os_stat(self.path(vm).try_into_ref(vm)?, dir_fd, follow_symlinks, vm)
}
}
#[pyclass]
#[derive(Debug)]
struct ScandirIterator {
entries: RefCell<fs::ReadDir>,
}
impl PyValue for ScandirIterator {
fn class(vm: &VirtualMachine) -> PyClassRef {
vm.class("_os", "ScandirIter")
}
}
#[pyimpl]
impl ScandirIterator {
#[pymethod(name = "__next__")]
fn next(&self, vm: &VirtualMachine) -> PyResult {
match self.entries.borrow_mut().next() {
Some(entry) => match entry {
Ok(entry) => Ok(DirEntry { entry }.into_ref(vm).into_object()),
Err(s) => Err(convert_io_error(vm, s)),
},
None => Err(objiter::new_stop_iteration(vm)),
}
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>, _vm: &VirtualMachine) -> PyRef<Self> {
zelf
}
}
fn os_scandir(path: PyStringRef, vm: &VirtualMachine) -> PyResult {
match fs::read_dir(&path.value) {
Ok(iter) => Ok(ScandirIterator {
entries: RefCell::new(iter),
}
.into_ref(vm)
.into_object()),
Err(s) => Err(convert_io_error(vm, s)),
}
}
#[derive(Debug)]
struct StatResult {
st_mode: u32,
st_ino: u64,
st_dev: u64,
st_nlink: u64,
st_uid: u32,
st_gid: u32,
st_size: u64,
st_atime: f64,
st_ctime: f64,
st_mtime: f64,
}
impl PyValue for StatResult {
fn class(vm: &VirtualMachine) -> PyClassRef {
vm.class("_os", "stat_result")
}
}
type StatResultRef = PyRef<StatResult>;
impl StatResultRef {
fn st_mode(self, _vm: &VirtualMachine) -> u32 {
self.st_mode
}
fn st_ino(self, _vm: &VirtualMachine) -> u64 {
self.st_ino
}
fn st_dev(self, _vm: &VirtualMachine) -> u64 {
self.st_dev
}
fn st_nlink(self, _vm: &VirtualMachine) -> u64 {
self.st_nlink
}
fn st_uid(self, _vm: &VirtualMachine) -> u32 {
self.st_uid
}
fn st_gid(self, _vm: &VirtualMachine) -> u32 {
self.st_gid
}
fn st_size(self, _vm: &VirtualMachine) -> u64 {
self.st_size
}
fn st_atime(self, _vm: &VirtualMachine) -> f64 {
self.st_atime
}
fn st_ctime(self, _vm: &VirtualMachine) -> f64 {
self.st_ctime
}
fn st_mtime(self, _vm: &VirtualMachine) -> f64 {
self.st_mtime
}
}
// Copied code from Duration::as_secs_f64 as it's still unstable
fn duration_as_secs_f64(duration: Duration) -> f64 {
(duration.as_secs() as f64) + f64::from(duration.subsec_nanos()) / 1_000_000_000_f64
}
fn to_seconds_from_unix_epoch(sys_time: SystemTime) -> f64 {
match sys_time.duration_since(SystemTime::UNIX_EPOCH) {
Ok(duration) => duration_as_secs_f64(duration),
Err(err) => -duration_as_secs_f64(err.duration()),
}
}
#[cfg(unix)]
fn to_seconds_from_nanos(secs: i64, nanos: i64) -> f64 {
let duration = Duration::new(secs as u64, nanos as u32);
duration_as_secs_f64(duration)
}
#[cfg(unix)]
macro_rules! os_unix_stat_inner {
( $path:expr, $follow_symlinks:expr, $vm:expr ) => {{
#[allow(clippy::match_bool)]
fn get_stats(path: &str, follow_symlinks: bool) -> io::Result<StatResult> {
let meta = match follow_symlinks {
true => fs::metadata(path)?,
false => fs::symlink_metadata(path)?,
};
Ok(StatResult {
st_mode: meta.st_mode(),
st_ino: meta.st_ino(),
st_dev: meta.st_dev(),
st_nlink: meta.st_nlink(),
st_uid: meta.st_uid(),
st_gid: meta.st_gid(),
st_size: meta.st_size(),
st_atime: to_seconds_from_unix_epoch(meta.accessed()?),
st_mtime: to_seconds_from_unix_epoch(meta.modified()?),
st_ctime: to_seconds_from_nanos(meta.st_ctime(), meta.st_ctime_nsec()),
})
}
get_stats(&$path.value, $follow_symlinks.follow_symlinks)
.map_err(|err| convert_io_error($vm, err))
}};
}
#[cfg(target_os = "linux")]
fn os_stat(
path: PyStringRef,
dir_fd: DirFd,
follow_symlinks: FollowSymlinks,
vm: &VirtualMachine,
) -> PyResult<StatResult> {
use std::os::linux::fs::MetadataExt;
let path = make_path(vm, path, &dir_fd);
os_unix_stat_inner!(path, follow_symlinks, vm)
}
#[cfg(target_os = "macos")]
fn os_stat(
path: PyStringRef,
dir_fd: DirFd,
follow_symlinks: FollowSymlinks,
vm: &VirtualMachine,
) -> PyResult<StatResult> {
use std::os::macos::fs::MetadataExt;
let path = make_path(vm, path, &dir_fd);
os_unix_stat_inner!(path, follow_symlinks, vm)
}
#[cfg(target_os = "android")]
fn os_stat(
path: PyStringRef,
dir_fd: DirFd,
follow_symlinks: FollowSymlinks,
vm: &VirtualMachine,
) -> PyResult<StatResult> {
use std::os::android::fs::MetadataExt;
let path = make_path(vm, path, &dir_fd);
os_unix_stat_inner!(path, follow_symlinks, vm)
}
#[cfg(target_os = "redox")]
fn os_stat(
path: PyStringRef,
dir_fd: DirFd,
follow_symlinks: FollowSymlinks,
vm: &VirtualMachine,
) -> PyResult<StatResult> {
use std::os::redox::fs::MetadataExt;
let path = make_path(vm, path, &dir_fd);
os_unix_stat_inner!(path, follow_symlinks, vm)
}
// Copied from CPython fileutils.c
#[cfg(windows)]
fn attributes_to_mode(attr: u32) -> u32 {
const FILE_ATTRIBUTE_DIRECTORY: u32 = 16;
const FILE_ATTRIBUTE_READONLY: u32 = 1;
const S_IFDIR: u32 = 0o040000;
const S_IFREG: u32 = 0o100000;
let mut m: u32 = 0;
if attr & FILE_ATTRIBUTE_DIRECTORY == FILE_ATTRIBUTE_DIRECTORY {
m |= S_IFDIR | 0111; /* IFEXEC for user,group,other */
} else {
m |= S_IFREG;
}
if attr & FILE_ATTRIBUTE_READONLY == FILE_ATTRIBUTE_READONLY {
m |= 0444;
} else {
m |= 0666;
}
m
}
#[cfg(windows)]
fn os_stat(
path: PyStringRef,
_dir_fd: DirFd, // TODO: error
follow_symlinks: FollowSymlinks,
vm: &VirtualMachine,
) -> PyResult<StatResult> {
use std::os::windows::fs::MetadataExt;
fn get_stats(path: &str, follow_symlinks: bool) -> io::Result<StatResult> {
let meta = match follow_symlinks {
true => fs::metadata(path)?,
false => fs::symlink_metadata(path)?,
};
Ok(StatResult {
st_mode: attributes_to_mode(meta.file_attributes()),
st_ino: 0, // TODO: Not implemented in std::os::windows::fs::MetadataExt.
st_dev: 0, // TODO: Not implemented in std::os::windows::fs::MetadataExt.
st_nlink: 0, // TODO: Not implemented in std::os::windows::fs::MetadataExt.
st_uid: 0, // 0 on windows
st_gid: 0, // 0 on windows
st_size: meta.file_size(),
st_atime: to_seconds_from_unix_epoch(meta.accessed()?),
st_mtime: to_seconds_from_unix_epoch(meta.modified()?),
st_ctime: to_seconds_from_unix_epoch(meta.created()?),
})
}
get_stats(&path.value, follow_symlinks.follow_symlinks)
.map_err(|s| vm.new_os_error(s.to_string()))
}
#[cfg(not(any(
target_os = "linux",
target_os = "macos",
target_os = "android",
target_os = "redox",
windows
)))]
fn os_stat(
_path: PyStringRef,
_dir_fd: DirFd,
_follow_symlinks: FollowSymlinks,
_vm: &VirtualMachine,
) -> PyResult<StatResult> {
unimplemented!();
}
fn os_lstat(path: PyStringRef, dir_fd: DirFd, vm: &VirtualMachine) -> PyResult<StatResult> {
os_stat(
path,
dir_fd,
FollowSymlinks {
follow_symlinks: false,
},
vm,
)
}
#[cfg(unix)]
fn os_symlink(
src: PyStringRef,
dst: PyStringRef,
dir_fd: DirFd,
vm: &VirtualMachine,
) -> PyResult<()> {
use std::os::unix::fs as unix_fs;
let dst = make_path(vm, dst, &dir_fd);
unix_fs::symlink(&src.value, &dst.value).map_err(|err| convert_io_error(vm, err))
}
#[cfg(windows)]
fn os_symlink(
src: PyStringRef,
dst: PyStringRef,
_dir_fd: DirFd,
vm: &VirtualMachine,
) -> PyResult<()> {
use std::os::windows::fs as win_fs;
let ret = match fs::metadata(&dst.value) {
Ok(meta) => {
if meta.is_file() {
win_fs::symlink_file(&src.value, &dst.value)
} else if meta.is_dir() {
win_fs::symlink_dir(&src.value, &dst.value)
} else {
panic!("Uknown file type");
}
}
Err(_) => win_fs::symlink_file(&src.value, &dst.value),
};
ret.map_err(|err| convert_io_error(vm, err))
}
#[cfg(all(not(unix), not(windows)))]
fn os_symlink(
src: PyStringRef,
dst: PyStringRef,
dir_fd: DirFd,
vm: &VirtualMachine,
) -> PyResult<()> {
unimplemented!();
}
fn os_getcwd(vm: &VirtualMachine) -> PyResult<String> {
Ok(env::current_dir()
.map_err(|err| convert_io_error(vm, err))?
.as_path()
.to_str()
.unwrap()
.to_string())
}
fn os_chdir(path: PyStringRef, vm: &VirtualMachine) -> PyResult<()> {
env::set_current_dir(&path.value).map_err(|err| convert_io_error(vm, err))
}
#[cfg(unix)]
fn os_chmod(
path: PyStringRef,
dir_fd: DirFd,
mode: u32,
follow_symlinks: FollowSymlinks,
vm: &VirtualMachine,
) -> PyResult<()> {
use std::os::unix::fs::PermissionsExt;
let path = make_path(vm, path, &dir_fd);
let metadata = if follow_symlinks.follow_symlinks {
fs::metadata(&path.value)
} else {
fs::symlink_metadata(&path.value)
};
let meta = metadata.map_err(|err| convert_io_error(vm, err))?;
let mut permissions = meta.permissions();
permissions.set_mode(mode);
fs::set_permissions(&path.value, permissions).map_err(|err| convert_io_error(vm, err))?;
Ok(())
}
fn os_fspath(path: PyObjectRef, vm: &VirtualMachine) -> PyResult {
if objtype::issubclass(&path.class(), &vm.ctx.str_type())
|| objtype::issubclass(&path.class(), &vm.ctx.bytes_type())
{
Ok(path)
} else {
Err(vm.new_type_error(format!(
"expected str or bytes object, not {}",
path.class()
)))
}
}
fn os_rename(src: PyStringRef, dst: PyStringRef, vm: &VirtualMachine) -> PyResult<()> {
fs::rename(&src.value, &dst.value).map_err(|err| convert_io_error(vm, err))
}
fn os_getpid(vm: &VirtualMachine) -> PyObjectRef {
let pid = std::process::id();
vm.new_int(pid)
}
fn os_cpu_count(vm: &VirtualMachine) -> PyObjectRef {
let cpu_count = num_cpus::get();
vm.new_int(cpu_count)
}
#[cfg(unix)]
fn os_getppid(vm: &VirtualMachine) -> PyObjectRef {
let ppid = unistd::getppid().as_raw();
vm.new_int(ppid)
}
#[cfg(unix)]
fn os_getgid(vm: &VirtualMachine) -> PyObjectRef {
let gid = unistd::getgid().as_raw();
vm.new_int(gid)
}
#[cfg(unix)]
fn os_getegid(vm: &VirtualMachine) -> PyObjectRef {
let egid = unistd::getegid().as_raw();
vm.new_int(egid)
}
#[cfg(unix)]
fn os_getpgid(pid: PyIntRef, vm: &VirtualMachine) -> PyObjectRef {
let pid = pid.as_bigint().to_u32().unwrap();
match unistd::getpgid(Some(Pid::from_raw(pid as i32))) {
Ok(pgid) => vm.new_int(pgid.as_raw()),
Err(err) => convert_nix_error(vm, err),
}
}
#[cfg(all(unix, not(target_os = "redox")))]
fn os_getsid(pid: PyIntRef, vm: &VirtualMachine) -> PyObjectRef {
let pid = pid.as_bigint().to_u32().unwrap();
match unistd::getsid(Some(Pid::from_raw(pid as i32))) {
Ok(sid) => vm.new_int(sid.as_raw()),
Err(err) => convert_nix_error(vm, err),
}
}
#[cfg(unix)]
fn os_getuid(vm: &VirtualMachine) -> PyObjectRef {
let uid = unistd::getuid().as_raw();
vm.new_int(uid)
}
#[cfg(unix)]
fn os_geteuid(vm: &VirtualMachine) -> PyObjectRef {
let euid = unistd::geteuid().as_raw();
vm.new_int(euid)
}
#[cfg(unix)]
fn os_setgid(gid: PyIntRef, vm: &VirtualMachine) -> PyResult<()> {
let gid = gid.as_bigint().to_u32().unwrap();
unistd::setgid(Gid::from_raw(gid)).map_err(|err| convert_nix_error(vm, err))
}
#[cfg(all(unix, not(target_os = "redox")))]
fn os_setegid(egid: PyIntRef, vm: &VirtualMachine) -> PyResult<()> {
let egid = egid.as_bigint().to_u32().unwrap();
unistd::setegid(Gid::from_raw(egid)).map_err(|err| convert_nix_error(vm, err))
}
#[cfg(unix)]
fn os_setpgid(pid: PyIntRef, pgid: PyIntRef, vm: &VirtualMachine) -> PyResult<()> {
let pid = pid.as_bigint().to_u32().unwrap();
let pgid = pgid.as_bigint().to_u32().unwrap();
unistd::setpgid(Pid::from_raw(pid as i32), Pid::from_raw(pgid as i32))
.map_err(|err| convert_nix_error(vm, err))
}
#[cfg(all(unix, not(target_os = "redox")))]
fn os_setsid(vm: &VirtualMachine) -> PyResult<()> {
unistd::setsid()
.map(|_ok| ())
.map_err(|err| convert_nix_error(vm, err))
}
#[cfg(unix)]
fn os_setuid(uid: PyIntRef, vm: &VirtualMachine) -> PyResult<()> {
let uid = uid.as_bigint().to_u32().unwrap();
unistd::setuid(Uid::from_raw(uid)).map_err(|err| convert_nix_error(vm, err))
}
#[cfg(all(unix, not(target_os = "redox")))]
fn os_seteuid(euid: PyIntRef, vm: &VirtualMachine) -> PyResult<()> {
let euid = euid.as_bigint().to_u32().unwrap();
unistd::seteuid(Uid::from_raw(euid)).map_err(|err| convert_nix_error(vm, err))
}
#[cfg(all(unix, not(target_os = "redox")))]
pub fn os_openpty(vm: &VirtualMachine) -> PyResult {
match openpty(None, None) {
Ok(r) => Ok(vm
.ctx
.new_tuple(vec![vm.new_int(r.master), vm.new_int(r.slave)])),
Err(err) => Err(convert_nix_error(vm, err)),
}
}
#[cfg(unix)]
pub fn os_ttyname(fd: PyIntRef, vm: &VirtualMachine) -> PyResult {
use libc::ttyname;
if let Some(fd) = fd.as_bigint().to_i32() {
let name = unsafe { ttyname(fd) };
if name.is_null() {
Err(vm.new_os_error(Error::last_os_error().to_string()))
} else {
let name = unsafe { CStr::from_ptr(name) }.to_str().unwrap();
Ok(vm.ctx.new_str(name.to_owned()))
}
} else {
Err(vm.new_overflow_error("signed integer is greater than maximum".to_owned()))
}
}
pub fn make_module(vm: &VirtualMachine) -> PyObjectRef {
let ctx = &vm.ctx;
let os_name = if cfg!(windows) {
"nt".to_string()
} else {
"posix".to_string()
};
let environ = _os_environ(vm);
let scandir_iter = ctx.new_class("ScandirIter", ctx.object());
ScandirIterator::extend_class(ctx, &scandir_iter);
let dir_entry = py_class!(ctx, "DirEntry", ctx.object(), {
"name" => ctx.new_property(DirEntryRef::name),
"path" => ctx.new_property(DirEntryRef::path),
"is_dir" => ctx.new_rustfunc(DirEntryRef::is_dir),
"is_file" => ctx.new_rustfunc(DirEntryRef::is_file),
"is_symlink" => ctx.new_rustfunc(DirEntryRef::is_symlink),
"stat" => ctx.new_rustfunc(DirEntryRef::stat),
});
let stat_result = py_class!(ctx, "stat_result", ctx.object(), {
"st_mode" => ctx.new_property(StatResultRef::st_mode),
"st_ino" => ctx.new_property(StatResultRef::st_ino),
"st_dev" => ctx.new_property(StatResultRef::st_dev),
"st_nlink" => ctx.new_property(StatResultRef::st_nlink),
"st_uid" => ctx.new_property(StatResultRef::st_uid),
"st_gid" => ctx.new_property(StatResultRef::st_gid),
"st_size" => ctx.new_property(StatResultRef::st_size),
"st_atime" => ctx.new_property(StatResultRef::st_atime),
"st_ctime" => ctx.new_property(StatResultRef::st_ctime),
"st_mtime" => ctx.new_property(StatResultRef::st_mtime),
});
struct SupportFunc<'a> {
name: &'a str,
func_obj: PyObjectRef,
fd: Option<bool>,
dir_fd: Option<bool>,
follow_symlinks: Option<bool>,
};
impl<'a> SupportFunc<'a> {
fn new<F, T, R>(
vm: &VirtualMachine,
name: &'a str,
func: F,
fd: Option<bool>,
dir_fd: Option<bool>,
follow_symlinks: Option<bool>,
) -> Self
where
F: IntoPyNativeFunc<T, R>,
{
let func_obj = vm.ctx.new_rustfunc(func);
Self {
name,
func_obj,
fd,
dir_fd,
follow_symlinks,
}
}
}
let mut support_funcs = vec![
SupportFunc::new(vm, "open", os_open, None, Some(false), None),
// access Some Some None
SupportFunc::new(vm, "chdir", os_chdir, Some(false), None, None),
// chflags Some, None Some
// chown Some Some Some
// chroot Some None None
SupportFunc::new(vm, "listdir", os_listdir, Some(false), None, None),
SupportFunc::new(vm, "mkdir", os_mkdir, Some(false), Some(false), None),
// mkfifo Some Some None
// mknod Some Some None
// pathconf Some None None
SupportFunc::new(vm, "readlink", os_readlink, Some(false), Some(false), None),
SupportFunc::new(vm, "remove", os_remove, Some(false), Some(false), None),
SupportFunc::new(vm, "rename", os_rename, Some(false), Some(false), None),
SupportFunc::new(vm, "replace", os_rename, Some(false), Some(false), None), // TODO: Fix replace
SupportFunc::new(vm, "rmdir", os_rmdir, Some(false), Some(false), None),
SupportFunc::new(vm, "scandir", os_scandir, Some(false), None, None),
SupportFunc::new(vm, "stat", os_stat, Some(false), Some(false), Some(false)),
SupportFunc::new(vm, "symlink", os_symlink, None, Some(false), None),
// truncate Some None None
SupportFunc::new(vm, "unlink", os_remove, Some(false), Some(false), None),
// utime Some Some Some
];
#[cfg(unix)]
support_funcs.extend(vec![SupportFunc::new(
vm,
"chmod",
os_chmod,
Some(false),
Some(false),
Some(false),
)]);
let supports_fd = PySet::default().into_ref(vm);
let supports_dir_fd = PySet::default().into_ref(vm);
let supports_follow_symlinks = PySet::default().into_ref(vm);
let module = py_module!(vm, "_os", {
"close" => ctx.new_rustfunc(os_close),
"error" => ctx.new_rustfunc(os_error),
"fsync" => ctx.new_rustfunc(os_fsync),
"read" => ctx.new_rustfunc(os_read),
"write" => ctx.new_rustfunc(os_write),
"mkdirs" => ctx.new_rustfunc(os_mkdirs),
"putenv" => ctx.new_rustfunc(os_putenv),
"unsetenv" => ctx.new_rustfunc(os_unsetenv),
"environ" => environ,
"name" => ctx.new_str(os_name),
"ScandirIter" => scandir_iter,
"DirEntry" => dir_entry,
"stat_result" => stat_result,
"lstat" => ctx.new_rustfunc(os_lstat),
"getcwd" => ctx.new_rustfunc(os_getcwd),
"chdir" => ctx.new_rustfunc(os_chdir),
"fspath" => ctx.new_rustfunc(os_fspath),
"O_RDONLY" => ctx.new_int(FileCreationFlags::O_RDONLY.bits()),
"O_WRONLY" => ctx.new_int(FileCreationFlags::O_WRONLY.bits()),
"O_RDWR" => ctx.new_int(FileCreationFlags::O_RDWR.bits()),
"O_NONBLOCK" => ctx.new_int(FileCreationFlags::O_NONBLOCK.bits()),
"O_APPEND" => ctx.new_int(FileCreationFlags::O_APPEND.bits()),
"O_EXCL" => ctx.new_int(FileCreationFlags::O_EXCL.bits()),
"O_CREAT" => ctx.new_int(FileCreationFlags::O_CREAT.bits()),
"F_OK" => ctx.new_int(AccessFlags::F_OK.bits()),
"R_OK" => ctx.new_int(AccessFlags::R_OK.bits()),
"W_OK" => ctx.new_int(AccessFlags::W_OK.bits()),
"X_OK" => ctx.new_int(AccessFlags::X_OK.bits()),
"getpid" => ctx.new_rustfunc(os_getpid),
"cpu_count" => ctx.new_rustfunc(os_cpu_count)
});
for support in support_funcs {
if support.fd.unwrap_or(false) {
supports_fd
.clone()
.add(support.func_obj.clone(), vm)
.unwrap();
}
if support.dir_fd.unwrap_or(false) {
supports_dir_fd
.clone()
.add(support.func_obj.clone(), vm)
.unwrap();
}
if support.follow_symlinks.unwrap_or(false) {
supports_follow_symlinks
.clone()
.add(support.func_obj.clone(), vm)
.unwrap();
}
vm.set_attr(&module, support.name, support.func_obj)
.unwrap();
}
extend_module!(vm, module, {
"supports_fd" => supports_fd.into_object(),
"supports_dir_fd" => supports_dir_fd.into_object(),
"supports_follow_symlinks" => supports_follow_symlinks.into_object(),
});
extend_module_platform_specific(&vm, module)
}
#[cfg(unix)]
fn extend_module_platform_specific(vm: &VirtualMachine, module: PyObjectRef) -> PyObjectRef {
let ctx = &vm.ctx;
extend_module!(vm, module, {
"getppid" => ctx.new_rustfunc(os_getppid),
"getgid" => ctx.new_rustfunc(os_getgid),
"getegid" => ctx.new_rustfunc(os_getegid),
"getpgid" => ctx.new_rustfunc(os_getpgid),
"getuid" => ctx.new_rustfunc(os_getuid),
"geteuid" => ctx.new_rustfunc(os_geteuid),
"setgid" => ctx.new_rustfunc(os_setgid),
"setpgid" => ctx.new_rustfunc(os_setpgid),
"setuid" => ctx.new_rustfunc(os_setuid),
"access" => ctx.new_rustfunc(os_access),
"chmod" => ctx.new_rustfunc(os_chmod)
});
#[cfg(not(target_os = "redox"))]
extend_module!(vm, module, {
"getsid" => ctx.new_rustfunc(os_getsid),
"setsid" => ctx.new_rustfunc(os_setsid),
"setegid" => ctx.new_rustfunc(os_setegid),
"seteuid" => ctx.new_rustfunc(os_seteuid),
"openpty" => ctx.new_rustfunc(os_openpty),
"ttyname" => ctx.new_rustfunc(os_ttyname),
});
module
}
#[cfg(not(unix))]
fn extend_module_platform_specific(_vm: &VirtualMachine, module: PyObjectRef) -> PyObjectRef {
module
}
| 30.107605 | 104 | 0.604386 |
386d612aedb1025276eea6969a3defded907e93b | 9,194 | use anyhow::{anyhow, bail, ensure, Context, Result};
use std::fs::File;
use std::io::{self, prelude::*, BufReader};
#[derive(Debug)]
enum Command {
Add, // 1
Mul, // 2
Read, // 3
Write, // 4
JumpIfTrue, // 5
JumpIfFalse, // 6
LessThan, // 7
Equals, // 8
Exit, // 99
}
#[derive(Debug, PartialEq, Copy, Clone)]
enum ParamMode {
Position, // 0
Immediate, // 1
}
fn main() -> Result<()> {
let file = File::open("input.txt")?;
let reader = BufReader::new(file);
let prog_str = reader
.lines()
.nth(0)
.ok_or_else(|| anyhow!("ERROR: Cannot read program string."))??;
let mut prog = parse_prog(&prog_str)?;
// println!("Program: [{}]{:?}", prog.len(), prog);
let output = eval(&mut prog)?;
println!("Output: {:?}", output);
Ok(())
}
fn parse_prog(commands: &str) -> Result<Vec<i32>> {
let cmd_str: Vec<&str> = commands.split(',').collect();
let mut prog: Vec<i32> = Vec::new();
for cmd in cmd_str {
prog.push(cmd.parse()?);
}
Ok(prog)
}
fn eval(prog: &mut Vec<i32>) -> Result<Vec<i32>> {
let mut result: Vec<i32> = Vec::new();
let mut ip: usize = 0;
while ip < prog.len() {
ip = eval_cmd(ip, prog, &mut result)?;
if ip == 0 {
break;
}
}
Ok(result)
}
/// Returns next ip
fn eval_cmd(ip: usize, prog: &mut Vec<i32>, output: &mut Vec<i32>) -> Result<usize> {
// check_ip(ip, prog.len(), format!("Cannot read command opcode"));
let (cmd, params) = parse_opcode(prog[ip])?;
// println!("Command[{}:{}]: {:?}({:?})", ip, prog[ip], cmd, params);
check_ip(
ip + params.len() - 1,
prog.len(),
format!(
"Not enough parameters for the command {:?} at position {}",
cmd, ip
),
)?;
match cmd {
Command::Add => {
ensure!(
params.len() == 3,
"ERROR: Expected 3 parameters in add command but was {}",
params.len()
);
ensure!(
params[2] == ParamMode::Position,
"ERROR: Destination parameter should be in position mode."
);
let v1 = get_param_value(ip + 1, params[0], prog)?;
let v2 = get_param_value(ip + 2, params[1], prog)?;
set_param_value(ip + 3, prog, v1 + v2)?;
}
Command::Mul => {
ensure!(
params.len() == 3,
"ERROR: Expected 3 parameters in mul command but was {}",
params.len()
);
ensure!(
params[2] == ParamMode::Position,
"ERROR: Destination parameter should be in position mode."
);
let v1 = get_param_value(ip + 1, params[0], prog)?;
let v2 = get_param_value(ip + 2, params[1], prog)?;
set_param_value(ip + 3, prog, v1 * v2)?;
}
Command::Read => {
ensure!(
params.len() == 1,
"ERROR: Expected 1 parameters in read command but was {}",
params.len()
);
ensure!(
params[0] == ParamMode::Position,
"ERROR: Destination parameter should be in position mode."
);
let mut buf = String::new();
print!("Input: ");
let _ = io::stdout().flush();
io::stdin()
.read_line(&mut buf)
.with_context(|| "ERROR: Failed to read from stdin")?;
set_param_value(
ip + 1,
prog,
buf.trim()
.parse()
.with_context(|| format!("ERROR: Cannot parse buffer '{}'", buf))?,
)?;
}
Command::Write => {
ensure!(
params.len() == 1,
"ERROR: Expected 1 parameters in write command but was {}",
params.len()
);
let res = get_param_value(ip + 1, params[0], prog)?;
println!("Output: {}", res);
output.push(res);
}
Command::JumpIfTrue => {
ensure!(
params.len() == 2,
"ERROR: Expected 3 parameters in add command but was {}",
params.len()
);
// ensure!(params[1] == ParamMode::Position, "ERROR: Destination parameter should be in position mode.");
let v1 = get_param_value(ip + 1, params[0], prog)?;
if v1 != 0 {
return Ok(get_param_value(ip + 2, params[1], prog)? as usize);
}
}
Command::JumpIfFalse => {
ensure!(
params.len() == 2,
"ERROR: Expected 3 parameters in add command but was {}",
params.len()
);
// ensure!(params[1] == ParamMode::Position, "ERROR: Destination parameter should be in position mode.");
let v1 = get_param_value(ip + 1, params[0], prog)?;
if v1 == 0 {
return Ok(get_param_value(ip + 2, params[1], prog)? as usize);
}
}
Command::LessThan => {
ensure!(
params.len() == 3,
"ERROR: Expected 3 parameters in mul command but was {}",
params.len()
);
ensure!(
params[2] == ParamMode::Position,
"ERROR: Destination parameter should be in position mode."
);
let v1 = get_param_value(ip + 1, params[0], prog)?;
let v2 = get_param_value(ip + 2, params[1], prog)?;
set_param_value(ip + 3, prog, if v1 < v2 { 1 } else { 0 })?;
}
Command::Equals => {
ensure!(
params.len() == 3,
"ERROR: Expected 3 parameters in mul command but was {}",
params.len()
);
ensure!(
params[2] == ParamMode::Position,
"ERROR: Destination parameter should be in position mode."
);
let v1 = get_param_value(ip + 1, params[0], prog)?;
let v2 = get_param_value(ip + 2, params[1], prog)?;
set_param_value(ip + 3, prog, if v1 == v2 { 1 } else { 0 })?;
}
Command::Exit => return Ok(0),
}
Ok(ip + params.len() + 1)
}
/// Returns command and its parameter modes
fn parse_opcode(opcode: i32) -> Result<(Command, Vec<ParamMode>)> {
let mut opc = opcode;
let cmd_id = match opc % 100 {
1 => Command::Add,
2 => Command::Mul,
3 => Command::Read,
4 => Command::Write,
5 => Command::JumpIfTrue,
6 => Command::JumpIfFalse,
7 => Command::LessThan,
8 => Command::Equals,
99 => Command::Exit,
_ => bail!("ERROR: Unknown command id {}", opc % 100),
};
let mut params: Vec<ParamMode> = Vec::new();
opc /= 100;
let params_count = match cmd_id {
Command::Add | Command::Mul | Command::LessThan | Command::Equals => 3,
Command::Read | Command::Write => 1,
Command::JumpIfTrue | Command::JumpIfFalse => 2,
Command::Exit => 0,
};
for _i in 0..params_count {
params.push(if opc % 10 == 0 {
ParamMode::Position
} else {
ParamMode::Immediate
});
opc /= 10;
}
Ok((cmd_id, params))
}
fn check_ip(ip: usize, prog_len: usize, msg: String) -> Result<()> {
ensure!(
ip < prog_len,
"ERROR: {}. Instruction pointer {} is out of program bound length {}",
msg,
ip,
prog_len
);
Ok(())
}
fn get_param_value(param_ip: usize, mode: ParamMode, prog: &[i32]) -> Result<i32> {
let value = match mode {
ParamMode::Position => {
let val_ip = prog[param_ip] as usize;
check_ip(val_ip, prog.len(), "Cannot read".to_string())?;
// println!(" in: ip={}->{} value={}", param_ip, val_ip, prog[val_ip]);
prog[val_ip]
}
ParamMode::Immediate => {
// println!(" in: ip={} value={}", param_ip, prog[param_ip]);
prog[param_ip]
}
};
Ok(value)
}
fn set_param_value(param_ip: usize, prog: &mut Vec<i32>, value: i32) -> Result<()> {
check_ip(param_ip, prog.len(), "Cannot store value".to_string())?;
let val_ip = prog[param_ip] as usize;
check_ip(val_ip, prog.len(), "Cannot store value".to_string())?;
prog[val_ip] = value;
// println!(" out: ip={}->{} value={}", param_ip, val_ip, value);
Ok(())
}
// #[cfg(test)]
// mod tests {
// use super::*;
// #[test]
// fn test1() -> Result<()> {
// let mut prog = parse_prog(&"3,9,8,9,10,9,4,9,99,-1,8".to_owned())?;
// assert_eq!(eval(&mut prog)?, vec![0]);
// Ok(())
// }
// #[test]
// fn test2() -> Result<()> {
// let mut prog = parse_prog(&"3,9,7,9,10,9,4,9,99,-1,8".to_owned())?;
// assert_eq!(eval(&mut prog)?, vec![0]);
// Ok(())
// }
// }
| 29.850649 | 117 | 0.476071 |
ebff322e7e9d1edbc1c9e25177cb1a55ca419a52 | 749 | use std::io;
pub mod bt;
pub mod c;
pub mod fd;
#[doc(hidden)]
pub trait IsMinusOne {
fn is_minus_one(&self) -> bool;
}
macro_rules! impl_is_minus_one {
($($t:ident)*) => ($(impl IsMinusOne for $t {
fn is_minus_one(&self) -> bool {
*self == -1
}
})*)
}
impl_is_minus_one! { i8 i16 i32 i64 isize }
pub fn cvt<T: IsMinusOne>(t: T) -> io::Result<T> {
if t.is_minus_one() {
Err(io::Error::last_os_error())
} else {
Ok(t)
}
}
pub fn cvt_r<T, F>(mut f: F) -> io::Result<T>
where
T: IsMinusOne,
F: FnMut() -> T,
{
loop {
match cvt(f()) {
Err(ref e) if e.kind() == io::ErrorKind::Interrupted => {}
other => return other,
}
}
}
| 17.833333 | 70 | 0.508678 |
0aab317c68fbefa9ece9bfd026e23736fc7980f0 | 1,977 | // http://rosettacode.org/wiki/Langton's_ant
use std::num::Wrapping;
struct Ant {
x: Wrapping<usize>,
y: Wrapping<usize>,
dir: Direction,
}
#[derive(Clone,Copy)]
enum Direction {
North,
East,
South,
West,
}
use Direction::*;
impl Ant {
fn mv(&mut self, vec: &mut Vec<Vec<u8>>) {
let pointer = &mut vec[self.y.0][self.x.0];
// change direction
match *pointer {
0 => self.dir = self.dir.right(),
1 => self.dir = self.dir.left(),
_ => panic!("Unexpected colour in grid"),
}
// flip colour
// if it's 1 it's black
// if it's 0 it's white
*pointer ^= 1;
// move direction
match self.dir {
North => self.y -= Wrapping(1),
South => self.y += Wrapping(1),
East => self.x += Wrapping(1),
West => self.x -= Wrapping(1),
}
}
}
impl Direction {
fn right(self) -> Direction {
match self {
North => East,
East => South,
South => West,
West => North,
}
}
fn left(self) -> Direction {
// 3 rights equal a left
self.right().right().right()
}
}
fn main() {
// create a 100x100 grid using vectors
let mut grid: Vec<Vec<u8>> = vec![vec![0; 100]; 100];
let mut ant = Ant {
x: Wrapping(50),
y: Wrapping(50),
dir: Direction::North,
};
while ant.x < Wrapping(100) && ant.y < Wrapping(100) {
ant.mv(&mut grid);
}
for each in &grid {
// construct string
// using iterator methods to quickly convert the vector
// to a string
let string = each.iter()
.map(|&x| {
if x == 0 {
" "
} else {
"#"
}
})
.fold(String::new(), |x, y| x + y);
println!("{}", string);
}
}
| 21.966667 | 63 | 0.453212 |
339f1c3cd33a0c44aeb38fb8efc54304147d0f9d | 1,307 | // SPDX-License-Identifier: BSD-3-Clause
// Copyright (C) 2021 Akira Moroo
// Copyright (C) 2018 Google LLC
#[cfg(target_arch = "x86_64")]
use core::arch::x86_64::_rdtsc;
const NSECS_PER_SEC: u64 = 1000000000;
const CPU_KHZ_DEFAULT: u64 = 200;
const PAUSE_THRESHOLD_TICKS: u64 = 150;
pub fn ndelay(ns: u64) {
let delta = ns * CPU_KHZ_DEFAULT / NSECS_PER_SEC;
let mut pause_delta = 0;
unsafe {
let start = _rdtsc();
if delta > PAUSE_THRESHOLD_TICKS {
pause_delta = delta - PAUSE_THRESHOLD_TICKS;
}
while _rdtsc() - start < pause_delta {
asm!("pause");
}
while _rdtsc() - start < delta {}
}
}
pub fn udelay(us: u64) {
for _i in 0..us as usize {
ndelay(1000)
}
}
#[allow(dead_code)]
pub fn mdelay(ms: u64) {
for _i in 0..ms as usize {
udelay(1000)
}
}
#[allow(dead_code)]
pub fn wait_while<F>(ms: u64, mut cond: F) -> bool
where
F: FnMut() -> bool,
{
let mut us = ms * 1000;
while cond() && us > 0 {
udelay(1);
us -= 1;
}
cond()
}
#[allow(dead_code)]
pub fn wait_until<F>(ms: u64, mut cond: F) -> bool
where
F: FnMut() -> bool,
{
let mut us = ms * 1000;
while !cond() && us > 0 {
udelay(1);
us -= 1;
}
cond()
}
| 20.107692 | 56 | 0.556236 |
0a6a57231fb5cb7bf821f607894add08b71396ba | 1,276 | use std::collections::HashMap;
use std::error::Error;
use std::path::Path;
use std::{panic, path};
use pdf_extract::extract_text;
use crate::process_engine::Processor;
#[derive(Default, Clone)]
pub struct PdfProcessor {
meta_data: HashMap<String, String>,
}
impl Processor for PdfProcessor {
fn content(&self, file: &Path) -> Result<String, Box<dyn Error>> {
let path = path::Path::new(&file);
let mut string = String::new();
string.push_str("<quake-br>");
println!("processing file: {:}", path.display());
let extract = panic::catch_unwind(|| extract_text(path));
let text = match extract {
Ok(t) => match t {
Ok(text) => text,
Err(err) => return Err(Box::new(err)),
},
Err(err) => {
println!("{:?}", err);
"".to_string()
}
};
string.push_str(text.as_str());
string = string.replace("\n\n", "<quake-br>").replace("\n", "");
string = string.replace("<quake-br>", "\n\n");
Ok(string)
}
fn meta_data(&self) -> HashMap<String, String> {
self.meta_data.clone()
}
}
#[cfg(test)]
mod tests {
#[test]
fn should_parse_references() {}
}
| 23.62963 | 72 | 0.535266 |
2950073cb5dca33b9cb8d5fad64c9d6a8b15b221 | 17,822 | use std::collections::HashMap;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::{Arc, Mutex};
use std::thread;
use std::time::{Duration, Instant};
use futures::channel::mpsc::unbounded;
use futures::future;
use futures::stream::StreamExt;
use rand::Rng;
use crate::proto::raftpb::*;
use crate::raft;
use crate::raft::persister::*;
pub const SNAPSHOT_INTERVAL: u64 = 10;
fn uniqstring() -> String {
static ID: AtomicUsize = AtomicUsize::new(0);
format!("{}", ID.fetch_add(1, Ordering::Relaxed))
}
/// A log entry.
#[derive(Clone, PartialEq, Message)]
pub struct Entry {
#[prost(uint64, tag = "100")]
pub x: u64,
}
pub struct Storage {
// copy of each server's committed entries
logs: Vec<HashMap<u64, Entry>>,
max_index: u64,
max_index0: u64,
}
impl Storage {
/// how many servers think a log entry is committed?
pub fn n_committed(&self, index: u64) -> (usize, Option<Entry>) {
let mut count = 0;
let mut cmd = None;
for log in &self.logs {
let cmd1 = log.get(&index).cloned();
if cmd1.is_some() {
if count > 0 && cmd != cmd1 {
panic!(
"committed values do not match: index {:?}, {:?}, {:?}",
index, cmd, cmd1
);
}
count += 1;
cmd = cmd1;
}
}
(count, cmd)
}
}
fn init_logger() {
use std::sync::Once;
static LOGGER_INIT: Once = Once::new();
LOGGER_INIT.call_once(env_logger::init);
}
pub struct Config {
pub net: labrpc::Network,
n: usize,
// use boxed slice to prohibit grow capacity.
pub rafts: Arc<Mutex<Box<[Option<raft::Node>]>>>,
// whether each server is on the net
pub connected: Box<[bool]>,
saved: Box<[Arc<SimplePersister>]>,
// the port file names each sends to
endnames: Box<[Box<[String]>]>,
pub storage: Arc<Mutex<Storage>>,
// time at which make_config() was called
start: Instant,
// begin()/end() statistics
// time at which test_test.go called cfg.begin()
t0: Instant,
// rpc_total() at start of test
rpcs0: usize,
// number of agreements
cmds0: usize,
}
impl Config {
pub fn new(n: usize) -> Config {
Config::new_with(n, false, false)
}
pub fn new_with(n: usize, unreliable: bool, snapshot: bool) -> Config {
init_logger();
let net = labrpc::Network::new();
net.set_reliable(!unreliable);
net.set_long_delays(true);
let storage = Storage {
logs: vec![HashMap::new(); n],
max_index: 0,
max_index0: 0,
};
let mut saved = vec![];
let mut endnames = vec![];
for _ in 0..n {
endnames.push(vec![String::new(); n].into_boxed_slice());
saved.push(Arc::new(SimplePersister::new()));
}
let mut cfg = Config {
net,
n,
rafts: Arc::new(Mutex::new(vec![None; n].into_boxed_slice())),
connected: vec![true; n].into_boxed_slice(),
saved: saved.into_boxed_slice(),
endnames: endnames.into_boxed_slice(),
storage: Arc::new(Mutex::new(storage)),
start: Instant::now(),
t0: Instant::now(),
rpcs0: 0,
cmds0: 0,
};
for i in 0..n {
cfg.start1_ext(i, snapshot);
}
for i in 0..n {
cfg.connect(i);
}
cfg
}
pub fn rpc_count(&self, server: usize) -> usize {
self.net.count(&format!("{}", server))
}
fn rpc_total(&self) -> usize {
self.net.total_count()
}
/// Maximum log size across all servers
pub fn log_size(&self) -> usize {
self.saved
.iter()
.map(|s| s.raft_state().len())
.max()
.unwrap()
}
// check that there's exactly one leader.
// try a few times in case re-elections are needed.
pub fn check_one_leader(&self) -> usize {
let mut random = rand::thread_rng();
let mut leaders = HashMap::new();
for _iters in 0..10 {
let ms = 450 + (random.gen::<u64>() % 100);
thread::sleep(Duration::from_millis(ms));
for (i, connected) in self.connected.iter().enumerate() {
if *connected {
let state = self.rafts.lock().unwrap()[i]
.as_ref()
.unwrap()
.get_state()
.clone();
let term = state.term();
let is_leader = state.is_leader();
if is_leader {
leaders.entry(term).or_insert_with(Vec::new).push(i);
}
}
}
let mut last_term_with_leader = 0;
for (term, leaders) in &leaders {
if leaders.len() > 1 {
panic!("term {} has {:?} (>1) leaders", term, leaders);
}
if *term > last_term_with_leader {
last_term_with_leader = *term;
}
}
if !leaders.is_empty() {
return leaders[&last_term_with_leader][0];
}
}
panic!("expected one leader, got none")
}
/// check that everyone agrees on the term.
pub fn check_terms(&self) -> u64 {
let mut term = 0;
for (i, connected) in self.connected.iter().enumerate() {
if *connected {
let xterm = self.rafts.lock().unwrap()[i].as_ref().unwrap().term();
if term == 0 {
term = xterm;
} else if term != xterm {
panic!("servers disagree on term");
}
}
}
term
}
/// check that there's no leader
pub fn check_no_leader(&self) {
for (i, connected) in self.connected.iter().enumerate() {
if *connected {
let is_leader = self.rafts.lock().unwrap()[i].as_ref().unwrap().is_leader();
if is_leader {
panic!("expected no leader, but {} claims to be leader", i);
}
}
}
}
pub fn check_timeout(&self) {
// enforce a two minute real-time limit on each test
if self.start.elapsed() > Duration::from_secs(120) {
panic!("test took longer than 120 seconds");
}
}
/// how many servers think a log entry is committed?
pub fn n_committed(&self, index: u64) -> (usize, Option<Entry>) {
let s = self.storage.lock().unwrap();
s.n_committed(index)
}
// wait for at least n servers to commit.
// but don't wait forever.
pub fn wait(&self, index: u64, n: usize, start_term: Option<u64>) -> Option<Entry> {
let mut to = Duration::from_millis(10);
for _ in 0..30 {
let (nd, _) = self.n_committed(index);
if nd >= n {
break;
}
thread::sleep(to);
if to < Duration::from_secs(1) {
to *= 2;
}
if let Some(start_term) = start_term {
let rafts = self.rafts.lock().unwrap();
for r in rafts.iter().flatten() {
let term = r.term();
if term > start_term {
// someone has moved on
// can no longer guarantee that we'll "win"
return None;
}
}
}
}
let (nd, cmd) = self.n_committed(index);
if nd < n {
panic!("only {} decided for index {}; wanted {}", nd, index, n);
}
cmd
}
/// do a complete agreement.
/// it might choose the wrong leader initially,
/// and have to re-submit after giving up.
/// entirely gives up after about 10 seconds.
/// indirectly checks that the servers agree on the
/// same value, since n_committed() checks this,
/// as do the threads that read from applyCh.
/// returns index.
/// if retry==true, may submit the command multiple
/// times, in case a leader fails just after Start().
/// if retry==false, calls start() only once, in order
/// to simplify the early Lab 2B tests.
pub fn one(&self, cmd: Entry, expected_servers: usize, retry: bool) -> u64 {
let t0 = Instant::now();
let mut starts = 0;
while t0.elapsed() < Duration::from_secs(10) {
// try all the servers, maybe one is the leader.
let mut index = None;
for _ in 0..self.n {
starts = (starts + 1) % self.n;
if self.connected[starts] {
let rafts = self.rafts.lock().unwrap();
if let Some(ref rf) = &rafts[starts] {
match rf.start(&cmd) {
Ok((index1, _)) => {
index = Some(index1);
break;
}
Err(e) => debug!("start cmd {:?} failed: {:?}", cmd, e),
}
}
}
}
if let Some(index) = index {
// somebody claimed to be the leader and to have
// submitted our command; wait a while for agreement.
let t1 = Instant::now();
while t1.elapsed() < Duration::from_secs(2) {
let (nd, cmd1) = self.n_committed(index);
if nd > 0 && nd >= expected_servers {
// committed
if let Some(cmd2) = cmd1 {
if cmd2 == cmd {
// and it was the command we submitted.
return index;
}
}
}
thread::sleep(Duration::from_millis(20));
}
if !retry {
panic!("one({:?}) failed to reach agreement", cmd);
}
} else {
thread::sleep(Duration::from_millis(50));
}
}
panic!("one({:?}) failed to reach agreement", cmd);
}
/// start a Test.
/// print the Test message.
/// e.g. cfg.begin("Test (2B): RPC counts aren't too high")
pub fn begin(&mut self, description: &str) {
println!(); // Force the log starts at a new line.
info!("{} ...", description);
self.t0 = Instant::now();
self.rpcs0 = self.rpc_total();
self.cmds0 = 0;
let mut s = self.storage.lock().unwrap();
s.max_index0 = s.max_index;
}
/// end a Test -- the fact that we got here means there was no failure.
/// print the Passed message, and some performance numbers.
pub fn end(&self) {
self.check_timeout();
// real time
let t = self.t0.elapsed();
// number of Raft peers
let npeers = self.n;
// number of RPC sends
let nrpc = self.rpc_total() - self.rpcs0;
// number of Raft agreements reported
let s = self.storage.lock().unwrap();
let ncmds = s.max_index - s.max_index0;
info!(" ... Passed --");
info!(" {:?} {} {} {}", t, npeers, nrpc, ncmds);
}
/// start or re-start a Raft.
/// if one already exists, "kill" it first.
/// allocate new outgoing port file names, and a new
/// state persister, to isolate previous instance of
/// this server. since we cannot really kill it.
pub fn start1(&mut self, i: usize) {
self.start1_ext(i, false);
}
pub fn start1_snapshot(&mut self, i: usize) {
self.start1_ext(i, true);
}
fn start1_ext(&mut self, i: usize, snapshot: bool) {
self.crash1(i);
// a fresh set of outgoing ClientEnd names.
// so that old crashed instance's ClientEnds can't send.
self.endnames[i] = vec![String::new(); self.n].into_boxed_slice();
for j in 0..self.n {
self.endnames[i][j] = uniqstring();
}
// a fresh set of ClientEnds.
let mut clients = Vec::with_capacity(self.n);
for (j, name) in self.endnames[i].iter().enumerate() {
let cli = self.net.create_client(name.to_string());
let client = RaftClient::new(cli);
clients.push(client);
self.net.connect(name, &format!("{}", j));
}
let (tx, apply_ch) = unbounded();
let rf = raft::Raft::new(clients, i, Box::new(self.saved[i].clone()), tx);
let node = raft::Node::new(rf);
self.rafts.lock().unwrap()[i] = Some(node.clone());
// listen to messages from Raft indicating newly committed messages.
let storage = self.storage.clone();
let rafts = self.rafts.clone();
let apply = apply_ch.for_each(move |cmd: raft::ApplyMsg| match cmd {
raft::ApplyMsg::Command { data, index } => {
// debug!("apply {}", index);
let entry = labcodec::decode(&data).expect("committed command is not an entry");
let mut s = storage.lock().unwrap();
for (j, log) in s.logs.iter().enumerate() {
if let Some(old) = log.get(&index) {
if *old != entry {
// some server has already committed a different value for this entry!
panic!(
"commit index={:?} server={:?} {:?} != server={:?} {:?}",
index, i, entry, j, old
);
}
}
}
let log = &mut s.logs[i];
if index > 1 && log.get(&(index - 1)).is_none() {
panic!("server {} apply out of order {}", i, index);
}
log.insert(index, entry);
if index > s.max_index {
s.max_index = index;
}
if snapshot && (index + 1) % SNAPSHOT_INTERVAL == 0 {
rafts.lock().unwrap()[i]
.as_ref()
.unwrap()
.snapshot(index, &data);
}
future::ready(())
}
raft::ApplyMsg::Snapshot { data, index, term } if snapshot => {
// debug!("install snapshot {}", index);
if rafts.lock().unwrap()[i]
.as_ref()
.unwrap()
.cond_install_snapshot(term, index, &data)
{
let mut s = storage.lock().unwrap();
let log = &mut s.logs[i];
log.clear();
let entry = labcodec::decode(&data).unwrap();
log.insert(index, entry);
}
future::ready(())
}
// ignore other types of ApplyMsg
_ => future::ready(()),
});
self.net.spawn_poller(apply);
let mut builder = labrpc::ServerBuilder::new(format!("{}", i));
raft::add_raft_service(node, &mut builder).unwrap();
let srv = builder.build();
self.net.add_server(srv);
}
/// shut down a Raft server but save its persistent state.
pub fn crash1(&mut self, i: usize) {
self.disconnect(i);
// disable client connections to the server.
self.net.delete_server(&format!("{}", i));
// a fresh persister, in case old instance
// continues to update the Persister.
// but copy old persister's content so that we always
// pass Make() the last persisted state.
let raft_state = self.saved[i].raft_state();
let snapshot = self.saved[i].snapshot();
let p = SimplePersister::new();
p.save_state_and_snapshot(raft_state, snapshot);
self.saved[i] = Arc::new(p);
if let Some(rf) = self.rafts.lock().unwrap()[i].take() {
rf.kill();
}
}
/// detach server i from the net.
pub fn disconnect(&mut self, i: usize) {
debug!("disconnect({})", i);
self.connected[i] = false;
// outgoing ClientEnds
for endname in &*self.endnames[i] {
self.net.enable(endname, false);
}
// incoming ClientEnds
for names in &*self.endnames {
let endname = &names[i];
self.net.enable(endname, false);
}
}
/// attach server i to the net.
pub fn connect(&mut self, i: usize) {
debug!("connect({})", i);
self.connected[i] = true;
// outgoing ClientEnds
for (j, connected) in self.connected.iter().enumerate() {
if *connected {
let endname = &*self.endnames[i][j];
self.net.enable(endname, true);
}
}
// incoming ClientEnds
for (j, connected) in self.connected.iter().enumerate() {
if *connected {
let endname = &*self.endnames[j][i];
self.net.enable(endname, true);
}
}
}
}
impl Drop for Config {
fn drop(&mut self) {
if let Ok(rafts) = self.rafts.try_lock() {
for r in rafts.iter().flatten() {
r.kill();
}
}
// FIXME: we should not panic in a drop method.
self.check_timeout();
}
}
| 33.064935 | 98 | 0.483391 |
fce46d1ebaa090e8aee5acd13802d46c1233d4a0 | 2,971 | use super::*;
use proptest::strategy::Strategy;
#[test]
fn without_small_integer_or_float_returns_true() {
run!(
|arc_process| {
(
strategy::term::integer::small(arc_process.clone()),
strategy::term(arc_process.clone())
.prop_filter("Right must not be a small integer or float", |v| {
!(v.is_smallint() || v.is_boxed_float())
}),
)
},
|(left, right)| {
prop_assert_eq!(native(left, right), true.into());
Ok(())
},
);
}
#[test]
fn with_same_small_integer_right_returns_false() {
run!(
|arc_process| strategy::term::integer::small(arc_process.clone()),
|operand| {
prop_assert_eq!(native(operand, operand), false.into());
Ok(())
},
);
}
#[test]
fn with_same_value_small_integer_right_returns_false() {
run!(
|arc_process| {
(SmallInteger::MIN_VALUE..SmallInteger::MAX_VALUE).prop_map(move |i| {
let mut heap = arc_process.acquire_heap();
(heap.integer(i).unwrap(), heap.integer(i).unwrap())
})
},
|(left, right)| {
prop_assert_eq!(native(left, right), false.into());
Ok(())
},
);
}
#[test]
fn with_different_small_integer_right_returns_true() {
run!(
|arc_process| {
(SmallInteger::MIN_VALUE..SmallInteger::MAX_VALUE).prop_map(move |i| {
let mut heap = arc_process.acquire_heap();
(heap.integer(i).unwrap(), heap.integer(i + 1).unwrap())
})
},
|(left, right)| {
prop_assert_eq!(native(left, right), true.into());
Ok(())
},
);
}
#[test]
fn with_same_value_float_right_returns_false() {
run!(
|arc_process| {
strategy::term::small_integer_float_integral_i64().prop_map(move |i| {
let mut heap = arc_process.acquire_heap();
(heap.integer(i).unwrap(), heap.float(i as f64).unwrap())
})
},
|(left, right)| {
prop_assert_eq!(native(left, right.into()), false.into());
Ok(())
},
);
}
#[test]
fn with_different_value_float_right_returns_true() {
run!(
|arc_process| {
strategy::term::small_integer_float_integral_i64().prop_map(move |i| {
let mut heap = arc_process.acquire_heap();
// change float toward zero to ensure it remains in integral range
let diff = if i < 0 { 1 } else { -1 };
(
heap.integer(i).unwrap(),
heap.float((i + diff) as f64).unwrap(),
)
})
},
|(left, right)| {
prop_assert_eq!(native(left, right.into()), true.into());
Ok(())
},
);
}
| 26.061404 | 84 | 0.500505 |
904bbd5d60b08eb355705918ab49f6962edfe83b | 4,789 | #![cfg_attr(test, feature(inclusive_range_syntax))]
#![no_std]
use core::ops::Deref;
use core::ops::DerefMut;
use core::slice::Iter;
#[cfg(test)]
mod tests;
/// A contiguous array type backed by a slice.
///
/// `StackVec`'s functionality is similar to that of `std::Vec`. You can `push`
/// and `pop` and iterate over the vector. Unlike `Vec`, however, `StackVec`
/// requires no memory allocation as it is backed by a user-supplied slice. As a
/// result, `StackVec`'s capacity is _bounded_ by the user-supplied slice. This
/// results in `push` being fallible: if `push` is called when the vector is
/// full, an `Err` is returned.
#[derive(Debug)]
pub struct StackVec<'a, T: 'a> {
storage: &'a mut [T],
len: usize
}
impl<'a, T: 'a> StackVec<'a, T> {
/// Constructs a new, empty `StackVec<T>` using `storage` as the backing
/// store. The returned `StackVec` will be able to hold `storage.len()`
/// values.
pub fn new(storage: &'a mut [T]) -> StackVec<'a, T> {
StackVec {
storage: storage,
len: 0,
}
}
/// Constructs a new `StackVec<T>` using `storage` as the backing store. The
/// first `len` elements of `storage` are treated as if they were `push`ed
/// onto `self.` The returned `StackVec` will be able to hold a total of
/// `storage.len()` values.
///
/// # Panics
///
/// Panics if `len > storage.len()`.
pub fn with_len(storage: &'a mut [T], len: usize) -> StackVec<'a, T> {
let l = storage.len();
StackVec {
storage: storage,
len: l,
}
}
/// Returns the number of elements this vector can hold.
pub fn capacity(&self) -> usize {
self.storage.len()
}
/// Shortens the vector, keeping the first `len` elements. If `len` is
/// greater than the vector's current length, this has no effect. Note that
/// this method has no effect on the capacity of the vector.
pub fn truncate(&mut self, len: usize) {
if len < self.len {
self.len = len;
}
}
/// Extracts a slice containing the entire vector, consuming `self`.
///
/// Note that the returned slice's length will be the length of this vector,
/// _not_ the length of the original backing storage.
pub fn into_slice(self) -> &'a mut [T] {
& mut self.storage[0..self.len]
}
/// Extracts a slice containing the entire vector.
pub fn as_slice(&self) -> &[T] {
& self.storage[0..self.len]
}
/// Extracts a mutable slice of the entire vector.
pub fn as_mut_slice(&mut self) -> &mut [T] {
& mut self.storage[0..self.len]
}
/// Returns the number of elements in the vector, also referred to as its
/// 'length'.
pub fn len(&self) -> usize {
self.len
}
/// Returns true if the vector contains no elements.
pub fn is_empty(&self) -> bool {
self.len == 0
}
/// Returns true if the vector is at capacity.
pub fn is_full(&self) -> bool {
self.len == self.storage.len()
}
/// Appends `value` to the back of this vector if the vector is not full.
///
/// # Error
///
/// If this vector is full, an `Err` is returned. Otherwise, `Ok` is
/// returned.
pub fn push(&mut self, value: T) -> Result<(), ()> {
if self.len < self.storage.len() {
self.storage[ self.len ] = value;
self.len += 1;
Ok( () )
} else {
Err( () )
}
}
}
impl<'a, T: Clone + 'a> StackVec<'a, T> {
/// If this vector is not empty, removes the last element from this vector
/// by cloning it and returns it. Otherwise returns `None`.
pub fn pop(&mut self) -> Option<T> {
if self.len > 0 {
let s = Some( self.storage[ self.len - 1 ].clone() );
self.len -= 1;
s
} else {
None
}
}
}
impl<'a, T: 'a> Deref for StackVec<'a, T> {
type Target = [T];
fn deref( & self ) -> & [T] {
self.as_slice()
}
}
impl<'a, T: 'a> DerefMut for StackVec<'a, T> {
fn deref_mut( & mut self ) -> & mut [T] {
self.as_mut_slice()
}
}
impl<'a, T: 'a> IntoIterator for StackVec<'a, T> {
type Item = & 'a T;
type IntoIter = Iter< 'a, T >;
fn into_iter( self ) -> Self::IntoIter {
self.storage[0..self.len].into_iter()
}
}
impl<'a, T: 'a> IntoIterator for & 'a StackVec<'a, T> {
type Item = & 'a T;
type IntoIter = Iter< 'a, T >;
fn into_iter( self ) -> Self::IntoIter {
self.storage[0..self.len].into_iter()
}
}
// FIXME: Implement `Deref`, `DerefMut`, and `IntoIterator` for `StackVec`.
// FIXME: Implement IntoIterator` for `&StackVec`.
| 29.561728 | 80 | 0.561704 |
f492025bf1e1271df9b26eb58b61cd1058516f31 | 266 | #[macro_use]
extern crate diesel;
#[macro_use]
extern crate diesel_derive_enum;
extern crate dotenv;
#[macro_use]
extern crate failure;
#[macro_use]
extern crate structopt;
pub mod opts;
mod db;
pub mod errors;
mod taskman;
mod priority;
pub use taskman::TaskMan;
| 14.777778 | 32 | 0.770677 |
bf4d801e886696466cc591760b33c34482daae4f | 3,452 | // Copyright 2020 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#[cfg(test)]
mod tests {
use std::mem::size_of;
use std::sync::Arc;
use common_datavalues::*;
use common_exception::Result;
use common_planners::*;
use pretty_assertions::assert_eq;
use crate::optimizers::optimizer_test::*;
use crate::optimizers::*;
#[test]
fn test_statistics_exact_optimizer() -> Result<()> {
let ctx = crate::tests::try_create_context()?;
let total = ctx.get_settings().get_max_block_size()? as u64;
let statistics =
Statistics::new_exact(total as usize, ((total) * size_of::<u64>() as u64) as usize);
ctx.try_set_statistics(&statistics)?;
let source_plan = PlanNode::ReadSource(ReadDataSourcePlan {
db: "system".to_string(),
table: "test".to_string(),
table_id: 0,
table_version: None,
schema: DataSchemaRefExt::create(vec![
DataField::new("a", DataType::String, false),
DataField::new("b", DataType::String, false),
DataField::new("c", DataType::String, false),
]),
parts: generate_partitions(8, total as u64),
statistics: statistics.clone(),
description: format!(
"(Read from system.{} table, Read Rows:{}, Read Bytes:{})",
"test".to_string(),
statistics.read_rows,
statistics.read_bytes
),
scan_plan: Arc::new(ScanPlan::empty()),
remote: false,
tbl_args: None,
push_downs: None,
});
let aggr_expr = Expression::AggregateFunction {
op: "count".to_string(),
distinct: false,
params: vec![],
args: vec![Expression::create_literal(DataValue::UInt64(Some(0)))],
};
let plan = PlanBuilder::from(&source_plan)
.expression(
&[Expression::create_literal(DataValue::UInt64(Some(0)))],
"Before GroupBy",
)?
.aggregate_partial(&[aggr_expr.clone()], &[])?
.aggregate_final(source_plan.schema(), &[aggr_expr], &[])?
.project(&[Expression::Column("count(0)".to_string())])?
.build()?;
let mut statistics_exact = StatisticsExactOptimizer::create(ctx);
let optimized = statistics_exact.optimize(&plan)?;
let expect = "\
Projection: count(0):UInt64\
\n AggregatorFinal: groupBy=[[]], aggr=[[count(0)]]\
\n Projection: 904e as count(0):String\
\n Expression: 904e:String (Exact Statistics)\
\n ReadDataSource: scan partitions: [1], scan schema: [dummy:UInt8], statistics: [read_rows: 1, read_bytes: 1]";
let actual = format!("{:?}", optimized);
assert_eq!(expect, actual);
Ok(())
}
}
| 37.934066 | 127 | 0.58372 |
cc6cee00b7d8eead7e23adc1a20542af416317af | 3,309 | #![doc = "generated by AutoRust 0.1.0"]
#[cfg(feature = "package-catalog-2016-11")]
mod package_catalog_2016_11;
#[cfg(feature = "package-catalog-2016-11")]
pub use package_catalog_2016_11::{models, operations, API_VERSION};
#[cfg(feature = "package-catalog-2015-10-preview")]
mod package_catalog_2015_10_preview;
#[cfg(feature = "package-catalog-2015-10-preview")]
pub use package_catalog_2015_10_preview::{models, operations, API_VERSION};
#[cfg(feature = "package-job-2017-09-preview")]
mod package_job_2017_09_preview;
#[cfg(feature = "package-job-2017-09-preview")]
pub use package_job_2017_09_preview::{models, operations, API_VERSION};
#[cfg(feature = "package-job-2016-11")]
mod package_job_2016_11;
#[cfg(feature = "package-job-2016-11")]
pub use package_job_2016_11::{models, operations, API_VERSION};
#[cfg(feature = "package-job-2016-03-preview")]
mod package_job_2016_03_preview;
#[cfg(feature = "package-job-2016-03-preview")]
pub use package_job_2016_03_preview::{models, operations, API_VERSION};
#[cfg(feature = "package-job-2015-11-preview")]
mod package_job_2015_11_preview;
use azure_core::setters;
#[cfg(feature = "package-job-2015-11-preview")]
pub use package_job_2015_11_preview::{models, operations, API_VERSION};
pub fn config(
http_client: std::sync::Arc<dyn azure_core::HttpClient>,
token_credential: Box<dyn azure_core::TokenCredential>,
) -> OperationConfigBuilder {
OperationConfigBuilder {
api_version: None,
http_client,
base_path: None,
token_credential,
token_credential_resource: None,
}
}
pub struct OperationConfigBuilder {
api_version: Option<String>,
http_client: std::sync::Arc<dyn azure_core::HttpClient>,
base_path: Option<String>,
token_credential: Box<dyn azure_core::TokenCredential>,
token_credential_resource: Option<String>,
}
impl OperationConfigBuilder {
setters! { api_version : String => Some (api_version) , base_path : String => Some (base_path) , token_credential_resource : String => Some (token_credential_resource) , }
pub fn build(self) -> OperationConfig {
OperationConfig {
api_version: self.api_version.unwrap_or(API_VERSION.to_owned()),
http_client: self.http_client,
base_path: self.base_path.unwrap_or("https://management.azure.com".to_owned()),
token_credential: Some(self.token_credential),
token_credential_resource: self.token_credential_resource.unwrap_or("https://management.azure.com/".to_owned()),
}
}
}
pub struct OperationConfig {
api_version: String,
http_client: std::sync::Arc<dyn azure_core::HttpClient>,
base_path: String,
token_credential: Option<Box<dyn azure_core::TokenCredential>>,
token_credential_resource: String,
}
impl OperationConfig {
pub fn api_version(&self) -> &str {
self.api_version.as_str()
}
pub fn http_client(&self) -> &dyn azure_core::HttpClient {
self.http_client.as_ref()
}
pub fn base_path(&self) -> &str {
self.base_path.as_str()
}
pub fn token_credential(&self) -> Option<&dyn azure_core::TokenCredential> {
self.token_credential.as_deref()
}
pub fn token_credential_resource(&self) -> &str {
self.token_credential_resource.as_str()
}
}
| 40.353659 | 175 | 0.715926 |
216183bf545f44e120194b6a8cb717d4c2f82995 | 24,942 | //! Types.
use fallible_iterator::FallibleIterator;
use postgres_protocol;
use postgres_protocol::types::{self, ArrayDimension};
use std::borrow::Cow;
use std::collections::HashMap;
use std::error::Error;
use std::fmt;
use std::hash::BuildHasher;
use std::sync::Arc;
use std::time::{Duration, SystemTime, UNIX_EPOCH};
use crate::types::type_gen::{Inner, Other};
#[doc(inline)]
pub use postgres_protocol::Oid;
pub use crate::types::special::{Date, Timestamp};
// Number of seconds from 1970-01-01 to 2000-01-01
const TIME_SEC_CONVERSION: u64 = 946_684_800;
const USEC_PER_SEC: u64 = 1_000_000;
const NSEC_PER_USEC: u64 = 1_000;
/// Generates a simple implementation of `ToSql::accepts` which accepts the
/// types passed to it.
#[macro_export]
macro_rules! accepts {
($($expected:ident),+) => (
fn accepts(ty: &$crate::types::Type) -> bool {
match *ty {
$($crate::types::Type::$expected)|+ => true,
_ => false
}
}
)
}
/// Generates an implementation of `ToSql::to_sql_checked`.
///
/// All `ToSql` implementations should use this macro.
#[macro_export]
macro_rules! to_sql_checked {
() => {
fn to_sql_checked(&self,
ty: &$crate::types::Type,
out: &mut ::std::vec::Vec<u8>)
-> ::std::result::Result<$crate::types::IsNull,
Box<dyn ::std::error::Error +
::std::marker::Sync +
::std::marker::Send>> {
$crate::types::__to_sql_checked(self, ty, out)
}
}
}
// WARNING: this function is not considered part of this crate's public API.
// It is subject to change at any time.
#[doc(hidden)]
pub fn __to_sql_checked<T>(
v: &T,
ty: &Type,
out: &mut Vec<u8>,
) -> Result<IsNull, Box<dyn Error + Sync + Send>>
where
T: ToSql,
{
if !T::accepts(ty) {
return Err(Box::new(WrongType(ty.clone())));
}
v.to_sql(ty, out)
}
#[cfg(feature = "with-bit-vec-0.5")]
mod bit_vec_05;
#[cfg(feature = "with-chrono-0.4")]
mod chrono_04;
#[cfg(feature = "with-eui48-0.4")]
mod eui48_04;
#[cfg(feature = "with-geo-types-0.4")]
mod geo_types_04;
#[cfg(feature = "with-serde_json-1")]
mod serde_json_1;
#[cfg(feature = "with-uuid-0.7")]
mod uuid_07;
mod special;
mod type_gen;
#[cfg(feature = "with-serde_json-1")]
pub use crate::types::serde_json_1::Json;
/// A Postgres type.
#[derive(PartialEq, Eq, Clone, Debug)]
pub struct Type(Inner);
impl fmt::Display for Type {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.schema() {
"public" | "pg_catalog" => {}
schema => write!(fmt, "{}.", schema)?,
}
fmt.write_str(self.name())
}
}
impl Type {
pub(crate) fn _new(name: String, oid: Oid, kind: Kind, schema: String) -> Type {
Type(Inner::Other(Arc::new(Other {
name,
oid,
kind,
schema,
})))
}
/// Returns the `Type` corresponding to the provided `Oid` if it
/// corresponds to a built-in type.
pub fn from_oid(oid: Oid) -> Option<Type> {
Inner::from_oid(oid).map(Type)
}
/// Returns the OID of the `Type`.
pub fn oid(&self) -> Oid {
self.0.oid()
}
/// Returns the kind of this type.
pub fn kind(&self) -> &Kind {
self.0.kind()
}
/// Returns the schema of this type.
pub fn schema(&self) -> &str {
match self.0 {
Inner::Other(ref u) => &u.schema,
_ => "pg_catalog",
}
}
/// Returns the name of this type.
pub fn name(&self) -> &str {
self.0.name()
}
}
/// Represents the kind of a Postgres type.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Kind {
/// A simple type like `VARCHAR` or `INTEGER`.
Simple,
/// An enumerated type along with its variants.
Enum(Vec<String>),
/// A pseudo-type.
Pseudo,
/// An array type along with the type of its elements.
Array(Type),
/// A range type along with the type of its elements.
Range(Type),
/// A domain type along with its underlying type.
Domain(Type),
/// A composite type along with information about its fields.
Composite(Vec<Field>),
#[doc(hidden)]
__ForExtensibility,
}
/// Information about a field of a composite type.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Field {
name: String,
type_: Type,
}
impl Field {
/// Returns the name of the field.
pub fn name(&self) -> &str {
&self.name
}
/// Returns the type of the field.
pub fn type_(&self) -> &Type {
&self.type_
}
}
impl Field {
pub(crate) fn new(name: String, type_: Type) -> Field {
Field { name, type_ }
}
}
/// An error indicating that a `NULL` Postgres value was passed to a `FromSql`
/// implementation that does not support `NULL` values.
#[derive(Debug, Clone, Copy)]
pub struct WasNull;
impl fmt::Display for WasNull {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt.write_str("a Postgres value was `NULL`")
}
}
impl Error for WasNull {}
/// An error indicating that a conversion was attempted between incompatible
/// Rust and Postgres types.
#[derive(Debug)]
pub struct WrongType(Type);
impl fmt::Display for WrongType {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
fmt,
"cannot convert to or from a Postgres value of type `{}`",
self.0
)
}
}
impl Error for WrongType {}
impl WrongType {
pub(crate) fn new(ty: Type) -> WrongType {
WrongType(ty)
}
}
/// A trait for types that can be created from a Postgres value.
///
/// # Types
///
/// The following implementations are provided by this crate, along with the
/// corresponding Postgres types:
///
/// | Rust type | Postgres type(s) |
/// |-----------------------------------|-----------------------------------------------|
/// | `bool` | BOOL |
/// | `i8` | "char" |
/// | `i16` | SMALLINT, SMALLSERIAL |
/// | `i32` | INT, SERIAL |
/// | `u32` | OID |
/// | `i64` | BIGINT, BIGSERIAL |
/// | `f32` | REAL |
/// | `f64` | DOUBLE PRECISION |
/// | `&str`/`String` | VARCHAR, CHAR(n), TEXT, CITEXT, NAME, UNKNOWN |
/// | `&[u8]`/`Vec<u8>` | BYTEA |
/// | `HashMap<String, Option<String>>` | HSTORE |
/// | `SystemTime` | TIMESTAMP, TIMESTAMP WITH TIME ZONE |
///
/// In addition, some implementations are provided for types in third party
/// crates. These are disabled by default; to opt into one of these
/// implementations, activate the Cargo feature corresponding to the crate's
/// name prefixed by `with-`. For example, the `with-serde_json-1` feature enables
/// the implementation for the `serde_json::Value` type.
///
/// | Rust type | Postgres type(s) |
/// |---------------------------------|-------------------------------------|
/// | `chrono::NaiveDateTime` | TIMESTAMP |
/// | `chrono::DateTime<Utc>` | TIMESTAMP WITH TIME ZONE |
/// | `chrono::DateTime<Local>` | TIMESTAMP WITH TIME ZONE |
/// | `chrono::DateTime<FixedOffset>` | TIMESTAMP WITH TIME ZONE |
/// | `chrono::NaiveDate` | DATE |
/// | `chrono::NaiveTime` | TIME |
/// | `eui48::MacAddress` | MACADDR |
/// | `geo_types::Point<f64>` | POINT |
/// | `geo_types::Rect<f64>` | BOX |
/// | `geo_types::LineString<f64>` | PATH |
/// | `serde_json::Value` | JSON, JSONB |
/// | `uuid::Uuid` | UUID |
/// | `bit_vec::BitVec` | BIT, VARBIT |
/// | `eui48::MacAddress` | MACADDR |
///
/// # Nullability
///
/// In addition to the types listed above, `FromSql` is implemented for
/// `Option<T>` where `T` implements `FromSql`. An `Option<T>` represents a
/// nullable Postgres value.
///
/// # Arrays
///
/// `FromSql` is implemented for `Vec<T>` where `T` implements `FromSql`, and
/// corresponds to one-dimensional Postgres arrays.
pub trait FromSql<'a>: Sized {
/// Creates a new value of this type from a buffer of data of the specified
/// Postgres `Type` in its binary format.
///
/// The caller of this method is responsible for ensuring that this type
/// is compatible with the Postgres `Type`.
fn from_sql(ty: &Type, raw: &'a [u8]) -> Result<Self, Box<dyn Error + Sync + Send>>;
/// Creates a new value of this type from a `NULL` SQL value.
///
/// The caller of this method is responsible for ensuring that this type
/// is compatible with the Postgres `Type`.
///
/// The default implementation returns
/// `Err(Box::new(WasNull))`.
#[allow(unused_variables)]
fn from_sql_null(ty: &Type) -> Result<Self, Box<dyn Error + Sync + Send>> {
Err(Box::new(WasNull))
}
/// A convenience function that delegates to `from_sql` and `from_sql_null` depending on the
/// value of `raw`.
fn from_sql_nullable(
ty: &Type,
raw: Option<&'a [u8]>,
) -> Result<Self, Box<dyn Error + Sync + Send>> {
match raw {
Some(raw) => Self::from_sql(ty, raw),
None => Self::from_sql_null(ty),
}
}
/// Determines if a value of this type can be created from the specified
/// Postgres `Type`.
fn accepts(ty: &Type) -> bool;
}
/// A trait for types which can be created from a Postgres value without borrowing any data.
///
/// This is primarily useful for trait bounds on functions.
pub trait FromSqlOwned: for<'a> FromSql<'a> {}
impl<T> FromSqlOwned for T where T: for<'a> FromSql<'a> {}
impl<'a, T: FromSql<'a>> FromSql<'a> for Option<T> {
fn from_sql(ty: &Type, raw: &'a [u8]) -> Result<Option<T>, Box<dyn Error + Sync + Send>> {
<T as FromSql>::from_sql(ty, raw).map(Some)
}
fn from_sql_null(_: &Type) -> Result<Option<T>, Box<dyn Error + Sync + Send>> {
Ok(None)
}
fn accepts(ty: &Type) -> bool {
<T as FromSql>::accepts(ty)
}
}
impl<'a, T: FromSql<'a>> FromSql<'a> for Vec<T> {
fn from_sql(ty: &Type, raw: &'a [u8]) -> Result<Vec<T>, Box<dyn Error + Sync + Send>> {
let member_type = match *ty.kind() {
Kind::Array(ref member) => member,
_ => panic!("expected array type"),
};
let array = types::array_from_sql(raw)?;
if array.dimensions().count()? > 1 {
return Err("array contains too many dimensions".into());
}
array
.values()
.and_then(|v| T::from_sql_nullable(member_type, v))
.collect()
}
fn accepts(ty: &Type) -> bool {
match *ty.kind() {
Kind::Array(ref inner) => T::accepts(inner),
_ => false,
}
}
}
impl<'a> FromSql<'a> for Vec<u8> {
fn from_sql(_: &Type, raw: &'a [u8]) -> Result<Vec<u8>, Box<dyn Error + Sync + Send>> {
Ok(types::bytea_from_sql(raw).to_owned())
}
accepts!(BYTEA);
}
impl<'a> FromSql<'a> for &'a [u8] {
fn from_sql(_: &Type, raw: &'a [u8]) -> Result<&'a [u8], Box<dyn Error + Sync + Send>> {
Ok(types::bytea_from_sql(raw))
}
accepts!(BYTEA);
}
impl<'a> FromSql<'a> for String {
fn from_sql(_: &Type, raw: &'a [u8]) -> Result<String, Box<dyn Error + Sync + Send>> {
types::text_from_sql(raw).map(|b| b.to_owned())
}
fn accepts(ty: &Type) -> bool {
<&str as FromSql>::accepts(ty)
}
}
impl<'a> FromSql<'a> for &'a str {
fn from_sql(_: &Type, raw: &'a [u8]) -> Result<&'a str, Box<dyn Error + Sync + Send>> {
types::text_from_sql(raw)
}
fn accepts(ty: &Type) -> bool {
match *ty {
Type::VARCHAR | Type::TEXT | Type::BPCHAR | Type::NAME | Type::UNKNOWN => true,
ref ty if ty.name() == "citext" => true,
_ => false,
}
}
}
macro_rules! simple_from {
($t:ty, $f:ident, $($expected:ident),+) => {
impl<'a> FromSql<'a> for $t {
fn from_sql(_: &Type, raw: &'a [u8]) -> Result<$t, Box<dyn Error + Sync + Send>> {
types::$f(raw)
}
accepts!($($expected),+);
}
}
}
simple_from!(bool, bool_from_sql, BOOL);
simple_from!(i8, char_from_sql, CHAR);
simple_from!(i16, int2_from_sql, INT2);
simple_from!(i32, int4_from_sql, INT4);
simple_from!(u32, oid_from_sql, OID);
simple_from!(i64, int8_from_sql, INT8);
simple_from!(f32, float4_from_sql, FLOAT4);
simple_from!(f64, float8_from_sql, FLOAT8);
impl<'a, S> FromSql<'a> for HashMap<String, Option<String>, S>
where
S: Default + BuildHasher,
{
fn from_sql(
_: &Type,
raw: &'a [u8],
) -> Result<HashMap<String, Option<String>, S>, Box<dyn Error + Sync + Send>> {
types::hstore_from_sql(raw)?
.map(|(k, v)| (k.to_owned(), v.map(str::to_owned)))
.collect()
}
fn accepts(ty: &Type) -> bool {
ty.name() == "hstore"
}
}
impl<'a> FromSql<'a> for SystemTime {
fn from_sql(_: &Type, raw: &'a [u8]) -> Result<SystemTime, Box<dyn Error + Sync + Send>> {
let time = types::timestamp_from_sql(raw)?;
let epoch = UNIX_EPOCH + Duration::from_secs(TIME_SEC_CONVERSION);
let negative = time < 0;
let time = time.abs() as u64;
let secs = time / USEC_PER_SEC;
let nsec = (time % USEC_PER_SEC) * NSEC_PER_USEC;
let offset = Duration::new(secs, nsec as u32);
let time = if negative {
epoch - offset
} else {
epoch + offset
};
Ok(time)
}
accepts!(TIMESTAMP, TIMESTAMPTZ);
}
/// An enum representing the nullability of a Postgres value.
pub enum IsNull {
/// The value is NULL.
Yes,
/// The value is not NULL.
No,
}
/// A trait for types that can be converted into Postgres values.
///
/// # Types
///
/// The following implementations are provided by this crate, along with the
/// corresponding Postgres types:
///
/// | Rust type | Postgres type(s) |
/// |-----------------------------------|--------------------------------------|
/// | `bool` | BOOL |
/// | `i8` | "char" |
/// | `i16` | SMALLINT, SMALLSERIAL |
/// | `i32` | INT, SERIAL |
/// | `u32` | OID |
/// | `i64` | BIGINT, BIGSERIAL |
/// | `f32` | REAL |
/// | `f64` | DOUBLE PRECISION |
/// | `&str`/`String` | VARCHAR, CHAR(n), TEXT, CITEXT, NAME |
/// | `&[u8]`/Vec<u8>` | BYTEA |
/// | `HashMap<String, Option<String>>` | HSTORE |
/// | `SystemTime` | TIMESTAMP, TIMESTAMP WITH TIME ZONE |
///
/// In addition, some implementations are provided for types in third party
/// crates. These are disabled by default; to opt into one of these
/// implementations, activate the Cargo feature corresponding to the crate's
/// name prefixed by `with-`. For example, the `with-serde_json-1` feature enables
/// the implementation for the `serde_json::Value` type.
///
/// | Rust type | Postgres type(s) |
/// |---------------------------------|-------------------------------------|
/// | `chrono::NaiveDateTime` | TIMESTAMP |
/// | `chrono::DateTime<Utc>` | TIMESTAMP WITH TIME ZONE |
/// | `chrono::DateTime<Local>` | TIMESTAMP WITH TIME ZONE |
/// | `chrono::DateTime<FixedOffset>` | TIMESTAMP WITH TIME ZONE |
/// | `chrono::NaiveDate` | DATE |
/// | `chrono::NaiveTime` | TIME |
/// | `eui48::MacAddress` | MACADDR |
/// | `geo_types::Point<f64>` | POINT |
/// | `geo_types::Rect<f64>` | BOX |
/// | `geo_types::LineString<f64>` | PATH |
/// | `serde_json::Value` | JSON, JSONB |
/// | `uuid::Uuid` | UUID |
/// | `bit_vec::BitVec` | BIT, VARBIT |
/// | `eui48::MacAddress` | MACADDR |
///
/// # Nullability
///
/// In addition to the types listed above, `ToSql` is implemented for
/// `Option<T>` where `T` implements `ToSql`. An `Option<T>` represents a
/// nullable Postgres value.
///
/// # Arrays
///
/// `ToSql` is implemented for `Vec<T>` and `&[T]` where `T` implements `ToSql`,
/// and corresponds to one-dimentional Postgres arrays with an index offset of 1.
pub trait ToSql: fmt::Debug {
/// Converts the value of `self` into the binary format of the specified
/// Postgres `Type`, appending it to `out`.
///
/// The caller of this method is responsible for ensuring that this type
/// is compatible with the Postgres `Type`.
///
/// The return value indicates if this value should be represented as
/// `NULL`. If this is the case, implementations **must not** write
/// anything to `out`.
fn to_sql(&self, ty: &Type, out: &mut Vec<u8>) -> Result<IsNull, Box<dyn Error + Sync + Send>>
where
Self: Sized;
/// Determines if a value of this type can be converted to the specified
/// Postgres `Type`.
fn accepts(ty: &Type) -> bool
where
Self: Sized;
/// An adaptor method used internally by Rust-Postgres.
///
/// *All* implementations of this method should be generated by the
/// `to_sql_checked!()` macro.
fn to_sql_checked(
&self,
ty: &Type,
out: &mut Vec<u8>,
) -> Result<IsNull, Box<dyn Error + Sync + Send>>;
}
impl<'a, T> ToSql for &'a T
where
T: ToSql,
{
fn to_sql(&self, ty: &Type, out: &mut Vec<u8>) -> Result<IsNull, Box<dyn Error + Sync + Send>> {
(*self).to_sql(ty, out)
}
fn accepts(ty: &Type) -> bool {
T::accepts(ty)
}
to_sql_checked!();
}
impl<T: ToSql> ToSql for Option<T> {
fn to_sql(&self, ty: &Type, out: &mut Vec<u8>) -> Result<IsNull, Box<dyn Error + Sync + Send>> {
match *self {
Some(ref val) => val.to_sql(ty, out),
None => Ok(IsNull::Yes),
}
}
fn accepts(ty: &Type) -> bool {
<T as ToSql>::accepts(ty)
}
to_sql_checked!();
}
impl<'a, T: ToSql> ToSql for &'a [T] {
fn to_sql(&self, ty: &Type, w: &mut Vec<u8>) -> Result<IsNull, Box<dyn Error + Sync + Send>> {
let member_type = match *ty.kind() {
Kind::Array(ref member) => member,
_ => panic!("expected array type"),
};
let dimension = ArrayDimension {
len: downcast(self.len())?,
lower_bound: 1,
};
types::array_to_sql(
Some(dimension),
member_type.oid(),
self.iter(),
|e, w| match e.to_sql(member_type, w)? {
IsNull::No => Ok(postgres_protocol::IsNull::No),
IsNull::Yes => Ok(postgres_protocol::IsNull::Yes),
},
w,
)?;
Ok(IsNull::No)
}
fn accepts(ty: &Type) -> bool {
match *ty.kind() {
Kind::Array(ref member) => T::accepts(member),
_ => false,
}
}
to_sql_checked!();
}
impl<'a> ToSql for &'a [u8] {
fn to_sql(&self, _: &Type, w: &mut Vec<u8>) -> Result<IsNull, Box<dyn Error + Sync + Send>> {
types::bytea_to_sql(*self, w);
Ok(IsNull::No)
}
accepts!(BYTEA);
to_sql_checked!();
}
impl<T: ToSql> ToSql for Vec<T> {
fn to_sql(&self, ty: &Type, w: &mut Vec<u8>) -> Result<IsNull, Box<dyn Error + Sync + Send>> {
<&[T] as ToSql>::to_sql(&&**self, ty, w)
}
fn accepts(ty: &Type) -> bool {
<&[T] as ToSql>::accepts(ty)
}
to_sql_checked!();
}
impl ToSql for Vec<u8> {
fn to_sql(&self, ty: &Type, w: &mut Vec<u8>) -> Result<IsNull, Box<dyn Error + Sync + Send>> {
<&[u8] as ToSql>::to_sql(&&**self, ty, w)
}
fn accepts(ty: &Type) -> bool {
<&[u8] as ToSql>::accepts(ty)
}
to_sql_checked!();
}
impl<'a> ToSql for &'a str {
fn to_sql(&self, _: &Type, w: &mut Vec<u8>) -> Result<IsNull, Box<dyn Error + Sync + Send>> {
types::text_to_sql(*self, w);
Ok(IsNull::No)
}
fn accepts(ty: &Type) -> bool {
match *ty {
Type::VARCHAR | Type::TEXT | Type::BPCHAR | Type::NAME | Type::UNKNOWN => true,
ref ty if ty.name() == "citext" => true,
_ => false,
}
}
to_sql_checked!();
}
impl<'a> ToSql for Cow<'a, str> {
fn to_sql(&self, ty: &Type, w: &mut Vec<u8>) -> Result<IsNull, Box<dyn Error + Sync + Send>> {
<&str as ToSql>::to_sql(&&self.as_ref(), ty, w)
}
fn accepts(ty: &Type) -> bool {
<&str as ToSql>::accepts(ty)
}
to_sql_checked!();
}
impl ToSql for String {
fn to_sql(&self, ty: &Type, w: &mut Vec<u8>) -> Result<IsNull, Box<dyn Error + Sync + Send>> {
<&str as ToSql>::to_sql(&&**self, ty, w)
}
fn accepts(ty: &Type) -> bool {
<&str as ToSql>::accepts(ty)
}
to_sql_checked!();
}
macro_rules! simple_to {
($t:ty, $f:ident, $($expected:ident),+) => {
impl ToSql for $t {
fn to_sql(&self,
_: &Type,
w: &mut Vec<u8>)
-> Result<IsNull, Box<dyn Error + Sync + Send>> {
types::$f(*self, w);
Ok(IsNull::No)
}
accepts!($($expected),+);
to_sql_checked!();
}
}
}
simple_to!(bool, bool_to_sql, BOOL);
simple_to!(i8, char_to_sql, CHAR);
simple_to!(i16, int2_to_sql, INT2);
simple_to!(i32, int4_to_sql, INT4);
simple_to!(u32, oid_to_sql, OID);
simple_to!(i64, int8_to_sql, INT8);
simple_to!(f32, float4_to_sql, FLOAT4);
simple_to!(f64, float8_to_sql, FLOAT8);
impl<H> ToSql for HashMap<String, Option<String>, H>
where
H: BuildHasher,
{
fn to_sql(&self, _: &Type, w: &mut Vec<u8>) -> Result<IsNull, Box<dyn Error + Sync + Send>> {
types::hstore_to_sql(
self.iter().map(|(k, v)| (&**k, v.as_ref().map(|v| &**v))),
w,
)?;
Ok(IsNull::No)
}
fn accepts(ty: &Type) -> bool {
ty.name() == "hstore"
}
to_sql_checked!();
}
impl ToSql for SystemTime {
fn to_sql(&self, _: &Type, w: &mut Vec<u8>) -> Result<IsNull, Box<dyn Error + Sync + Send>> {
let epoch = UNIX_EPOCH + Duration::from_secs(TIME_SEC_CONVERSION);
let to_usec =
|d: Duration| d.as_secs() * USEC_PER_SEC + u64::from(d.subsec_nanos()) / NSEC_PER_USEC;
let time = match self.duration_since(epoch) {
Ok(duration) => to_usec(duration) as i64,
Err(e) => -(to_usec(e.duration()) as i64),
};
types::timestamp_to_sql(time, w);
Ok(IsNull::No)
}
accepts!(TIMESTAMP, TIMESTAMPTZ);
to_sql_checked!();
}
fn downcast(len: usize) -> Result<i32, Box<dyn Error + Sync + Send>> {
if len > i32::max_value() as usize {
Err("value too large to transmit".into())
} else {
Ok(len as i32)
}
}
| 31.93598 | 100 | 0.505693 |
ebd38730d9c02410731985ed1f9d7b4e6d4a250b | 7,118 | use parser::ast::{
AtomKind, Boolean, Expression, FloatSize, FunctionType, IntSize, LValue, PrimitiveType,
};
pub trait AatbeFmt {
fn fmt(self) -> String;
}
impl AatbeFmt for PrimitiveType {
fn fmt(self) -> String {
(&self).fmt()
}
}
impl AatbeFmt for FunctionType {
fn fmt(self) -> String {
(&self).fmt()
}
}
impl AatbeFmt for &FunctionType {
fn fmt(self) -> String {
let params = if self.params.len() == 1 && self.params[0] == PrimitiveType::Unit {
String::from("()")
} else {
format!(
"{}",
self.params
.iter()
.map(|p| p.fmt())
.collect::<Vec<_>>()
.join(", ")
)
};
let ret_ty = format!(" -> {}", (&*self.ret_ty).fmt());
format!("{}{}", params, ret_ty)
}
}
impl AatbeFmt for &PrimitiveType {
fn fmt(self) -> String {
match self {
PrimitiveType::Str => String::from("str"),
PrimitiveType::Bool => String::from("bool"),
PrimitiveType::Int(bits) => match bits {
IntSize::Bits8 => String::from("i8"),
IntSize::Bits16 => String::from("i16"),
IntSize::Bits32 => String::from("i32"),
IntSize::Bits64 => String::from("i64"),
},
PrimitiveType::UInt(bits) => match bits {
IntSize::Bits8 => String::from("u8"),
IntSize::Bits16 => String::from("u16"),
IntSize::Bits32 => String::from("u32"),
IntSize::Bits64 => String::from("u64"),
},
PrimitiveType::Float(bits) => match bits {
FloatSize::Bits32 => String::from("f32"),
FloatSize::Bits64 => String::from("f64"),
},
PrimitiveType::Varargs => String::from("..."),
PrimitiveType::NamedType {
name: _,
ty: Some(ty),
} => ty.clone().fmt(),
PrimitiveType::Pointer(ty) => format!("{}*", ty.clone().fmt()),
PrimitiveType::Char => String::from("char"),
PrimitiveType::TypeRef(ty) => ty.clone(),
PrimitiveType::Array { ty, len } => {
format!("{}[{}]", ty.clone().fmt(), len.to_string())
}
PrimitiveType::Unit => String::from("()"),
PrimitiveType::Ref(ty) => format!("&{}", ty.clone().fmt()),
PrimitiveType::Function(func) => func.fmt(),
PrimitiveType::Slice { ty } => format!("{}[]", ty.clone().fmt()),
PrimitiveType::GenericTypeRef(name, types) => format!(
"{}[{}]",
name,
types
.iter()
.map(|ty| ty.fmt())
.collect::<Vec<_>>()
.join(", ")
),
PrimitiveType::Newtype(name) => name.clone(),
PrimitiveType::VariantType(name) => name.clone(),
_ => panic!("ICE fmt {:?}", self),
}
}
}
impl AatbeFmt for &AtomKind {
fn fmt(self) -> String {
match self {
AtomKind::StringLiteral(lit) => format!("\"{}\"", lit),
AtomKind::CharLiteral(lit) => format!("{}", lit),
AtomKind::Integer(val, ty) => format!("{}{}", val, ty.fmt()),
AtomKind::Floating(val, ty) => format!("{}{}", val, ty.fmt()),
AtomKind::Bool(Boolean::True) => String::from("true"),
AtomKind::Bool(Boolean::False) => String::from("false"),
AtomKind::Ident(id) => format!("{}", id),
AtomKind::Unary(op, id) => format!("{}{}", op, id.fmt()),
AtomKind::Parenthesized(expr) => format!("({})", expr.fmt()),
AtomKind::Cast(val, ty) => format!("{} as {}", val.fmt(), ty.fmt()),
AtomKind::NamedValue { name, val } => format!("{}: {}", name, val.fmt()),
AtomKind::Access(list) => list.join("."),
AtomKind::Array(vals) => format!(
"[{}]",
vals.iter()
.map(|val| val.fmt())
.collect::<Vec<_>>()
.join(", ")
),
AtomKind::Index(lval, index) => format!("{}[{}]", lval.fmt(), index.fmt()),
AtomKind::Ref(val) => format!("&{}", val.fmt()),
AtomKind::Deref(val) => format!("*{}", val.fmt()),
AtomKind::SymbolLiteral(sym) => format!(":{}", sym),
AtomKind::Unit => format!("()"),
_ => panic!("ICE fmt {:?}", self),
}
}
}
impl AatbeFmt for &Expression {
fn fmt(self) -> String {
match self {
Expression::Binary(lhs, op, rhs) => format!("{} {} {}", lhs.fmt(), op, rhs.fmt()),
Expression::Atom(atom) => atom.fmt(),
Expression::RecordInit {
record,
types,
values,
} => format!(
"{}{} {{ {} }}",
record,
if types.len() > 0 {
format!(
"[{}]",
types
.iter()
.map(|val| val.fmt())
.collect::<Vec<_>>()
.join(", ")
)
} else {
String::default()
},
values
.iter()
.map(|val| val.fmt())
.collect::<Vec<_>>()
.join(", ")
),
Expression::Call { name, types, args } => format!(
"{}{} {}",
name,
if types.len() > 0 {
format!(
"[{}]",
types
.iter()
.map(|val| val.fmt())
.collect::<Vec<_>>()
.join(", ")
)
} else {
String::default()
},
args.iter()
.map(|val| val.fmt())
.collect::<Vec<_>>()
.join(", ")
),
Expression::Function {
name,
export,
ty: ty @ FunctionType { ext, .. },
..
} => format!(
"{}{}fn {}{}",
if *export { "exp " } else { "" },
if *ext { "ext " } else { "" },
name,
ty.fmt(),
),
_ => panic!("ICE fmt {:?}", self),
}
}
}
impl AatbeFmt for &LValue {
fn fmt(self) -> String {
match self {
LValue::Ident(ident) => ident.clone(),
LValue::Accessor(access) => access.join("."),
LValue::Deref(lval) => format!("*{}", lval.fmt()),
LValue::Index(lval, expr) => format!("{}[{}]", lval.fmt(), expr.fmt()),
}
}
}
| 35.064039 | 94 | 0.387187 |
714b729375b40fada4ffb25fab29e685cab109fb | 71,155 | mod settings;
pub mod parser;
mod meta;
mod help;
mod validator;
mod usage;
// Std
use std::env;
use std::ffi::{OsStr, OsString};
use std::fmt;
use std::io::{self, BufRead, BufWriter, Write};
use std::path::Path;
use std::process;
use std::rc::Rc;
use std::result::Result as StdResult;
// Third Party
#[cfg(feature = "yaml")]
use yaml_rust::Yaml;
// Internal
use app::help::Help;
use app::parser::Parser;
use args::{AnyArg, Arg, ArgGroup, ArgMatcher, ArgMatches, ArgSettings};
use errors::Result as ClapResult;
pub use self::settings::AppSettings;
use completions::Shell;
use map::{self, VecMap};
/// Used to create a representation of a command line program and all possible command line
/// arguments. Application settings are set using the "builder pattern" with the
/// [`App::get_matches`] family of methods being the terminal methods that starts the
/// runtime-parsing process. These methods then return information about the user supplied
/// arguments (or lack there of).
///
/// **NOTE:** There aren't any mandatory "options" that one must set. The "options" may
/// also appear in any order (so long as one of the [`App::get_matches`] methods is the last method
/// called).
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// let m = App::new("My Program")
/// .author("Me, [email protected]")
/// .version("1.0.2")
/// .about("Explains in brief what the program does")
/// .arg(
/// Arg::with_name("in_file").index(1)
/// )
/// .after_help("Longer explanation to appear after the options when \
/// displaying the help information from --help or -h")
/// .get_matches();
///
/// // Your program logic starts here...
/// ```
/// [`App::get_matches`]: ./struct.App.html#method.get_matches
#[allow(missing_debug_implementations)]
pub struct App<'a, 'b>
where
'a: 'b,
{
#[doc(hidden)] pub p: Parser<'a, 'b>,
}
impl<'a, 'b> App<'a, 'b> {
/// Creates a new instance of an application requiring a name. The name may be, but doesn't
/// have to be same as the binary. The name will be displayed to the user when they request to
/// print version or help and usage information.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// let prog = App::new("My Program")
/// # ;
/// ```
pub fn new<S: Into<String>>(n: S) -> Self {
App {
p: Parser::with_name(n.into()),
}
}
/// some doc
pub fn argv(mut self, argv: &mut Vec<OsString>) -> Self {
self.p.extra_argv.append(argv);
self
}
/// Get the name of the app
pub fn get_name(&self) -> &str { &self.p.meta.name }
/// Get the name of the binary
pub fn get_bin_name(&self) -> Option<&str> { self.p.meta.bin_name.as_ref().map(|s| s.as_str()) }
/// Creates a new instance of an application requiring a name, but uses the [`crate_authors!`]
/// and [`crate_version!`] macros to fill in the [`App::author`] and [`App::version`] fields.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// let prog = App::with_defaults("My Program")
/// # ;
/// ```
/// [`crate_authors!`]: ./macro.crate_authors!.html
/// [`crate_version!`]: ./macro.crate_version!.html
/// [`App::author`]: ./struct.App.html#method.author
/// [`App::version`]: ./struct.App.html#method.author
#[deprecated(since="2.14.1", note="Can never work; use explicit App::author() and App::version() calls instead")]
pub fn with_defaults<S: Into<String>>(n: S) -> Self {
let mut a = App {
p: Parser::with_name(n.into()),
};
a.p.meta.author = Some("Kevin K. <[email protected]>");
a.p.meta.version = Some("2.19.2");
a
}
/// Creates a new instance of [`App`] from a .yml (YAML) file. A full example of supported YAML
/// objects can be found in [`examples/17_yaml.rs`] and [`examples/17_yaml.yml`]. One great use
/// for using YAML is when supporting multiple languages and dialects, as each language could
/// be a distinct YAML file and determined at compiletime via `cargo` "features" in your
/// `Cargo.toml`
///
/// In order to use this function you must compile `clap` with the `features = ["yaml"]` in
/// your settings for the `[dependencies.clap]` table of your `Cargo.toml`
///
/// **NOTE:** Due to how the YAML objects are built there is a convenience macro for loading
/// the YAML file at compile time (relative to the current file, like modules work). That YAML
/// object can then be passed to this function.
///
/// # Panics
///
/// The YAML file must be properly formatted or this function will [`panic!`]. A good way to
/// ensure this doesn't happen is to run your program with the `--help` switch. If this passes
/// without error, you needn't worry because the YAML is properly formatted.
///
/// # Examples
///
/// The following example shows how to load a properly formatted YAML file to build an instance
/// of an [`App`] struct.
///
/// ```ignore
/// # #[macro_use]
/// # extern crate clap;
/// # use clap::App;
/// # fn main() {
/// let yml = load_yaml!("app.yml");
/// let app = App::from_yaml(yml);
///
/// // continued logic goes here, such as `app.get_matches()` etc.
/// # }
/// ```
/// [`App`]: ./struct.App.html
/// [`examples/17_yaml.rs`]: https://github.com/kbknapp/clap-rs/blob/master/examples/17_yaml.rs
/// [`examples/17_yaml.yml`]: https://github.com/kbknapp/clap-rs/blob/master/examples/17_yaml.yml
/// [`panic!`]: https://doc.rust-lang.org/std/macro.panic!.html
#[cfg(feature = "yaml")]
pub fn from_yaml(yaml: &'a Yaml) -> App<'a, 'a> { App::from(yaml) }
/// Sets a string of author(s) that will be displayed to the user when they
/// request the help information with `--help` or `-h`.
///
/// **Pro-tip:** Use `clap`s convenience macro [`crate_authors!`] to automatically set your
/// application's author(s) to the same thing as your crate at compile time. See the [`examples/`]
/// directory for more information
///
/// See the [`examples/`]
/// directory for more information
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// App::new("myprog")
/// .author("Me, [email protected]")
/// # ;
/// ```
/// [`crate_authors!`]: ./macro.crate_authors!.html
/// [`examples/`]: https://github.com/kbknapp/clap-rs/tree/master/examples
pub fn author<S: Into<&'b str>>(mut self, author: S) -> Self {
self.p.meta.author = Some(author.into());
self
}
/// Overrides the system-determined binary name. This should only be used when absolutely
/// necessary, such as when the binary name for your application is misleading, or perhaps
/// *not* how the user should invoke your program.
///
/// **Pro-tip:** When building things such as third party `cargo` subcommands, this setting
/// **should** be used!
///
/// **NOTE:** This command **should not** be used for [`SubCommand`]s.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// App::new("My Program")
/// .bin_name("my_binary")
/// # ;
/// ```
/// [`SubCommand`]: ./struct.SubCommand.html
pub fn bin_name<S: Into<String>>(mut self, name: S) -> Self {
self.p.meta.bin_name = Some(name.into());
self
}
/// Sets a string describing what the program does. This will be displayed when displaying help
/// information with `-h`.
///
/// **NOTE:** If only `about` is provided, and not [`App::long_about`] but the user requests
/// `--help` clap will still display the contents of `about` appropriately
///
/// **NOTE:** Only [`App::about`] is used in completion script generation in order to be
/// concise
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// App::new("myprog")
/// .about("Does really amazing things to great people")
/// # ;
/// ```
/// [`App::long_about`]: ./struct.App.html#method.long_about
pub fn about<S: Into<&'b str>>(mut self, about: S) -> Self {
self.p.meta.about = Some(about.into());
self
}
/// Sets a string describing what the program does. This will be displayed when displaying help
/// information.
///
/// **NOTE:** If only `long_about` is provided, and not [`App::about`] but the user requests
/// `-h` clap will still display the contents of `long_about` appropriately
///
/// **NOTE:** Only [`App::about`] is used in completion script generation in order to be
/// concise
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// App::new("myprog")
/// .long_about(
/// "Does really amazing things to great people. Now let's talk a little
/// more in depth about how this subcommand really works. It may take about
/// a few lines of text, but that's ok!")
/// # ;
/// ```
/// [`App::about`]: ./struct.App.html#method.about
pub fn long_about<S: Into<&'b str>>(mut self, about: S) -> Self {
self.p.meta.long_about = Some(about.into());
self
}
/// Sets the program's name. This will be displayed when displaying help information.
///
/// **Pro-top:** This function is particularly useful when configuring a program via
/// [`App::from_yaml`] in conjunction with the [`crate_name!`] macro to derive the program's
/// name from its `Cargo.toml`.
///
/// # Examples
/// ```ignore
/// # #[macro_use]
/// # extern crate clap;
/// # use clap::App;
/// # fn main() {
/// let yml = load_yaml!("app.yml");
/// let app = App::from_yaml(yml)
/// .name(crate_name!());
///
/// // continued logic goes here, such as `app.get_matches()` etc.
/// # }
/// ```
///
/// [`App::from_yaml`]: ./struct.App.html#method.from_yaml
/// [`crate_name!`]: ./macro.crate_name.html
pub fn name<S: Into<String>>(mut self, name: S) -> Self {
self.p.meta.name = name.into();
self
}
/// Adds additional help information to be displayed in addition to auto-generated help. This
/// information is displayed **after** the auto-generated help information. This is often used
/// to describe how to use the arguments, or caveats to be noted.
///
/// # Examples
///
/// ```no_run
/// # use clap::App;
/// App::new("myprog")
/// .after_help("Does really amazing things to great people...but be careful with -R")
/// # ;
/// ```
pub fn after_help<S: Into<&'b str>>(mut self, help: S) -> Self {
self.p.meta.more_help = Some(help.into());
self
}
/// Adds additional help information to be displayed in addition to auto-generated help. This
/// information is displayed **before** the auto-generated help information. This is often used
/// for header information.
///
/// # Examples
///
/// ```no_run
/// # use clap::App;
/// App::new("myprog")
/// .before_help("Some info I'd like to appear before the help info")
/// # ;
/// ```
pub fn before_help<S: Into<&'b str>>(mut self, help: S) -> Self {
self.p.meta.pre_help = Some(help.into());
self
}
/// Sets a string of the version number to be displayed when displaying version or help
/// information with `-V`.
///
/// **NOTE:** If only `version` is provided, and not [`App::long_version`] but the user
/// requests `--version` clap will still display the contents of `version` appropriately
///
/// **Pro-tip:** Use `clap`s convenience macro [`crate_version!`] to automatically set your
/// application's version to the same thing as your crate at compile time. See the [`examples/`]
/// directory for more information
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// App::new("myprog")
/// .version("v0.1.24")
/// # ;
/// ```
/// [`crate_version!`]: ./macro.crate_version!.html
/// [`examples/`]: https://github.com/kbknapp/clap-rs/tree/master/examples
/// [`App::long_version`]: ./struct.App.html#method.long_version
pub fn version<S: Into<&'b str>>(mut self, ver: S) -> Self {
self.p.meta.version = Some(ver.into());
self
}
/// Sets a string of the version number to be displayed when displaying version or help
/// information with `--version`.
///
/// **NOTE:** If only `long_version` is provided, and not [`App::version`] but the user
/// requests `-V` clap will still display the contents of `long_version` appropriately
///
/// **Pro-tip:** Use `clap`s convenience macro [`crate_version!`] to automatically set your
/// application's version to the same thing as your crate at compile time. See the [`examples/`]
/// directory for more information
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// App::new("myprog")
/// .long_version(
/// "v0.1.24
/// commit: abcdef89726d
/// revision: 123
/// release: 2
/// binary: myprog")
/// # ;
/// ```
/// [`crate_version!`]: ./macro.crate_version!.html
/// [`examples/`]: https://github.com/kbknapp/clap-rs/tree/master/examples
/// [`App::version`]: ./struct.App.html#method.version
pub fn long_version<S: Into<&'b str>>(mut self, ver: S) -> Self {
self.p.meta.long_version = Some(ver.into());
self
}
/// Sets a custom usage string to override the auto-generated usage string.
///
/// This will be displayed to the user when errors are found in argument parsing, or when you
/// call [`ArgMatches::usage`]
///
/// **CAUTION:** Using this setting disables `clap`s "context-aware" usage strings. After this
/// setting is set, this will be the only usage string displayed to the user!
///
/// **NOTE:** You do not need to specify the "USAGE: \n\t" portion, as that will
/// still be applied by `clap`, you only need to specify the portion starting
/// with the binary name.
///
/// **NOTE:** This will not replace the entire help message, *only* the portion
/// showing the usage.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// App::new("myprog")
/// .usage("myapp [-clDas] <some_file>")
/// # ;
/// ```
/// [`ArgMatches::usage`]: ./struct.ArgMatches.html#method.usage
pub fn usage<S: Into<&'b str>>(mut self, usage: S) -> Self {
self.p.meta.usage_str = Some(usage.into());
self
}
/// Sets a custom help message and overrides the auto-generated one. This should only be used
/// when the auto-generated message does not suffice.
///
/// This will be displayed to the user when they use `--help` or `-h`
///
/// **NOTE:** This replaces the **entire** help message, so nothing will be auto-generated.
///
/// **NOTE:** This **only** replaces the help message for the current command, meaning if you
/// are using subcommands, those help messages will still be auto-generated unless you
/// specify a [`Arg::help`] for them as well.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// App::new("myapp")
/// .help("myapp v1.0\n\
/// Does awesome things\n\
/// (C) [email protected]\n\n\
///
/// USAGE: myapp <opts> <comamnd>\n\n\
///
/// Options:\n\
/// -h, --helpe Dispay this message\n\
/// -V, --version Display version info\n\
/// -s <stuff> Do something with stuff\n\
/// -v Be verbose\n\n\
///
/// Commmands:\n\
/// help Prints this message\n\
/// work Do some work")
/// # ;
/// ```
/// [`Arg::help`]: ./struct.Arg.html#method.help
pub fn help<S: Into<&'b str>>(mut self, help: S) -> Self {
self.p.meta.help_str = Some(help.into());
self
}
/// Sets the [`short`] for the auto-generated `help` argument.
///
/// By default `clap` automatically assigns `h`, but this can be overridden if you have a
/// different argument which you'd prefer to use the `-h` short with. This can be done by
/// defining your own argument with a lowercase `h` as the [`short`].
///
/// `clap` lazily generates these `help` arguments **after** you've defined any arguments of
/// your own.
///
/// **NOTE:** Any leading `-` characters will be stripped, and only the first
/// non `-` character will be used as the [`short`] version
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// App::new("myprog")
/// .help_short("H") // Using an uppercase `H` instead of the default lowercase `h`
/// # ;
/// ```
/// [`short`]: ./struct.Arg.html#method.short
pub fn help_short<S: AsRef<str> + 'b>(mut self, s: S) -> Self {
self.p.help_short(s.as_ref());
self
}
/// Sets the [`short`] for the auto-generated `version` argument.
///
/// By default `clap` automatically assigns `V`, but this can be overridden if you have a
/// different argument which you'd prefer to use the `-V` short with. This can be done by
/// defining your own argument with an uppercase `V` as the [`short`].
///
/// `clap` lazily generates these `version` arguments **after** you've defined any arguments of
/// your own.
///
/// **NOTE:** Any leading `-` characters will be stripped, and only the first
/// non `-` character will be used as the `short` version
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// App::new("myprog")
/// .version_short("v") // Using a lowercase `v` instead of the default capital `V`
/// # ;
/// ```
/// [`short`]: ./struct.Arg.html#method.short
pub fn version_short<S: AsRef<str>>(mut self, s: S) -> Self {
self.p.version_short(s.as_ref());
self
}
/// Sets the help text for the auto-generated `help` argument.
///
/// By default `clap` sets this to `"Prints help information"`, but if you're using a
/// different convention for your help messages and would prefer a different phrasing you can
/// override it.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// App::new("myprog")
/// .help_message("Print help information") // Perhaps you want imperative help messages
///
/// # ;
/// ```
pub fn help_message<S: Into<&'a str>>(mut self, s: S) -> Self {
self.p.help_message = Some(s.into());
self
}
/// Sets the help text for the auto-generated `version` argument.
///
/// By default `clap` sets this to `"Prints version information"`, but if you're using a
/// different convention for your help messages and would prefer a different phrasing then you
/// can change it.
///
/// # Examples
/// ```no_run
/// # use clap::{App, Arg};
/// App::new("myprog")
/// .version_message("Print version information") // Perhaps you want imperative help messages
/// # ;
/// ```
pub fn version_message<S: Into<&'a str>>(mut self, s: S) -> Self {
self.p.version_message = Some(s.into());
self
}
/// Sets the help template to be used, overriding the default format.
///
/// Tags arg given inside curly brackets.
///
/// Valid tags are:
///
/// * `{bin}` - Binary name.
/// * `{version}` - Version number.
/// * `{author}` - Author information.
/// * `{about}` - General description (from [`App::about`])
/// * `{usage}` - Automatically generated or given usage string.
/// * `{all-args}` - Help for all arguments (options, flags, positionals arguments,
/// and subcommands) including titles.
/// * `{unified}` - Unified help for options and flags. Note, you must *also* set
/// [`AppSettings::UnifiedHelpMessage`] to fully merge both options and
/// flags, otherwise the ordering is "best effort"
/// * `{flags}` - Help for flags.
/// * `{options}` - Help for options.
/// * `{positionals}` - Help for positionals arguments.
/// * `{subcommands}` - Help for subcommands.
/// * `{after-help}` - Help from [`App::after_help`]
/// * `{before-help}` - Help from [`App::before_help`]
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// App::new("myprog")
/// .version("1.0")
/// .template("{bin} ({version}) - {usage}")
/// # ;
/// ```
/// **NOTE:**The template system is, on purpose, very simple. Therefore the tags have to writen
/// in the lowercase and without spacing.
/// [`App::about`]: ./struct.App.html#method.about
/// [`App::after_help`]: ./struct.App.html#method.after_help
/// [`App::before_help`]: ./struct.App.html#method.before_help
/// [`AppSettings::UnifiedHelpMessage`]: ./enum.AppSettings.html#variant.UnifiedHelpMessage
pub fn template<S: Into<&'b str>>(mut self, s: S) -> Self {
self.p.meta.template = Some(s.into());
self
}
/// Enables a single command, or [`SubCommand`], level settings.
///
/// See [`AppSettings`] for a full list of possibilities and examples.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg, AppSettings};
/// App::new("myprog")
/// .setting(AppSettings::SubcommandRequired)
/// .setting(AppSettings::WaitOnError)
/// # ;
/// ```
/// [`SubCommand`]: ./struct.SubCommand.html
/// [`AppSettings`]: ./enum.AppSettings.html
pub fn setting(mut self, setting: AppSettings) -> Self {
self.p.set(setting);
self
}
/// Enables multiple command, or [`SubCommand`], level settings
///
/// See [`AppSettings`] for a full list of possibilities and examples.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg, AppSettings};
/// App::new("myprog")
/// .settings(&[AppSettings::SubcommandRequired,
/// AppSettings::WaitOnError])
/// # ;
/// ```
/// [`SubCommand`]: ./struct.SubCommand.html
/// [`AppSettings`]: ./enum.AppSettings.html
pub fn settings(mut self, settings: &[AppSettings]) -> Self {
for s in settings {
self.p.set(*s);
}
self
}
/// Enables a single setting that is propagated down through all child [`SubCommand`]s.
///
/// See [`AppSettings`] for a full list of possibilities and examples.
///
/// **NOTE**: The setting is *only* propagated *down* and not up through parent commands.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg, AppSettings};
/// App::new("myprog")
/// .global_setting(AppSettings::SubcommandRequired)
/// # ;
/// ```
/// [`SubCommand`]: ./struct.SubCommand.html
/// [`AppSettings`]: ./enum.AppSettings.html
pub fn global_setting(mut self, setting: AppSettings) -> Self {
self.p.set(setting);
self.p.g_settings.set(setting);
self
}
/// Enables multiple settings which are propagated *down* through all child [`SubCommand`]s.
///
/// See [`AppSettings`] for a full list of possibilities and examples.
///
/// **NOTE**: The setting is *only* propagated *down* and not up through parent commands.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg, AppSettings};
/// App::new("myprog")
/// .global_settings(&[AppSettings::SubcommandRequired,
/// AppSettings::ColoredHelp])
/// # ;
/// ```
/// [`SubCommand`]: ./struct.SubCommand.html
/// [`AppSettings`]: ./enum.AppSettings.html
pub fn global_settings(mut self, settings: &[AppSettings]) -> Self {
for s in settings {
self.p.set(*s);
self.p.g_settings.set(*s)
}
self
}
/// Disables a single command, or [`SubCommand`], level setting.
///
/// See [`AppSettings`] for a full list of possibilities and examples.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, AppSettings};
/// App::new("myprog")
/// .unset_setting(AppSettings::ColorAuto)
/// # ;
/// ```
/// [`SubCommand`]: ./struct.SubCommand.html
/// [`AppSettings`]: ./enum.AppSettings.html
pub fn unset_setting(mut self, setting: AppSettings) -> Self {
self.p.unset(setting);
self
}
/// Disables multiple command, or [`SubCommand`], level settings.
///
/// See [`AppSettings`] for a full list of possibilities and examples.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, AppSettings};
/// App::new("myprog")
/// .unset_settings(&[AppSettings::ColorAuto,
/// AppSettings::AllowInvalidUtf8])
/// # ;
/// ```
/// [`SubCommand`]: ./struct.SubCommand.html
/// [`AppSettings`]: ./enum.AppSettings.html
pub fn unset_settings(mut self, settings: &[AppSettings]) -> Self {
for s in settings {
self.p.unset(*s);
}
self
}
/// Sets the terminal width at which to wrap help messages. Defaults to `120`. Using `0` will
/// ignore terminal widths and use source formatting.
///
/// `clap` automatically tries to determine the terminal width on Unix, Linux, OSX and Windows
/// if the `wrap_help` cargo "feature" has been used while compiling. If the terminal width
/// cannot be determined, `clap` defaults to `120`.
///
/// **NOTE:** This setting applies globally and *not* on a per-command basis.
///
/// **NOTE:** This setting must be set **before** any subcommands are added!
///
/// # Platform Specific
///
/// Only Unix, Linux, OSX and Windows support automatic determination of terminal width.
/// Even on those platforms, this setting is useful if for any reason the terminal width
/// cannot be determined.
///
/// # Examples
///
/// ```no_run
/// # use clap::App;
/// App::new("myprog")
/// .set_term_width(80)
/// # ;
/// ```
pub fn set_term_width(mut self, width: usize) -> Self {
self.p.meta.term_w = Some(width);
self
}
/// Sets the max terminal width at which to wrap help messages. Using `0` will ignore terminal
/// widths and use source formatting.
///
/// `clap` automatically tries to determine the terminal width on Unix, Linux, OSX and Windows
/// if the `wrap_help` cargo "feature" has been used while compiling, but one might want to
/// limit the size (e.g. when the terminal is running fullscreen).
///
/// **NOTE:** This setting applies globally and *not* on a per-command basis.
///
/// **NOTE:** This setting must be set **before** any subcommands are added!
///
/// # Platform Specific
///
/// Only Unix, Linux, OSX and Windows support automatic determination of terminal width.
///
/// # Examples
///
/// ```no_run
/// # use clap::App;
/// App::new("myprog")
/// .max_term_width(100)
/// # ;
/// ```
pub fn max_term_width(mut self, w: usize) -> Self {
self.p.meta.max_w = Some(w);
self
}
/// Adds an [argument] to the list of valid possibilities.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// App::new("myprog")
/// // Adding a single "flag" argument with a short and help text, using Arg::with_name()
/// .arg(
/// Arg::with_name("debug")
/// .short("d")
/// .help("turns on debugging mode")
/// )
/// // Adding a single "option" argument with a short, a long, and help text using the less
/// // verbose Arg::from_usage()
/// .arg(
/// Arg::from_usage("-c --config=[CONFIG] 'Optionally sets a config file to use'")
/// )
/// # ;
/// ```
/// [argument]: ./struct.Arg.html
pub fn arg<A: Into<Arg<'a, 'b>>>(mut self, a: A) -> Self {
self.p.add_arg(a.into());
self
}
/// Adds multiple [arguments] to the list of valid possibilities
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// App::new("myprog")
/// .args(
/// &[Arg::from_usage("[debug] -d 'turns on debugging info'"),
/// Arg::with_name("input").index(1).help("the input file to use")]
/// )
/// # ;
/// ```
/// [arguments]: ./struct.Arg.html
pub fn args(mut self, args: &[Arg<'a, 'b>]) -> Self {
for arg in args {
self.p.add_arg_ref(arg);
}
self
}
/// A convenience method for adding a single [argument] from a usage type string. The string
/// used follows the same rules and syntax as [`Arg::from_usage`]
///
/// **NOTE:** The downside to using this method is that you can not set any additional
/// properties of the [`Arg`] other than what [`Arg::from_usage`] supports.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// App::new("myprog")
/// .arg_from_usage("-c --config=<FILE> 'Sets a configuration file to use'")
/// # ;
/// ```
/// [arguments]: ./struct.Arg.html
/// [`Arg`]: ./struct.Arg.html
/// [`Arg::from_usage`]: ./struct.Arg.html#method.from_usage
pub fn arg_from_usage(mut self, usage: &'a str) -> Self {
self.p.add_arg(Arg::from_usage(usage));
self
}
/// Adds multiple [arguments] at once from a usage string, one per line. See
/// [`Arg::from_usage`] for details on the syntax and rules supported.
///
/// **NOTE:** Like [`App::arg_from_usage`] the downside is you only set properties for the
/// [`Arg`]s which [`Arg::from_usage`] supports.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// App::new("myprog")
/// .args_from_usage(
/// "-c --config=[FILE] 'Sets a configuration file to use'
/// [debug]... -d 'Sets the debugging level'
/// <FILE> 'The input file to use'"
/// )
/// # ;
/// ```
/// [arguments]: ./struct.Arg.html
/// [`Arg::from_usage`]: ./struct.Arg.html#method.from_usage
/// [`App::arg_from_usage`]: ./struct.App.html#method.arg_from_usage
/// [`Arg`]: ./struct.Arg.html
pub fn args_from_usage(mut self, usage: &'a str) -> Self {
for line in usage.lines() {
let l = line.trim();
if l.is_empty() {
continue;
}
self.p.add_arg(Arg::from_usage(l));
}
self
}
/// Allows adding a [`SubCommand`] alias, which function as "hidden" subcommands that
/// automatically dispatch as if this subcommand was used. This is more efficient, and easier
/// than creating multiple hidden subcommands as one only needs to check for the existence of
/// this command, and not all variants.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg, SubCommand};
/// let m = App::new("myprog")
/// .subcommand(SubCommand::with_name("test")
/// .alias("do-stuff"))
/// .get_matches_from(vec!["myprog", "do-stuff"]);
/// assert_eq!(m.subcommand_name(), Some("test"));
/// ```
/// [`SubCommand`]: ./struct.SubCommand.html
pub fn alias<S: Into<&'b str>>(mut self, name: S) -> Self {
if let Some(ref mut als) = self.p.meta.aliases {
als.push((name.into(), false));
} else {
self.p.meta.aliases = Some(vec![(name.into(), false)]);
}
self
}
/// Allows adding [`SubCommand`] aliases, which function as "hidden" subcommands that
/// automatically dispatch as if this subcommand was used. This is more efficient, and easier
/// than creating multiple hidden subcommands as one only needs to check for the existence of
/// this command, and not all variants.
///
/// # Examples
///
/// ```rust
/// # use clap::{App, Arg, SubCommand};
/// let m = App::new("myprog")
/// .subcommand(SubCommand::with_name("test")
/// .aliases(&["do-stuff", "do-tests", "tests"]))
/// .arg(Arg::with_name("input")
/// .help("the file to add")
/// .index(1)
/// .required(false))
/// .get_matches_from(vec!["myprog", "do-tests"]);
/// assert_eq!(m.subcommand_name(), Some("test"));
/// ```
/// [`SubCommand`]: ./struct.SubCommand.html
pub fn aliases(mut self, names: &[&'b str]) -> Self {
if let Some(ref mut als) = self.p.meta.aliases {
for n in names {
als.push((n, false));
}
} else {
self.p.meta.aliases = Some(names.iter().map(|n| (*n, false)).collect::<Vec<_>>());
}
self
}
/// Allows adding a [`SubCommand`] alias that functions exactly like those defined with
/// [`App::alias`], except that they are visible inside the help message.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg, SubCommand};
/// let m = App::new("myprog")
/// .subcommand(SubCommand::with_name("test")
/// .visible_alias("do-stuff"))
/// .get_matches_from(vec!["myprog", "do-stuff"]);
/// assert_eq!(m.subcommand_name(), Some("test"));
/// ```
/// [`SubCommand`]: ./struct.SubCommand.html
/// [`App::alias`]: ./struct.App.html#method.alias
pub fn visible_alias<S: Into<&'b str>>(mut self, name: S) -> Self {
if let Some(ref mut als) = self.p.meta.aliases {
als.push((name.into(), true));
} else {
self.p.meta.aliases = Some(vec![(name.into(), true)]);
}
self
}
/// Allows adding multiple [`SubCommand`] aliases that functions exactly like those defined
/// with [`App::aliases`], except that they are visible inside the help message.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg, SubCommand};
/// let m = App::new("myprog")
/// .subcommand(SubCommand::with_name("test")
/// .visible_aliases(&["do-stuff", "tests"]))
/// .get_matches_from(vec!["myprog", "do-stuff"]);
/// assert_eq!(m.subcommand_name(), Some("test"));
/// ```
/// [`SubCommand`]: ./struct.SubCommand.html
/// [`App::aliases`]: ./struct.App.html#method.aliases
pub fn visible_aliases(mut self, names: &[&'b str]) -> Self {
if let Some(ref mut als) = self.p.meta.aliases {
for n in names {
als.push((n, true));
}
} else {
self.p.meta.aliases = Some(names.iter().map(|n| (*n, true)).collect::<Vec<_>>());
}
self
}
/// Adds an [`ArgGroup`] to the application. [`ArgGroup`]s are a family of related arguments.
/// By placing them in a logical group, you can build easier requirement and exclusion rules.
/// For instance, you can make an entire [`ArgGroup`] required, meaning that one (and *only*
/// one) argument from that group must be present at runtime.
///
/// You can also do things such as name an [`ArgGroup`] as a conflict to another argument.
/// Meaning any of the arguments that belong to that group will cause a failure if present with
/// the conflicting argument.
///
/// Another added benefit of [`ArgGroup`]s is that you can extract a value from a group instead
/// of determining exactly which argument was used.
///
/// Finally, using [`ArgGroup`]s to ensure exclusion between arguments is another very common
/// use
///
/// # Examples
///
/// The following example demonstrates using an [`ArgGroup`] to ensure that one, and only one,
/// of the arguments from the specified group is present at runtime.
///
/// ```no_run
/// # use clap::{App, ArgGroup};
/// App::new("app")
/// .args_from_usage(
/// "--set-ver [ver] 'set the version manually'
/// --major 'auto increase major'
/// --minor 'auto increase minor'
/// --patch 'auto increase patch'")
/// .group(ArgGroup::with_name("vers")
/// .args(&["set-ver", "major", "minor","patch"])
/// .required(true))
/// # ;
/// ```
/// [`ArgGroup`]: ./struct.ArgGroup.html
pub fn group(mut self, group: ArgGroup<'a>) -> Self {
self.p.add_group(group);
self
}
/// Adds multiple [`ArgGroup`]s to the [`App`] at once.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, ArgGroup};
/// App::new("app")
/// .args_from_usage(
/// "--set-ver [ver] 'set the version manually'
/// --major 'auto increase major'
/// --minor 'auto increase minor'
/// --patch 'auto increase patch'
/// -c [FILE] 'a config file'
/// -i [IFACE] 'an interface'")
/// .groups(&[
/// ArgGroup::with_name("vers")
/// .args(&["set-ver", "major", "minor","patch"])
/// .required(true),
/// ArgGroup::with_name("input")
/// .args(&["c", "i"])
/// ])
/// # ;
/// ```
/// [`ArgGroup`]: ./struct.ArgGroup.html
/// [`App`]: ./struct.App.html
pub fn groups(mut self, groups: &[ArgGroup<'a>]) -> Self {
for g in groups {
self = self.group(g.into());
}
self
}
/// Adds a [`SubCommand`] to the list of valid possibilities. Subcommands are effectively
/// sub-[`App`]s, because they can contain their own arguments, subcommands, version, usage,
/// etc. They also function just like [`App`]s, in that they get their own auto generated help,
/// version, and usage.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg, SubCommand};
/// App::new("myprog")
/// .subcommand(SubCommand::with_name("config")
/// .about("Controls configuration features")
/// .arg_from_usage("<config> 'Required configuration file to use'"))
/// # ;
/// ```
/// [`SubCommand`]: ./struct.SubCommand.html
/// [`App`]: ./struct.App.html
pub fn subcommand(mut self, subcmd: App<'a, 'b>) -> Self {
self.p.add_subcommand(subcmd);
self
}
/// Adds multiple subcommands to the list of valid possibilities by iterating over an
/// [`IntoIterator`] of [`SubCommand`]s
///
/// # Examples
///
/// ```rust
/// # use clap::{App, Arg, SubCommand};
/// # App::new("myprog")
/// .subcommands( vec![
/// SubCommand::with_name("config").about("Controls configuration functionality")
/// .arg(Arg::with_name("config_file").index(1)),
/// SubCommand::with_name("debug").about("Controls debug functionality")])
/// # ;
/// ```
/// [`SubCommand`]: ./struct.SubCommand.html
/// [`IntoIterator`]: https://doc.rust-lang.org/std/iter/trait.IntoIterator.html
pub fn subcommands<I>(mut self, subcmds: I) -> Self
where
I: IntoIterator<Item = App<'a, 'b>>,
{
for subcmd in subcmds {
self.p.add_subcommand(subcmd);
}
self
}
/// Allows custom ordering of [`SubCommand`]s within the help message. Subcommands with a lower
/// value will be displayed first in the help message. This is helpful when one would like to
/// emphasise frequently used subcommands, or prioritize those towards the top of the list.
/// Duplicate values **are** allowed. Subcommands with duplicate display orders will be
/// displayed in alphabetical order.
///
/// **NOTE:** The default is 999 for all subcommands.
///
/// # Examples
///
/// ```rust
/// # use clap::{App, SubCommand};
/// let m = App::new("cust-ord")
/// .subcommand(SubCommand::with_name("alpha") // typically subcommands are grouped
/// // alphabetically by name. Subcommands
/// // without a display_order have a value of
/// // 999 and are displayed alphabetically with
/// // all other 999 subcommands
/// .about("Some help and text"))
/// .subcommand(SubCommand::with_name("beta")
/// .display_order(1) // In order to force this subcommand to appear *first*
/// // all we have to do is give it a value lower than 999.
/// // Any other subcommands with a value of 1 will be displayed
/// // alphabetically with this one...then 2 values, then 3, etc.
/// .about("I should be first!"))
/// .get_matches_from(vec![
/// "cust-ord", "--help"
/// ]);
/// ```
///
/// The above example displays the following help message
///
/// ```text
/// cust-ord
///
/// USAGE:
/// cust-ord [FLAGS] [OPTIONS]
///
/// FLAGS:
/// -h, --help Prints help information
/// -V, --version Prints version information
///
/// SUBCOMMANDS:
/// beta I should be first!
/// alpha Some help and text
/// ```
/// [`SubCommand`]: ./struct.SubCommand.html
pub fn display_order(mut self, ord: usize) -> Self {
self.p.meta.disp_ord = ord;
self
}
/// Prints the full help message to [`io::stdout()`] using a [`BufWriter`] using the same
/// method as if someone ran `-h` to request the help message
///
/// **NOTE:** clap has the ability to distinguish between "short" and "long" help messages
/// depending on if the user ran [`-h` (short)] or [`--help` (long)]
///
/// # Examples
///
/// ```rust
/// # use clap::App;
/// let mut app = App::new("myprog");
/// app.print_help();
/// ```
/// [`io::stdout()`]: https://doc.rust-lang.org/std/io/fn.stdout.html
/// [`BufWriter`]: https://doc.rust-lang.org/std/io/struct.BufWriter.html
/// [`-h` (short)]: ./struct.Arg.html#method.help
/// [`--help` (long)]: ./struct.Arg.html#method.long_help
pub fn print_help(&mut self) -> ClapResult<()> {
// If there are global arguments, or settings we need to propagate them down to subcommands
// before parsing incase we run into a subcommand
self.p.propagate_globals();
self.p.propagate_settings();
self.p.derive_display_order();
self.p.create_help_and_version();
let out = io::stdout();
let mut buf_w = BufWriter::new(out.lock());
self.write_help(&mut buf_w)
}
/// Prints the full help message to [`io::stdout()`] using a [`BufWriter`] using the same
/// method as if someone ran `--help` to request the help message
///
/// **NOTE:** clap has the ability to distinguish between "short" and "long" help messages
/// depending on if the user ran [`-h` (short)] or [`--help` (long)]
///
/// # Examples
///
/// ```rust
/// # use clap::App;
/// let mut app = App::new("myprog");
/// app.print_long_help();
/// ```
/// [`io::stdout()`]: https://doc.rust-lang.org/std/io/fn.stdout.html
/// [`BufWriter`]: https://doc.rust-lang.org/std/io/struct.BufWriter.html
/// [`-h` (short)]: ./struct.Arg.html#method.help
/// [`--help` (long)]: ./struct.Arg.html#method.long_help
pub fn print_long_help(&mut self) -> ClapResult<()> {
// If there are global arguments, or settings we need to propagate them down to subcommands
// before parsing incase we run into a subcommand
self.p.propagate_globals();
self.p.propagate_settings();
self.p.derive_display_order();
self.p.create_help_and_version();
let out = io::stdout();
let mut buf_w = BufWriter::new(out.lock());
self.write_long_help(&mut buf_w)
}
/// Writes the full help message to the user to a [`io::Write`] object in the same method as if
/// the user ran `-h`
///
/// **NOTE:** clap has the ability to distinguish between "short" and "long" help messages
/// depending on if the user ran [`-h` (short)] or [`--help` (long)]
///
/// **NOTE:** There is a known bug where this method does not write propagated global arguments
/// or autogenerated arguments (i.e. the default help/version args). Prefer
/// [`App::write_long_help`] instead if possible!
///
/// # Examples
///
/// ```rust
/// # use clap::App;
/// use std::io;
/// let mut app = App::new("myprog");
/// let mut out = io::stdout();
/// app.write_help(&mut out).expect("failed to write to stdout");
/// ```
/// [`io::Write`]: https://doc.rust-lang.org/std/io/trait.Write.html
/// [`-h` (short)]: ./struct.Arg.html#method.help
/// [`--help` (long)]: ./struct.Arg.html#method.long_help
pub fn write_help<W: Write>(&self, w: &mut W) -> ClapResult<()> {
// PENDING ISSUE: 808
// https://github.com/kbknapp/clap-rs/issues/808
// If there are global arguments, or settings we need to propagate them down to subcommands
// before parsing incase we run into a subcommand
// self.p.propagate_globals();
// self.p.propagate_settings();
// self.p.derive_display_order();
// self.p.create_help_and_version();
Help::write_app_help(w, self, false)
}
/// Writes the full help message to the user to a [`io::Write`] object in the same method as if
/// the user ran `--help`
///
/// **NOTE:** clap has the ability to distinguish between "short" and "long" help messages
/// depending on if the user ran [`-h` (short)] or [`--help` (long)]
///
/// # Examples
///
/// ```rust
/// # use clap::App;
/// use std::io;
/// let mut app = App::new("myprog");
/// let mut out = io::stdout();
/// app.write_long_help(&mut out).expect("failed to write to stdout");
/// ```
/// [`io::Write`]: https://doc.rust-lang.org/std/io/trait.Write.html
/// [`-h` (short)]: ./struct.Arg.html#method.help
/// [`--help` (long)]: ./struct.Arg.html#method.long_help
pub fn write_long_help<W: Write>(&mut self, w: &mut W) -> ClapResult<()> {
self.p.propagate_globals();
self.p.propagate_settings();
self.p.derive_display_order();
self.p.create_help_and_version();
Help::write_app_help(w, self, true)
}
/// Writes the version message to the user to a [`io::Write`] object as if the user ran `-V`.
///
/// **NOTE:** clap has the ability to distinguish between "short" and "long" version messages
/// depending on if the user ran [`-V` (short)] or [`--version` (long)]
///
/// # Examples
///
/// ```rust
/// # use clap::App;
/// use std::io;
/// let mut app = App::new("myprog");
/// let mut out = io::stdout();
/// app.write_version(&mut out).expect("failed to write to stdout");
/// ```
/// [`io::Write`]: https://doc.rust-lang.org/std/io/trait.Write.html
/// [`-V` (short)]: ./struct.App.html#method.version
/// [`--version` (long)]: ./struct.App.html#method.long_version
pub fn write_version<W: Write>(&self, w: &mut W) -> ClapResult<()> {
self.p.write_version(w, false).map_err(From::from)
}
/// Writes the version message to the user to a [`io::Write`] object
///
/// **NOTE:** clap has the ability to distinguish between "short" and "long" version messages
/// depending on if the user ran [`-V` (short)] or [`--version` (long)]
///
/// # Examples
///
/// ```rust
/// # use clap::App;
/// use std::io;
/// let mut app = App::new("myprog");
/// let mut out = io::stdout();
/// app.write_long_version(&mut out).expect("failed to write to stdout");
/// ```
/// [`io::Write`]: https://doc.rust-lang.org/std/io/trait.Write.html
/// [`-V` (short)]: ./struct.App.html#method.version
/// [`--version` (long)]: ./struct.App.html#method.long_version
pub fn write_long_version<W: Write>(&self, w: &mut W) -> ClapResult<()> {
self.p.write_version(w, true).map_err(From::from)
}
/// Generate a completions file for a specified shell at compile time.
///
/// **NOTE:** to generate the file at compile time you must use a `build.rs` "Build Script"
///
/// # Examples
///
/// The following example generates a bash completion script via a `build.rs` script. In this
/// simple example, we'll demo a very small application with only a single subcommand and two
/// args. Real applications could be many multiple levels deep in subcommands, and have tens or
/// potentially hundreds of arguments.
///
/// First, it helps if we separate out our `App` definition into a separate file. Whether you
/// do this as a function, or bare App definition is a matter of personal preference.
///
/// ```
/// // src/cli.rs
///
/// use clap::{App, Arg, SubCommand};
///
/// pub fn build_cli() -> App<'static, 'static> {
/// App::new("compl")
/// .about("Tests completions")
/// .arg(Arg::with_name("file")
/// .help("some input file"))
/// .subcommand(SubCommand::with_name("test")
/// .about("tests things")
/// .arg(Arg::with_name("case")
/// .long("case")
/// .takes_value(true)
/// .help("the case to test")))
/// }
/// ```
///
/// In our regular code, we can simply call this `build_cli()` function, then call
/// `get_matches()`, or any of the other normal methods directly after. For example:
///
/// ```ignore
/// // src/main.rs
///
/// mod cli;
///
/// fn main() {
/// let m = cli::build_cli().get_matches();
///
/// // normal logic continues...
/// }
/// ```
///
/// Next, we set up our `Cargo.toml` to use a `build.rs` build script.
///
/// ```toml
/// # Cargo.toml
/// build = "build.rs"
///
/// [build-dependencies]
/// clap = "2.23"
/// ```
///
/// Next, we place a `build.rs` in our project root.
///
/// ```ignore
/// extern crate clap;
///
/// use clap::Shell;
///
/// include!("src/cli.rs");
///
/// fn main() {
/// let outdir = match env::var_os("OUT_DIR") {
/// None => return,
/// Some(outdir) => outdir,
/// };
/// let mut app = build_cli();
/// app.gen_completions("myapp", // We need to specify the bin name manually
/// Shell::Bash, // Then say which shell to build completions for
/// outdir); // Then say where write the completions to
/// }
/// ```
/// Now, once we compile there will be a `{bin_name}.bash` file in the directory.
/// Assuming we compiled with debug mode, it would be somewhere similar to
/// `<project>/target/debug/build/myapp-<hash>/out/myapp.bash`.
///
/// Fish shell completions will use the file format `{bin_name}.fish`
pub fn gen_completions<T: Into<OsString>, S: Into<String>>(
&mut self,
bin_name: S,
for_shell: Shell,
out_dir: T,
) {
self.p.meta.bin_name = Some(bin_name.into());
self.p.gen_completions(for_shell, out_dir.into());
}
/// Generate a completions file for a specified shell at runtime. Until `cargo install` can
/// install extra files like a completion script, this may be used e.g. in a command that
/// outputs the contents of the completion script, to be redirected into a file by the user.
///
/// # Examples
///
/// Assuming a separate `cli.rs` like the [example above](./struct.App.html#method.gen_completions),
/// we can let users generate a completion script using a command:
///
/// ```ignore
/// // src/main.rs
///
/// mod cli;
/// use std::io;
///
/// fn main() {
/// let matches = cli::build_cli().get_matches();
///
/// if matches.is_present("generate-bash-completions") {
/// cli::build_cli().gen_completions_to("myapp", Shell::Bash, &mut io::stdout());
/// }
///
/// // normal logic continues...
/// }
///
/// ```
///
/// Usage:
///
/// ```shell
/// $ myapp generate-bash-completions > /usr/share/bash-completion/completions/myapp.bash
/// ```
pub fn gen_completions_to<W: Write, S: Into<String>>(
&mut self,
bin_name: S,
for_shell: Shell,
buf: &mut W,
) {
self.p.meta.bin_name = Some(bin_name.into());
self.p.gen_completions_to(for_shell, buf);
}
/// Starts the parsing process, upon a failed parse an error will be displayed to the user and
/// the process will exit with the appropriate error code. By default this method gets all user
/// provided arguments from [`env::args_os`] in order to allow for invalid UTF-8 code points,
/// which are legal on many platforms.
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// let matches = App::new("myprog")
/// // Args and options go here...
/// .get_matches();
/// ```
/// [`env::args_os`]: https://doc.rust-lang.org/std/env/fn.args_os.html
pub fn get_matches(self) -> ArgMatches<'a> { self.get_matches_from(&mut env::args_os()) }
/// Starts the parsing process. This method will return a [`clap::Result`] type instead of exiting
/// the process on failed parse. By default this method gets matches from [`env::args_os`]
///
/// **NOTE:** This method WILL NOT exit when `--help` or `--version` (or short versions) are
/// used. It will return a [`clap::Error`], where the [`kind`] is a
/// [`ErrorKind::HelpDisplayed`] or [`ErrorKind::VersionDisplayed`] respectively. You must call
/// [`Error::exit`] or perform a [`std::process::exit`].
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// let matches = App::new("myprog")
/// // Args and options go here...
/// .get_matches_safe()
/// .unwrap_or_else( |e| e.exit() );
/// ```
/// [`env::args_os`]: https://doc.rust-lang.org/std/env/fn.args_os.html
/// [`ErrorKind::HelpDisplayed`]: ./enum.ErrorKind.html#variant.HelpDisplayed
/// [`ErrorKind::VersionDisplayed`]: ./enum.ErrorKind.html#variant.VersionDisplayed
/// [`Error::exit`]: ./struct.Error.html#method.exit
/// [`std::process::exit`]: https://doc.rust-lang.org/std/process/fn.exit.html
/// [`clap::Result`]: ./type.Result.html
/// [`clap::Error`]: ./struct.Error.html
/// [`kind`]: ./struct.Error.html
pub fn get_matches_safe(self) -> ClapResult<ArgMatches<'a>> {
// Start the parsing
self.get_matches_from_safe(&mut env::args_os())
}
/// Starts the parsing process. Like [`App::get_matches`] this method does not return a [`clap::Result`]
/// and will automatically exit with an error message. This method, however, lets you specify
/// what iterator to use when performing matches, such as a [`Vec`] of your making.
///
/// **NOTE:** The first argument will be parsed as the binary name unless
/// [`AppSettings::NoBinaryName`] is used
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// let arg_vec = vec!["my_prog", "some", "args", "to", "parse"];
///
/// let matches = App::new("myprog")
/// // Args and options go here...
/// .get_matches_from(arg_vec);
/// ```
/// [`App::get_matches`]: ./struct.App.html#method.get_matches
/// [`clap::Result`]: ./type.Result.html
/// [`Vec`]: https://doc.rust-lang.org/std/vec/struct.Vec.html
/// [`AppSettings::NoBinaryName`]: ./enum.AppSettings.html#variant.NoBinaryName
pub fn get_matches_from<I, T>(mut self, itr: I) -> ArgMatches<'a>
where
I: IntoIterator<Item = T>,
T: Into<OsString> + Clone,
{
self.get_matches_from_safe_borrow(itr).unwrap_or_else(|e| {
// Otherwise, write to stderr and exit
if e.use_stderr() {
wlnerr!("{}", e.message);
if self.p.is_set(AppSettings::WaitOnError) {
wlnerr!("\nPress [ENTER] / [RETURN] to continue...");
let mut s = String::new();
let i = io::stdin();
i.lock().read_line(&mut s).unwrap();
}
drop(self);
drop(e);
process::exit(1);
}
drop(self);
e.exit()
})
}
/// Starts the parsing process. A combination of [`App::get_matches_from`], and
/// [`App::get_matches_safe`]
///
/// **NOTE:** This method WILL NOT exit when `--help` or `--version` (or short versions) are
/// used. It will return a [`clap::Error`], where the [`kind`] is a [`ErrorKind::HelpDisplayed`]
/// or [`ErrorKind::VersionDisplayed`] respectively. You must call [`Error::exit`] or
/// perform a [`std::process::exit`] yourself.
///
/// **NOTE:** The first argument will be parsed as the binary name unless
/// [`AppSettings::NoBinaryName`] is used
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// let arg_vec = vec!["my_prog", "some", "args", "to", "parse"];
///
/// let matches = App::new("myprog")
/// // Args and options go here...
/// .get_matches_from_safe(arg_vec)
/// .unwrap_or_else( |e| { panic!("An error occurs: {}", e) });
/// ```
/// [`App::get_matches_from`]: ./struct.App.html#method.get_matches_from
/// [`App::get_matches_safe`]: ./struct.App.html#method.get_matches_safe
/// [`ErrorKind::HelpDisplayed`]: ./enum.ErrorKind.html#variant.HelpDisplayed
/// [`ErrorKind::VersionDisplayed`]: ./enum.ErrorKind.html#variant.VersionDisplayed
/// [`Error::exit`]: ./struct.Error.html#method.exit
/// [`std::process::exit`]: https://doc.rust-lang.org/std/process/fn.exit.html
/// [`clap::Error`]: ./struct.Error.html
/// [`Error::exit`]: ./struct.Error.html#method.exit
/// [`kind`]: ./struct.Error.html
/// [`AppSettings::NoBinaryName`]: ./enum.AppSettings.html#variant.NoBinaryName
pub fn get_matches_from_safe<I, T>(mut self, itr: I) -> ClapResult<ArgMatches<'a>>
where
I: IntoIterator<Item = T>,
T: Into<OsString> + Clone,
{
self.get_matches_from_safe_borrow(itr)
}
/// Starts the parsing process without consuming the [`App`] struct `self`. This is normally not
/// the desired functionality, instead prefer [`App::get_matches_from_safe`] which *does*
/// consume `self`.
///
/// **NOTE:** The first argument will be parsed as the binary name unless
/// [`AppSettings::NoBinaryName`] is used
///
/// # Examples
///
/// ```no_run
/// # use clap::{App, Arg};
/// let arg_vec = vec!["my_prog", "some", "args", "to", "parse"];
///
/// let mut app = App::new("myprog");
/// // Args and options go here...
/// let matches = app.get_matches_from_safe_borrow(arg_vec)
/// .unwrap_or_else( |e| { panic!("An error occurs: {}", e) });
/// ```
/// [`App`]: ./struct.App.html
/// [`App::get_matches_from_safe`]: ./struct.App.html#method.get_matches_from_safe
/// [`AppSettings::NoBinaryName`]: ./enum.AppSettings.html#variant.NoBinaryName
pub fn get_matches_from_safe_borrow<I, T>(&mut self, itr: I) -> ClapResult<ArgMatches<'a>>
where
I: IntoIterator<Item = T>,
T: Into<OsString> + Clone,
{
// If there are global arguments, or settings we need to propagate them down to subcommands
// before parsing incase we run into a subcommand
if !self.p.is_set(AppSettings::Propagated) {
self.p.propagate_globals();
self.p.propagate_settings();
self.p.derive_display_order();
self.p.set(AppSettings::Propagated);
}
let mut matcher = ArgMatcher::new();
let mut it = itr.into_iter();
let mut all_args: Vec<OsString> = Vec::new();
// Get the name of the program (argument 1 of env::args()) and determine the
// actual file
// that was used to execute the program. This is because a program called
// ./target/release/my_prog -a
// will have two arguments, './target/release/my_prog', '-a' but we don't want
// to display
// the full path when displaying help messages and such
if !self.p.is_set(AppSettings::NoBinaryName) {
if let Some(name) = it.next() {
let bn_os = name.into();
let p = Path::new(&*bn_os);
if let Some(f) = p.file_name() {
if let Some(s) = f.to_os_string().to_str() {
if self.p.meta.bin_name.is_none() {
self.p.meta.bin_name = Some(s.to_owned());
}
}
}
}
}
for arg in self.p.extra_argv.iter() {
all_args.push(arg.clone());
}
for arg in it {
let x: OsString = arg.into();
all_args.push(x);
}
// do the real parsing
if let Err(e) = self.p.get_matches_with(&mut matcher, &mut all_args.into_iter().peekable()) {
return Err(e);
}
let global_arg_vec: Vec<&str> = (&self).p.global_args.iter().map(|ga| ga.b.name).collect();
matcher.propagate_globals(&global_arg_vec);
Ok(matcher.into())
}
}
#[cfg(feature = "yaml")]
impl<'a> From<&'a Yaml> for App<'a, 'a> {
fn from(mut yaml: &'a Yaml) -> Self {
use args::SubCommand;
// We WANT this to panic on error...so expect() is good.
let mut is_sc = None;
let mut a = if let Some(name) = yaml["name"].as_str() {
App::new(name)
} else {
let yaml_hash = yaml.as_hash().unwrap();
let sc_key = yaml_hash.keys().nth(0).unwrap();
is_sc = Some(yaml_hash.get(sc_key).unwrap());
App::new(sc_key.as_str().unwrap())
};
yaml = if let Some(sc) = is_sc { sc } else { yaml };
macro_rules! yaml_str {
($a:ident, $y:ident, $i:ident) => {
if let Some(v) = $y[stringify!($i)].as_str() {
$a = $a.$i(v);
} else if $y[stringify!($i)] != Yaml::BadValue {
panic!("Failed to convert YAML value {:?} to a string", $y[stringify!($i)]);
}
};
}
yaml_str!(a, yaml, version);
yaml_str!(a, yaml, author);
yaml_str!(a, yaml, bin_name);
yaml_str!(a, yaml, about);
yaml_str!(a, yaml, before_help);
yaml_str!(a, yaml, after_help);
yaml_str!(a, yaml, template);
yaml_str!(a, yaml, usage);
yaml_str!(a, yaml, help);
yaml_str!(a, yaml, help_short);
yaml_str!(a, yaml, version_short);
yaml_str!(a, yaml, help_message);
yaml_str!(a, yaml, version_message);
yaml_str!(a, yaml, alias);
yaml_str!(a, yaml, visible_alias);
if let Some(v) = yaml["display_order"].as_i64() {
a = a.display_order(v as usize);
} else if yaml["display_order"] != Yaml::BadValue {
panic!(
"Failed to convert YAML value {:?} to a u64",
yaml["display_order"]
);
}
if let Some(v) = yaml["setting"].as_str() {
a = a.setting(v.parse().expect("unknown AppSetting found in YAML file"));
} else if yaml["setting"] != Yaml::BadValue {
panic!(
"Failed to convert YAML value {:?} to an AppSetting",
yaml["setting"]
);
}
if let Some(v) = yaml["settings"].as_vec() {
for ys in v {
if let Some(s) = ys.as_str() {
a = a.setting(s.parse().expect("unknown AppSetting found in YAML file"));
}
}
} else if let Some(v) = yaml["settings"].as_str() {
a = a.setting(v.parse().expect("unknown AppSetting found in YAML file"));
} else if yaml["settings"] != Yaml::BadValue {
panic!(
"Failed to convert YAML value {:?} to a string",
yaml["settings"]
);
}
if let Some(v) = yaml["global_setting"].as_str() {
a = a.setting(v.parse().expect("unknown AppSetting found in YAML file"));
} else if yaml["global_setting"] != Yaml::BadValue {
panic!(
"Failed to convert YAML value {:?} to an AppSetting",
yaml["setting"]
);
}
if let Some(v) = yaml["global_settings"].as_vec() {
for ys in v {
if let Some(s) = ys.as_str() {
a = a.global_setting(s.parse().expect("unknown AppSetting found in YAML file"));
}
}
} else if let Some(v) = yaml["global_settings"].as_str() {
a = a.global_setting(v.parse().expect("unknown AppSetting found in YAML file"));
} else if yaml["global_settings"] != Yaml::BadValue {
panic!(
"Failed to convert YAML value {:?} to a string",
yaml["global_settings"]
);
}
macro_rules! vec_or_str {
($a:ident, $y:ident, $as_vec:ident, $as_single:ident) => {{
let maybe_vec = $y[stringify!($as_vec)].as_vec();
if let Some(vec) = maybe_vec {
for ys in vec {
if let Some(s) = ys.as_str() {
$a = $a.$as_single(s);
} else {
panic!("Failed to convert YAML value {:?} to a string", ys);
}
}
} else {
if let Some(s) = $y[stringify!($as_vec)].as_str() {
$a = $a.$as_single(s);
} else if $y[stringify!($as_vec)] != Yaml::BadValue {
panic!("Failed to convert YAML value {:?} to either a vec or string", $y[stringify!($as_vec)]);
}
}
$a
}
};
}
a = vec_or_str!(a, yaml, aliases, alias);
a = vec_or_str!(a, yaml, visible_aliases, visible_alias);
if let Some(v) = yaml["args"].as_vec() {
for arg_yaml in v {
a = a.arg(Arg::from_yaml(arg_yaml.as_hash().unwrap()));
}
}
if let Some(v) = yaml["subcommands"].as_vec() {
for sc_yaml in v {
a = a.subcommand(SubCommand::from_yaml(sc_yaml));
}
}
if let Some(v) = yaml["groups"].as_vec() {
for ag_yaml in v {
a = a.group(ArgGroup::from(ag_yaml.as_hash().unwrap()));
}
}
a
}
}
impl<'a, 'b> Clone for App<'a, 'b> {
fn clone(&self) -> Self { App { p: self.p.clone() } }
}
impl<'n, 'e> AnyArg<'n, 'e> for App<'n, 'e> {
fn name(&self) -> &'n str {
unreachable!("App struct does not support AnyArg::name, this is a bug!")
}
fn overrides(&self) -> Option<&[&'e str]> { None }
fn requires(&self) -> Option<&[(Option<&'e str>, &'n str)]> { None }
fn blacklist(&self) -> Option<&[&'e str]> { None }
fn required_unless(&self) -> Option<&[&'e str]> { None }
fn val_names(&self) -> Option<&VecMap<&'e str>> { None }
fn is_set(&self, _: ArgSettings) -> bool { false }
fn val_terminator(&self) -> Option<&'e str> { None }
fn set(&mut self, _: ArgSettings) {
unreachable!("App struct does not support AnyArg::set, this is a bug!")
}
fn has_switch(&self) -> bool { false }
fn max_vals(&self) -> Option<u64> { None }
fn num_vals(&self) -> Option<u64> { None }
fn possible_vals(&self) -> Option<&[&'e str]> { None }
fn validator(&self) -> Option<&Rc<Fn(String) -> StdResult<(), String>>> { None }
fn validator_os(&self) -> Option<&Rc<Fn(&OsStr) -> StdResult<(), OsString>>> { None }
fn min_vals(&self) -> Option<u64> { None }
fn short(&self) -> Option<char> { None }
fn long(&self) -> Option<&'e str> { None }
fn val_delim(&self) -> Option<char> { None }
fn takes_value(&self) -> bool { true }
fn help(&self) -> Option<&'e str> { self.p.meta.about }
fn long_help(&self) -> Option<&'e str> { self.p.meta.long_about }
fn default_val(&self) -> Option<&'e OsStr> { None }
fn default_vals_ifs(&self) -> Option<map::Values<(&'n str, Option<&'e OsStr>, &'e OsStr)>> {
None
}
fn env<'s>(&'s self) -> Option<(&'n OsStr, Option<&'s OsString>)> { None }
fn longest_filter(&self) -> bool { true }
fn aliases(&self) -> Option<Vec<&'e str>> {
if let Some(ref aliases) = self.p.meta.aliases {
let vis_aliases: Vec<_> = aliases
.iter()
.filter_map(|&(n, v)| if v { Some(n) } else { None })
.collect();
if vis_aliases.is_empty() {
None
} else {
Some(vis_aliases)
}
} else {
None
}
}
}
impl<'n, 'e> fmt::Display for App<'n, 'e> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.p.meta.name) }
}
| 38.296555 | 123 | 0.546047 |
187c830a546d4d96edbc95de5f112c83e63bbdc9 | 3,530 | #[derive(Debug,PartialEq,Eq,PartialOrd,Ord,Clone,Copy)]
#[repr(C)]
pub enum Encoding {
ISO_8859_1 = 0, // ASCII
ISO_8859_2 = 1, // Latin2
ISO_8859_3 = 2, //
ISO_8859_4 = 3, // Latin4
ISO_8859_5 = 4, // ISO-8859-5
ISO_8859_6 = 5, // Arabic
ISO_8859_7 = 6, // Greek
ISO_8859_8 = 7, // Hebrew
ISO_8859_9 = 8, //
ISO_8859_10 = 9, //
JAPANESE_EUC_JP = 10, // EUC_JP
JAPANESE_SHIFT_JIS = 11, // SJS
JAPANESE_JIS = 12, // JIS
CHINESE_BIG5 = 13, // BIG5
CHINESE_GB = 14, // GB
CHINESE_EUC_CN = 15, // Misnamed. Should be EUC_TW. Was Basis Tech
KOREAN_EUC_KR = 16, // KSC
UNICODE_UNUSED = 17, // Unicode
CHINESE_EUC_DEC = 18, // Misnamed. Should be EUC_TW. Was
CHINESE_CNS = 19, // Misnamed. Should be EUC_TW. Was
CHINESE_BIG5_CP950 = 20, // BIG5_CP950
JAPANESE_CP932 = 21, // CP932
UTF8 = 22,
UNKNOWN_ENCODING = 23,
ASCII_7BIT = 24, // ISO_8859_1 with all characters <= 127.
RUSSIAN_KOI8_R = 25, // KOI8R
RUSSIAN_CP1251 = 26, // CP1251
MSFT_CP1252 = 27, // 27: CP1252 aka MSFT euro ascii
RUSSIAN_KOI8_RU = 28, // CP21866 aka KOI8-U, used for Ukrainian.
MSFT_CP1250 = 29, // CP1250 aka MSFT eastern european
ISO_8859_15 = 30, // aka ISO_8859_0 aka ISO_8859_1 euroized
MSFT_CP1254 = 31, // used for Turkish
MSFT_CP1257 = 32, // used in Baltic countries
ISO_8859_11 = 33, // aka TIS-620, used for Thai
MSFT_CP874 = 34, // used for Thai
MSFT_CP1256 = 35, // used for Arabic
MSFT_CP1255 = 36, // Logical Hebrew Microsoft
ISO_8859_8_I = 37, // Iso Hebrew Logical
HEBREW_VISUAL = 38, // Iso Hebrew Visual
CZECH_CP852 = 39,
CZECH_CSN_369103 = 40, // aka ISO_IR_139 aka KOI8_CS
MSFT_CP1253 = 41, // used for Greek
RUSSIAN_CP866 = 42,
ISO_8859_13 = 43,
ISO_2022_KR = 44,
GBK = 45,
GB18030 = 46,
BIG5_HKSCS = 47,
ISO_2022_CN = 48,
TSCII = 49,
TAMIL_MONO = 50,
TAMIL_BI = 51,
JAGRAN = 52,
MACINTOSH_ROMAN = 53,
UTF7 = 54,
BHASKAR = 55, // Indic encoding - Devanagari
HTCHANAKYA = 56, // 56 Indic encoding - Devanagari
UTF16BE = 57, // big-endian UTF-16
UTF16LE = 58, // little-endian UTF-16
UTF32BE = 59, // big-endian UTF-32
UTF32LE = 60, // little-endian UTF-32
BINARYENC = 61,
HZ_GB_2312 = 62,
UTF8UTF8 = 63,
TAM_ELANGO = 64, // Elango - Tamil
TAM_LTTMBARANI = 65, // Barani - Tamil
TAM_SHREE = 66, // Shree - Tamil
TAM_TBOOMIS = 67, // TBoomis - Tamil
TAM_TMNEWS = 68, // TMNews - Tamil
TAM_WEBTAMIL = 69, // Webtamil - Tamil
KDDI_SHIFT_JIS = 70,
DOCOMO_SHIFT_JIS = 71,
SOFTBANK_SHIFT_JIS = 72,
KDDI_ISO_2022_JP = 73,
SOFTBANK_ISO_2022_JP = 74,
}
pub static NUM_ENCODINGS: usize = 75;
| 43.04878 | 77 | 0.493484 |
7939cbba214f9f27e9c031b1f4cc5bdfe7b07391 | 744 | //! Prediction agents module.
use crate::Shared;
pub trait ValuePredictor<S> {
/// Compute the estimated value of V(s).
fn predict_v(&self, s: &S) -> f64;
}
impl<S, T: ValuePredictor<S>> ValuePredictor<S> for Shared<T> {
fn predict_v(&self, s: &S) -> f64 { self.borrow().predict_v(s) }
}
pub trait ActionValuePredictor<S, A> {
/// Compute the estimated value of Q(s, a).
fn predict_q(&self, s: &S, a: &A) -> f64;
}
impl<S, A, T: ActionValuePredictor<S, A>> ActionValuePredictor<S, A> for Shared<T> {
fn predict_q(&self, s: &S, a: &A) -> f64 { self.borrow().predict_q(s, a) }
}
pub mod gtd;
pub mod lstd;
pub mod mc;
pub mod td;
// TODO:
// Implement the algorithms discussed in https://arxiv.org/pdf/1304.3999.pdf
| 25.655172 | 84 | 0.641129 |
8980419dee569e01211cd3662be11a914856a8c3 | 3,748 | // Copyright 2016 Tuomo Hartikainen <[email protected]>.
// Licensed under the 2-clause BSD license, see LICENSE for details.
use std::fs;
use std::io;
use std::rc::Rc;
use std::path::{Path, PathBuf};
use status::Status;
use todo_item::TodoItem;
pub fn get_dateless_items(items: &Vec<Rc<TodoItem>>)
-> Vec<Rc<TodoItem>> {
let mut dateless: Vec<Rc<TodoItem>> = Vec::new();
for item in items {
if item.date.is_none() {
dateless.push(item.clone());
}
}
dateless
}
pub fn get_files_in_dir(dir: &Path) -> io::Result<Vec<PathBuf>> {
let mut files = Vec::new();
// get file list
if try!(fs::metadata(dir)).is_dir() {
for entry in fs::read_dir(dir).unwrap() {
match entry {
Err(err) => println!("Error: {}", err),
Ok(dirent) => match dirent.file_type() {
Err(err) => println!("could not find file type for \
file '{}'",
err),
Ok(ft) => if ft.is_file() { files.push(dirent.path()) },
},
}
}
}
Ok(files)
}
pub fn get_item_by_id(items: &Vec<Rc<TodoItem>>, i: i32)
-> Option<Rc<TodoItem>> {
for item in items {
if item.id == i {
return Some(item.clone());
}
};
None
}
pub fn get_items_on_date(items: &Vec<Rc<TodoItem>>, date_str: &str)
-> Vec<Rc<TodoItem>> {
let mut today: Vec<Rc<TodoItem>> = Vec::new();
for item in items {
match item.get_date_str() {
Some(d) => {
if d.eq(&date_str) {
today.push(item.clone());
}
},
None => {},
};
}
today
}
pub fn get_items_after(items: &Vec<Rc<TodoItem>>, date_str: &str)
-> Vec<Rc<TodoItem>> {
let mut list: Vec<Rc<TodoItem>> = Vec::new();
for item in items {
if let Some(i_date) = item.get_date_str() {
if &i_date[..] > date_str {
list.push(item.clone());
}
}
}
list
}
pub fn get_items_before(items: &Vec<Rc<TodoItem>>, date_str: &str)
-> Vec<Rc<TodoItem>> {
let mut list: Vec<Rc<TodoItem>> = Vec::new();
for item in items {
if let Some(i_date) = item.get_date_str() {
if &i_date[..] < date_str {
list.push(item.clone());
}
}
}
list
}
pub fn get_todo_items(path: &Path) -> io::Result<Vec<Rc<TodoItem>>> {
let mut items: Vec<Rc<TodoItem>> = Vec::new();
let files = try!(get_files_in_dir(path));
for (id, file) in (1..).zip(files.iter()) {
match TodoItem::new_from_file(&file, id) {
Ok(i) => items.push(Rc::new(i)),
Err(err)=> print_err!("Could not load todo file '{:?}': {}",
file, err),
};
};
// Sort items here, so filtered items will be "automatically" in order too
items.sort();
Ok(items)
}
// TODO: None does not end up to undone items?
pub fn get_undone_items(items: &Vec<Rc<TodoItem>>) -> Vec<Rc<TodoItem>> {
let mut undone: Vec<Rc<TodoItem>> = Vec::new();
for item in items {
match item.status {
Some(ref s) => {
if *s == Status::Todo { undone.push(item.clone()); }
},
None => undone.push(item.clone()),
};
};
undone
}
pub fn remove_item_by_id(items: &mut Vec<Rc<TodoItem>>, i: i32)
-> Option<Rc<TodoItem>> {
items.iter().position(|ref p| p.id == i).map(|e| items.remove(e))
}
| 27.357664 | 78 | 0.489061 |
d599b3cad241510051ddc8026075b801277fc689 | 3,751 | //
// Sysinfo
//
// Copyright (c) 2017 Guillaume Gomez
//
use crate::Pid;
#[cfg(not(any(target_os = "windows", target_os = "unknown", target_arch = "wasm32")))]
use std::ffi::OsStr;
#[cfg(not(any(target_os = "windows", target_os = "unknown", target_arch = "wasm32")))]
use std::os::unix::ffi::OsStrExt;
#[cfg(not(any(target_os = "windows", target_os = "unknown", target_arch = "wasm32")))]
use std::path::Path;
#[allow(clippy::useless_conversion)]
#[cfg(not(any(
target_os = "windows",
target_os = "unknown",
target_arch = "wasm32",
target_os = "macos",
target_os = "ios"
)))]
pub fn realpath(original: &Path) -> std::path::PathBuf {
use libc::{c_char, lstat, stat, S_IFLNK, S_IFMT};
use std::fs;
use std::mem::MaybeUninit;
use std::path::PathBuf;
fn and(x: u32, y: u32) -> u32 {
x & y
}
// let ori = Path::new(original.to_str().unwrap());
// Right now lstat on windows doesn't work quite well
// if cfg!(windows) {
// return PathBuf::from(ori);
// }
let result = PathBuf::from(original);
let mut result_s = result.to_str().unwrap_or("").as_bytes().to_vec();
result_s.push(0);
let mut buf = MaybeUninit::<stat>::uninit();
let res = unsafe { lstat(result_s.as_ptr() as *const c_char, buf.as_mut_ptr()) };
let buf = unsafe { buf.assume_init() };
if res < 0 || and(buf.st_mode.into(), S_IFMT.into()) != S_IFLNK.into() {
PathBuf::new()
} else {
match fs::read_link(&result) {
Ok(f) => f,
Err(_) => PathBuf::new(),
}
}
}
/* convert a path to a NUL-terminated Vec<u8> suitable for use with C functions */
#[cfg(not(any(target_os = "windows", target_os = "unknown", target_arch = "wasm32")))]
pub fn to_cpath(path: &Path) -> Vec<u8> {
let path_os: &OsStr = path.as_ref();
let mut cpath = path_os.as_bytes().to_vec();
cpath.push(0);
cpath
}
/// Returns the pid for the current process.
///
/// `Err` is returned in case the platform isn't supported.
///
/// ```no_run
/// use sysinfo::get_current_pid;
///
/// match get_current_pid() {
/// Ok(pid) => {
/// println!("current pid: {}", pid);
/// }
/// Err(e) => {
/// eprintln!("failed to get current pid: {}", e);
/// }
/// }
/// ```
#[allow(clippy::unnecessary_wraps)]
pub fn get_current_pid() -> Result<Pid, &'static str> {
cfg_if::cfg_if! {
if #[cfg(not(any(target_os = "windows", target_os = "unknown", target_arch = "wasm32")))] {
fn inner() -> Result<Pid, &'static str> {
unsafe { Ok(::libc::getpid()) }
}
} else if #[cfg(target_os = "windows")] {
fn inner() -> Result<Pid, &'static str> {
use winapi::um::processthreadsapi::GetCurrentProcessId;
unsafe { Ok(GetCurrentProcessId() as Pid) }
}
} else if #[cfg(target_os = "unknown")] {
fn inner() -> Result<Pid, &'static str> {
Err("Unavailable on this platform")
}
} else {
fn inner() -> Result<Pid, &'static str> {
Err("Unknown platform")
}
}
}
inner()
}
/// Converts the value into a parallel iterator (if the multithread feature is enabled)
/// Uses the rayon::iter::IntoParallelIterator trait
#[cfg(feature = "multithread")]
pub fn into_iter<T>(val: T) -> T::Iter
where
T: rayon::iter::IntoParallelIterator,
{
val.into_par_iter()
}
/// Converts the value into a sequential iterator (if the multithread feature is disabled)
/// Uses the std::iter::IntoIterator trait
#[cfg(not(feature = "multithread"))]
pub fn into_iter<T>(val: T) -> T::IntoIter
where
T: IntoIterator,
{
val.into_iter()
}
| 30.25 | 99 | 0.57638 |
b9d8a4ec68fadd2109370da0eb121034e078f3f8 | 61,739 | //! This file builds up the `ScopeTree`, which describes
//! the parent links in the region hierarchy.
//!
//! For more information about how MIR-based region-checking works,
//! see the [rustc guide].
//!
//! [rustc guide]: https://rust-lang.github.io/rustc-guide/mir/borrowck.html
use crate::hir;
use crate::hir::Node;
use crate::hir::def_id::DefId;
use crate::hir::intravisit::{self, Visitor, NestedVisitorMap};
use crate::hir::{Block, Arm, Pat, PatKind, Stmt, Expr, Local};
use crate::ich::{StableHashingContext, NodeIdHashingMode};
use crate::util::nodemap::{FxHashMap, FxHashSet};
use crate::ty::{self, DefIdTree, TyCtxt};
use crate::ty::query::Providers;
use rustc_data_structures::indexed_vec::Idx;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult};
use rustc_macros::HashStable;
use syntax::source_map;
use syntax_pos::{Span, DUMMY_SP};
use std::fmt;
use std::mem;
/// Represents a statically-describable scope that can be used to
/// bound the lifetime/region for values.
///
/// `Node(node_id)`: Any AST node that has any scope at all has the
/// `Node(node_id)` scope. Other variants represent special cases not
/// immediately derivable from the abstract syntax tree structure.
///
/// `DestructionScope(node_id)` represents the scope of destructors
/// implicitly-attached to `node_id` that run immediately after the
/// expression for `node_id` itself. Not every AST node carries a
/// `DestructionScope`, but those that are `terminating_scopes` do;
/// see discussion with `ScopeTree`.
///
/// `Remainder { block, statement_index }` represents
/// the scope of user code running immediately after the initializer
/// expression for the indexed statement, until the end of the block.
///
/// So: the following code can be broken down into the scopes beneath:
///
/// ```text
/// let a = f().g( 'b: { let x = d(); let y = d(); x.h(y) } ) ;
///
/// +-+ (D12.)
/// +-+ (D11.)
/// +---------+ (R10.)
/// +-+ (D9.)
/// +----------+ (M8.)
/// +----------------------+ (R7.)
/// +-+ (D6.)
/// +----------+ (M5.)
/// +-----------------------------------+ (M4.)
/// +--------------------------------------------------+ (M3.)
/// +--+ (M2.)
/// +-----------------------------------------------------------+ (M1.)
///
/// (M1.): Node scope of the whole `let a = ...;` statement.
/// (M2.): Node scope of the `f()` expression.
/// (M3.): Node scope of the `f().g(..)` expression.
/// (M4.): Node scope of the block labeled `'b:`.
/// (M5.): Node scope of the `let x = d();` statement
/// (D6.): DestructionScope for temporaries created during M5.
/// (R7.): Remainder scope for block `'b:`, stmt 0 (let x = ...).
/// (M8.): Node scope of the `let y = d();` statement.
/// (D9.): DestructionScope for temporaries created during M8.
/// (R10.): Remainder scope for block `'b:`, stmt 1 (let y = ...).
/// (D11.): DestructionScope for temporaries and bindings from block `'b:`.
/// (D12.): DestructionScope for temporaries created during M1 (e.g., f()).
/// ```
///
/// Note that while the above picture shows the destruction scopes
/// as following their corresponding node scopes, in the internal
/// data structures of the compiler the destruction scopes are
/// represented as enclosing parents. This is sound because we use the
/// enclosing parent relationship just to ensure that referenced
/// values live long enough; phrased another way, the starting point
/// of each range is not really the important thing in the above
/// picture, but rather the ending point.
//
// FIXME(pnkfelix): this currently derives `PartialOrd` and `Ord` to
// placate the same deriving in `ty::FreeRegion`, but we may want to
// actually attach a more meaningful ordering to scopes than the one
// generated via deriving here.
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Copy,
RustcEncodable, RustcDecodable, HashStable)]
pub struct Scope {
pub id: hir::ItemLocalId,
pub data: ScopeData,
}
impl fmt::Debug for Scope {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.data {
ScopeData::Node => write!(fmt, "Node({:?})", self.id),
ScopeData::CallSite => write!(fmt, "CallSite({:?})", self.id),
ScopeData::Arguments => write!(fmt, "Arguments({:?})", self.id),
ScopeData::Destruction => write!(fmt, "Destruction({:?})", self.id),
ScopeData::Remainder(fsi) => write!(
fmt,
"Remainder {{ block: {:?}, first_statement_index: {}}}",
self.id,
fsi.as_u32(),
),
}
}
}
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Debug, Copy,
RustcEncodable, RustcDecodable, HashStable)]
pub enum ScopeData {
Node,
/// Scope of the call-site for a function or closure
/// (outlives the arguments as well as the body).
CallSite,
/// Scope of arguments passed to a function or closure
/// (they outlive its body).
Arguments,
/// Scope of destructors for temporaries of node-id.
Destruction,
/// Scope following a `let id = expr;` binding in a block.
Remainder(FirstStatementIndex)
}
newtype_index! {
/// Represents a subscope of `block` for a binding that is introduced
/// by `block.stmts[first_statement_index]`. Such subscopes represent
/// a suffix of the block. Note that each subscope does not include
/// the initializer expression, if any, for the statement indexed by
/// `first_statement_index`.
///
/// For example, given `{ let (a, b) = EXPR_1; let c = EXPR_2; ... }`:
///
/// * The subscope with `first_statement_index == 0` is scope of both
/// `a` and `b`; it does not include EXPR_1, but does include
/// everything after that first `let`. (If you want a scope that
/// includes EXPR_1 as well, then do not use `Scope::Remainder`,
/// but instead another `Scope` that encompasses the whole block,
/// e.g., `Scope::Node`.
///
/// * The subscope with `first_statement_index == 1` is scope of `c`,
/// and thus does not include EXPR_2, but covers the `...`.
pub struct FirstStatementIndex {
derive [HashStable]
}
}
// compilation error if size of `ScopeData` is not the same as a `u32`
static_assert_size!(ScopeData, 4);
impl Scope {
/// Returns a item-local ID associated with this scope.
///
/// N.B., likely to be replaced as API is refined; e.g., pnkfelix
/// anticipates `fn entry_node_id` and `fn each_exit_node_id`.
pub fn item_local_id(&self) -> hir::ItemLocalId {
self.id
}
pub fn hir_id(&self, scope_tree: &ScopeTree) -> hir::HirId {
match scope_tree.root_body {
Some(hir_id) => {
hir::HirId {
owner: hir_id.owner,
local_id: self.item_local_id()
}
}
None => hir::DUMMY_HIR_ID
}
}
/// Returns the span of this `Scope`. Note that in general the
/// returned span may not correspond to the span of any `NodeId` in
/// the AST.
pub fn span(&self, tcx: TyCtxt<'_>, scope_tree: &ScopeTree) -> Span {
let hir_id = self.hir_id(scope_tree);
if hir_id == hir::DUMMY_HIR_ID {
return DUMMY_SP;
}
let span = tcx.hir().span(hir_id);
if let ScopeData::Remainder(first_statement_index) = self.data {
if let Node::Block(ref blk) = tcx.hir().get(hir_id) {
// Want span for scope starting after the
// indexed statement and ending at end of
// `blk`; reuse span of `blk` and shift `lo`
// forward to end of indexed statement.
//
// (This is the special case aluded to in the
// doc-comment for this method)
let stmt_span = blk.stmts[first_statement_index.index()].span;
// To avoid issues with macro-generated spans, the span
// of the statement must be nested in that of the block.
if span.lo() <= stmt_span.lo() && stmt_span.lo() <= span.hi() {
return Span::new(stmt_span.lo(), span.hi(), span.ctxt());
}
}
}
span
}
}
pub type ScopeDepth = u32;
/// The region scope tree encodes information about region relationships.
#[derive(Default, Debug)]
pub struct ScopeTree {
/// If not empty, this body is the root of this region hierarchy.
root_body: Option<hir::HirId>,
/// The parent of the root body owner, if the latter is an
/// an associated const or method, as impls/traits can also
/// have lifetime parameters free in this body.
root_parent: Option<hir::HirId>,
/// Maps from a scope ID to the enclosing scope id;
/// this is usually corresponding to the lexical nesting, though
/// in the case of closures the parent scope is the innermost
/// conditional expression or repeating block. (Note that the
/// enclosing scope ID for the block associated with a closure is
/// the closure itself.)
parent_map: FxHashMap<Scope, (Scope, ScopeDepth)>,
/// Maps from a variable or binding ID to the block in which that
/// variable is declared.
var_map: FxHashMap<hir::ItemLocalId, Scope>,
/// Maps from a `NodeId` to the associated destruction scope (if any).
destruction_scopes: FxHashMap<hir::ItemLocalId, Scope>,
/// `rvalue_scopes` includes entries for those expressions whose
/// cleanup scope is larger than the default. The map goes from the
/// expression ID to the cleanup scope id. For rvalues not present in
/// this table, the appropriate cleanup scope is the innermost
/// enclosing statement, conditional expression, or repeating
/// block (see `terminating_scopes`).
/// In constants, None is used to indicate that certain expressions
/// escape into 'static and should have no local cleanup scope.
rvalue_scopes: FxHashMap<hir::ItemLocalId, Option<Scope>>,
/// Encodes the hierarchy of fn bodies. Every fn body (including
/// closures) forms its own distinct region hierarchy, rooted in
/// the block that is the fn body. This map points from the ID of
/// that root block to the ID of the root block for the enclosing
/// fn, if any. Thus the map structures the fn bodies into a
/// hierarchy based on their lexical mapping. This is used to
/// handle the relationships between regions in a fn and in a
/// closure defined by that fn. See the "Modeling closures"
/// section of the README in infer::region_constraints for
/// more details.
closure_tree: FxHashMap<hir::ItemLocalId, hir::ItemLocalId>,
/// If there are any `yield` nested within a scope, this map
/// stores the `Span` of the last one and its index in the
/// postorder of the Visitor traversal on the HIR.
///
/// HIR Visitor postorder indexes might seem like a peculiar
/// thing to care about. but it turns out that HIR bindings
/// and the temporary results of HIR expressions are never
/// storage-live at the end of HIR nodes with postorder indexes
/// lower than theirs, and therefore don't need to be suspended
/// at yield-points at these indexes.
///
/// For an example, suppose we have some code such as:
/// ```rust,ignore (example)
/// foo(f(), yield y, bar(g()))
/// ```
///
/// With the HIR tree (calls numbered for expository purposes)
/// ```
/// Call#0(foo, [Call#1(f), Yield(y), Call#2(bar, Call#3(g))])
/// ```
///
/// Obviously, the result of `f()` was created before the yield
/// (and therefore needs to be kept valid over the yield) while
/// the result of `g()` occurs after the yield (and therefore
/// doesn't). If we want to infer that, we can look at the
/// postorder traversal:
/// ```plain,ignore
/// `foo` `f` Call#1 `y` Yield `bar` `g` Call#3 Call#2 Call#0
/// ```
///
/// In which we can easily see that `Call#1` occurs before the yield,
/// and `Call#3` after it.
///
/// To see that this method works, consider:
///
/// Let `D` be our binding/temporary and `U` be our other HIR node, with
/// `HIR-postorder(U) < HIR-postorder(D)` (in our example, U would be
/// the yield and D would be one of the calls). Let's show that
/// `D` is storage-dead at `U`.
///
/// Remember that storage-live/storage-dead refers to the state of
/// the *storage*, and does not consider moves/drop flags.
///
/// Then:
/// 1. From the ordering guarantee of HIR visitors (see
/// `rustc::hir::intravisit`), `D` does not dominate `U`.
/// 2. Therefore, `D` is *potentially* storage-dead at `U` (because
/// we might visit `U` without ever getting to `D`).
/// 3. However, we guarantee that at each HIR point, each
/// binding/temporary is always either always storage-live
/// or always storage-dead. This is what is being guaranteed
/// by `terminating_scopes` including all blocks where the
/// count of executions is not guaranteed.
/// 4. By `2.` and `3.`, `D` is *statically* storage-dead at `U`,
/// QED.
///
/// This property ought to not on (3) in an essential way -- it
/// is probably still correct even if we have "unrestricted" terminating
/// scopes. However, why use the complicated proof when a simple one
/// works?
///
/// A subtle thing: `box` expressions, such as `box (&x, yield 2, &y)`. It
/// might seem that a `box` expression creates a `Box<T>` temporary
/// when it *starts* executing, at `HIR-preorder(BOX-EXPR)`. That might
/// be true in the MIR desugaring, but it is not important in the semantics.
///
/// The reason is that semantically, until the `box` expression returns,
/// the values are still owned by their containing expressions. So
/// we'll see that `&x`.
yield_in_scope: FxHashMap<Scope, YieldData>,
/// The number of visit_expr and visit_pat calls done in the body.
/// Used to sanity check visit_expr/visit_pat call count when
/// calculating generator interiors.
body_expr_count: FxHashMap<hir::BodyId, usize>,
}
#[derive(Debug, Copy, Clone, RustcEncodable, RustcDecodable, HashStable)]
pub struct YieldData {
/// The `Span` of the yield.
pub span: Span,
/// The number of expressions and patterns appearing before the `yield` in the body plus one.
pub expr_and_pat_count: usize,
pub source: hir::YieldSource,
}
#[derive(Debug, Copy, Clone)]
pub struct Context {
/// The root of the current region tree. This is typically the id
/// of the innermost fn body. Each fn forms its own disjoint tree
/// in the region hierarchy. These fn bodies are themselves
/// arranged into a tree. See the "Modeling closures" section of
/// the README in `infer::region_constraints` for more
/// details.
root_id: Option<hir::ItemLocalId>,
/// The scope that contains any new variables declared, plus its depth in
/// the scope tree.
var_parent: Option<(Scope, ScopeDepth)>,
/// Region parent of expressions, etc., plus its depth in the scope tree.
parent: Option<(Scope, ScopeDepth)>,
}
struct RegionResolutionVisitor<'tcx> {
tcx: TyCtxt<'tcx>,
// The number of expressions and patterns visited in the current body.
expr_and_pat_count: usize,
// When this is `true`, we record the `Scopes` we encounter
// when processing a Yield expression. This allows us to fix
// up their indices.
pessimistic_yield: bool,
// Stores scopes when `pessimistic_yield` is `true`.
fixup_scopes: Vec<Scope>,
// The generated scope tree.
scope_tree: ScopeTree,
cx: Context,
/// `terminating_scopes` is a set containing the ids of each
/// statement, or conditional/repeating expression. These scopes
/// are calling "terminating scopes" because, when attempting to
/// find the scope of a temporary, by default we search up the
/// enclosing scopes until we encounter the terminating scope. A
/// conditional/repeating expression is one which is not
/// guaranteed to execute exactly once upon entering the parent
/// scope. This could be because the expression only executes
/// conditionally, such as the expression `b` in `a && b`, or
/// because the expression may execute many times, such as a loop
/// body. The reason that we distinguish such expressions is that,
/// upon exiting the parent scope, we cannot statically know how
/// many times the expression executed, and thus if the expression
/// creates temporaries we cannot know statically how many such
/// temporaries we would have to cleanup. Therefore, we ensure that
/// the temporaries never outlast the conditional/repeating
/// expression, preventing the need for dynamic checks and/or
/// arbitrary amounts of stack space. Terminating scopes end
/// up being contained in a DestructionScope that contains the
/// destructor's execution.
terminating_scopes: FxHashSet<hir::ItemLocalId>,
}
struct ExprLocatorVisitor {
hir_id: hir::HirId,
result: Option<usize>,
expr_and_pat_count: usize,
}
// This visitor has to have the same `visit_expr` calls as `RegionResolutionVisitor`
// since `expr_count` is compared against the results there.
impl<'tcx> Visitor<'tcx> for ExprLocatorVisitor {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::None
}
fn visit_pat(&mut self, pat: &'tcx Pat) {
intravisit::walk_pat(self, pat);
self.expr_and_pat_count += 1;
if pat.hir_id == self.hir_id {
self.result = Some(self.expr_and_pat_count);
}
}
fn visit_expr(&mut self, expr: &'tcx Expr) {
debug!("ExprLocatorVisitor - pre-increment {} expr = {:?}",
self.expr_and_pat_count,
expr);
intravisit::walk_expr(self, expr);
self.expr_and_pat_count += 1;
debug!("ExprLocatorVisitor - post-increment {} expr = {:?}",
self.expr_and_pat_count,
expr);
if expr.hir_id == self.hir_id {
self.result = Some(self.expr_and_pat_count);
}
}
}
impl<'tcx> ScopeTree {
pub fn record_scope_parent(&mut self, child: Scope, parent: Option<(Scope, ScopeDepth)>) {
debug!("{:?}.parent = {:?}", child, parent);
if let Some(p) = parent {
let prev = self.parent_map.insert(child, p);
assert!(prev.is_none());
}
// Record the destruction scopes for later so we can query them.
if let ScopeData::Destruction = child.data {
self.destruction_scopes.insert(child.item_local_id(), child);
}
}
pub fn each_encl_scope<E>(&self, mut e: E) where E: FnMut(Scope, Scope) {
for (&child, &parent) in &self.parent_map {
e(child, parent.0)
}
}
pub fn each_var_scope<E>(&self, mut e: E) where E: FnMut(&hir::ItemLocalId, Scope) {
for (child, &parent) in self.var_map.iter() {
e(child, parent)
}
}
pub fn opt_destruction_scope(&self, n: hir::ItemLocalId) -> Option<Scope> {
self.destruction_scopes.get(&n).cloned()
}
/// Records that `sub_closure` is defined within `sup_closure`. These IDs
/// should be the ID of the block that is the fn body, which is
/// also the root of the region hierarchy for that fn.
fn record_closure_parent(&mut self,
sub_closure: hir::ItemLocalId,
sup_closure: hir::ItemLocalId) {
debug!("record_closure_parent(sub_closure={:?}, sup_closure={:?})",
sub_closure, sup_closure);
assert!(sub_closure != sup_closure);
let previous = self.closure_tree.insert(sub_closure, sup_closure);
assert!(previous.is_none());
}
fn record_var_scope(&mut self, var: hir::ItemLocalId, lifetime: Scope) {
debug!("record_var_scope(sub={:?}, sup={:?})", var, lifetime);
assert!(var != lifetime.item_local_id());
self.var_map.insert(var, lifetime);
}
fn record_rvalue_scope(&mut self, var: hir::ItemLocalId, lifetime: Option<Scope>) {
debug!("record_rvalue_scope(sub={:?}, sup={:?})", var, lifetime);
if let Some(lifetime) = lifetime {
assert!(var != lifetime.item_local_id());
}
self.rvalue_scopes.insert(var, lifetime);
}
/// Returns the narrowest scope that encloses `id`, if any.
pub fn opt_encl_scope(&self, id: Scope) -> Option<Scope> {
self.parent_map.get(&id).cloned().map(|(p, _)| p)
}
/// Returns the narrowest scope that encloses `id`, if any.
#[allow(dead_code)] // used in cfg
pub fn encl_scope(&self, id: Scope) -> Scope {
self.opt_encl_scope(id).unwrap()
}
/// Returns the lifetime of the local variable `var_id`
pub fn var_scope(&self, var_id: hir::ItemLocalId) -> Scope {
self.var_map.get(&var_id).cloned().unwrap_or_else(||
bug!("no enclosing scope for id {:?}", var_id))
}
/// Returns the scope when the temp created by `expr_id` will be cleaned up.
pub fn temporary_scope(&self, expr_id: hir::ItemLocalId) -> Option<Scope> {
// Check for a designated rvalue scope.
if let Some(&s) = self.rvalue_scopes.get(&expr_id) {
debug!("temporary_scope({:?}) = {:?} [custom]", expr_id, s);
return s;
}
// Otherwise, locate the innermost terminating scope
// if there's one. Static items, for instance, won't
// have an enclosing scope, hence no scope will be
// returned.
let mut id = Scope { id: expr_id, data: ScopeData::Node };
while let Some(&(p, _)) = self.parent_map.get(&id) {
match p.data {
ScopeData::Destruction => {
debug!("temporary_scope({:?}) = {:?} [enclosing]",
expr_id, id);
return Some(id);
}
_ => id = p
}
}
debug!("temporary_scope({:?}) = None", expr_id);
return None;
}
/// Returns the lifetime of the variable `id`.
pub fn var_region(&self, id: hir::ItemLocalId) -> ty::RegionKind {
let scope = ty::ReScope(self.var_scope(id));
debug!("var_region({:?}) = {:?}", id, scope);
scope
}
pub fn scopes_intersect(&self, scope1: Scope, scope2: Scope) -> bool {
self.is_subscope_of(scope1, scope2) ||
self.is_subscope_of(scope2, scope1)
}
/// Returns `true` if `subscope` is equal to or is lexically nested inside `superscope`, and
/// `false` otherwise.
pub fn is_subscope_of(&self,
subscope: Scope,
superscope: Scope)
-> bool {
let mut s = subscope;
debug!("is_subscope_of({:?}, {:?})", subscope, superscope);
while superscope != s {
match self.opt_encl_scope(s) {
None => {
debug!("is_subscope_of({:?}, {:?}, s={:?})=false",
subscope, superscope, s);
return false;
}
Some(scope) => s = scope
}
}
debug!("is_subscope_of({:?}, {:?})=true", subscope, superscope);
return true;
}
/// Returns the ID of the innermost containing body.
pub fn containing_body(&self, mut scope: Scope) -> Option<hir::ItemLocalId> {
loop {
if let ScopeData::CallSite = scope.data {
return Some(scope.item_local_id());
}
scope = self.opt_encl_scope(scope)?;
}
}
/// Finds the nearest common ancestor of two scopes. That is, finds the
/// smallest scope which is greater than or equal to both `scope_a` and
/// `scope_b`.
pub fn nearest_common_ancestor(&self, scope_a: Scope, scope_b: Scope) -> Scope {
if scope_a == scope_b { return scope_a; }
let mut a = scope_a;
let mut b = scope_b;
// Get the depth of each scope's parent. If either scope has no parent,
// it must be the root, which means we can stop immediately because the
// root must be the nearest common ancestor. (In practice, this is
// moderately common.)
let (parent_a, parent_a_depth) = match self.parent_map.get(&a) {
Some(pd) => *pd,
None => return a,
};
let (parent_b, parent_b_depth) = match self.parent_map.get(&b) {
Some(pd) => *pd,
None => return b,
};
if parent_a_depth > parent_b_depth {
// `a` is lower than `b`. Move `a` up until it's at the same depth
// as `b`. The first move up is trivial because we already found
// `parent_a` above; the loop does the remaining N-1 moves.
a = parent_a;
for _ in 0..(parent_a_depth - parent_b_depth - 1) {
a = self.parent_map.get(&a).unwrap().0;
}
} else if parent_b_depth > parent_a_depth {
// `b` is lower than `a`.
b = parent_b;
for _ in 0..(parent_b_depth - parent_a_depth - 1) {
b = self.parent_map.get(&b).unwrap().0;
}
} else {
// Both scopes are at the same depth, and we know they're not equal
// because that case was tested for at the top of this function. So
// we can trivially move them both up one level now.
assert!(parent_a_depth != 0);
a = parent_a;
b = parent_b;
}
// Now both scopes are at the same level. We move upwards in lockstep
// until they match. In practice, this loop is almost always executed
// zero times because `a` is almost always a direct ancestor of `b` or
// vice versa.
while a != b {
a = self.parent_map.get(&a).unwrap().0;
b = self.parent_map.get(&b).unwrap().0;
};
a
}
/// Assuming that the provided region was defined within this `ScopeTree`,
/// returns the outermost `Scope` that the region outlives.
pub fn early_free_scope(&self, tcx: TyCtxt<'tcx>, br: &ty::EarlyBoundRegion) -> Scope {
let param_owner = tcx.parent(br.def_id).unwrap();
let param_owner_id = tcx.hir().as_local_hir_id(param_owner).unwrap();
let scope = tcx.hir().maybe_body_owned_by(param_owner_id).map(|body_id| {
tcx.hir().body(body_id).value.hir_id.local_id
}).unwrap_or_else(|| {
// The lifetime was defined on node that doesn't own a body,
// which in practice can only mean a trait or an impl, that
// is the parent of a method, and that is enforced below.
if Some(param_owner_id) != self.root_parent {
tcx.sess.delay_span_bug(
DUMMY_SP,
&format!("free_scope: {:?} not recognized by the \
region scope tree for {:?} / {:?}",
param_owner,
self.root_parent.map(|id| tcx.hir().local_def_id(id)),
self.root_body.map(|hir_id| DefId::local(hir_id.owner))));
}
// The trait/impl lifetime is in scope for the method's body.
self.root_body.unwrap().local_id
});
Scope { id: scope, data: ScopeData::CallSite }
}
/// Assuming that the provided region was defined within this `ScopeTree`,
/// returns the outermost `Scope` that the region outlives.
pub fn free_scope(&self, tcx: TyCtxt<'tcx>, fr: &ty::FreeRegion) -> Scope {
let param_owner = match fr.bound_region {
ty::BoundRegion::BrNamed(def_id, _) => {
tcx.parent(def_id).unwrap()
}
_ => fr.scope
};
// Ensure that the named late-bound lifetimes were defined
// on the same function that they ended up being freed in.
assert_eq!(param_owner, fr.scope);
let param_owner_id = tcx.hir().as_local_hir_id(param_owner).unwrap();
let body_id = tcx.hir().body_owned_by(param_owner_id);
Scope { id: tcx.hir().body(body_id).value.hir_id.local_id, data: ScopeData::CallSite }
}
/// Checks whether the given scope contains a `yield`. If so,
/// returns `Some((span, expr_count))` with the span of a yield we found and
/// the number of expressions and patterns appearing before the `yield` in the body + 1.
/// If there a are multiple yields in a scope, the one with the highest number is returned.
pub fn yield_in_scope(&self, scope: Scope) -> Option<YieldData> {
self.yield_in_scope.get(&scope).cloned()
}
/// Checks whether the given scope contains a `yield` and if that yield could execute
/// after `expr`. If so, it returns the span of that `yield`.
/// `scope` must be inside the body.
pub fn yield_in_scope_for_expr(&self,
scope: Scope,
expr_hir_id: hir::HirId,
body: &'tcx hir::Body) -> Option<Span> {
self.yield_in_scope(scope).and_then(|YieldData { span, expr_and_pat_count, .. }| {
let mut visitor = ExprLocatorVisitor {
hir_id: expr_hir_id,
result: None,
expr_and_pat_count: 0,
};
visitor.visit_body(body);
if expr_and_pat_count >= visitor.result.unwrap() {
Some(span)
} else {
None
}
})
}
/// Gives the number of expressions visited in a body.
/// Used to sanity check visit_expr call count when
/// calculating generator interiors.
pub fn body_expr_count(&self, body_id: hir::BodyId) -> Option<usize> {
self.body_expr_count.get(&body_id).map(|r| *r)
}
}
/// Records the lifetime of a local variable as `cx.var_parent`
fn record_var_lifetime(
visitor: &mut RegionResolutionVisitor<'_>,
var_id: hir::ItemLocalId,
_sp: Span,
) {
match visitor.cx.var_parent {
None => {
// this can happen in extern fn declarations like
//
// extern fn isalnum(c: c_int) -> c_int
}
Some((parent_scope, _)) =>
visitor.scope_tree.record_var_scope(var_id, parent_scope),
}
}
fn resolve_block<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, blk: &'tcx hir::Block) {
debug!("resolve_block(blk.hir_id={:?})", blk.hir_id);
let prev_cx = visitor.cx;
// We treat the tail expression in the block (if any) somewhat
// differently from the statements. The issue has to do with
// temporary lifetimes. Consider the following:
//
// quux({
// let inner = ... (&bar()) ...;
//
// (... (&foo()) ...) // (the tail expression)
// }, other_argument());
//
// Each of the statements within the block is a terminating
// scope, and thus a temporary (e.g., the result of calling
// `bar()` in the initializer expression for `let inner = ...;`)
// will be cleaned up immediately after its corresponding
// statement (i.e., `let inner = ...;`) executes.
//
// On the other hand, temporaries associated with evaluating the
// tail expression for the block are assigned lifetimes so that
// they will be cleaned up as part of the terminating scope
// *surrounding* the block expression. Here, the terminating
// scope for the block expression is the `quux(..)` call; so
// those temporaries will only be cleaned up *after* both
// `other_argument()` has run and also the call to `quux(..)`
// itself has returned.
visitor.enter_node_scope_with_dtor(blk.hir_id.local_id);
visitor.cx.var_parent = visitor.cx.parent;
{
// This block should be kept approximately in sync with
// `intravisit::walk_block`. (We manually walk the block, rather
// than call `walk_block`, in order to maintain precise
// index information.)
for (i, statement) in blk.stmts.iter().enumerate() {
match statement.node {
hir::StmtKind::Local(..) |
hir::StmtKind::Item(..) => {
// Each declaration introduces a subscope for bindings
// introduced by the declaration; this subscope covers a
// suffix of the block. Each subscope in a block has the
// previous subscope in the block as a parent, except for
// the first such subscope, which has the block itself as a
// parent.
visitor.enter_scope(
Scope {
id: blk.hir_id.local_id,
data: ScopeData::Remainder(FirstStatementIndex::new(i))
}
);
visitor.cx.var_parent = visitor.cx.parent;
}
hir::StmtKind::Expr(..) |
hir::StmtKind::Semi(..) => {}
}
visitor.visit_stmt(statement)
}
walk_list!(visitor, visit_expr, &blk.expr);
}
visitor.cx = prev_cx;
}
fn resolve_arm<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, arm: &'tcx hir::Arm) {
let prev_cx = visitor.cx;
visitor.enter_scope(
Scope {
id: arm.hir_id.local_id,
data: ScopeData::Node,
}
);
visitor.cx.var_parent = visitor.cx.parent;
visitor.terminating_scopes.insert(arm.body.hir_id.local_id);
if let Some(hir::Guard::If(ref expr)) = arm.guard {
visitor.terminating_scopes.insert(expr.hir_id.local_id);
}
intravisit::walk_arm(visitor, arm);
visitor.cx = prev_cx;
}
fn resolve_pat<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, pat: &'tcx hir::Pat) {
visitor.record_child_scope(Scope { id: pat.hir_id.local_id, data: ScopeData::Node });
// If this is a binding then record the lifetime of that binding.
if let PatKind::Binding(..) = pat.node {
record_var_lifetime(visitor, pat.hir_id.local_id, pat.span);
}
debug!("resolve_pat - pre-increment {} pat = {:?}", visitor.expr_and_pat_count, pat);
intravisit::walk_pat(visitor, pat);
visitor.expr_and_pat_count += 1;
debug!("resolve_pat - post-increment {} pat = {:?}", visitor.expr_and_pat_count, pat);
}
fn resolve_stmt<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, stmt: &'tcx hir::Stmt) {
let stmt_id = stmt.hir_id.local_id;
debug!("resolve_stmt(stmt.id={:?})", stmt_id);
// Every statement will clean up the temporaries created during
// execution of that statement. Therefore each statement has an
// associated destruction scope that represents the scope of the
// statement plus its destructors, and thus the scope for which
// regions referenced by the destructors need to survive.
visitor.terminating_scopes.insert(stmt_id);
let prev_parent = visitor.cx.parent;
visitor.enter_node_scope_with_dtor(stmt_id);
intravisit::walk_stmt(visitor, stmt);
visitor.cx.parent = prev_parent;
}
fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx hir::Expr) {
debug!("resolve_expr - pre-increment {} expr = {:?}", visitor.expr_and_pat_count, expr);
let prev_cx = visitor.cx;
visitor.enter_node_scope_with_dtor(expr.hir_id.local_id);
{
let terminating_scopes = &mut visitor.terminating_scopes;
let mut terminating = |id: hir::ItemLocalId| {
terminating_scopes.insert(id);
};
match expr.node {
// Conditional or repeating scopes are always terminating
// scopes, meaning that temporaries cannot outlive them.
// This ensures fixed size stacks.
hir::ExprKind::Binary(
source_map::Spanned { node: hir::BinOpKind::And, .. }, _, ref r) |
hir::ExprKind::Binary(
source_map::Spanned { node: hir::BinOpKind::Or, .. }, _, ref r) => {
// For shortcircuiting operators, mark the RHS as a terminating
// scope since it only executes conditionally.
terminating(r.hir_id.local_id);
}
hir::ExprKind::Loop(ref body, _, _) => {
terminating(body.hir_id.local_id);
}
hir::ExprKind::DropTemps(ref expr) => {
// `DropTemps(expr)` does not denote a conditional scope.
// Rather, we want to achieve the same behavior as `{ let _t = expr; _t }`.
terminating(expr.hir_id.local_id);
}
hir::ExprKind::AssignOp(..) | hir::ExprKind::Index(..) |
hir::ExprKind::Unary(..) | hir::ExprKind::Call(..) | hir::ExprKind::MethodCall(..) => {
// FIXME(https://github.com/rust-lang/rfcs/issues/811) Nested method calls
//
// The lifetimes for a call or method call look as follows:
//
// call.id
// - arg0.id
// - ...
// - argN.id
// - call.callee_id
//
// The idea is that call.callee_id represents *the time when
// the invoked function is actually running* and call.id
// represents *the time to prepare the arguments and make the
// call*. See the section "Borrows in Calls" borrowck/README.md
// for an extended explanation of why this distinction is
// important.
//
// record_superlifetime(new_cx, expr.callee_id);
}
_ => {}
}
}
let prev_pessimistic = visitor.pessimistic_yield;
// Ordinarily, we can rely on the visit order of HIR intravisit
// to correspond to the actual execution order of statements.
// However, there's a weird corner case with compund assignment
// operators (e.g. `a += b`). The evaluation order depends on whether
// or not the operator is overloaded (e.g. whether or not a trait
// like AddAssign is implemented).
// For primitive types (which, despite having a trait impl, don't actually
// end up calling it), the evluation order is right-to-left. For example,
// the following code snippet:
//
// let y = &mut 0;
// *{println!("LHS!"); y} += {println!("RHS!"); 1};
//
// will print:
//
// RHS!
// LHS!
//
// However, if the operator is used on a non-primitive type,
// the evaluation order will be left-to-right, since the operator
// actually get desugared to a method call. For example, this
// nearly identical code snippet:
//
// let y = &mut String::new();
// *{println!("LHS String"); y} += {println!("RHS String"); "hi"};
//
// will print:
// LHS String
// RHS String
//
// To determine the actual execution order, we need to perform
// trait resolution. Unfortunately, we need to be able to compute
// yield_in_scope before type checking is even done, as it gets
// used by AST borrowcheck.
//
// Fortunately, we don't need to know the actual execution order.
// It suffices to know the 'worst case' order with respect to yields.
// Specifically, we need to know the highest 'expr_and_pat_count'
// that we could assign to the yield expression. To do this,
// we pick the greater of the two values from the left-hand
// and right-hand expressions. This makes us overly conservative
// about what types could possibly live across yield points,
// but we will never fail to detect that a type does actually
// live across a yield point. The latter part is critical -
// we're already overly conservative about what types will live
// across yield points, as the generated MIR will determine
// when things are actually live. However, for typecheck to work
// properly, we can't miss any types.
match expr.node {
// Manually recurse over closures, because they are the only
// case of nested bodies that share the parent environment.
hir::ExprKind::Closure(.., body, _, _) => {
let body = visitor.tcx.hir().body(body);
visitor.visit_body(body);
},
hir::ExprKind::AssignOp(_, ref left_expr, ref right_expr) => {
debug!("resolve_expr - enabling pessimistic_yield, was previously {}",
prev_pessimistic);
let start_point = visitor.fixup_scopes.len();
visitor.pessimistic_yield = true;
// If the actual execution order turns out to be right-to-left,
// then we're fine. However, if the actual execution order is left-to-right,
// then we'll assign too low a count to any `yield` expressions
// we encounter in 'right_expression' - they should really occur after all of the
// expressions in 'left_expression'.
visitor.visit_expr(&right_expr);
visitor.pessimistic_yield = prev_pessimistic;
debug!("resolve_expr - restoring pessimistic_yield to {}", prev_pessimistic);
visitor.visit_expr(&left_expr);
debug!("resolve_expr - fixing up counts to {}", visitor.expr_and_pat_count);
// Remove and process any scopes pushed by the visitor
let target_scopes = visitor.fixup_scopes.drain(start_point..);
for scope in target_scopes {
let mut yield_data = visitor.scope_tree.yield_in_scope.get_mut(&scope).unwrap();
let count = yield_data.expr_and_pat_count;
let span = yield_data.span;
// expr_and_pat_count never decreases. Since we recorded counts in yield_in_scope
// before walking the left-hand side, it should be impossible for the recorded
// count to be greater than the left-hand side count.
if count > visitor.expr_and_pat_count {
bug!("Encountered greater count {} at span {:?} - expected no greater than {}",
count, span, visitor.expr_and_pat_count);
}
let new_count = visitor.expr_and_pat_count;
debug!("resolve_expr - increasing count for scope {:?} from {} to {} at span {:?}",
scope, count, new_count, span);
yield_data.expr_and_pat_count = new_count;
}
}
_ => intravisit::walk_expr(visitor, expr)
}
visitor.expr_and_pat_count += 1;
debug!("resolve_expr post-increment {}, expr = {:?}", visitor.expr_and_pat_count, expr);
if let hir::ExprKind::Yield(_, source) = &expr.node {
// Mark this expr's scope and all parent scopes as containing `yield`.
let mut scope = Scope { id: expr.hir_id.local_id, data: ScopeData::Node };
loop {
let data = YieldData {
span: expr.span,
expr_and_pat_count: visitor.expr_and_pat_count,
source: *source,
};
visitor.scope_tree.yield_in_scope.insert(scope, data);
if visitor.pessimistic_yield {
debug!("resolve_expr in pessimistic_yield - marking scope {:?} for fixup", scope);
visitor.fixup_scopes.push(scope);
}
// Keep traversing up while we can.
match visitor.scope_tree.parent_map.get(&scope) {
// Don't cross from closure bodies to their parent.
Some(&(superscope, _)) => match superscope.data {
ScopeData::CallSite => break,
_ => scope = superscope
},
None => break
}
}
}
visitor.cx = prev_cx;
}
fn resolve_local<'tcx>(
visitor: &mut RegionResolutionVisitor<'tcx>,
pat: Option<&'tcx hir::Pat>,
init: Option<&'tcx hir::Expr>,
) {
debug!("resolve_local(pat={:?}, init={:?})", pat, init);
let blk_scope = visitor.cx.var_parent.map(|(p, _)| p);
// As an exception to the normal rules governing temporary
// lifetimes, initializers in a let have a temporary lifetime
// of the enclosing block. This means that e.g., a program
// like the following is legal:
//
// let ref x = HashMap::new();
//
// Because the hash map will be freed in the enclosing block.
//
// We express the rules more formally based on 3 grammars (defined
// fully in the helpers below that implement them):
//
// 1. `E&`, which matches expressions like `&<rvalue>` that
// own a pointer into the stack.
//
// 2. `P&`, which matches patterns like `ref x` or `(ref x, ref
// y)` that produce ref bindings into the value they are
// matched against or something (at least partially) owned by
// the value they are matched against. (By partially owned,
// I mean that creating a binding into a ref-counted or managed value
// would still count.)
//
// 3. `ET`, which matches both rvalues like `foo()` as well as places
// based on rvalues like `foo().x[2].y`.
//
// A subexpression `<rvalue>` that appears in a let initializer
// `let pat [: ty] = expr` has an extended temporary lifetime if
// any of the following conditions are met:
//
// A. `pat` matches `P&` and `expr` matches `ET`
// (covers cases where `pat` creates ref bindings into an rvalue
// produced by `expr`)
// B. `ty` is a borrowed pointer and `expr` matches `ET`
// (covers cases where coercion creates a borrow)
// C. `expr` matches `E&`
// (covers cases `expr` borrows an rvalue that is then assigned
// to memory (at least partially) owned by the binding)
//
// Here are some examples hopefully giving an intuition where each
// rule comes into play and why:
//
// Rule A. `let (ref x, ref y) = (foo().x, 44)`. The rvalue `(22, 44)`
// would have an extended lifetime, but not `foo()`.
//
// Rule B. `let x = &foo().x`. The rvalue ``foo()` would have extended
// lifetime.
//
// In some cases, multiple rules may apply (though not to the same
// rvalue). For example:
//
// let ref x = [&a(), &b()];
//
// Here, the expression `[...]` has an extended lifetime due to rule
// A, but the inner rvalues `a()` and `b()` have an extended lifetime
// due to rule C.
if let Some(expr) = init {
record_rvalue_scope_if_borrow_expr(visitor, &expr, blk_scope);
if let Some(pat) = pat {
if is_binding_pat(pat) {
record_rvalue_scope(visitor, &expr, blk_scope);
}
}
}
// Make sure we visit the initializer first, so expr_and_pat_count remains correct
if let Some(expr) = init {
visitor.visit_expr(expr);
}
if let Some(pat) = pat {
visitor.visit_pat(pat);
}
/// Returns `true` if `pat` match the `P&` non-terminal.
///
/// P& = ref X
/// | StructName { ..., P&, ... }
/// | VariantName(..., P&, ...)
/// | [ ..., P&, ... ]
/// | ( ..., P&, ... )
/// | box P&
fn is_binding_pat(pat: &hir::Pat) -> bool {
// Note that the code below looks for *explicit* refs only, that is, it won't
// know about *implicit* refs as introduced in #42640.
//
// This is not a problem. For example, consider
//
// let (ref x, ref y) = (Foo { .. }, Bar { .. });
//
// Due to the explicit refs on the left hand side, the below code would signal
// that the temporary value on the right hand side should live until the end of
// the enclosing block (as opposed to being dropped after the let is complete).
//
// To create an implicit ref, however, you must have a borrowed value on the RHS
// already, as in this example (which won't compile before #42640):
//
// let Foo { x, .. } = &Foo { x: ..., ... };
//
// in place of
//
// let Foo { ref x, .. } = Foo { ... };
//
// In the former case (the implicit ref version), the temporary is created by the
// & expression, and its lifetime would be extended to the end of the block (due
// to a different rule, not the below code).
match pat.node {
PatKind::Binding(hir::BindingAnnotation::Ref, ..) |
PatKind::Binding(hir::BindingAnnotation::RefMut, ..) => true,
PatKind::Struct(_, ref field_pats, _) => {
field_pats.iter().any(|fp| is_binding_pat(&fp.pat))
}
PatKind::Slice(ref pats1, ref pats2, ref pats3) => {
pats1.iter().any(|p| is_binding_pat(&p)) ||
pats2.iter().any(|p| is_binding_pat(&p)) ||
pats3.iter().any(|p| is_binding_pat(&p))
}
PatKind::TupleStruct(_, ref subpats, _) |
PatKind::Tuple(ref subpats, _) => {
subpats.iter().any(|p| is_binding_pat(&p))
}
PatKind::Box(ref subpat) => {
is_binding_pat(&subpat)
}
_ => false,
}
}
/// If `expr` matches the `E&` grammar, then records an extended rvalue scope as appropriate:
///
/// E& = & ET
/// | StructName { ..., f: E&, ... }
/// | [ ..., E&, ... ]
/// | ( ..., E&, ... )
/// | {...; E&}
/// | box E&
/// | E& as ...
/// | ( E& )
fn record_rvalue_scope_if_borrow_expr<'tcx>(
visitor: &mut RegionResolutionVisitor<'tcx>,
expr: &hir::Expr,
blk_id: Option<Scope>,
) {
match expr.node {
hir::ExprKind::AddrOf(_, ref subexpr) => {
record_rvalue_scope_if_borrow_expr(visitor, &subexpr, blk_id);
record_rvalue_scope(visitor, &subexpr, blk_id);
}
hir::ExprKind::Struct(_, ref fields, _) => {
for field in fields {
record_rvalue_scope_if_borrow_expr(
visitor, &field.expr, blk_id);
}
}
hir::ExprKind::Array(ref subexprs) |
hir::ExprKind::Tup(ref subexprs) => {
for subexpr in subexprs {
record_rvalue_scope_if_borrow_expr(
visitor, &subexpr, blk_id);
}
}
hir::ExprKind::Cast(ref subexpr, _) => {
record_rvalue_scope_if_borrow_expr(visitor, &subexpr, blk_id)
}
hir::ExprKind::Block(ref block, _) => {
if let Some(ref subexpr) = block.expr {
record_rvalue_scope_if_borrow_expr(
visitor, &subexpr, blk_id);
}
}
_ => {}
}
}
/// Applied to an expression `expr` if `expr` -- or something owned or partially owned by
/// `expr` -- is going to be indirectly referenced by a variable in a let statement. In that
/// case, the "temporary lifetime" or `expr` is extended to be the block enclosing the `let`
/// statement.
///
/// More formally, if `expr` matches the grammar `ET`, record the rvalue scope of the matching
/// `<rvalue>` as `blk_id`:
///
/// ET = *ET
/// | ET[...]
/// | ET.f
/// | (ET)
/// | <rvalue>
///
/// Note: ET is intended to match "rvalues or places based on rvalues".
fn record_rvalue_scope<'tcx>(
visitor: &mut RegionResolutionVisitor<'tcx>,
expr: &hir::Expr,
blk_scope: Option<Scope>,
) {
let mut expr = expr;
loop {
// Note: give all the expressions matching `ET` with the
// extended temporary lifetime, not just the innermost rvalue,
// because in codegen if we must compile e.g., `*rvalue()`
// into a temporary, we request the temporary scope of the
// outer expression.
visitor.scope_tree.record_rvalue_scope(expr.hir_id.local_id, blk_scope);
match expr.node {
hir::ExprKind::AddrOf(_, ref subexpr) |
hir::ExprKind::Unary(hir::UnDeref, ref subexpr) |
hir::ExprKind::Field(ref subexpr, _) |
hir::ExprKind::Index(ref subexpr, _) => {
expr = &subexpr;
}
_ => {
return;
}
}
}
}
}
impl<'tcx> RegionResolutionVisitor<'tcx> {
/// Records the current parent (if any) as the parent of `child_scope`.
/// Returns the depth of `child_scope`.
fn record_child_scope(&mut self, child_scope: Scope) -> ScopeDepth {
let parent = self.cx.parent;
self.scope_tree.record_scope_parent(child_scope, parent);
// If `child_scope` has no parent, it must be the root node, and so has
// a depth of 1. Otherwise, its depth is one more than its parent's.
parent.map_or(1, |(_p, d)| d + 1)
}
/// Records the current parent (if any) as the parent of `child_scope`,
/// and sets `child_scope` as the new current parent.
fn enter_scope(&mut self, child_scope: Scope) {
let child_depth = self.record_child_scope(child_scope);
self.cx.parent = Some((child_scope, child_depth));
}
fn enter_node_scope_with_dtor(&mut self, id: hir::ItemLocalId) {
// If node was previously marked as a terminating scope during the
// recursive visit of its parent node in the AST, then we need to
// account for the destruction scope representing the scope of
// the destructors that run immediately after it completes.
if self.terminating_scopes.contains(&id) {
self.enter_scope(Scope { id, data: ScopeData::Destruction });
}
self.enter_scope(Scope { id, data: ScopeData::Node });
}
}
impl<'tcx> Visitor<'tcx> for RegionResolutionVisitor<'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::None
}
fn visit_block(&mut self, b: &'tcx Block) {
resolve_block(self, b);
}
fn visit_body(&mut self, body: &'tcx hir::Body) {
let body_id = body.id();
let owner_id = self.tcx.hir().body_owner(body_id);
debug!("visit_body(id={:?}, span={:?}, body.id={:?}, cx.parent={:?})",
owner_id,
self.tcx.sess.source_map().span_to_string(body.value.span),
body_id,
self.cx.parent);
let outer_ec = mem::replace(&mut self.expr_and_pat_count, 0);
let outer_cx = self.cx;
let outer_ts = mem::take(&mut self.terminating_scopes);
self.terminating_scopes.insert(body.value.hir_id.local_id);
if let Some(root_id) = self.cx.root_id {
self.scope_tree.record_closure_parent(body.value.hir_id.local_id, root_id);
}
self.cx.root_id = Some(body.value.hir_id.local_id);
self.enter_scope(Scope { id: body.value.hir_id.local_id, data: ScopeData::CallSite });
self.enter_scope(Scope { id: body.value.hir_id.local_id, data: ScopeData::Arguments });
// The arguments and `self` are parented to the fn.
self.cx.var_parent = self.cx.parent.take();
for param in &body.params {
self.visit_pat(¶m.pat);
}
// The body of the every fn is a root scope.
self.cx.parent = self.cx.var_parent;
if self.tcx.hir().body_owner_kind(owner_id).is_fn_or_closure() {
self.visit_expr(&body.value)
} else {
// Only functions have an outer terminating (drop) scope, while
// temporaries in constant initializers may be 'static, but only
// according to rvalue lifetime semantics, using the same
// syntactical rules used for let initializers.
//
// e.g., in `let x = &f();`, the temporary holding the result from
// the `f()` call lives for the entirety of the surrounding block.
//
// Similarly, `const X: ... = &f();` would have the result of `f()`
// live for `'static`, implying (if Drop restrictions on constants
// ever get lifted) that the value *could* have a destructor, but
// it'd get leaked instead of the destructor running during the
// evaluation of `X` (if at all allowed by CTFE).
//
// However, `const Y: ... = g(&f());`, like `let y = g(&f());`,
// would *not* let the `f()` temporary escape into an outer scope
// (i.e., `'static`), which means that after `g` returns, it drops,
// and all the associated destruction scope rules apply.
self.cx.var_parent = None;
resolve_local(self, None, Some(&body.value));
}
if body.generator_kind.is_some() {
self.scope_tree.body_expr_count.insert(body_id, self.expr_and_pat_count);
}
// Restore context we had at the start.
self.expr_and_pat_count = outer_ec;
self.cx = outer_cx;
self.terminating_scopes = outer_ts;
}
fn visit_arm(&mut self, a: &'tcx Arm) {
resolve_arm(self, a);
}
fn visit_pat(&mut self, p: &'tcx Pat) {
resolve_pat(self, p);
}
fn visit_stmt(&mut self, s: &'tcx Stmt) {
resolve_stmt(self, s);
}
fn visit_expr(&mut self, ex: &'tcx Expr) {
resolve_expr(self, ex);
}
fn visit_local(&mut self, l: &'tcx Local) {
resolve_local(self, Some(&l.pat), l.init.as_ref().map(|e| &**e));
}
}
fn region_scope_tree(tcx: TyCtxt<'_>, def_id: DefId) -> &ScopeTree {
let closure_base_def_id = tcx.closure_base_def_id(def_id);
if closure_base_def_id != def_id {
return tcx.region_scope_tree(closure_base_def_id);
}
let id = tcx.hir().as_local_hir_id(def_id).unwrap();
let scope_tree = if let Some(body_id) = tcx.hir().maybe_body_owned_by(id) {
let mut visitor = RegionResolutionVisitor {
tcx,
scope_tree: ScopeTree::default(),
expr_and_pat_count: 0,
cx: Context {
root_id: None,
parent: None,
var_parent: None,
},
terminating_scopes: Default::default(),
pessimistic_yield: false,
fixup_scopes: vec![],
};
let body = tcx.hir().body(body_id);
visitor.scope_tree.root_body = Some(body.value.hir_id);
// If the item is an associated const or a method,
// record its impl/trait parent, as it can also have
// lifetime parameters free in this body.
match tcx.hir().get(id) {
Node::ImplItem(_) |
Node::TraitItem(_) => {
visitor.scope_tree.root_parent = Some(tcx.hir().get_parent_item(id));
}
_ => {}
}
visitor.visit_body(body);
visitor.scope_tree
} else {
ScopeTree::default()
};
tcx.arena.alloc(scope_tree)
}
pub fn provide(providers: &mut Providers<'_>) {
*providers = Providers {
region_scope_tree,
..*providers
};
}
impl<'a> HashStable<StableHashingContext<'a>> for ScopeTree {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
let ScopeTree {
root_body,
root_parent,
ref body_expr_count,
ref parent_map,
ref var_map,
ref destruction_scopes,
ref rvalue_scopes,
ref closure_tree,
ref yield_in_scope,
} = *self;
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
root_body.hash_stable(hcx, hasher);
root_parent.hash_stable(hcx, hasher);
});
body_expr_count.hash_stable(hcx, hasher);
parent_map.hash_stable(hcx, hasher);
var_map.hash_stable(hcx, hasher);
destruction_scopes.hash_stable(hcx, hasher);
rvalue_scopes.hash_stable(hcx, hasher);
closure_tree.hash_stable(hcx, hasher);
yield_in_scope.hash_stable(hcx, hasher);
}
}
| 40.537754 | 99 | 0.582582 |
bb6311ea6888a08d95a7fdd72de0e68968ce8994 | 1,331 | #[test]
fn absolute_layout_no_size() {
let mut stretch = stretch::Stretch::new();
let node0 = stretch
.new_node(
stretch::style::Style { position_type: stretch::style::PositionType::Absolute, ..Default::default() },
vec![],
)
.unwrap();
let node = stretch
.new_node(
stretch::style::Style {
size: stretch::geometry::Size {
width: stretch::style::Dimension::Points(100f32),
height: stretch::style::Dimension::Points(100f32),
..Default::default()
},
..Default::default()
},
vec![node0],
)
.unwrap();
stretch.compute_layout(node, stretch::geometry::Size::undefined()).unwrap();
assert_eq!(stretch.layout(node).unwrap().size.width, 100f32);
assert_eq!(stretch.layout(node).unwrap().size.height, 100f32);
assert_eq!(stretch.layout(node).unwrap().location.x, 0f32);
assert_eq!(stretch.layout(node).unwrap().location.y, 0f32);
assert_eq!(stretch.layout(node0).unwrap().size.width, 0f32);
assert_eq!(stretch.layout(node0).unwrap().size.height, 0f32);
assert_eq!(stretch.layout(node0).unwrap().location.x, 0f32);
assert_eq!(stretch.layout(node0).unwrap().location.y, 0f32);
}
| 40.333333 | 114 | 0.583772 |
64c979463c731667c279112d4f2476f6c393405d | 1,545 | use crate::battle::{builder::BattleBuilder, model::CharaConfig, rpg_core::PlayMode};
use chrono::NaiveDateTime;
use sea_orm::{entity::prelude::*, DeriveEntityModel};
use uuid::Uuid;
#[derive(Debug, Clone, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "playdata")]
pub struct Model {
#[sea_orm(primary_key)]
pub battle_uuid: Uuid,
pub player: serde_json::Value,
pub enemy: serde_json::Value,
pub elapesd_turns: u32,
pub start_time: NaiveDateTime,
pub play_mode: String,
}
#[derive(Clone, Copy, Debug, EnumIter)]
pub enum Relation {
PlayData,
}
impl RelationTrait for Relation {
fn def(&self) -> RelationDef {
match self {
&Self::PlayData => Entity::has_many(Entity).into(),
}
}
}
impl Related<Entity> for Entity {
fn to() -> RelationDef {
Relation::PlayData.def()
}
}
impl ActiveModelBehavior for ActiveModel {}
impl From<Model> for BattleBuilder {
fn from(from: Model) -> BattleBuilder {
BattleBuilder::new(
PlayMode::try_from_value(&from.play_mode).unwrap(),
// base type is CharaConfig
Some(
CharaConfig::chara_new_noasync(&from.player["charabase"]["name"].as_str().unwrap())
.unwrap(),
),
// base type is CharaConfig
Some(
CharaConfig::chara_new_noasync(&from.enemy["charabase"]["name"].as_str().unwrap())
.unwrap(),
),
Some(from.elapesd_turns),
)
}
}
| 27.105263 | 99 | 0.601294 |
dee6b5fc427ef993844b61dcebbd81b6ba57a45e | 50,333 | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::F18R2 {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct FB0R {
bits: bool,
}
impl FB0R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB1R {
bits: bool,
}
impl FB1R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB2R {
bits: bool,
}
impl FB2R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB3R {
bits: bool,
}
impl FB3R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB4R {
bits: bool,
}
impl FB4R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB5R {
bits: bool,
}
impl FB5R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB6R {
bits: bool,
}
impl FB6R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB7R {
bits: bool,
}
impl FB7R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB8R {
bits: bool,
}
impl FB8R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB9R {
bits: bool,
}
impl FB9R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB10R {
bits: bool,
}
impl FB10R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB11R {
bits: bool,
}
impl FB11R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB12R {
bits: bool,
}
impl FB12R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB13R {
bits: bool,
}
impl FB13R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB14R {
bits: bool,
}
impl FB14R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB15R {
bits: bool,
}
impl FB15R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB16R {
bits: bool,
}
impl FB16R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB17R {
bits: bool,
}
impl FB17R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB18R {
bits: bool,
}
impl FB18R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB19R {
bits: bool,
}
impl FB19R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB20R {
bits: bool,
}
impl FB20R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB21R {
bits: bool,
}
impl FB21R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB22R {
bits: bool,
}
impl FB22R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB23R {
bits: bool,
}
impl FB23R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB24R {
bits: bool,
}
impl FB24R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB25R {
bits: bool,
}
impl FB25R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB26R {
bits: bool,
}
impl FB26R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB27R {
bits: bool,
}
impl FB27R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB28R {
bits: bool,
}
impl FB28R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB29R {
bits: bool,
}
impl FB29R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB30R {
bits: bool,
}
impl FB30R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB31R {
bits: bool,
}
impl FB31R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Proxy"]
pub struct _FB0W<'a> {
w: &'a mut W,
}
impl<'a> _FB0W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB1W<'a> {
w: &'a mut W,
}
impl<'a> _FB1W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB2W<'a> {
w: &'a mut W,
}
impl<'a> _FB2W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 2;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB3W<'a> {
w: &'a mut W,
}
impl<'a> _FB3W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB4W<'a> {
w: &'a mut W,
}
impl<'a> _FB4W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB5W<'a> {
w: &'a mut W,
}
impl<'a> _FB5W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 5;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB6W<'a> {
w: &'a mut W,
}
impl<'a> _FB6W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 6;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB7W<'a> {
w: &'a mut W,
}
impl<'a> _FB7W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 7;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB8W<'a> {
w: &'a mut W,
}
impl<'a> _FB8W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 8;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB9W<'a> {
w: &'a mut W,
}
impl<'a> _FB9W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 9;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB10W<'a> {
w: &'a mut W,
}
impl<'a> _FB10W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 10;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB11W<'a> {
w: &'a mut W,
}
impl<'a> _FB11W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 11;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB12W<'a> {
w: &'a mut W,
}
impl<'a> _FB12W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 12;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB13W<'a> {
w: &'a mut W,
}
impl<'a> _FB13W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 13;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB14W<'a> {
w: &'a mut W,
}
impl<'a> _FB14W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 14;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB15W<'a> {
w: &'a mut W,
}
impl<'a> _FB15W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 15;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB16W<'a> {
w: &'a mut W,
}
impl<'a> _FB16W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 16;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB17W<'a> {
w: &'a mut W,
}
impl<'a> _FB17W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 17;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB18W<'a> {
w: &'a mut W,
}
impl<'a> _FB18W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 18;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB19W<'a> {
w: &'a mut W,
}
impl<'a> _FB19W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 19;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB20W<'a> {
w: &'a mut W,
}
impl<'a> _FB20W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 20;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB21W<'a> {
w: &'a mut W,
}
impl<'a> _FB21W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 21;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB22W<'a> {
w: &'a mut W,
}
impl<'a> _FB22W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 22;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB23W<'a> {
w: &'a mut W,
}
impl<'a> _FB23W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 23;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB24W<'a> {
w: &'a mut W,
}
impl<'a> _FB24W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 24;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB25W<'a> {
w: &'a mut W,
}
impl<'a> _FB25W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 25;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB26W<'a> {
w: &'a mut W,
}
impl<'a> _FB26W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 26;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB27W<'a> {
w: &'a mut W,
}
impl<'a> _FB27W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 27;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB28W<'a> {
w: &'a mut W,
}
impl<'a> _FB28W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 28;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB29W<'a> {
w: &'a mut W,
}
impl<'a> _FB29W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 29;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB30W<'a> {
w: &'a mut W,
}
impl<'a> _FB30W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 30;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB31W<'a> {
w: &'a mut W,
}
impl<'a> _FB31W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 31;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0 - Filter bits"]
#[inline]
pub fn fb0(&self) -> FB0R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB0R { bits }
}
#[doc = "Bit 1 - Filter bits"]
#[inline]
pub fn fb1(&self) -> FB1R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB1R { bits }
}
#[doc = "Bit 2 - Filter bits"]
#[inline]
pub fn fb2(&self) -> FB2R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB2R { bits }
}
#[doc = "Bit 3 - Filter bits"]
#[inline]
pub fn fb3(&self) -> FB3R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB3R { bits }
}
#[doc = "Bit 4 - Filter bits"]
#[inline]
pub fn fb4(&self) -> FB4R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB4R { bits }
}
#[doc = "Bit 5 - Filter bits"]
#[inline]
pub fn fb5(&self) -> FB5R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB5R { bits }
}
#[doc = "Bit 6 - Filter bits"]
#[inline]
pub fn fb6(&self) -> FB6R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB6R { bits }
}
#[doc = "Bit 7 - Filter bits"]
#[inline]
pub fn fb7(&self) -> FB7R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 7;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB7R { bits }
}
#[doc = "Bit 8 - Filter bits"]
#[inline]
pub fn fb8(&self) -> FB8R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB8R { bits }
}
#[doc = "Bit 9 - Filter bits"]
#[inline]
pub fn fb9(&self) -> FB9R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 9;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB9R { bits }
}
#[doc = "Bit 10 - Filter bits"]
#[inline]
pub fn fb10(&self) -> FB10R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 10;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB10R { bits }
}
#[doc = "Bit 11 - Filter bits"]
#[inline]
pub fn fb11(&self) -> FB11R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 11;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB11R { bits }
}
#[doc = "Bit 12 - Filter bits"]
#[inline]
pub fn fb12(&self) -> FB12R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 12;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB12R { bits }
}
#[doc = "Bit 13 - Filter bits"]
#[inline]
pub fn fb13(&self) -> FB13R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 13;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB13R { bits }
}
#[doc = "Bit 14 - Filter bits"]
#[inline]
pub fn fb14(&self) -> FB14R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 14;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB14R { bits }
}
#[doc = "Bit 15 - Filter bits"]
#[inline]
pub fn fb15(&self) -> FB15R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 15;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB15R { bits }
}
#[doc = "Bit 16 - Filter bits"]
#[inline]
pub fn fb16(&self) -> FB16R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 16;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB16R { bits }
}
#[doc = "Bit 17 - Filter bits"]
#[inline]
pub fn fb17(&self) -> FB17R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 17;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB17R { bits }
}
#[doc = "Bit 18 - Filter bits"]
#[inline]
pub fn fb18(&self) -> FB18R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 18;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB18R { bits }
}
#[doc = "Bit 19 - Filter bits"]
#[inline]
pub fn fb19(&self) -> FB19R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 19;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB19R { bits }
}
#[doc = "Bit 20 - Filter bits"]
#[inline]
pub fn fb20(&self) -> FB20R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 20;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB20R { bits }
}
#[doc = "Bit 21 - Filter bits"]
#[inline]
pub fn fb21(&self) -> FB21R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 21;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB21R { bits }
}
#[doc = "Bit 22 - Filter bits"]
#[inline]
pub fn fb22(&self) -> FB22R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 22;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB22R { bits }
}
#[doc = "Bit 23 - Filter bits"]
#[inline]
pub fn fb23(&self) -> FB23R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 23;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB23R { bits }
}
#[doc = "Bit 24 - Filter bits"]
#[inline]
pub fn fb24(&self) -> FB24R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 24;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB24R { bits }
}
#[doc = "Bit 25 - Filter bits"]
#[inline]
pub fn fb25(&self) -> FB25R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 25;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB25R { bits }
}
#[doc = "Bit 26 - Filter bits"]
#[inline]
pub fn fb26(&self) -> FB26R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 26;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB26R { bits }
}
#[doc = "Bit 27 - Filter bits"]
#[inline]
pub fn fb27(&self) -> FB27R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 27;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB27R { bits }
}
#[doc = "Bit 28 - Filter bits"]
#[inline]
pub fn fb28(&self) -> FB28R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 28;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB28R { bits }
}
#[doc = "Bit 29 - Filter bits"]
#[inline]
pub fn fb29(&self) -> FB29R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 29;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB29R { bits }
}
#[doc = "Bit 30 - Filter bits"]
#[inline]
pub fn fb30(&self) -> FB30R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 30;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB30R { bits }
}
#[doc = "Bit 31 - Filter bits"]
#[inline]
pub fn fb31(&self) -> FB31R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 31;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB31R { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - Filter bits"]
#[inline]
pub fn fb0(&mut self) -> _FB0W {
_FB0W { w: self }
}
#[doc = "Bit 1 - Filter bits"]
#[inline]
pub fn fb1(&mut self) -> _FB1W {
_FB1W { w: self }
}
#[doc = "Bit 2 - Filter bits"]
#[inline]
pub fn fb2(&mut self) -> _FB2W {
_FB2W { w: self }
}
#[doc = "Bit 3 - Filter bits"]
#[inline]
pub fn fb3(&mut self) -> _FB3W {
_FB3W { w: self }
}
#[doc = "Bit 4 - Filter bits"]
#[inline]
pub fn fb4(&mut self) -> _FB4W {
_FB4W { w: self }
}
#[doc = "Bit 5 - Filter bits"]
#[inline]
pub fn fb5(&mut self) -> _FB5W {
_FB5W { w: self }
}
#[doc = "Bit 6 - Filter bits"]
#[inline]
pub fn fb6(&mut self) -> _FB6W {
_FB6W { w: self }
}
#[doc = "Bit 7 - Filter bits"]
#[inline]
pub fn fb7(&mut self) -> _FB7W {
_FB7W { w: self }
}
#[doc = "Bit 8 - Filter bits"]
#[inline]
pub fn fb8(&mut self) -> _FB8W {
_FB8W { w: self }
}
#[doc = "Bit 9 - Filter bits"]
#[inline]
pub fn fb9(&mut self) -> _FB9W {
_FB9W { w: self }
}
#[doc = "Bit 10 - Filter bits"]
#[inline]
pub fn fb10(&mut self) -> _FB10W {
_FB10W { w: self }
}
#[doc = "Bit 11 - Filter bits"]
#[inline]
pub fn fb11(&mut self) -> _FB11W {
_FB11W { w: self }
}
#[doc = "Bit 12 - Filter bits"]
#[inline]
pub fn fb12(&mut self) -> _FB12W {
_FB12W { w: self }
}
#[doc = "Bit 13 - Filter bits"]
#[inline]
pub fn fb13(&mut self) -> _FB13W {
_FB13W { w: self }
}
#[doc = "Bit 14 - Filter bits"]
#[inline]
pub fn fb14(&mut self) -> _FB14W {
_FB14W { w: self }
}
#[doc = "Bit 15 - Filter bits"]
#[inline]
pub fn fb15(&mut self) -> _FB15W {
_FB15W { w: self }
}
#[doc = "Bit 16 - Filter bits"]
#[inline]
pub fn fb16(&mut self) -> _FB16W {
_FB16W { w: self }
}
#[doc = "Bit 17 - Filter bits"]
#[inline]
pub fn fb17(&mut self) -> _FB17W {
_FB17W { w: self }
}
#[doc = "Bit 18 - Filter bits"]
#[inline]
pub fn fb18(&mut self) -> _FB18W {
_FB18W { w: self }
}
#[doc = "Bit 19 - Filter bits"]
#[inline]
pub fn fb19(&mut self) -> _FB19W {
_FB19W { w: self }
}
#[doc = "Bit 20 - Filter bits"]
#[inline]
pub fn fb20(&mut self) -> _FB20W {
_FB20W { w: self }
}
#[doc = "Bit 21 - Filter bits"]
#[inline]
pub fn fb21(&mut self) -> _FB21W {
_FB21W { w: self }
}
#[doc = "Bit 22 - Filter bits"]
#[inline]
pub fn fb22(&mut self) -> _FB22W {
_FB22W { w: self }
}
#[doc = "Bit 23 - Filter bits"]
#[inline]
pub fn fb23(&mut self) -> _FB23W {
_FB23W { w: self }
}
#[doc = "Bit 24 - Filter bits"]
#[inline]
pub fn fb24(&mut self) -> _FB24W {
_FB24W { w: self }
}
#[doc = "Bit 25 - Filter bits"]
#[inline]
pub fn fb25(&mut self) -> _FB25W {
_FB25W { w: self }
}
#[doc = "Bit 26 - Filter bits"]
#[inline]
pub fn fb26(&mut self) -> _FB26W {
_FB26W { w: self }
}
#[doc = "Bit 27 - Filter bits"]
#[inline]
pub fn fb27(&mut self) -> _FB27W {
_FB27W { w: self }
}
#[doc = "Bit 28 - Filter bits"]
#[inline]
pub fn fb28(&mut self) -> _FB28W {
_FB28W { w: self }
}
#[doc = "Bit 29 - Filter bits"]
#[inline]
pub fn fb29(&mut self) -> _FB29W {
_FB29W { w: self }
}
#[doc = "Bit 30 - Filter bits"]
#[inline]
pub fn fb30(&mut self) -> _FB30W {
_FB30W { w: self }
}
#[doc = "Bit 31 - Filter bits"]
#[inline]
pub fn fb31(&mut self) -> _FB31W {
_FB31W { w: self }
}
}
| 25.772145 | 60 | 0.464467 |
29ec6fa51b717f048d783e6a91694c00024d379b | 7,483 | // enum4.rs
#[derive(Debug)]
enum Value {
Number(f64),
Str(String),
Bool(bool),
Arr(Vec<Value>)
}
use std::fmt;
impl fmt::Display for Value {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use Value::*;
match *self {
Number(n) => write!(f,"{} ",n),
Str(ref s) => write!(f,"{} ",s),
Bool(b) => write!(f,"{} ",b),
Arr(ref arr) => {
write!(f,"(")?;
for v in arr.iter() {
v.fmt(f)?;
}
write!(f,")")
}
}
}
}
struct Builder {
stack: Vec<Vec<Value>>,
current: Vec<Value>,
error: Option<String>,
open: bool
}
impl Builder {
fn new() -> Builder {
Builder {
stack: Vec::new(),
current: Vec::new(),
error: None,
open: false
}
}
fn push(&mut self, v: Value) -> &mut Builder {
if ! self.open {
self.error = Some("not open!".to_string());
}
if self.error.is_none() {
self.current.push(v);
}
self
}
fn s(&mut self, s: &str) -> &mut Builder {
self.push(Value::Str(s.to_string()))
}
fn b(&mut self, v: bool) -> &mut Builder {
self.push(Value::Bool(v))
}
fn n(&mut self, v: f64) -> &mut Builder {
self.push(Value::Number(v))
}
fn extract_current(&mut self, arr: Vec<Value>) -> Vec<Value> {
let mut current = arr;
std::mem::swap(&mut current, &mut self.current);
current
}
fn value(&mut self) -> Result<Value,SexprError> {
match self.error {
None => {
let current = self.extract_current(Vec::new());
Ok(Value::Arr(current))
},
Some(ref s) => SexprError::err(s.clone())
}
}
fn open(&mut self) -> &mut Builder {
if ! self.open {
self.open = true;
return self;
}
if self.error.is_some() { return self; }
let current = self.extract_current(Vec::new());
self.stack.push(current);
self
}
fn close(&mut self) -> &mut Builder {
if let Some(last_current) = self.stack.pop() {
let current = self.extract_current(last_current);
self.current.push(Value::Arr(current));
} else {
if self.open {
self.open = false;
} else {
self.error = Some("mismatched open/close".to_string());
}
}
self
}
}
fn parse(text: &str) -> Result<Value,SexprError> {
let mut builder = Builder::new();
let mut word = String::new();
for ch in text.chars() {
if ch.is_whitespace() {
if word.len() > 0 {
parse_word(&mut builder, &word)?;
word.clear();
}
} else
if ch == '(' {
builder.open();
} else
if ch == ')' {
if word.len() > 0 {
parse_word(&mut builder, &word)?;
word.clear();
}
builder.close();
} else {
word.push(ch);
}
}
builder.value()
}
use std::error::Error;
fn parse_word(builder: &mut Builder, word: &str) -> Result<(),SexprError> {
// guaranteed to be at least one character!
let first = word.chars().next().unwrap();
if word == "T" || word == "F" {
builder.b(word == "T");
} else
if first.is_digit(10) || first == '-' {
let num: f64 = word.parse()?;
builder.n(num);
} else {
builder.s(&word);
}
Ok(())
}
struct Pairs<'a> {
slice: &'a [Value],
idx: usize
}
fn pairs(v: &Value) -> Option<Pairs> {
match *v {
Value::Arr(ref arr) => Some(Pairs {
slice: &arr,
idx: 0
}),
_ => None
}
}
impl <'a> Iterator for Pairs<'a> {
type Item = (&'a str, &'a Value);
fn next(&mut self) -> Option<Self::Item> {
if self.idx == self.slice.len() {
return None; // no more pairs
}
let v = &self.slice[self.idx];
self.idx += 1;
match *v {
Value::Arr(ref arr) if arr.len() > 2 => {
match arr[0] {
Value::Str(ref s) => {
Some((s,&arr[1]))
}
_ => None
}
},
_ => None
}
}
}
#[derive(Debug)]
pub struct SexprError {
details: String
}
impl fmt::Display for SexprError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f,"{}",self.details)
}
}
impl Error for SexprError {
fn description(&self) -> &str {
&self.details
}
}
impl SexprError {
pub fn new(msg: &str) -> SexprError {
SexprError{details: msg.to_string()}
}
pub fn err<T>(msg: String) -> Result<T,SexprError> {
Err(SexprError { details: msg })
}
}
impl From<std::num::ParseFloatError> for SexprError {
fn from(err: std::num::ParseFloatError) -> Self {
SexprError::new(err.description())
}
}
fn eval(v: &Value) -> Result<f64,SexprError> {
match *v {
Value::Arr(ref arr) if arr.len() > 2 => eval_op(&arr),
Value::Number(x) => Ok(x),
ref v => SexprError::err(format!("cannot convert {:?} to number", v))
}
}
fn eval_op(arr: &[Value]) -> Result<f64,SexprError> {
match arr[0] {
Value::Str(ref s) => {
if s == "+" || s == "*" {
let adding = s == "+";
let mut res = if adding {0.0} else {1.0};
for v in &arr[1..] {
let num = eval(v)?;
res = if adding {
res + num
} else {
res * num
}
}
Ok(res)
} else
if s == "-" || s == "/" {
let x = eval(&arr[1])?;
let y = eval(&arr[2])?;
let res = if s == "-" {
x - y
} else {
x / y
};
Ok(res)
} else {
SexprError::err(format!("unknown operator {:?}", s))
}
},
ref v => SexprError::err(format!("operator must be string {:?}", v))
}
}
fn main() {
// building the hard way
/*
use Value::*;
let s = "hello".to_string();
let v = vec![Number(1.0),Bool(false),Str(s)];
let arr = Arr(v);
let res = Arr(vec![Number(2.0),arr]);
println!("{:?}",res);
println!("{}",res);
let res = Builder::new().open()
.s("one")
.open()
.s("two")
.b(true)
.open()
.s("four")
.n(1.0)
.close()
.close().close().value().expect("error");
println!("{:?}",res);
println!("{}",res);
*/
let default = "( (one 1) (two 2) (three 3) )";
let test = std::env::args().skip(1).next().unwrap_or(default.to_string());
let res = parse(&test).expect("error");
println!("{:?}",res);
println!("{}",res);
//~ for (s,e) in pairs(&res).expect("iter") {
//~ println!("{} {}",s,e);
//~ }
let x = eval(&res);
println!("result is {:?}",x);
}
| 23.384375 | 78 | 0.42924 |
5d8c95059ba92114dd263cc0f136dba50f47d8f2 | 826 | use axum::extract::{FromRequest, RequestParts};
pub use axum::http::Extensions;
use crate::{service::*, Error, Result};
/// 应用内部服务层对象构造器
#[async_trait]
pub trait ServiceProvider: Sized {
async fn provide(extensions: &Extensions) -> Result<Self>;
}
#[async_trait]
impl<B, S> FromRequest<B> for Service<S>
where
B: Send,
S: ServiceProvider
{
type Rejection = Error;
async fn from_request(req: &mut RequestParts<B>) -> Result<Self, Self::Rejection> {
log::debug!("Extract Service<{}>.", std::any::type_name::<S>());
let extensions = req
.extensions()
.ok_or_else(|| {
log::error!("No extensions.");
Error::ExtractServiceExtensionFailed
})?;
<S as ServiceProvider>::provide(extensions).await.map(Service)
}
}
| 25.8125 | 87 | 0.61138 |
f98d732ab3d1ab331bcf56dffe27c60c983f280f | 15,793 | extern crate rust_htslib;
use self::rust_htslib::bam::FetchDefinition;
use crate::bcf::report::table_report::fasta_reader::read_fasta;
use crate::common::Region;
use anyhow::Result;
use rust_htslib::bam::record::CigarStringView;
use rust_htslib::{bam, bam::Read};
use serde::Serialize;
use std::path::Path;
#[derive(Serialize, Clone, Debug, PartialEq)]
pub enum Marker {
A,
T,
C,
G,
N,
Deletion,
Insertion,
Match,
Pairing,
}
#[derive(Clone, Debug)]
pub struct Alignment {
sequence: String,
pos: i64,
length: u16,
flags: Vec<u16>,
name: String,
cigar: CigarStringView,
paired: bool,
mate_pos: i64,
tid: i32,
mate_tid: i32,
}
#[derive(Serialize, Clone, Debug, PartialEq)]
pub struct AlignmentNucleobase {
pub marker_type: Marker,
pub bases: String,
pub start_position: f64,
pub end_position: f64,
pub flags: Vec<u16>,
pub name: String,
pub read_start: u32,
pub read_end: u32,
}
#[derive(Serialize, Clone, Debug, PartialEq)]
pub struct AlignmentMatch {
pub marker_type: Marker,
pub start_position: f64,
pub end_position: f64,
pub flags: Vec<u16>,
pub name: String,
pub read_start: u32,
pub read_end: u32,
}
pub fn decode_flags(code: u16) -> Vec<u16> {
let flags_map = vec![
0x1, 0x2, 0x4, 0x8, 0x10, 0x20, 0x40, 0x80, 0x100, 0x200, 0x400, 0x800,
];
let mut read_map = Vec::new();
for flag in flags_map {
if (flag & code) == flag {
read_map.push(flag);
}
}
read_map
}
pub fn read_indexed_bam<P: AsRef<Path>>(path: P, region: &Region) -> Result<Vec<Alignment>> {
let mut bam = bam::IndexedReader::from_path(&path)?;
let chrom = ®ion.target;
let (from, to) = (region.start, region.end);
let tid = bam.header().tid(chrom.as_bytes()).unwrap() as i32;
let mut alignments: Vec<Alignment> = Vec::new();
bam.fetch(FetchDefinition::Region(tid, from as i64, to as i64))?;
for r in bam.records() {
let rec = r?;
let a = make_alignment(&rec);
alignments.push(a);
}
Ok(alignments)
}
fn make_alignment(record: &bam::Record) -> Alignment {
let has_pair = record.is_paired();
let mate_pos = record.mpos();
let tid = record.tid();
let mtid = record.mtid();
//Cigar String
let cigstring = record.cigar();
//Position
let pos = record.pos();
//Länge
let le = record.seq().len() as u16;
//Sequenz
let seq = record.seq().as_bytes();
let sequenz = String::from_utf8(seq).unwrap();
//Flags
let flgs = record.flags();
let flag_string = decode_flags(flgs);
//Name
let n = record.qname().to_owned();
let name = String::from_utf8(n).unwrap();
Alignment {
sequence: sequenz,
pos,
length: le,
cigar: cigstring,
flags: flag_string,
name,
paired: has_pair,
mate_pos,
tid,
mate_tid: mtid,
}
}
pub fn make_nucleobases<P: AsRef<Path>>(
fasta_path: P,
region: &Region,
snippets: Vec<Alignment>,
) -> Result<(Vec<AlignmentNucleobase>, Vec<AlignmentMatch>)> {
let mut bases: Vec<AlignmentNucleobase> = Vec::new();
let mut matches: Vec<AlignmentMatch> = Vec::new();
let ref_bases = read_fasta(fasta_path, region, false)?;
let (from, to) = (region.start, region.end);
for snippet in snippets {
let mut cigar_offset: i64 = 0;
let mut read_offset: i64 = 0;
let base_string = snippet.sequence.clone();
let char_vec: Vec<char> = base_string.chars().collect();
let mut soft_clip_begin = true;
let temp_snippet = snippet.clone();
if temp_snippet.paired
&& (temp_snippet.pos + temp_snippet.length as i64) < temp_snippet.mate_pos
&& temp_snippet.tid == temp_snippet.mate_tid
{
let pairing = AlignmentMatch {
marker_type: Marker::Pairing,
start_position: (temp_snippet.pos + temp_snippet.length as i64) as f64 - 0.5,
end_position: temp_snippet.mate_pos as f64 - 0.5,
flags: temp_snippet.flags.clone(),
name: temp_snippet.name.clone(),
read_start: temp_snippet.pos as u32,
read_end: (temp_snippet.mate_pos + 100) as u32,
};
matches.push(pairing);
}
for c in snippet.cigar.iter() {
let mut match_count = 0;
let mut match_start = 0;
let mut match_ending = false;
match c {
rust_htslib::bam::record::Cigar::Match(c) => {
for _i in 0..rust_htslib::bam::record::Cigar::Match(*c).len() {
let snip = snippet.clone();
let b = char_vec[cigar_offset as usize];
if snip.pos + read_offset >= from as i64
&& snip.pos + read_offset < to as i64
{
let ref_index = snip.pos + read_offset - from as i64;
let ref_base = &ref_bases[ref_index as usize];
if ref_base.get_marker_type() == b {
// Create long rule while bases match
if match_count == 0 {
match_start = snip.pos as i64 + read_offset;
}
match_count += 1;
match_ending = true;
//m = Marker::Match; // Match with reference fasta
} else {
let (mtch, base) = make_markers(
snip.clone(),
b,
read_offset,
match_start,
match_count,
);
if let Some(m) = mtch {
matches.push(m)
}
bases.push(base);
match_count = 0;
match_start = 0;
match_ending = false;
}
}
cigar_offset += 1;
read_offset += 1;
}
if match_ending {
let mtch =
end_mismatch_detection(snippet.clone(), match_start, match_count);
matches.push(mtch);
}
soft_clip_begin = false;
}
rust_htslib::bam::record::Cigar::Ins(c) => {
let snip = snippet.clone();
let p: f64 = snip.pos as f64 + read_offset as f64 - 0.5;
let m: Marker = Marker::Insertion;
let rs;
let re;
let mut b = String::from("");
for _i in 0..rust_htslib::bam::record::Cigar::Ins(*c).len() {
let char = char_vec[cigar_offset as usize];
b.push(char);
cigar_offset += 1;
}
if snip.paired && snip.tid == snip.mate_tid {
if snip.pos < snip.mate_pos {
re = snip.mate_pos + 100;
rs = snip.pos;
} else {
rs = snip.mate_pos;
re = snip.pos + snip.length as i64;
}
} else {
rs = snip.pos;
re = snip.pos + snip.length as i64;
}
let base = AlignmentNucleobase {
marker_type: m,
bases: b,
start_position: p as f64 + 0.5,
end_position: p as f64 + 1.5,
flags: snip.flags,
name: snip.name,
read_start: rs as u32,
read_end: re as u32,
};
if from as f64 <= (base.start_position + 0.5)
&& (base.start_position + 0.5) <= to as f64
{
bases.push(base);
}
soft_clip_begin = false;
}
rust_htslib::bam::record::Cigar::Del(c) => {
for _i in 0..rust_htslib::bam::record::Cigar::Del(*c).len() {
let snip = snippet.clone();
let marker = Marker::Deletion;
let position = snip.pos as i64 + read_offset;
let flags = snip.flags;
let name = snip.name;
let read_start;
let read_end;
let empty_bases = String::from("");
if snip.paired && snip.tid == snip.mate_tid {
if snip.pos < snip.mate_pos {
read_end = snip.mate_pos + 100;
read_start = snip.pos;
} else {
read_start = snip.mate_pos;
read_end = snip.pos + snip.length as i64;
}
} else {
read_start = snip.pos;
read_end = snip.pos + snip.length as i64;
}
let base = AlignmentNucleobase {
marker_type: marker,
bases: empty_bases,
start_position: position as f64 + 0.5,
end_position: position as f64 + 1.5,
flags,
name,
read_start: read_start as u32,
read_end: read_end as u32,
};
read_offset += 1;
if from as f64 <= (base.start_position + 0.5)
&& (base.start_position + 0.5) <= to as f64
{
bases.push(base);
}
}
soft_clip_begin = false;
}
rust_htslib::bam::record::Cigar::SoftClip(c) => {
for _i in 0..rust_htslib::bam::record::Cigar::SoftClip(*c).len() {
let snip = snippet.clone();
let b = char_vec[cigar_offset as usize];
if snip.pos + read_offset >= from as i64
&& snip.pos + read_offset < to as i64
{
let ref_index = snip.pos + read_offset - from as i64;
let ref_base = &ref_bases[ref_index as usize];
if ref_base.get_marker_type() == b {
// Create long rule while bases match
if match_count == 0 {
match_start = snip.pos as i64 + read_offset;
}
match_count += 1;
match_ending = true;
} else {
let (mtch, base) = make_markers(
snip.clone(),
b,
read_offset,
match_start,
match_count,
);
if let Some(m) = mtch {
matches.push(m)
}
bases.push(base);
match_count = 0;
match_start = 0;
match_ending = false;
}
}
cigar_offset += 1;
if !soft_clip_begin {
read_offset += 1;
}
}
if match_ending {
let mtch =
end_mismatch_detection(snippet.clone(), match_start, match_count);
matches.push(mtch);
}
soft_clip_begin = false;
}
_ => {
soft_clip_begin = false;
}
}
}
}
Ok((bases, matches))
}
fn make_markers(
snip: Alignment,
base: char,
read_offset: i64,
match_start: i64,
match_count: i64,
) -> (Option<AlignmentMatch>, AlignmentNucleobase) {
let marker: Marker;
match base {
// Mismatch
'A' => marker = Marker::A,
'T' => marker = Marker::T,
'C' => marker = Marker::C,
'N' => marker = Marker::N,
'G' => marker = Marker::G,
_ => marker = Marker::Deletion,
}
let position = snip.pos as i64 + read_offset;
let flags = snip.flags;
let name = snip.name;
let read_start: i64;
let read_end: i64;
if snip.paired && snip.tid == snip.mate_tid {
if snip.pos < snip.mate_pos {
read_end = snip.mate_pos + 100;
read_start = snip.pos;
} else {
read_start = snip.mate_pos;
read_end = snip.pos + snip.length as i64;
}
} else {
read_start = snip.pos;
read_end = snip.pos + snip.length as i64;
}
let mut mtch = None;
if match_count > 0 {
// First mismatch detection must lead to new creation of all previous matches
mtch = Some(AlignmentMatch {
marker_type: Marker::Match,
start_position: match_start as f64 + 0.5,
end_position: (match_start + match_count - 1) as f64 + 1.5,
flags: flags.clone(),
name: name.clone(),
read_start: read_start as u32,
read_end: read_end as u32,
});
}
let base = AlignmentNucleobase {
marker_type: marker,
bases: base.to_string(),
start_position: position as f64 + 0.5,
end_position: position as f64 + 1.5,
flags,
name,
read_start: read_start as u32,
read_end: read_end as u32,
};
(mtch, base)
}
fn end_mismatch_detection(snip: Alignment, match_start: i64, match_count: i64) -> AlignmentMatch {
let f = snip.flags;
let n = snip.name;
let rs: i64;
let re: i64;
if snip.paired && snip.tid == snip.mate_tid {
if snip.pos < snip.mate_pos {
re = snip.mate_pos + 100;
rs = snip.pos;
} else {
rs = snip.mate_pos;
re = snip.pos + snip.length as i64;
}
} else {
rs = snip.pos;
re = snip.pos + snip.length as i64;
}
AlignmentMatch {
marker_type: Marker::Match,
start_position: match_start as f64 + 0.5,
end_position: (match_start + match_count - 1) as f64 + 1.5,
flags: f,
name: n,
read_start: rs as u32,
read_end: re as u32,
}
}
| 32.562887 | 98 | 0.439562 |
1491b34abd99c3cafa69a0c632da50f04d20464e | 5,850 | //! External resource handling
//!
//! The `External` type abstracts away the loading of external resources. See the type documentation
//! for details.
use std::{
fmt::{Debug, Display},
path::{Path, PathBuf},
};
use openssl::{
pkey::{PKey, Private},
x509::X509,
};
use serde::{Deserialize, Serialize};
use thiserror::Error;
use super::{read_file, ReadFileError};
use crate::{
crypto::{self, asymmetric_key::SecretKey},
tls,
};
#[cfg(test)]
lazy_static::lazy_static! {
/// Path to bundled resources.
pub static ref RESOURCES_PATH: PathBuf =
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../resources");
}
/// External resource.
///
/// An `External` resource can be given in two ways: Either as an immediate value, or through a
/// path, provided the value implements `Loadable`.
///
/// Serializing and deserializing an `External` value is only possible if it is in path form. This
/// is especially useful when writing structure configurations.
///
/// An `External` also always provides a default, which will always result in an error when `load`
/// is called. Should the underlying type `T` implement `Default`, the `with_default` can be
/// used instead.
#[derive(Clone, Eq, Debug, Deserialize, PartialEq, Serialize)]
#[serde(untagged)]
pub enum External<T> {
/// Value that should be loaded from an external path.
Path(PathBuf),
/// Loaded or immediate value.
#[serde(skip)]
Loaded(T),
/// The value has not been specified, but a default has been requested.
#[serde(skip)]
Missing,
}
impl<T> External<T> {
/// Creates an external from a value.
pub fn value(value: T) -> Self {
External::Loaded(value)
}
/// Creates an external referencing a path.
pub fn path<P: AsRef<Path>>(path: P) -> Self {
External::Path(path.as_ref().to_owned())
}
}
impl<T> External<T>
where
T: Loadable,
{
/// Loads the value if not loaded already, resolving relative paths from `root` or returns
/// available value. If the value is `Missing`, returns an error.
pub fn load<P: AsRef<Path>>(self, root: P) -> Result<T, LoadError<T::Error>> {
match self {
External::Path(path) => {
let full_path = if path.is_relative() {
root.as_ref().join(&path)
} else {
path
};
T::from_file(&full_path).map_err(move |error| LoadError::Failed {
error,
// We canonicalize `full_path` here, with `ReadFileError` we get extra
// information about the absolute path this way if the latter is relative. It
// will still be relative if the current path does not exist.
path: full_path.canonicalize().unwrap_or(full_path),
})
}
External::Loaded(value) => Ok(value),
External::Missing => Err(LoadError::Missing),
}
}
}
impl<T> External<T>
where
T: Loadable + Default,
{
/// Insert a default value if missing.
pub fn with_default(self) -> Self {
match self {
External::Missing => External::Loaded(Default::default()),
_ => self,
}
}
}
/// A value that can be loaded from a file.
pub trait Loadable: Sized {
/// Error that can occur when attempting to load.
type Error: Debug + Display;
/// Loads a value from the given input path.
fn from_file<P: AsRef<Path>>(path: P) -> Result<Self, Self::Error>;
/// Load a test-only instance from the local path.
#[cfg(test)]
fn from_resources<P: AsRef<Path>>(rel_path: P) -> Self {
Self::from_file(RESOURCES_PATH.join(rel_path)).expect("could not load resources from local")
}
}
impl<T> Default for External<T> {
fn default() -> Self {
External::Missing
}
}
fn display_res_path<E>(result: &Result<PathBuf, E>) -> String {
result
.as_ref()
.map(|p| p.display().to_string())
.unwrap_or_else(|_| String::new())
}
/// Error loading external value.
#[derive(Debug, Error)]
pub enum LoadError<E: Debug + Display> {
/// Failed to load from path.
#[error("could not load from {}: {error}", display_res_path(&.path.canonicalize()))]
Failed {
/// Path that failed to load.
path: PathBuf,
/// Error load failed with.
error: E,
},
/// A value was missing.
#[error("value is missing (default requested)")]
Missing,
}
// We supply a few useful implementations for external types.
impl Loadable for X509 {
type Error = anyhow::Error;
fn from_file<P: AsRef<Path>>(path: P) -> Result<Self, Self::Error> {
tls::load_cert(path)
}
}
impl Loadable for PKey<Private> {
type Error = anyhow::Error;
fn from_file<P: AsRef<Path>>(path: P) -> Result<Self, Self::Error> {
tls::load_private_key(path)
}
}
impl Loadable for SecretKey {
type Error = crypto::Error;
fn from_file<P: AsRef<Path>>(path: P) -> Result<Self, Self::Error> {
SecretKey::from_file(path)
}
}
impl Loadable for Vec<u8> {
type Error = ReadFileError;
fn from_file<P: AsRef<Path>>(path: P) -> Result<Self, Self::Error> {
read_file(path)
}
}
#[cfg(test)]
mod tests {
use super::External;
#[test]
fn test_to_string() {
let val: External<()> = External::Path("foo/bar.toml".into());
assert_eq!(
"\"foo/bar.toml\"",
serde_json::to_string(&val).expect("serialization error")
);
}
#[test]
fn test_load_from_string() {
let input = "\"foo/bar.toml\"";
let val: External<()> = serde_json::from_str(input).expect("deserialization failed");
assert_eq!(External::Path("foo/bar.toml".into()), val);
}
}
| 28.125 | 100 | 0.602906 |
bb382d1383311b666280b3dc0054f0a78703a8d1 | 892 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-fast
// -*- rust -*-
#[legacy_modes];
type compare<T> = fn@(T, T) -> bool;
fn test_generic<T: Copy>(expected: T, not_expected: T, eq: compare<T>) {
let actual: T = if true { expected } else { not_expected };
assert (eq(expected, actual));
}
fn test_vec() {
fn compare_box(&&v1: @int, &&v2: @int) -> bool { return v1 == v2; }
test_generic::<@int>(@1, @2, compare_box);
}
fn main() { test_vec(); }
| 31.857143 | 72 | 0.66704 |
de146799b35ae6aeb941274e49577632d23acf09 | 4,655 | //! # Tracing Sprout
//!
//! A tokio-rs/tracing structured JSON formatting layer for the fledgling logger
//!
//! This subscriber layer is derived from [Tracing Bunyan Formatter](https://github.com/LukeMathWalker/tracing-bunyan-formatter),
//! with a few tweaks to internals and some of the formatting (and rules surrounding it).
//!
//! ## Features
//! - All traces will receive their parent's attributes as well as their own, child attributes will
//! take precedence if there are collisions
//! - There is a very minimal timing capability that adds elapsed time to `EVENT` and `EXIT` traces
//! - `TRACE`, `DEBUG` and `ERROR` logs get slighly more metadata (file name, line number, module path & target) attached
//! to them
//! - Avoids panics - as much as possible it opts to handle failure by `eprintln`ing to `stdout`.
//! These scenarios should be few and far between, but it's better that a failure in your tracing
//! implementation doesn't poison your main application.
//!
//! ## Example
//!
//! ```no_run
//! use tracing::{subscriber::set_global_default, Subscriber};
//! use tracing_sprout::TrunkLayer;
//! use tracing_subscriber::prelude::*;
//! use tracing_subscriber::{EnvFilter, Registry};
//!
//! let env_filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info"));
//! let formatting_layer = TrunkLayer::new("My Application".to_string(), std::io::stdout);
//! let subscriber = Registry::default()
//! .with(env_filter)
//! .with(formatting_layer);
//!
//! set_global_default(subscriber).expect("failed to set up global tracing subscriber")
//! ```
//!
//! ## Developing on an application using this
//!
//! Generally structured JSON logs are great for machines to read, but not so kind for humans. If
//! you're developing an application that uses this layer it would be advisable to download some
//! kind of CLI tool that outputs these logs in a slightly nicer format, a good example would be
//! the node.js's [pino-pretty](https://github.com/pinojs/pino-pretty), from there you can just pipe
//! your logs into it for a nicer development experience
//!
//! ```sh
//! cargo run | pino-pretty
//! ```
//!
//! ## Log output and a more detailed example
//!
//! See the [examples](https://github.com/naamancurtis/tracing-sprout/tree/main/examples) for a basic demonstration of how this can be used. The **basic.rs** example in there would output logs like the following:
//!
//! ```txt
//! {"app_name":"I'm Groot","pid":2735,"id":"1","time":"Sat, 13 Feb 2021 14:16:15 +0000","timestamp":1613225775,"msg":"[EPIC MONTAGE | START]","level":"info","span.type":"enter"}
//! {"app_name":"I'm Groot","pid":2735,"id":"1","group":"[\"Peter Quill\", \"Gamora\", \"Drax\", \"Rocket\"]","time":"Sat, 13 Feb 2021 14:16:15 +0000","timestamp":1613225775,"msg":"[EVENT] Trying to plug in the power","level":"trace","file":"examples/basic.rs","line":32,"module":"basic","target":"basic","thread_id":"ThreadId(1)","thread_name":"main","span.type":"event"}
//! {"app_name":"I'm Groot","pid":2735,"id":"2","info":"I'm overwriting my parents ID","time":"Sat, 13 Feb 2021 14:16:15 +0000","timestamp":1613225775,"msg":"[MUSIC IS PLAYING | START]","level":"debug","file":"examples/basic.rs","line":34,"module":"basic","target":"basic","thread_id":"ThreadId(1)","thread_name":"main","span.type":"enter"}
//! ...
//! ```
//!
//! However _(excuse the verbosity here)_, by piping it through a tool like the one mentioned above
//! _(pino-pretty)_ you get the following output
//!
//! ```txt
//! [Sat, 13 Feb 2021 14:14:54 +0000] INFO (1331): [EPIC MONTAGE | START]
//! app_name: "I'm Groot"
//! id: "1"
//! span.type: "enter"
//! [Sat, 13 Feb 2021 14:14:54 +0000] TRACE (1331): [EVENT] Trying to plug in the power
//! app_name: "I'm Groot"
//! id: "1"
//! group: "[\"Peter Quill\", \"Gamora\", \"Drax\", \"Rocket\"]"
//! file: "examples/basic.rs"
//! line: 32
//! module: "basic"
//! target: "basic"
//! thread_id: "ThreadId(1)"
//! thread_name: "main"
//! span.type: "event"
//! [Sat, 13 Feb 2021 14:14:54 +0000] DEBUG (1331): [MUSIC IS PLAYING | START]
//! app_name: "I'm Groot"
//! id: "2"
//! info: "I'm overwriting my parents ID"
//! file: "examples/basic.rs"
//! line: 34
//! module: "basic"
//! target: "basic"
//! thread_id: "ThreadId(1)"
//! thread_name: "main"
//! span.type: "enter"
//! ...
//! ```
pub(crate) mod constants;
mod error;
mod formatting;
mod storage;
pub(crate) mod util;
pub(crate) use error::SproutError;
pub(crate) type Result<T> = std::result::Result<T, SproutError>;
pub use formatting::TrunkLayer;
| 46.089109 | 372 | 0.657358 |
ebcc7ec9d2c8e8525683fe8bf6df846456a08f6e | 571 | #![allow(unused_variables)]
#![allow(dead_code)]
// 32 bits
union IntOrFloat {
i: i32,
f: f32
}
fn process_value(iof: IntOrFloat){
unsafe {
match iof {
IntOrFloat { i:42 } => {
println!("meaning of life value");
}
IntOrFloat { f:f32 } => {
println!("value = {}", f);
}
}
}
}
fn main() {
let mut iof = IntOrFloat { i:123 };
iof.i = 234;
let value = unsafe { iof.i };
println!("iof.i = {}", value);
process_value(IntOrFloat{f:0.22})
}
| 17.84375 | 50 | 0.469352 |
9bb75216c0186a016b8c69741bdbed29ae61c66d | 3,675 | // This file is part of Substrate.
// Copyright (C) 2017-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! Substrate blockchain API.
pub mod error;
use jsonrpc_core::Result as RpcResult;
use jsonrpc_derive::rpc;
use jsonrpc_pubsub::{typed::Subscriber, SubscriptionId};
use sp_rpc::{number::NumberOrHex, list::ListOrValue};
use self::error::{FutureResult, Result};
pub use self::gen_client::Client as ChainClient;
/// Substrate blockchain API
#[rpc]
pub trait ChainApi<Number, Hash, Header, SignedBlock> {
/// RPC metadata
type Metadata;
/// Get header of a relay chain block.
#[rpc(name = "chain_getHeader")]
fn header(&self, hash: Option<Hash>) -> FutureResult<Option<Header>>;
/// Get header and body of a relay chain block.
#[rpc(name = "chain_getBlock")]
fn block(&self, hash: Option<Hash>) -> FutureResult<Option<SignedBlock>>;
/// Get hash of the n-th block in the canon chain.
///
/// By default returns latest block hash.
#[rpc(name = "chain_getBlockHash", alias("chain_getHead"))]
fn block_hash(
&self,
hash: Option<ListOrValue<NumberOrHex>>,
) -> Result<ListOrValue<Option<Hash>>>;
/// Get hash of the last finalized block in the canon chain.
#[rpc(name = "chain_getFinalizedHead", alias("chain_getFinalisedHead"))]
fn finalized_head(&self) -> Result<Hash>;
/// All head subscription
#[pubsub(subscription = "chain_allHead", subscribe, name = "chain_subscribeAllHeads")]
fn subscribe_all_heads(&self, metadata: Self::Metadata, subscriber: Subscriber<Header>);
/// Unsubscribe from all head subscription.
#[pubsub(subscription = "chain_allHead", unsubscribe, name = "chain_unsubscribeAllHeads")]
fn unsubscribe_all_heads(
&self,
metadata: Option<Self::Metadata>,
id: SubscriptionId,
) -> RpcResult<bool>;
/// New head subscription
#[pubsub(
subscription = "chain_newHead",
subscribe,
name = "chain_subscribeNewHeads",
alias("subscribe_newHead", "chain_subscribeNewHead")
)]
fn subscribe_new_heads(&self, metadata: Self::Metadata, subscriber: Subscriber<Header>);
/// Unsubscribe from new head subscription.
#[pubsub(
subscription = "chain_newHead",
unsubscribe,
name = "chain_unsubscribeNewHeads",
alias("unsubscribe_newHead", "chain_unsubscribeNewHead")
)]
fn unsubscribe_new_heads(
&self,
metadata: Option<Self::Metadata>,
id: SubscriptionId,
) -> RpcResult<bool>;
/// Finalized head subscription
#[pubsub(
subscription = "chain_finalizedHead",
subscribe,
name = "chain_subscribeFinalizedHeads",
alias("chain_subscribeFinalisedHeads")
)]
fn subscribe_finalized_heads(&self, metadata: Self::Metadata, subscriber: Subscriber<Header>);
/// Unsubscribe from finalized head subscription.
#[pubsub(
subscription = "chain_finalizedHead",
unsubscribe,
name = "chain_unsubscribeFinalizedHeads",
alias("chain_unsubscribeFinalisedHeads")
)]
fn unsubscribe_finalized_heads(
&self,
metadata: Option<Self::Metadata>,
id: SubscriptionId,
) -> RpcResult<bool>;
}
| 32.236842 | 95 | 0.736599 |
f702ca5144ea898033ea7345c3c2c346649762f8 | 1,878 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub struct Quad { a: u64, b: u64, c: u64, d: u64 }
pub struct Floats { a: f64, b: u8, c: f64 }
mod rustrt {
use super::{Floats, Quad};
#[nolink]
pub extern {
pub fn debug_abi_1(++q: Quad) -> Quad;
pub fn debug_abi_2(++f: Floats) -> Floats;
}
}
fn test1() {
unsafe {
let q = Quad { a: 0xaaaa_aaaa_aaaa_aaaa_u64,
b: 0xbbbb_bbbb_bbbb_bbbb_u64,
c: 0xcccc_cccc_cccc_cccc_u64,
d: 0xdddd_dddd_dddd_dddd_u64 };
let qq = rustrt::debug_abi_1(q);
error!("a: %x", qq.a as uint);
error!("b: %x", qq.b as uint);
error!("c: %x", qq.c as uint);
error!("d: %x", qq.d as uint);
assert!(qq.a == q.c + 1u64);
assert!(qq.b == q.d - 1u64);
assert!(qq.c == q.a + 1u64);
assert!(qq.d == q.b - 1u64);
}
}
#[cfg(target_arch = "x86_64")]
fn test2() {
unsafe {
let f = Floats { a: 1.234567890e-15_f64,
b: 0b_1010_1010_u8,
c: 1.0987654321e-15_f64 };
let ff = rustrt::debug_abi_2(f);
error!("a: %f", ff.a as float);
error!("b: %u", ff.b as uint);
error!("c: %f", ff.c as float);
assert!(ff.a == f.c + 1.0f64);
assert!(ff.b == 0xff_u8);
assert!(ff.c == f.a - 1.0f64);
}
}
#[cfg(target_arch = "x86")]
fn test2() {
}
pub fn main() {
test1();
test2();
}
| 28.454545 | 68 | 0.548988 |
f8e3784011d37cf07532dc6a6725b6cec2d93eb5 | 950 | // Rust language amplification library providing multiple generic trait
// implementations, type wrappers, derive macros and other language enhancements
//
// Written in 2019-2020 by
// Dr. Maxim Orlovsky <[email protected]>
// Martin Habovstiak <[email protected]>
//
// To the extent possible under law, the author(s) have dedicated all
// copyright and related and neighboring rights to this software to
// the public domain worldwide. This software is distributed without
// any warranty.
//
// You should have received a copy of the MIT License
// along with this software.
// If not, see <https://opensource.org/licenses/MIT>.
/// Used as an alternative to default for test and prototyping purposes, when a
/// type can't have a default value, but you need to generate some dumb data.
pub trait DumbDefault
where
Self: Sized,
{
/// Returns an object initialized with dumb data
fn dumb_default() -> Self;
}
| 36.538462 | 80 | 0.743158 |
fef150e0b417d9acb349c04fe6519465d8f43132 | 321 | use std::path::PathBuf;
use std::{fs, io::Result};
/// Read text content of file into string.
/// relative paths are resolved from cargo's root dir
pub fn read_input(filename: &str) -> Result<String> {
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
path.push(filename);
fs::read_to_string(path)
}
| 29.181818 | 61 | 0.688474 |
5612a26e8ba1f126ea526dbc0fa2e89b0fde54a6 | 684 | struct Solution;
impl Solution {
fn find_kth_positive(arr: Vec<i32>, mut k: i32) -> i32 {
let mut x = 1;
let mut i = 0;
let n = arr.len();
loop {
if i < n && x == arr[i] {
i += 1;
} else {
k -= 1;
}
if k == 0 {
break;
}
x += 1;
}
x
}
}
#[test]
fn test() {
let arr = vec![2, 3, 4, 7, 11];
let k = 5;
let res = 9;
assert_eq!(Solution::find_kth_positive(arr, k), res);
let arr = vec![1, 2, 3, 4];
let k = 2;
let res = 6;
assert_eq!(Solution::find_kth_positive(arr, k), res);
}
| 20.117647 | 60 | 0.390351 |
48e801bd41d0464855e16f8445be4d46da875f3d | 4,629 | pub mod background;
pub mod tile;
mod register;
mod palette;
mod sprite;
mod sprite_utils;
mod renderer;
use super::types::{Addr, Data};
use super::mapper::Mapper;
use self::super::ram::Ram;
use self::register::*;
pub use self::palette::*;
pub use self::sprite::*;
pub use self::sprite_utils::*;
pub use self::background::*;
use self::renderer::Renderer;
const CYCLES_PER_LINE: usize = 341;
#[derive(Debug)]
pub struct PpuCtx<P: PaletteRam> {
pub palette: P,
pub vram: Box<Ram>,
pub cram: Box<Ram>,
pub oam_ram: Box<Ram>,
}
#[derive(Debug)]
pub struct PpuConfig {
pub is_horizontal_mirror: bool,
}
#[derive(Debug)]
pub struct Ppu {
pub cycle: usize,
pub line: usize,
pub register: Register,
pub ctx: PpuCtx<Palette>,
pub sprites: SpritesWithCtx,
pub background: Background,
pub config: PpuConfig,
renderer: Renderer,
}
impl Ppu {
pub fn new(character_ram: Vec<Data>, config: PpuConfig) -> Ppu {
Ppu {
cycle: 0,
line: 0,
register: Register::new(),
ctx: PpuCtx {
palette: Palette::new(),
vram: Box::new(Ram::new(vec![0;0x2000])),
cram: Box::new(Ram::new(character_ram)),
oam_ram: Box::new(Ram::new(vec![0;0x0100])),
},
sprites: Vec::new(),
background: Background::new(),
config,
renderer: Renderer::new(),
}
}
pub fn read(&mut self, addr: Addr, mapper: &dyn Mapper) -> Data {
self.register.read(addr, &mut self.ctx, mapper)
}
pub fn write(&mut self, addr: Addr, data: Data, mapper: &mut dyn Mapper){
self.register.write(addr, data, &mut self.ctx, mapper)
}
pub fn run(&mut self, cycle: usize, nmi: &mut bool, mapper: &dyn Mapper) -> bool {
let cycle = self.cycle + cycle;
if cycle < CYCLES_PER_LINE {
self.cycle = cycle;
return false;
}
if self.line == 0 {
self.background.clear();
}
if self.has_sprite_hit(cycle) {
self.register.set_sprite_hit();
}
self.cycle = cycle - CYCLES_PER_LINE;
self.line = self.line + 1;
let scroll_x = self.register.get_scroll_x();
let scroll_y = self.register.get_scroll_y();
if self.line <= 240 && self.line % 8 == 0 && scroll_y <= 240 {
let mut config = SpriteConfig{
offset_addr_by_name_table: None,
offset_addr_by_background_table: self.register.get_background_table_offset(),
offset_addr_by_sprite_table: self.register.get_sprite_table_offset(),
is_horizontal_mirror: self.config.is_horizontal_mirror,
is_background_enable: self.register.is_background_enable(),
};
// target line edge on display area
let tile_x = ((scroll_x as usize + (self.register.get_name_table_id() % 2) as usize * 256) / 8) as Data;
let tile_y = self.get_scroll_tile_y();
self.background.build_line(
&self.ctx.vram,
&self.ctx.cram,
&self.ctx.palette,
(tile_x, tile_y),
(scroll_x, scroll_y),
&mut config,
mapper
);
}
// VBLANK
if self.line == 241 {
self.register.set_vblank();
self.register.clear_sprite_hit();
if self.register.is_irq_enable() {
*nmi = true;
}
}
// page end
if self.line >= 262 {
self.register.clear_vblank();
self.register.clear_sprite_hit();
*nmi = false;
self.line = 0;
self.sprites = build_sprites(
&self.ctx.cram,
&self.ctx.oam_ram,
&self.ctx.palette,
self.register.get_sprite_table_offset(),
self.register.is_sprite_8x8(),
mapper,
);
self.sprites.reverse(); // low index is be front
if self.background.0.len() != 0 {
self.renderer.render(&self.background.0, &self.sprites, self.register.is_background_clip(), self.register.is_sprites_clip());
}
return true
}
false
}
pub fn transfer_sprite(&mut self, addr: Addr, data: Data) {
let addr = addr + self.register.oam.get_addr();
self.ctx.oam_ram.write(addr % 0x100, data);
}
fn get_scroll_tile_y(&self) -> Data {
((self.register.get_scroll_y() as usize
+ self.line
+ ((self.register.get_name_table_id() / 2) as usize * 240))
/ 8) as Data
}
pub fn is_background_enabled(&self) -> bool {
self.register.is_background_enable()
}
pub fn is_sprite_enabled(&self) -> bool {
self.register.is_sprite_enable()
}
// sprite 0 hit
fn has_sprite_hit(&self, cycle: usize) -> bool {
let y = self.ctx.oam_ram.read(0) as usize;
let x = self.ctx.oam_ram.read(3) as usize;
self.register.is_sprite_enable() && (y == self.line) && x <= cycle
}
} | 26.603448 | 133 | 0.622381 |
26506a459928945c8b1f5dd1d60e45a4752b2b7d | 591 | extern crate wasm_bindgen;
use super::wasm_timer::SystemTime;
pub struct Clock {
ms_per_frame: u128,
last_time: SystemTime
}
impl Clock {
pub fn new(ms_per_frame:u128) -> Clock {
let last_time = SystemTime::now();
Clock {
ms_per_frame, last_time
}
}
pub fn is_waiting(&self) -> bool {
return SystemTime::now().duration_since(self.last_time).unwrap().as_millis()
< self.ms_per_frame;
}
pub fn wait(&mut self) {
while self.is_waiting() {}
self.last_time = SystemTime::now();
}
}
| 20.37931 | 84 | 0.592217 |
e2bf08dada53e76cd858ee4a99d259f872beb822 | 106 | mod counter_tbl;
mod counters;
pub use self::counter_tbl::CounterTable;
pub use self::counters::Counters; | 21.2 | 40 | 0.792453 |
696e24641560469455f75949568bf68e4ca4cf51 | 4,008 | #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use crate::core::{Action, Error, Method, MovingAverageConstructor, Source, ValueType, OHLCV};
use crate::core::{IndicatorConfig, IndicatorInstance, IndicatorResult};
use crate::helpers::MA;
/// Envelopes
///
/// ## Links
///
/// * <https://www.investopedia.com/terms/e/envelope.asp>
///
/// # 3 values
///
/// * `Upper bound`
///
/// Range of values is the same as the range of the `source` values.
///
/// * `Lower bound`
///
/// Range of values is the same as the range of the `source` values.Action
///
/// * Raw `Source2` value
///
/// # 1 signal
///
/// * Signal 1 appears when `Source2` value crosses bounds.
/// When `Source2` value crosses `upper bound` upwards, returns full sell signal.
/// When `Source2` value crosses `lower bound` downwards, returns full buy signal.
#[derive(Debug, Clone, Copy)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct Envelopes<M: MovingAverageConstructor = MA> {
/// Main moving average type.
///
/// Default is [`SMA(20)`](crate::methods::SMA).
///
/// Period range in \[`2`; [`PeriodType::MAX`](crate::core::PeriodType)\).
pub ma: M,
/// Bound relative size. Default is `0.1`.
///
/// Range in (`0.0`; `+inf`).
pub k: ValueType,
/// Source value type for bounds. Default is [`Close`](crate::core::Source::Close).
pub source: Source,
/// Source2 value type for actual price. Default is [`Close`](crate::core::Source::Close).
pub source2: Source,
}
impl<M: MovingAverageConstructor> IndicatorConfig for Envelopes<M> {
type Instance = EnvelopesInstance<M>;
const NAME: &'static str = "Envelopes";
fn init<T: OHLCV>(self, candle: &T) -> Result<Self::Instance, Error> {
if !self.validate() {
return Err(Error::WrongConfig);
}
let cfg = self;
let src = candle.source(cfg.source);
Ok(Self::Instance {
ma: cfg.ma.init(src)?, // method(cfg.method, cfg.period, src)?,
k_high: 1.0 + cfg.k,
k_low: 1.0 - cfg.k,
cfg,
})
}
fn validate(&self) -> bool {
self.k > 0.0 && self.ma.ma_period() > 1
}
fn set(&mut self, name: &str, value: String) -> Result<(), Error> {
match name {
"ma" => match value.parse() {
Err(_) => return Err(Error::ParameterParse(name.to_string(), value.to_string())),
Ok(value) => self.ma = value,
},
"k" => match value.parse() {
Err(_) => return Err(Error::ParameterParse(name.to_string(), value.to_string())),
Ok(value) => self.k = value,
},
"source" => match value.parse() {
Err(_) => return Err(Error::ParameterParse(name.to_string(), value.to_string())),
Ok(value) => self.source = value,
},
"source2" => match value.parse() {
Err(_) => return Err(Error::ParameterParse(name.to_string(), value.to_string())),
Ok(value) => self.source2 = value,
},
_ => {
return Err(Error::ParameterParse(name.to_string(), value));
}
};
Ok(())
}
fn size(&self) -> (u8, u8) {
(3, 1)
}
}
impl Default for Envelopes<MA> {
fn default() -> Self {
Self {
ma: MA::SMA(20),
k: 0.1,
source: Source::Close,
source2: Source::Close,
}
}
}
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct EnvelopesInstance<M: MovingAverageConstructor = MA> {
cfg: Envelopes<M>,
ma: M::Instance,
k_high: ValueType,
k_low: ValueType,
}
impl<M: MovingAverageConstructor> IndicatorInstance for EnvelopesInstance<M> {
type Config = Envelopes<M>;
fn config(&self) -> &Self::Config {
&self.cfg
}
fn next<T: OHLCV>(&mut self, candle: &T) -> IndicatorResult {
let src = candle.source(self.cfg.source);
let v = self.ma.next(&src);
let (value1, value2) = (v * self.k_high, v * self.k_low);
let src2 = candle.source(self.cfg.source2);
// let signal = if src2 < value2 {
// 1
// } else if src2 > value1 {
// -1
// } else {
// 0
// };
let signal = (src2 < value2) as i8 - (src2 > value1) as i8;
IndicatorResult::new(&[value1, value2, src2], &[Action::from(signal)])
}
}
| 25.858065 | 93 | 0.626497 |
625c9b480320d06bbed7565edce52265f4e77c2e | 23 | pub mod actix_handler;
| 11.5 | 22 | 0.826087 |
dea253a2fe4b75b3c52c19c322eea3a4eb83c98d | 173 | mod casing;
mod export;
mod remove;
pub use export::{export_schema, export_schema_with_title};
pub use remove::remove_schemas;
// Re-exports
pub use schemars::schema_for;
| 17.3 | 58 | 0.780347 |
6107e0013305c9941b8b31cd05d4cec34b1f4859 | 730 | use common::SOCKET_PATH;
use std::io::fs;
use std::io::fs::PathExtensions;
use std::io::net::pipe::UnixListener;
use std::io::{Acceptor,Listener};
mod common;
fn main() {
let socket = Path::new(SOCKET_PATH);
// Delete old socket if necessary
if socket.exists() {
fs::unlink(&socket).unwrap();
}
// Bind to socket
let stream = match UnixListener::bind(&socket) {
Err(_) => panic!("failed to bind socket"),
Ok(stream) => stream,
};
println!("Server started, waiting for clients");
// Iterate over clients, blocks if no client available
for mut client in stream.listen().incoming() {
println!("Client said: {}", client.read_to_string().unwrap());
}
}
| 24.333333 | 70 | 0.620548 |
e5a0d186a29e7df2066366bd2c70687596760c95 | 3,772 | use crate::{
message::BasicReturnMessage, promises::Promises, publisher_confirm::Confirmation,
BasicProperties, Promise,
};
use log::{trace, warn};
use parking_lot::Mutex;
use std::{collections::VecDeque, sync::Arc};
#[derive(Clone, Debug, Default)]
pub(crate) struct ReturnedMessages {
inner: Arc<Mutex<Inner>>,
}
impl ReturnedMessages {
pub(crate) fn start_new_delivery(&self, message: BasicReturnMessage) {
self.inner.lock().current_message = Some(message);
}
pub(crate) fn set_delivery_properties(&self, properties: BasicProperties) {
if let Some(message) = self.inner.lock().current_message.as_mut() {
message.delivery.properties = properties;
}
}
pub(crate) fn new_delivery_complete(&self, confirm_mode: bool) {
self.inner.lock().new_delivery_complete(confirm_mode);
}
pub(crate) fn receive_delivery_content(&self, data: Vec<u8>) {
if let Some(message) = self.inner.lock().current_message.as_mut() {
message.delivery.data.extend(data);
}
}
pub(crate) fn drain(&self) -> Vec<BasicReturnMessage> {
self.inner.lock().drain()
}
pub(crate) fn register_dropped_confirm(&self, promise: Promise<Confirmation>) {
self.inner.lock().register_dropped_confirm(promise);
}
pub(crate) fn get_waiting_message(&self) -> Option<BasicReturnMessage> {
self.inner.lock().waiting_messages.pop_front()
}
}
#[derive(Debug, Default)]
pub struct Inner {
current_message: Option<BasicReturnMessage>,
non_confirm_messages: Vec<BasicReturnMessage>,
waiting_messages: VecDeque<BasicReturnMessage>,
messages: Vec<BasicReturnMessage>,
dropped_confirms: Promises<Confirmation>,
}
impl Inner {
fn new_delivery_complete(&mut self, confirm_mode: bool) {
if let Some(message) = self.current_message.take() {
warn!("Server returned us a message: {:?}", message);
if confirm_mode {
self.waiting_messages.push_back(message);
} else {
self.non_confirm_messages.push(message);
}
}
}
fn register_dropped_confirm(&mut self, promise: Promise<Confirmation>) {
if let Some(confirmation) = self.dropped_confirms.register(promise) {
if let Ok(Confirmation::Nack(Some(message))) | Ok(Confirmation::Ack(Some(message))) =
confirmation
{
trace!("Dropped PublisherConfirm was carrying a message, storing it");
self.messages.push(*message);
} else {
trace!("Dropped PublisherConfirm was ready but didn't carry a message, discarding");
}
} else {
trace!("Storing dropped PublisherConfirm for further use");
}
}
fn drain(&mut self) -> Vec<BasicReturnMessage> {
let mut messages = std::mem::take(&mut self.messages);
if !self.non_confirm_messages.is_empty() {
let mut non_confirm_messages = std::mem::take(&mut self.non_confirm_messages);
non_confirm_messages.append(&mut messages);
messages = non_confirm_messages;
}
if let Some(confirmations) = self.dropped_confirms.try_wait() {
for confirmation in confirmations {
if let Ok(Confirmation::Nack(Some(message)))
| Ok(Confirmation::Ack(Some(message))) = confirmation
{
trace!("PublisherConfirm was carrying a message, storing it");
messages.push(*message);
} else {
trace!("PublisherConfirm was ready but didn't carry a message, discarding");
}
}
}
messages
}
}
| 35.584906 | 100 | 0.620891 |
096be0e737bee87d433d2392b13dec1bc3766d10 | 6,670 | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use crate::Display;
use crate::KeymapKey;
use crate::ModifierIntent;
use crate::ModifierType;
use glib::object::ObjectType as ObjectType_;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem;
use std::mem::transmute;
glib::glib_wrapper! {
pub struct Keymap(Object<ffi::GdkKeymap>);
match fn {
get_type => || ffi::gdk_keymap_get_type(),
}
}
impl Keymap {
#[doc(alias = "gdk_keymap_get_caps_lock_state")]
pub fn get_caps_lock_state(&self) -> bool {
unsafe { from_glib(ffi::gdk_keymap_get_caps_lock_state(self.to_glib_none().0)) }
}
#[doc(alias = "gdk_keymap_get_direction")]
pub fn get_direction(&self) -> pango::Direction {
unsafe { from_glib(ffi::gdk_keymap_get_direction(self.to_glib_none().0)) }
}
#[doc(alias = "gdk_keymap_get_modifier_mask")]
pub fn get_modifier_mask(&self, intent: ModifierIntent) -> ModifierType {
unsafe {
from_glib(ffi::gdk_keymap_get_modifier_mask(
self.to_glib_none().0,
intent.to_glib(),
))
}
}
#[doc(alias = "gdk_keymap_get_modifier_state")]
pub fn get_modifier_state(&self) -> u32 {
unsafe { ffi::gdk_keymap_get_modifier_state(self.to_glib_none().0) }
}
#[doc(alias = "gdk_keymap_get_num_lock_state")]
pub fn get_num_lock_state(&self) -> bool {
unsafe { from_glib(ffi::gdk_keymap_get_num_lock_state(self.to_glib_none().0)) }
}
#[cfg(any(feature = "v3_18", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v3_18")))]
#[doc(alias = "gdk_keymap_get_scroll_lock_state")]
pub fn get_scroll_lock_state(&self) -> bool {
unsafe { from_glib(ffi::gdk_keymap_get_scroll_lock_state(self.to_glib_none().0)) }
}
#[doc(alias = "gdk_keymap_have_bidi_layouts")]
pub fn have_bidi_layouts(&self) -> bool {
unsafe { from_glib(ffi::gdk_keymap_have_bidi_layouts(self.to_glib_none().0)) }
}
#[doc(alias = "gdk_keymap_lookup_key")]
pub fn lookup_key(&self, key: &KeymapKey) -> u32 {
unsafe { ffi::gdk_keymap_lookup_key(self.to_glib_none().0, key.to_glib_none().0) }
}
#[doc(alias = "gdk_keymap_translate_keyboard_state")]
pub fn translate_keyboard_state(
&self,
hardware_keycode: u32,
state: ModifierType,
group: i32,
) -> Option<(u32, i32, i32, ModifierType)> {
unsafe {
let mut keyval = mem::MaybeUninit::uninit();
let mut effective_group = mem::MaybeUninit::uninit();
let mut level = mem::MaybeUninit::uninit();
let mut consumed_modifiers = mem::MaybeUninit::uninit();
let ret = from_glib(ffi::gdk_keymap_translate_keyboard_state(
self.to_glib_none().0,
hardware_keycode,
state.to_glib(),
group,
keyval.as_mut_ptr(),
effective_group.as_mut_ptr(),
level.as_mut_ptr(),
consumed_modifiers.as_mut_ptr(),
));
let keyval = keyval.assume_init();
let effective_group = effective_group.assume_init();
let level = level.assume_init();
let consumed_modifiers = consumed_modifiers.assume_init();
if ret {
Some((
keyval,
effective_group,
level,
from_glib(consumed_modifiers),
))
} else {
None
}
}
}
#[cfg_attr(feature = "v3_22", deprecated)]
#[doc(alias = "gdk_keymap_get_default")]
pub fn get_default() -> Option<Keymap> {
assert_initialized_main_thread!();
unsafe { from_glib_none(ffi::gdk_keymap_get_default()) }
}
#[doc(alias = "gdk_keymap_get_for_display")]
pub fn get_for_display(display: &Display) -> Option<Keymap> {
skip_assert_initialized!();
unsafe { from_glib_none(ffi::gdk_keymap_get_for_display(display.to_glib_none().0)) }
}
pub fn connect_direction_changed<F: Fn(&Keymap) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn direction_changed_trampoline<F: Fn(&Keymap) + 'static>(
this: *mut ffi::GdkKeymap,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(&from_glib_borrow(this))
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"direction-changed\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
direction_changed_trampoline::<F> as *const (),
)),
Box_::into_raw(f),
)
}
}
pub fn connect_keys_changed<F: Fn(&Keymap) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn keys_changed_trampoline<F: Fn(&Keymap) + 'static>(
this: *mut ffi::GdkKeymap,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(&from_glib_borrow(this))
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"keys-changed\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
keys_changed_trampoline::<F> as *const (),
)),
Box_::into_raw(f),
)
}
}
pub fn connect_state_changed<F: Fn(&Keymap) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn state_changed_trampoline<F: Fn(&Keymap) + 'static>(
this: *mut ffi::GdkKeymap,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(&from_glib_borrow(this))
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"state-changed\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
state_changed_trampoline::<F> as *const (),
)),
Box_::into_raw(f),
)
}
}
}
impl fmt::Display for Keymap {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("Keymap")
}
}
| 34.205128 | 96 | 0.555772 |
18e1de056b2bfb2213388c5f9888601af8dec84e | 8,173 | //! Timer handling for `setInterval` AVM timers.
//!
//! We tick the timers during our normal frame loop for deterministic operation.
//! The timers are stored in a priority queue, where we check if the nearest timer
//! is ready to tick each frame.
//!
//! TODO: Could we use this for AVM2 timers as well?
use crate::avm1::{Activation, ActivationIdentifier, AvmString, Object, TObject, Value};
use crate::context::UpdateContext;
use gc_arena::Collect;
use std::collections::{binary_heap::PeekMut, BinaryHeap};
/// Manages the collection of timers.
pub struct Timers<'gc> {
/// The collection of active timers.
timers: BinaryHeap<Timer<'gc>>,
/// An increasing ID used for created timers.
timer_counter: i32,
/// The current global time.
cur_time: u64,
}
impl<'gc> Timers<'gc> {
/// Ticks all timers and runs necessary callbacks.
pub fn update_timers(context: &mut UpdateContext<'_, 'gc, '_>, dt: f64) -> Option<f64> {
context.timers.cur_time = context
.timers
.cur_time
.wrapping_add((dt * Self::TIMER_SCALE) as u64);
let num_timers = context.timers.num_timers();
if num_timers == 0 {
return None;
}
let version = context.swf.version();
let globals = context.avm1.global_object_cell();
let level0 = context.stage.root_clip();
let mut activation = Activation::from_nothing(
context.reborrow(),
ActivationIdentifier::root("[Timer Callback]"),
version,
globals,
level0,
);
let mut tick_count = 0;
let cur_time = activation.context.timers.cur_time;
// We have to be careful because the timer list can be mutated while updating;
// a timer callback could add more timers, clear timers, etc.
while activation
.context
.timers
.peek()
.map(|timer| timer.tick_time)
.unwrap_or(cur_time)
< cur_time
{
let timer = activation.context.timers.peek().unwrap();
// TODO: This is only really necessary because BinaryHeap lacks `remove` or `retain` on stable.
// We can remove the timers straight away in `clearInterval` once this is stable.
if !timer.is_alive.get() {
activation.context.timers.pop();
continue;
}
tick_count += 1;
// SANITY: Only allow so many ticks per timer per update.
if tick_count > Self::MAX_TICKS {
// Reset our time to a little bit before the nearest timer.
let next_time = activation.context.timers.peek_mut().unwrap().tick_time;
activation.context.timers.cur_time = next_time.wrapping_sub(100);
break;
}
// TODO: Can we avoid these clones?
let params = timer.params.clone();
let callback = timer.callback.clone();
match callback {
TimerCallback::Function(function) => {
let _ = function.call(
"[Timer Callback]".into(),
&mut activation,
Value::Undefined,
¶ms,
);
}
TimerCallback::Method { this, method_name } => {
let _ = this.call_method(method_name, ¶ms, &mut activation);
}
}
crate::player::Player::run_actions(&mut activation.context);
let mut timer = activation.context.timers.peek_mut().unwrap();
if timer.is_timeout {
// Timeouts only fire once.
drop(timer);
activation.context.timers.pop();
} else {
// Reset setInterval timers. `peek_mut` re-sorts the timer in the priority queue.
timer.tick_time = timer.tick_time.wrapping_add(timer.interval);
}
}
// Return estimated time until next timer tick.
activation
.context
.timers
.peek()
.map(|timer| (timer.tick_time.wrapping_sub(cur_time)) as f64 / Self::TIMER_SCALE)
}
/// The minimum interval we allow for timers.
const MIN_INTERVAL: i32 = 10;
/// The maximum timer ticks per call to `update_ticks`, for sanity.
const MAX_TICKS: i32 = 10;
/// The scale of the timers (microseconds).
const TIMER_SCALE: f64 = 1000.0;
/// Creates a new `Timers` collection.
pub fn new() -> Self {
Self {
timers: Default::default(),
timer_counter: 0,
cur_time: 0,
}
}
/// The number of timers currently active.
pub fn num_timers(&self) -> usize {
self.timers.len()
}
/// Registers a new timer and returns the timer ID.
pub fn add_timer(
&mut self,
callback: TimerCallback<'gc>,
interval: i32,
params: Vec<Value<'gc>>,
is_timeout: bool,
) -> i32 {
// SANITY: Set a minimum interval so we don't spam too much.
let interval = interval.max(Self::MIN_INTERVAL) as u64 * (Self::TIMER_SCALE as u64);
self.timer_counter = self.timer_counter.wrapping_add(1);
let id = self.timer_counter;
let timer = Timer {
id,
callback,
params,
tick_time: self.cur_time + interval,
interval,
is_timeout,
is_alive: std::cell::Cell::new(true),
};
self.timers.push(timer);
id
}
/// Removes a timer.
pub fn remove(&mut self, id: i32) -> bool {
// TODO: When `BinaryHeap::remove` is stable, we can remove it here directly.
if let Some(timer) = self.timers.iter().find(|timer| timer.id == id) {
timer.is_alive.set(false);
true
} else {
false
}
}
fn peek(&self) -> Option<&Timer<'gc>> {
self.timers.peek()
}
fn peek_mut(&mut self) -> Option<PeekMut<'_, Timer<'gc>>> {
self.timers.peek_mut()
}
fn pop(&mut self) -> Option<Timer<'gc>> {
self.timers.pop()
}
}
impl Default for Timers<'_> {
fn default() -> Self {
Self::new()
}
}
unsafe impl<'gc> Collect for Timers<'gc> {
fn trace(&self, cc: gc_arena::CollectionContext) {
for timer in &self.timers {
timer.trace(cc);
}
}
}
/// A timer created via `setInterval`/`setTimeout`.
/// Runs a callback when it ticks.
#[derive(Debug, Collect)]
#[collect(no_drop)]
struct Timer<'gc> {
/// The ID of the timer.
id: i32,
/// The callback that this timer runs when it fires.
/// A callback is either a function object, or a parent object with a method name.
callback: TimerCallback<'gc>,
/// The parameters to pass to the callback function.
params: Vec<Value<'gc>>,
/// The time when this timer should fire.
tick_time: u64,
/// The interval between timer ticks, in microseconds.
interval: u64,
/// This timer only fires once if `is_timeout` is true.
is_timeout: bool,
/// Whether this timer has been removed.
is_alive: std::cell::Cell<bool>,
}
// Implement `Ord` so that timers can be stored in the BinaryHeap (as a min-heap).
impl PartialEq for Timer<'_> {
fn eq(&self, other: &Self) -> bool {
self.tick_time == other.tick_time
}
}
impl Eq for Timer<'_> {}
impl PartialOrd for Timer<'_> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.tick_time
.partial_cmp(&other.tick_time)
.map(|o| o.reverse())
}
}
impl Ord for Timer<'_> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.tick_time.cmp(&other.tick_time).reverse()
}
}
/// A callback fired by a `setInterval`/`setTimeout` timer.
#[derive(Debug, Collect, Clone)]
#[collect(no_drop)]
pub enum TimerCallback<'gc> {
Function(Object<'gc>),
Method {
this: Object<'gc>,
method_name: AvmString<'gc>,
},
}
| 30.496269 | 107 | 0.567111 |
ac8850fc5bd45445e57aefe87cef64586cca3c8d | 2,719 | use crate::{
client::Client,
error::Error,
request::{Request, TryIntoRequest},
response::{marker::EmptyBody, ResponseFuture},
routing::Route,
};
use twilight_model::id::{marker::ApplicationMarker, Id};
/// Delete a followup message to an interaction, by its token and message ID.
///
/// # Examples
///
/// ```no_run
/// # #[tokio::main]
/// # async fn main() -> Result<(), Box<dyn std::error::Error>> {
/// use std::env;
/// use twilight_http::Client;
/// use twilight_http::request::AuditLogReason;
/// use twilight_model::id::Id;
///
/// let client = Client::new(env::var("DISCORD_TOKEN")?);
/// let application_id = Id::new(1);
///
/// client
/// .interaction(application_id)
/// .delete_response("token here")
/// .exec()
/// .await?;
/// # Ok(()) }
/// ```
#[must_use = "requests must be configured and executed"]
pub struct DeleteResponse<'a> {
application_id: Id<ApplicationMarker>,
http: &'a Client,
token: &'a str,
}
impl<'a> DeleteResponse<'a> {
pub(crate) const fn new(
http: &'a Client,
application_id: Id<ApplicationMarker>,
token: &'a str,
) -> Self {
Self {
application_id,
http,
token,
}
}
/// Execute the request, returning a future resolving to a [`Response`].
///
/// [`Response`]: crate::response::Response
pub fn exec(self) -> ResponseFuture<EmptyBody> {
let http = self.http;
match self.try_into_request() {
Ok(request) => http.request(request),
Err(source) => ResponseFuture::error(source),
}
}
}
impl TryIntoRequest for DeleteResponse<'_> {
fn try_into_request(self) -> Result<Request, Error> {
Ok(Request::builder(&Route::DeleteInteractionOriginal {
application_id: self.application_id.get(),
interaction_token: self.token,
})
.use_authorization_token(false)
.build())
}
}
#[cfg(test)]
mod tests {
use crate::{client::Client, request::TryIntoRequest};
use std::error::Error;
use twilight_http_ratelimiting::Path;
use twilight_model::id::Id;
#[test]
fn test_delete_followup_message() -> Result<(), Box<dyn Error>> {
let application_id = Id::new(1);
let token = "foo".to_owned();
let client = Client::new(String::new());
let req = client
.interaction(application_id)
.delete_response(&token)
.try_into_request()?;
assert!(!req.use_authorization_token());
assert_eq!(
&Path::WebhooksIdTokenMessagesId(application_id.get(), token),
req.ratelimit_path()
);
Ok(())
}
}
| 26.398058 | 77 | 0.585142 |
ef9a8a40a8f80f69c54b3c7cfacb7b6f5ac0946d | 7,675 | use super::{types::*, ToCKBCellData};
use crate::toCKB_typescript::utils::types::{
generated::{basic, btc_difficulty, mint_xt_witness},
ToCKBStatus,
};
use crate::*;
use ckb_testtool::{builtin::ALWAYS_SUCCESS, context::Context};
use ckb_tool::ckb_types::{bytes::Bytes, core::TransactionBuilder, packed::*, prelude::*};
use molecule::prelude::*;
pub const MAX_CYCLES: u64 = 100_000_000;
pub const PLEDGE: u64 = 10000;
pub const XT_CELL_CAPACITY: u64 = 200;
pub fn run_test_case(case: TestCase) {
let kind = case.kind;
let mut context = Context::default();
let toCKB_typescript_bin: Bytes = Loader::default().load_binary("toCKB-typescript");
let toCKB_typescript_out_point = context.deploy_cell(toCKB_typescript_bin);
// let toCKB_lockscript_bin: Bytes = Loader::default().load_binary("toCKB-lockscript");
// let toCKB_lockscript_out_point = context.deploy_cell(toCKB_lockscript_bin);
let always_success_out_point = context.deploy_cell(ALWAYS_SUCCESS.clone());
let sudt_bin = include_bytes!("../../../../deps/simple_udt");
let sudt_out_point = context.deploy_cell(Bytes::from(sudt_bin.as_ref()));
// prepare scripts
let toCKB_typescript = context
.build_script(&toCKB_typescript_out_point, [kind; 1].to_vec().into())
.expect("script");
let toCKB_typescript_dep = CellDep::new_builder()
.out_point(toCKB_typescript_out_point)
.build();
let always_success_lockscript = context
.build_script(&always_success_out_point, Default::default())
.expect("script");
let always_success_lockscript_dep = CellDep::new_builder()
.out_point(always_success_out_point)
.build();
let lock_hash: [u8; 32] = always_success_lockscript.calc_script_hash().unpack();
// let lock_hash = [0u8; 32];
dbg!(hex::encode(lock_hash));
let sudt_script_args: Bytes = lock_hash.to_vec().into();
let _sudt_typescript = context
.build_script(&sudt_out_point, sudt_script_args)
.expect("script");
let sudt_typescript_dep = CellDep::new_builder().out_point(sudt_out_point).build();
// prepare cells
let x_lock_address_str = case.tockb_cell_data.x_lock_address;
let x_lock_address = basic::Bytes::new_builder()
.set(
x_lock_address_str
.as_bytes()
.iter()
.map(|c| Byte::new(*c))
.collect::<Vec<_>>()
.into(),
)
.build();
let signer_lockscript =
basic::Script::from_slice(case.tockb_cell_data.signer_lockscript.as_slice()).unwrap();
let user_lockscript =
basic::Script::from_slice(case.tockb_cell_data.user_lockscript.as_slice()).unwrap();
let input_toCKB_data = ToCKBCellData::new_builder()
.status(Byte::new(ToCKBStatus::Bonded as u8))
.lot_size(Byte::new(case.tockb_cell_data.lot_size))
.signer_lockscript(signer_lockscript.clone())
.user_lockscript(user_lockscript.clone())
.x_lock_address(x_lock_address.clone())
.build();
let output_toCKB_data = ToCKBCellData::new_builder()
.status(Byte::new(ToCKBStatus::Warranty as u8))
.lot_size(Byte::new(case.tockb_cell_data.lot_size))
.signer_lockscript(signer_lockscript.clone())
.user_lockscript(user_lockscript.clone())
.x_lock_address(x_lock_address.clone())
.build();
let input_ckb_cell_out_point = context.create_cell(
CellOutput::new_builder()
.capacity(case.input_capacity.pack())
.lock(always_success_lockscript.clone())
.type_(Some(toCKB_typescript.clone()).pack())
.build(),
input_toCKB_data.as_bytes(),
);
let input_ckb_cell = CellInput::new_builder()
.previous_output(input_ckb_cell_out_point)
.build();
let inputs = vec![input_ckb_cell];
let mut outputs = vec![CellOutput::new_builder()
.capacity(case.output_capacity.pack())
.type_(Some(toCKB_typescript.clone()).pack())
.lock(always_success_lockscript.clone())
.build()];
let mut outputs_data = vec![output_toCKB_data.as_bytes()];
for output in case.outputs.into_iter() {
let cell_output = CellOutput::new_builder()
.capacity(output.capacity.pack())
.type_(Some(output.typescript).pack())
.lock(output.lockscript)
.build();
outputs.push(cell_output);
outputs_data.push(output.amount.to_le_bytes().to_vec().into())
}
let spv_proof = match case.witness.spv_proof {
SpvProof::BTC(btc_spv_proof) => btc_spv_proof.as_slice().to_vec(),
};
let witness_data = mint_xt_witness::MintXTWitness::new_builder()
.spv_proof(spv_proof.into())
.cell_dep_index_list(case.witness.cell_dep_index_list.into())
.build();
let witness = WitnessArgs::new_builder()
.input_type(Some(witness_data.as_bytes()).pack())
.build();
let dep_data = match case.cell_deps_data {
CellDepsData::BTC(difficulty_data) => {
let data = btc_difficulty::BTCDifficulty::new_builder()
.previous(difficulty_data.previous.to_le_bytes().to_vec().into())
.current(difficulty_data.current.to_le_bytes().to_vec().into())
.build();
dbg!(&data);
data.as_bytes()
}
};
let data_out_point = context.deploy_cell(dep_data);
let data_dep = CellDep::new_builder().out_point(data_out_point).build();
let tx = TransactionBuilder::default()
.inputs(inputs)
.outputs(outputs)
.outputs_data(outputs_data.pack())
.cell_dep(data_dep)
.cell_dep(toCKB_typescript_dep)
.cell_dep(always_success_lockscript_dep)
.cell_dep(sudt_typescript_dep)
.witness(witness.as_bytes().pack())
.build();
let res = context.verify_tx(&tx, MAX_CYCLES);
dbg!(&res);
match res {
Ok(_cycles) => assert_eq!(case.expect_return_code, 0),
Err(err) => assert!(check_err(err, case.expect_return_code)),
}
}
pub fn check_err(err: ckb_tool::ckb_error::Error, code: i8) -> bool {
let get = format!("{}", err);
let expected = format!("Script(ValidationFailure({}))", code);
dbg!(&get, &expected);
get == expected
}
pub struct DeployResult {
pub context: Context,
pub toCKB_typescript: Script,
pub always_success_lockscript: Script,
pub sudt_typescript: Script,
}
pub fn deploy(kind: u8) -> DeployResult {
let mut context = Context::default();
let toCKB_typescript_bin: Bytes = Loader::default().load_binary("toCKB-typescript");
let toCKB_typescript_out_point = context.deploy_cell(toCKB_typescript_bin);
let always_success_out_point = context.deploy_cell(ALWAYS_SUCCESS.clone());
let sudt_bin = include_bytes!("../../../../deps/simple_udt");
let sudt_out_point = context.deploy_cell(Bytes::from(sudt_bin.as_ref()));
// prepare scripts
let toCKB_typescript = context
.build_script(&toCKB_typescript_out_point, [kind; 1].to_vec().into())
.expect("script");
let always_success_lockscript = context
.build_script(&always_success_out_point, Default::default())
.expect("script");
let lock_hash: [u8; 32] = always_success_lockscript.calc_script_hash().unpack();
let sudt_script_args: Bytes = lock_hash.to_vec().into();
let sudt_typescript = context
.build_script(&sudt_out_point, sudt_script_args)
.expect("script");
DeployResult {
context,
toCKB_typescript,
always_success_lockscript,
sudt_typescript,
}
}
| 40.183246 | 94 | 0.661107 |
de0bbe41d58fc942856d47b0a383622dda1a1d25 | 756 | // Copyright 2018-2021 Cargill Incorporated
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Traits, types, and implementations for interacting with node_id's.
/// Store logic for accessing and modifying an instances node_id.
pub mod store;
| 39.789474 | 75 | 0.753968 |
231304cca2a06a653154b5c0acf1ec7bceffc34d | 4,041 | // Copyright 2021 IOTA Stiftung
// SPDX-License-Identifier: Apache-2.0
//! Error handling in iota-client crate.
/// Type alias of `Result` in iota-client
pub type Result<T> = std::result::Result<T, Error>;
#[derive(Debug, thiserror::Error)]
/// Error type of the iota client crate.
pub enum Error {
/// Error when building indexation messages
#[error("Error when building indexation message: {0}")]
IndexationError(String),
/// Error when building transaction messages
#[error("Error when building transaction message")]
TransactionError,
/// The wallet account doesn't have enough balance
#[error("The wallet account doesn't have enough balance. It only has {0}, required is {1}")]
NotEnoughBalance(u64, u64),
/// Dust error, for example not enough balance on an address
#[error("Dust error: {0}")]
DustError(String),
/// Missing required parameters
#[error("Must provide required parameter: {0}")]
MissingParameter(String),
/// Invalid parameters
#[error("Parameter is invalid:{0}")]
InvalidParameter(String),
/// Found spent output
#[error("Found spent output.")]
SpentOutput,
/// Error from RestAPI calls with unexpected status code response
#[error("Response error with status code {0}: {1}")]
ResponseError(u16, String),
/// No node available in the synced node pool
#[error("No synced node available")]
SyncedNodePoolEmpty,
/// Error on Url type conversion
#[error("Failed to parse url")]
UrlError,
/// Error on Url type conversion
#[error("Failed to parse node_pool_urls")]
NodePoolUrlsError,
/// Errors from reqwest api call
#[error("{0}")]
ReqwestError(#[from] reqwest::Error),
/// Hex string convert error
#[error("{0}")]
FromHexError(#[from] hex::FromHexError),
/// Message types error
#[error("{0}")]
MessageError(bee_message::Error),
/// The message cannot be promoted or reattached
#[error("Message ID `{0}` doesn't need to be promoted or reattached")]
NoNeedPromoteOrReattach(String),
/// Mqtt client error
#[cfg(feature = "mqtt")]
#[error("{0}")]
MqttClientError(#[from] paho_mqtt::errors::Error),
/// Invalid MQTT topic.
#[error("The MQTT topic {0} is invalid")]
InvalidMqttTopic(String),
/// MQTT connection not found (all nodes MQTT's are disabled)
#[error("MQTT connection not found (all nodes have the MQTT plugin disabled)")]
MqttConnectionNotFound,
/// IO error
#[error("{0}")]
IoError(#[from] std::io::Error),
/// JSON error
#[error("{0}")]
Json(#[from] serde_json::Error),
/// PoW error
#[error("{0}")]
Pow(String),
/// Address not found
#[error("Address not found in range {0}")]
InputAddressNotFound(String),
/// Storage adapter not set_path
#[cfg(feature = "storage")]
#[error("Storage adapter not set {0}")]
StorageAdapterNotSet(String),
/// Storage error
#[cfg(feature = "storage")]
#[error("Storage error {0}")]
Storage(String),
/// Account not found error
#[cfg(feature = "storage")]
#[error("Account not found")]
AccountNotFound,
/// Crypto.rs error
#[error("{0}")]
CryptoError(crypto::Error),
/// Slip10 error
#[error("{0}")]
Slip10Error(slip10::Error),
/// Invalid amount of parents
#[error("Invalid amount of parents, length must be in 1..=8")]
InvalidParentsAmount,
}
// can't use #[from] on bee_message::Error so manually converting it
impl From<bee_message::Error> for Error {
fn from(error: bee_message::Error) -> Self {
Error::MessageError(error)
}
}
// can't use #[from] on crypto::Error so manually converting it
impl From<crypto::Error> for Error {
fn from(error: crypto::Error) -> Self {
Error::CryptoError(error)
}
}
// can't use #[from] on slip10::Error so manually converting it
impl From<slip10::Error> for Error {
fn from(error: slip10::Error) -> Self {
Error::Slip10Error(error)
}
}
| 33.122951 | 96 | 0.641178 |
e64eaed0c8bc2baa31302032949b530131a0a31c | 18,847 | #[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - LEA Capability Register"]
pub leacap: LEACAP,
#[doc = "0x04 - Configuration Register 0"]
pub leacnf0: LEACNF0,
#[doc = "0x08 - Configuration Register 1"]
pub leacnf1: LEACNF1,
#[doc = "0x0c - Configuration Register 2"]
pub leacnf2: LEACNF2,
#[doc = "0x10 - Memory Bottom Register"]
pub leamb: LEAMB,
#[doc = "0x14 - Memory Top Register"]
pub leamt: LEAMT,
#[doc = "0x18 - Code Memory Access Register"]
pub leacma: LEACMA,
#[doc = "0x1c - Code Memory Control Register"]
pub leacmctl: LEACMCTL,
_reserved8: [u8; 8usize],
#[doc = "0x28 - LEA Command Status Register"]
pub leacmdstat: LEACMDSTAT,
#[doc = "0x2c - LEA Source 1 Status Register"]
pub leas1stat: LEAS1STAT,
#[doc = "0x30 - LEA Source 0 Status Register"]
pub leas0stat: LEAS0STAT,
#[doc = "0x34 - LEA Result Status Register"]
pub leadststat: LEADSTSTAT,
_reserved12: [u8; 8usize],
#[doc = "0x40 - PM Control Register"]
pub leapmctl: LEAPMCTL,
#[doc = "0x44 - PM Result Register"]
pub leapmdst: LEAPMDST,
#[doc = "0x48 - PM Source 1 Register"]
pub leapms1: LEAPMS1,
#[doc = "0x4c - PM Source 0 Register"]
pub leapms0: LEAPMS0,
#[doc = "0x50 - PM Command Buffer Register"]
pub leapmcb: LEAPMCB,
_reserved17: [u8; 28usize],
#[doc = "0x70 - Interrupt Flag and Set Register"]
pub leaifgset: LEAIFGSET,
#[doc = "0x74 - Interrupt Enable Register"]
pub leaie: LEAIE,
#[doc = "0x78 - Interrupt Flag and Clear Register"]
pub leaifg: LEAIFG,
#[doc = "0x7c - Interrupt Vector Register"]
pub leaiv: LEAIV,
}
#[doc = "LEA Capability Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leacap](leacap) module"]
pub type LEACAP = crate::Reg<u32, _LEACAP>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEACAP;
#[doc = "`read()` method returns [leacap::R](leacap::R) reader structure"]
impl crate::Readable for LEACAP {}
#[doc = "`write(|w| ..)` method takes [leacap::W](leacap::W) writer structure"]
impl crate::Writable for LEACAP {}
#[doc = "LEA Capability Register"]
pub mod leacap;
#[doc = "Configuration Register 0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leacnf0](leacnf0) module"]
pub type LEACNF0 = crate::Reg<u32, _LEACNF0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEACNF0;
#[doc = "`read()` method returns [leacnf0::R](leacnf0::R) reader structure"]
impl crate::Readable for LEACNF0 {}
#[doc = "`write(|w| ..)` method takes [leacnf0::W](leacnf0::W) writer structure"]
impl crate::Writable for LEACNF0 {}
#[doc = "Configuration Register 0"]
pub mod leacnf0;
#[doc = "Configuration Register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leacnf1](leacnf1) module"]
pub type LEACNF1 = crate::Reg<u32, _LEACNF1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEACNF1;
#[doc = "`read()` method returns [leacnf1::R](leacnf1::R) reader structure"]
impl crate::Readable for LEACNF1 {}
#[doc = "`write(|w| ..)` method takes [leacnf1::W](leacnf1::W) writer structure"]
impl crate::Writable for LEACNF1 {}
#[doc = "Configuration Register 1"]
pub mod leacnf1;
#[doc = "Configuration Register 2\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leacnf2](leacnf2) module"]
pub type LEACNF2 = crate::Reg<u32, _LEACNF2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEACNF2;
#[doc = "`read()` method returns [leacnf2::R](leacnf2::R) reader structure"]
impl crate::Readable for LEACNF2 {}
#[doc = "`write(|w| ..)` method takes [leacnf2::W](leacnf2::W) writer structure"]
impl crate::Writable for LEACNF2 {}
#[doc = "Configuration Register 2"]
pub mod leacnf2;
#[doc = "Memory Bottom Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leamb](leamb) module"]
pub type LEAMB = crate::Reg<u32, _LEAMB>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEAMB;
#[doc = "`read()` method returns [leamb::R](leamb::R) reader structure"]
impl crate::Readable for LEAMB {}
#[doc = "`write(|w| ..)` method takes [leamb::W](leamb::W) writer structure"]
impl crate::Writable for LEAMB {}
#[doc = "Memory Bottom Register"]
pub mod leamb;
#[doc = "Memory Top Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leamt](leamt) module"]
pub type LEAMT = crate::Reg<u32, _LEAMT>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEAMT;
#[doc = "`read()` method returns [leamt::R](leamt::R) reader structure"]
impl crate::Readable for LEAMT {}
#[doc = "`write(|w| ..)` method takes [leamt::W](leamt::W) writer structure"]
impl crate::Writable for LEAMT {}
#[doc = "Memory Top Register"]
pub mod leamt;
#[doc = "Code Memory Access Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leacma](leacma) module"]
pub type LEACMA = crate::Reg<u32, _LEACMA>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEACMA;
#[doc = "`read()` method returns [leacma::R](leacma::R) reader structure"]
impl crate::Readable for LEACMA {}
#[doc = "`write(|w| ..)` method takes [leacma::W](leacma::W) writer structure"]
impl crate::Writable for LEACMA {}
#[doc = "Code Memory Access Register"]
pub mod leacma;
#[doc = "Code Memory Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leacmctl](leacmctl) module"]
pub type LEACMCTL = crate::Reg<u32, _LEACMCTL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEACMCTL;
#[doc = "`read()` method returns [leacmctl::R](leacmctl::R) reader structure"]
impl crate::Readable for LEACMCTL {}
#[doc = "`write(|w| ..)` method takes [leacmctl::W](leacmctl::W) writer structure"]
impl crate::Writable for LEACMCTL {}
#[doc = "Code Memory Control Register"]
pub mod leacmctl;
#[doc = "LEA Command Status Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leacmdstat](leacmdstat) module"]
pub type LEACMDSTAT = crate::Reg<u32, _LEACMDSTAT>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEACMDSTAT;
#[doc = "`read()` method returns [leacmdstat::R](leacmdstat::R) reader structure"]
impl crate::Readable for LEACMDSTAT {}
#[doc = "`write(|w| ..)` method takes [leacmdstat::W](leacmdstat::W) writer structure"]
impl crate::Writable for LEACMDSTAT {}
#[doc = "LEA Command Status Register"]
pub mod leacmdstat;
#[doc = "LEA Source 1 Status Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leas1stat](leas1stat) module"]
pub type LEAS1STAT = crate::Reg<u32, _LEAS1STAT>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEAS1STAT;
#[doc = "`read()` method returns [leas1stat::R](leas1stat::R) reader structure"]
impl crate::Readable for LEAS1STAT {}
#[doc = "`write(|w| ..)` method takes [leas1stat::W](leas1stat::W) writer structure"]
impl crate::Writable for LEAS1STAT {}
#[doc = "LEA Source 1 Status Register"]
pub mod leas1stat;
#[doc = "LEA Source 0 Status Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leas0stat](leas0stat) module"]
pub type LEAS0STAT = crate::Reg<u32, _LEAS0STAT>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEAS0STAT;
#[doc = "`read()` method returns [leas0stat::R](leas0stat::R) reader structure"]
impl crate::Readable for LEAS0STAT {}
#[doc = "`write(|w| ..)` method takes [leas0stat::W](leas0stat::W) writer structure"]
impl crate::Writable for LEAS0STAT {}
#[doc = "LEA Source 0 Status Register"]
pub mod leas0stat;
#[doc = "LEA Result Status Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leadststat](leadststat) module"]
pub type LEADSTSTAT = crate::Reg<u32, _LEADSTSTAT>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEADSTSTAT;
#[doc = "`read()` method returns [leadststat::R](leadststat::R) reader structure"]
impl crate::Readable for LEADSTSTAT {}
#[doc = "`write(|w| ..)` method takes [leadststat::W](leadststat::W) writer structure"]
impl crate::Writable for LEADSTSTAT {}
#[doc = "LEA Result Status Register"]
pub mod leadststat;
#[doc = "PM Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leapmctl](leapmctl) module"]
pub type LEAPMCTL = crate::Reg<u32, _LEAPMCTL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEAPMCTL;
#[doc = "`read()` method returns [leapmctl::R](leapmctl::R) reader structure"]
impl crate::Readable for LEAPMCTL {}
#[doc = "`write(|w| ..)` method takes [leapmctl::W](leapmctl::W) writer structure"]
impl crate::Writable for LEAPMCTL {}
#[doc = "PM Control Register"]
pub mod leapmctl;
#[doc = "PM Result Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leapmdst](leapmdst) module"]
pub type LEAPMDST = crate::Reg<u32, _LEAPMDST>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEAPMDST;
#[doc = "`read()` method returns [leapmdst::R](leapmdst::R) reader structure"]
impl crate::Readable for LEAPMDST {}
#[doc = "`write(|w| ..)` method takes [leapmdst::W](leapmdst::W) writer structure"]
impl crate::Writable for LEAPMDST {}
#[doc = "PM Result Register"]
pub mod leapmdst;
#[doc = "PM Source 1 Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leapms1](leapms1) module"]
pub type LEAPMS1 = crate::Reg<u32, _LEAPMS1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEAPMS1;
#[doc = "`read()` method returns [leapms1::R](leapms1::R) reader structure"]
impl crate::Readable for LEAPMS1 {}
#[doc = "`write(|w| ..)` method takes [leapms1::W](leapms1::W) writer structure"]
impl crate::Writable for LEAPMS1 {}
#[doc = "PM Source 1 Register"]
pub mod leapms1;
#[doc = "PM Source 0 Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leapms0](leapms0) module"]
pub type LEAPMS0 = crate::Reg<u32, _LEAPMS0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEAPMS0;
#[doc = "`read()` method returns [leapms0::R](leapms0::R) reader structure"]
impl crate::Readable for LEAPMS0 {}
#[doc = "`write(|w| ..)` method takes [leapms0::W](leapms0::W) writer structure"]
impl crate::Writable for LEAPMS0 {}
#[doc = "PM Source 0 Register"]
pub mod leapms0;
#[doc = "PM Command Buffer Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leapmcb](leapmcb) module"]
pub type LEAPMCB = crate::Reg<u32, _LEAPMCB>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEAPMCB;
#[doc = "`read()` method returns [leapmcb::R](leapmcb::R) reader structure"]
impl crate::Readable for LEAPMCB {}
#[doc = "`write(|w| ..)` method takes [leapmcb::W](leapmcb::W) writer structure"]
impl crate::Writable for LEAPMCB {}
#[doc = "PM Command Buffer Register"]
pub mod leapmcb;
#[doc = "Interrupt Flag and Set Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leaifgset](leaifgset) module"]
pub type LEAIFGSET = crate::Reg<u32, _LEAIFGSET>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEAIFGSET;
#[doc = "`read()` method returns [leaifgset::R](leaifgset::R) reader structure"]
impl crate::Readable for LEAIFGSET {}
#[doc = "`write(|w| ..)` method takes [leaifgset::W](leaifgset::W) writer structure"]
impl crate::Writable for LEAIFGSET {}
#[doc = "Interrupt Flag and Set Register"]
pub mod leaifgset;
#[doc = "Interrupt Enable Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leaie](leaie) module"]
pub type LEAIE = crate::Reg<u32, _LEAIE>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEAIE;
#[doc = "`read()` method returns [leaie::R](leaie::R) reader structure"]
impl crate::Readable for LEAIE {}
#[doc = "`write(|w| ..)` method takes [leaie::W](leaie::W) writer structure"]
impl crate::Writable for LEAIE {}
#[doc = "Interrupt Enable Register"]
pub mod leaie;
#[doc = "Interrupt Flag and Clear Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leaifg](leaifg) module"]
pub type LEAIFG = crate::Reg<u32, _LEAIFG>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEAIFG;
#[doc = "`read()` method returns [leaifg::R](leaifg::R) reader structure"]
impl crate::Readable for LEAIFG {}
#[doc = "`write(|w| ..)` method takes [leaifg::W](leaifg::W) writer structure"]
impl crate::Writable for LEAIFG {}
#[doc = "Interrupt Flag and Clear Register"]
pub mod leaifg;
#[doc = "Interrupt Vector Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [leaiv](leaiv) module"]
pub type LEAIV = crate::Reg<u32, _LEAIV>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LEAIV;
#[doc = "`read()` method returns [leaiv::R](leaiv::R) reader structure"]
impl crate::Readable for LEAIV {}
#[doc = "`write(|w| ..)` method takes [leaiv::W](leaiv::W) writer structure"]
impl crate::Writable for LEAIV {}
#[doc = "Interrupt Vector Register"]
pub mod leaiv;
| 67.071174 | 421 | 0.685467 |
89ddf2864037fb92a9c9b94cc14e9d126ca62c2b | 5,137 | use super::entities::{Entities, Entity, EntityId};
use super::errors::{Error, ErrorKind};
use super::system::InfallibleSystem;
use idcontain::IdMap;
use math::prelude::*;
use math::Trans3;
derive_flat! {
#[element(Transform, &TransformRef, &mut TransformMut)]
#[access(&TransformsRef, &mut TransformsMut)]
pub struct TransformsAccess {
#[element(local)]
pub locals: Vec<Trans3>,
#[element(absolute)]
pub absolutes: Vec<Trans3>,
}
}
pub struct Transforms {
map: IdMap<Entity, TransformsAccess>,
removed: Vec<usize>,
}
impl Transforms {
pub fn attach_identity(&mut self, entity: EntityId) {
self.attach(entity, Trans3::one())
}
pub fn attach(&mut self, entity: EntityId, transform: Trans3) {
let old = self.map.insert(
entity,
Transform {
local: transform,
absolute: Trans3::one(),
},
);
if old.is_some() {
error!(
"Entity {:?} already had a transform attached, replacing.",
entity
);
}
}
pub fn get_local_mut(&mut self, entity: EntityId) -> Option<&mut Trans3> {
self.map.get_mut(entity).map(|transform| transform.local)
}
pub fn get_absolute(&self, entity: EntityId) -> Option<&Trans3> {
self.map.get(entity).map(|transform| transform.absolute)
}
fn lookup_parent(&self, entities: &Entities, id: EntityId) -> ParentLookup {
let mut id = id;
loop {
match entities.parent_of(id) {
Ok(Some(parent_id)) => {
if let Some(parent_index) = self.map.id_to_index(parent_id) {
return ParentLookup::Found {
parent_id,
parent_index,
};
} else {
id = parent_id;
}
}
Ok(None) => return ParentLookup::IsRoot,
Err(Error(ErrorKind::NoSuchEntity(..), _)) => {
return ParentLookup::Removed;
}
Err(error) => panic!("unexpected error in `parent_of`: {}", error),
}
}
}
}
impl<'context> InfallibleSystem<'context> for Transforms {
type Dependencies = &'context Entities;
fn debug_name() -> &'static str {
"transforms"
}
fn create(_deps: &Entities) -> Self {
Transforms {
map: IdMap::with_capacity(1024),
removed: Vec::with_capacity(128),
}
}
fn update(&mut self, entities: &Entities) {
for index in 0..self.map.len() {
let mut id = self
.map
.index_to_id(index)
.expect("misleading map length: index_to_id");
loop {
match self.lookup_parent(entities, id) {
ParentLookup::IsRoot => {
let access = self.map.access_mut();
access.absolutes[index] = access.locals[index];
break;
}
ParentLookup::Found {
parent_id,
parent_index,
} => {
assert_ne!(parent_index, index);
if parent_index > index {
debug!(
"Parent {:?} @ {} and child {:?} @ {} have reversed transforms, \
swapping.",
parent_id, parent_index, id, index
);
self.map.swap_indices(parent_index, index);
id = parent_id;
continue;
}
let access = self.map.access_mut();
access.absolutes[index] =
access.absolutes[parent_index].concat(&access.locals[index]);
break;
}
ParentLookup::Removed => {
debug!("Transform {:?} @ {} lazily removed.", id, index);
self.removed.push(index);
break;
}
}
}
}
for &index in self.removed.iter().rev() {
debug!(
"Actually removed transform for {:?} @ {}.",
self.map.index_to_id(index).unwrap(),
index
);
self.map.remove_by_index(index);
}
self.removed.clear();
}
fn teardown(&mut self, entities: &Entities) {
self.update(entities);
}
fn destroy(mut self, entities: &Entities) {
self.update(entities);
if !self.map.is_empty() {
error!("Transforms leaked, {} instances.", self.map.len());
}
}
}
enum ParentLookup {
Removed,
IsRoot,
Found {
parent_id: EntityId,
parent_index: usize,
},
}
| 31.133333 | 97 | 0.461943 |
e4f8c5b0073770393426bc581e1f1e768ec9dd69 | 6,144 | use crate::*;
/// This serializes and wraps creeps
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct CommonCreepData {
pos: CommonData,
name: String,
id: ObjectId<Creep>,
hits: u32,
max_hits: u32,
parts: HashSet<Part>,
store: HashMap<ResourceType, u32>,
}
impl HasPosition for CommonCreepData {
fn pos(&self) -> Position {
self.pos.pos()
}
}
impl Deref for CommonCreepData {
type Target = CommonData;
fn deref(&self) -> &Self::Target {
&self.pos
}
}
impl Display for CommonCreepData {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
// TODO update Display
writeln!(f, "{}: {:?}", self.name, self.parts)?;
writeln!(f, "{} of {} HP", self.hits, self.max_hits)?;
for (r, amount) in &self.store {
writeln!(f, "{:?}: {}", r, amount)?;
}
Ok(())
}
}
impl From<Creep> for CommonCreepData {
fn from(c: Creep) -> Self {
let pos = c.pos().into();
let name = c.name();
let id = c.id();
let hits = c.hits();
let max_hits = c.hits_max();
let mut store = HashMap::<ResourceType, u32>::new();
let mut parts = HashSet::<Part>::new();
for part in c.body() {
parts.insert(part.part);
}
for r in c.store_types() {
store.insert(r, c.store_of(r));
}
CommonCreepData { pos, name, id, hits, max_hits, store, parts }
}
}
impl From<PowerCreep> for CommonCreepData {
fn from(c: PowerCreep) -> Self {
let pos = c.pos().into();
let name = c.name();
let id = c.id().into_type();
let hits = c.hits();
let max_hits = c.hits_max();
let mut store = HashMap::<ResourceType, u32>::new();
let parts = HashSet::<Part>::new();
for r in c.store_types() {
store.insert(r, c.store_of(r));
}
CommonCreepData { pos, name, id, hits, max_hits, store, parts }
}
}
impl CommonCreepData {
/// Determine if this is a power creep by looking at it's parts
pub fn is_power_creep(&self) -> bool {
self.parts.is_empty()
}
/// Returns the creep if this is a creep
pub fn creep(&self) -> Option<Creep> {
if self.is_power_creep() {
None
} else if let Ok(creep) = game::get_object_typed(self.id) {
creep
} else {
None
}
}
/// Returns the power creep if this is a power creep
pub fn power_creep(&self) -> Option<PowerCreep> {
if !self.is_power_creep() {
return None;
}
let creep: ObjectId<PowerCreep> = self.id.into_type();
if let Ok(creep) = game::get_object_typed(creep) {
creep
} else {
None
}
}
/// Get Harvesting Power of this creep
pub fn harvesting_power(&self) -> u32 {
if self.is_power_creep() {
return 0;
}
if let Some(creep) = self.creep() {
let parts = creep.get_active_bodyparts(Work);
return parts * HARVEST_POWER;
}
error!("Creep has an invalid id!");
0
}
/// Get Harvest Mineral Power
pub fn harvest_mineral_power(&self) -> u32 {
if self.is_power_creep() {
return 0;
}
if let Some(creep) = self.creep() {
let parts = creep.get_active_bodyparts(Work);
return parts * HARVEST_MINERAL_POWER;
}
error!("Creep has an invalid id!");
0
}
/// Get Build Power
pub fn build_power(&self) -> u32 {
if self.is_power_creep() {
return 0;
}
if let Some(creep) = self.creep() {
let parts = creep.get_active_bodyparts(Work);
return parts * BUILD_POWER;
}
error!("Creep has an invalid id!");
0
}
/// Get the attack power
pub fn attack_power(&self) -> u32 {
if let Some(creep) = self.creep() {
let parts = creep.get_active_bodyparts(Attack);
return parts * ATTACK_POWER;
}
error!("Creep has an invalid id!");
0
}
/// Get ranged attack power
pub fn ranged_attack_power(&self) -> u32 {
if let Some(creep) = self.creep() {
let parts = creep.get_active_bodyparts(RangedAttack);
return parts * RANGED_ATTACK_POWER;
}
error!("Creep has an invalid id!");
0
}
/// Get carry capacity
pub fn carry_capacity(&self) -> u32 {
if let Some(creep) = self.creep() {
let parts = creep.get_active_bodyparts(Carry);
return parts * CARRY_CAPACITY;
}
error!("Creep has an invalid id!");
0
}
/// Get dismantle power
pub fn dismantle_power(&self) -> u32 {
if let Some(creep) = self.creep() {
let parts = creep.get_active_bodyparts(Work);
return parts * DISMANTLE_POWER;
}
error!("Creep has an invalid id!");
0
}
/// Get heal power
pub fn heal_power(&self) -> u32 {
if let Some(creep) = self.creep() {
let parts = creep.get_active_bodyparts(Heal);
return parts * HEAL_POWER;
}
error!("Creep has an invalid id!");
0
}
/// Get Ranged Heal Power
pub fn ranged_heal_power(&self) -> u32 {
if let Some(creep) = self.creep() {
let parts = creep.get_active_bodyparts(Heal);
return parts * RANGED_HEAL_POWER;
}
error!("Creep has an invalid id!");
0
}
/// Repair power
pub fn repair_power(&self) -> u32 {
if let Some(creep) = self.creep() {
let parts = creep.get_active_bodyparts(Work);
return parts * REPAIR_POWER;
}
error!("Creep has an invalid id!");
0
}
/// Upgrade power
pub fn upgrade_power(&self) -> u32 {
if let Some(creep) = self.creep() {
let parts = creep.get_active_bodyparts(Work);
return parts * UPGRADE_CONTROLLER_POWER;
}
error!("Creep has an invalid id!");
0
}
}
impl CommonCreepData {
/// Update this creeps data
pub fn update(&mut self, creep: Creep) {
if self.id != creep.id() {
warn!("Attempting to update creep with invalid data");
return;
}
// Update position
self.pos = creep.pos().into();
// update hits
self.hits = creep.hits();
// update store
for r in creep.store_types() {
self.store.insert(r, creep.store_of(r));
}
}
/// Get the parts
pub fn parts(&self) -> HashSet<Part> {
self.parts.clone()
}
/// Get the creeps name
pub fn name(&self) -> &str {
&self.name
}
}
| 21.407666 | 67 | 0.594401 |
3afdd2bb6092d37dd10d1e90de9803003e9a3bd5 | 1,616 | //! [](https://opensource.org/licenses/BSD-2-Clause)
//! [](https://opensource.org/licenses/MIT)
//! [](https://ci.appveyor.com/project/KizzyCode/ezexec-rust)
//! [](https://docs.rs/ezexec)
//! [](https://crates.io/crates/ezexec)
//! [](https://crates.io/crates/ezexec)
//! [](https://deps.rs/crate/ezexec/0.1.0)
//!
//!
//! # `ezexec`
//! Welcome to `ezexec` 🎉
//!
//! `ezexec` provides a simple API to execute binaries or shell commands. Furthermore it implements a trivial but usually
//! good-enough API to find a binary in `PATH` or to get the current shell.
//!
//!
//! ## Example
//! ```rust
//! # use ezexec::{ ExecBuilder, error::Result };
//! #
//! # fn list() -> Result {
//! // Lists all files in the current directory and forwards the output to the parent's stdout
//! ExecBuilder::with_shell("ls")?
//! .spawn_transparent()?
//! .wait()?;
//! # Ok(())
//! # }
//! ```
#[macro_use] pub mod error;
pub mod lookup;
mod builder;
mod capturing_executor;
mod transparent_executor;
pub use crate::{
builder::ExecBuilder,
capturing_executor::CapturingExecutor,
transparent_executor::TransparentExecutor
}; | 39.414634 | 158 | 0.676361 |
db6207da39a2e5b9d89fa6caef76c97fcaa08029 | 1,066 | // Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Utilities for testing approximate ordering - especially true for
//! floating point types, where NaN's cannot be ordered.
pub fn min<T: PartialOrd>(x: T, y: T) -> T {
if x <= y {
x
} else {
y
}
}
pub fn max<T: PartialOrd>(x: T, y: T) -> T {
if x >= y {
x
} else {
y
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_min() {
assert!(min(0u32, 1u32) == 0u32);
assert!(min(-1.0f32, 0.0f32) == -1.0f32);
}
#[test]
fn test_max() {
assert!(max(0u32, 1u32) == 1u32);
assert!(max(-1.0f32, 0.0f32) == 0.0f32);
}
}
| 23.688889 | 68 | 0.587242 |
503b2e08c39bb8e27f11c0c4279666f0ec660ba0 | 766 | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// rustc-env:RUST_NEW_ERROR_FORMAT
trait Parser<T> {
fn parse(text: &str) -> Option<T>;
}
impl<bool> Parser<bool> for bool {
fn parse(text: &str) -> Option<bool> {
Some(true) //~ ERROR mismatched types
}
}
fn main() {
println!("{}", bool::parse("ok").unwrap_or(false));
}
| 29.461538 | 68 | 0.685379 |
50f912bdb57999e94c11bfcb4fd698f66dafb795 | 11,205 | // Copyright (c) 2015-2021 Georg Brandl. Licensed under the Apache License,
// Version 2.0 <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0>
// or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at
// your option. This file may not be copied, modified, or distributed except
// according to those terms.
//! Python values, and serialization instances for them.
use std::fmt;
use std::cmp::Ordering;
use std::collections::{BTreeMap, BTreeSet};
use num_bigint::BigInt;
use num_traits::{Signed, ToPrimitive};
pub use crate::value_impls::{to_value, from_value};
use crate::error::{Error, ErrorCode};
/// Represents all primitive builtin Python values that can be restored by
/// unpickling.
///
/// Note on integers: the distinction between the two types (short and long) is
/// very fuzzy in Python, and they can be used interchangeably. In Python 3,
/// all integers are long integers, so all are pickled as such. While decoding,
/// we simply put all integers that fit into an i64, and use `BigInt` for the
/// rest.
#[derive(Clone, Debug, PartialEq)]
pub enum Value {
/// None
None,
/// Boolean
Bool(bool),
/// Short integer
I64(i64),
/// Long integer (unbounded length)
Int(BigInt),
/// Float
F64(f64),
/// Bytestring
Bytes(Vec<u8>),
/// Unicode string
String(String),
/// List
List(Vec<Value>),
/// Tuple
Tuple(Vec<Value>),
/// Set
Set(BTreeSet<HashableValue>),
/// Frozen (immutable) set
FrozenSet(BTreeSet<HashableValue>),
/// Dictionary (map)
Dict(BTreeMap<HashableValue, Value>),
}
/// Represents all primitive builtin Python values that can be contained
/// in a "hashable" context (i.e., as dictionary keys and set elements).
///
/// In Rust, the type is *not* hashable, since we use B-tree maps and sets
/// instead of the hash variants. To be able to put all Value instances
/// into these B-trees, we implement a consistent ordering between all
/// the possible types (see below).
#[derive(Clone, Debug)]
pub enum HashableValue {
/// None
None,
/// Boolean
Bool(bool),
/// Short integer
I64(i64),
/// Long integer
Int(BigInt),
/// Float
F64(f64),
/// Bytestring
Bytes(Vec<u8>),
/// Unicode string
String(String),
/// Tuple
Tuple(Vec<HashableValue>),
/// Frozen (immutable) set
FrozenSet(BTreeSet<HashableValue>),
}
fn values_to_hashable(values: Vec<Value>) -> Result<Vec<HashableValue>, Error> {
values.into_iter().map(Value::into_hashable).collect()
}
fn hashable_to_values(values: Vec<HashableValue>) -> Vec<Value> {
values.into_iter().map(HashableValue::into_value).collect()
}
impl Value {
/// Convert the value into a hashable version, if possible. If not, return
/// a ValueNotHashable error.
pub fn into_hashable(self) -> Result<HashableValue, Error> {
match self {
Value::None => Ok(HashableValue::None),
Value::Bool(b) => Ok(HashableValue::Bool(b)),
Value::I64(i) => Ok(HashableValue::I64(i)),
Value::Int(i) => Ok(HashableValue::Int(i)),
Value::F64(f) => Ok(HashableValue::F64(f)),
Value::Bytes(b) => Ok(HashableValue::Bytes(b)),
Value::String(s) => Ok(HashableValue::String(s)),
Value::FrozenSet(v) => Ok(HashableValue::FrozenSet(v)),
Value::Tuple(v) => values_to_hashable(v).map(HashableValue::Tuple),
_ => Err(Error::Syntax(ErrorCode::ValueNotHashable))
}
}
}
impl HashableValue {
/// Convert the value into its non-hashable version. This always works.
pub fn into_value(self) -> Value {
match self {
HashableValue::None => Value::None,
HashableValue::Bool(b) => Value::Bool(b),
HashableValue::I64(i) => Value::I64(i),
HashableValue::Int(i) => Value::Int(i),
HashableValue::F64(f) => Value::F64(f),
HashableValue::Bytes(b) => Value::Bytes(b),
HashableValue::String(s) => Value::String(s),
HashableValue::FrozenSet(v) => Value::FrozenSet(v),
HashableValue::Tuple(v) => Value::Tuple(hashable_to_values(v)),
}
}
}
fn write_elements<'a, I, T>(f: &mut fmt::Formatter, it: I,
prefix: &'static str, suffix: &'static str,
len: usize, always_comma: bool) -> fmt::Result
where I: Iterator<Item=&'a T>, T: fmt::Display + 'a
{
f.write_str(prefix)?;
for (i, item) in it.enumerate() {
if i < len - 1 || always_comma {
write!(f, "{}, ", item)?;
} else {
write!(f, "{}", item)?;
}
}
f.write_str(suffix)
}
impl fmt::Display for Value {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Value::None => write!(f, "None"),
Value::Bool(b) => write!(f, "{}", if b { "True" } else { "False" }),
Value::I64(i) => write!(f, "{}", i),
Value::Int(ref i) => write!(f, "{}", i),
Value::F64(v) => write!(f, "{}", v),
Value::Bytes(ref b) => write!(f, "b{:?}", b), //
Value::String(ref s) => write!(f, "{:?}", s),
Value::List(ref v) => write_elements(f, v.iter(), "[", "]", v.len(), false),
Value::Tuple(ref v) => write_elements(f, v.iter(), "(", ")", v.len(), v.len() == 1),
Value::FrozenSet(ref v) => write_elements(f, v.iter(),
"frozenset([", "])", v.len(), false),
Value::Set(ref v) => if v.is_empty() {
write!(f, "set()")
} else {
write_elements(f, v.iter(), "{", "}", v.len(), false)
},
Value::Dict(ref v) => {
write!(f, "{{")?;
for (i, (key, value)) in v.iter().enumerate() {
if i < v.len() - 1 {
write!(f, "{}: {}, ", key, value)?;
} else {
write!(f, "{}: {}", key, value)?;
}
}
write!(f, "}}")
},
}
}
}
impl fmt::Display for HashableValue {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
HashableValue::None => write!(f, "None"),
HashableValue::Bool(b) => write!(f, "{}", if b { "True" } else { "False" }),
HashableValue::I64(i) => write!(f, "{}", i),
HashableValue::Int(ref i) => write!(f, "{}", i),
HashableValue::F64(v) => write!(f, "{}", v),
HashableValue::Bytes(ref b) => write!(f, "b{:?}", b), //
HashableValue::String(ref s) => write!(f, "{:?}", s),
HashableValue::Tuple(ref v) => write_elements(f, v.iter(), "(", ")",
v.len(), v.len() == 1),
HashableValue::FrozenSet(ref v) => write_elements(f, v.iter(), "frozenset([", "])",
v.len(), false),
}
}
}
impl PartialEq for HashableValue {
fn eq(&self, other: &HashableValue) -> bool {
self.cmp(other) == Ordering::Equal
}
}
impl Eq for HashableValue {}
impl PartialOrd for HashableValue {
fn partial_cmp(&self, other: &HashableValue) -> Option<Ordering> {
Some(self.cmp(other))
}
}
/// Implement a (more or less) consistent ordering for `HashableValue`s
/// so that they can be added to dictionaries and sets.
///
/// Also, like in Python, numeric values with the same value (integral or not)
/// must compare equal.
///
/// For other types, we define an ordering between all types A and B so that all
/// objects of type A are always lesser than objects of type B. This is done
/// similar to Python 2's ordering of different types.
impl Ord for HashableValue {
fn cmp(&self, other: &HashableValue) -> Ordering {
use self::HashableValue::*;
match *self {
None => match *other {
None => Ordering::Equal,
_ => Ordering::Less
},
Bool(b) => match *other {
None => Ordering::Greater,
Bool(b2) => b.cmp(&b2),
I64(i2) => (b as i64).cmp(&i2),
Int(ref bi) => BigInt::from(b as i64).cmp(bi),
F64(f) => float_ord(b as i64 as f64, f),
_ => Ordering::Less
},
I64(i) => match *other {
None => Ordering::Greater,
Bool(b) => i.cmp(&(b as i64)),
I64(i2) => i.cmp(&i2),
Int(ref bi) => BigInt::from(i).cmp(bi),
F64(f) => float_ord(i as f64, f),
_ => Ordering::Less
},
Int(ref bi) => match *other {
None => Ordering::Greater,
Bool(b) => bi.cmp(&BigInt::from(b as i64)),
I64(i) => bi.cmp(&BigInt::from(i)),
Int(ref bi2) => bi.cmp(bi2),
F64(f) => float_bigint_ord(bi, f),
_ => Ordering::Less
},
F64(f) => match *other {
None => Ordering::Greater,
Bool(b) => float_ord(f, b as i64 as f64),
I64(i) => float_ord(f, i as f64),
Int(ref bi) => BigInt::from(f as i64).cmp(bi),
F64(f2) => float_ord(f, f2),
_ => Ordering::Less
},
Bytes(ref bs) => match *other {
String(_) | FrozenSet(_) |
Tuple(_) => Ordering::Less,
Bytes(ref bs2) => bs.cmp(bs2),
_ => Ordering::Greater
},
String(ref s) => match *other {
FrozenSet(_) |
Tuple(_) => Ordering::Less,
String(ref s2) => s.cmp(s2),
_ => Ordering::Greater
},
FrozenSet(ref s) => match *other {
Tuple(_) => Ordering::Less,
FrozenSet(ref s2) => s.cmp(s2),
_ => Ordering::Greater
},
Tuple(ref t) => match *other {
Tuple(ref t2) => t.cmp(t2),
_ => Ordering::Greater
},
}
}
}
/// A "reasonable" total ordering for floats.
fn float_ord(f: f64, g: f64) -> Ordering {
match f.partial_cmp(&g) {
Some(o) => o,
None => Ordering::Less
}
}
/// Ordering between floats and big integers.
fn float_bigint_ord(bi: &BigInt, g: f64) -> Ordering {
match bi.to_f64() {
Some(f) => float_ord(f, g),
None => if bi.is_positive() { Ordering::Greater } else { Ordering::Less }
}
}
| 37.474916 | 97 | 0.495047 |
9cba4143d42e385bbbb0e62c08a8761591fe8ffa | 6,493 | // Copyright 2020 Parity Technologies (UK) Ltd.
// This file is part of Tetcoin.
// Tetcoin is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Tetcoin is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Tetcoin. If not, see <http://www.gnu.org/licenses/>.
//! Chain specifications for the test runtime.
use tp_authority_discovery::AuthorityId as AuthorityDiscoveryId;
use babe_primitives::AuthorityId as BabeId;
use grandpa::AuthorityId as GrandpaId;
use noble_staking::Forcing;
use tetcoin_primitives::v1::{ValidatorId, AccountId, AssignmentId};
use tetcoin_service::chain_spec::{get_account_id_from_seed, get_from_seed, Extensions};
use tetcoin_test_runtime::constants::currency::TETS;
use tc_chain_spec::{ChainSpec, ChainType};
use tet_core::sr25519;
use tp_runtime::Perbill;
const DEFAULT_PROTOCOL_ID: &str = "tet";
/// The `ChainSpec` parametrized for tetcoin test runtime.
pub type TetcoinChainSpec =
service::GenericChainSpec<tetcoin_test_runtime::GenesisConfig, Extensions>;
/// Local testnet config (multivalidator Alice + Bob)
pub fn tetcoin_local_testnet_config() -> TetcoinChainSpec {
TetcoinChainSpec::from_genesis(
"Local Testnet",
"local_testnet",
ChainType::Local,
|| tetcoin_local_testnet_genesis(),
vec![],
None,
Some(DEFAULT_PROTOCOL_ID),
None,
Default::default(),
)
}
/// Local testnet genesis config (multivalidator Alice + Bob)
pub fn tetcoin_local_testnet_genesis() -> tetcoin_test_runtime::GenesisConfig {
tetcoin_testnet_genesis(
vec![
get_authority_keys_from_seed("Alice"),
get_authority_keys_from_seed("Bob"),
],
get_account_id_from_seed::<sr25519::Public>("Alice"),
None,
)
}
/// Helper function to generate stash, controller and session key from seed
fn get_authority_keys_from_seed(
seed: &str,
) -> (AccountId, AccountId, BabeId, GrandpaId, ValidatorId, AssignmentId, AuthorityDiscoveryId) {
(
get_account_id_from_seed::<sr25519::Public>(&format!("{}//stash", seed)),
get_account_id_from_seed::<sr25519::Public>(seed),
get_from_seed::<BabeId>(seed),
get_from_seed::<GrandpaId>(seed),
get_from_seed::<ValidatorId>(seed),
get_from_seed::<AssignmentId>(seed),
get_from_seed::<AuthorityDiscoveryId>(seed),
)
}
fn testnet_accounts() -> Vec<AccountId> {
vec![
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_account_id_from_seed::<sr25519::Public>("Charlie"),
get_account_id_from_seed::<sr25519::Public>("Dave"),
get_account_id_from_seed::<sr25519::Public>("Eve"),
get_account_id_from_seed::<sr25519::Public>("Ferdie"),
get_account_id_from_seed::<sr25519::Public>("Alice//stash"),
get_account_id_from_seed::<sr25519::Public>("Bob//stash"),
get_account_id_from_seed::<sr25519::Public>("Charlie//stash"),
get_account_id_from_seed::<sr25519::Public>("Dave//stash"),
get_account_id_from_seed::<sr25519::Public>("Eve//stash"),
get_account_id_from_seed::<sr25519::Public>("Ferdie//stash"),
]
}
/// Helper function to create tetcoin GenesisConfig for testing
fn tetcoin_testnet_genesis(
initial_authorities: Vec<(
AccountId,
AccountId,
BabeId,
GrandpaId,
ValidatorId,
AssignmentId,
AuthorityDiscoveryId,
)>,
root_key: AccountId,
endowed_accounts: Option<Vec<AccountId>>,
) -> tetcoin_test_runtime::GenesisConfig {
use tetcoin_test_runtime as runtime;
let endowed_accounts: Vec<AccountId> = endowed_accounts.unwrap_or_else(testnet_accounts);
const ENDOWMENT: u128 = 1_000_000 * TETS;
const STASH: u128 = 100 * TETS;
runtime::GenesisConfig {
fabric_system: Some(runtime::SystemConfig {
code: runtime::WASM_BINARY.expect("Wasm binary must be built for testing").to_vec(),
..Default::default()
}),
noble_indices: Some(runtime::IndicesConfig { indices: vec![] }),
noble_balances: Some(runtime::BalancesConfig {
balances: endowed_accounts
.iter()
.map(|k| (k.clone(), ENDOWMENT))
.collect(),
}),
noble_session: Some(runtime::SessionConfig {
keys: initial_authorities
.iter()
.map(|x| {
(
x.0.clone(),
x.0.clone(),
runtime::SessionKeys {
babe: x.2.clone(),
grandpa: x.3.clone(),
para_validator: x.4.clone(),
para_assignment: x.5.clone(),
authority_discovery: x.6.clone(),
},
)
})
.collect::<Vec<_>>(),
}),
noble_staking: Some(runtime::StakingConfig {
minimum_validator_count: 1,
validator_count: 2,
stakers: initial_authorities
.iter()
.map(|x| {
(
x.0.clone(),
x.1.clone(),
STASH,
runtime::StakerStatus::Validator,
)
})
.collect(),
invulnerables: initial_authorities.iter().map(|x| x.0.clone()).collect(),
force_era: Forcing::NotForcing,
slash_reward_fraction: Perbill::from_percent(10),
..Default::default()
}),
noble_babe: Some(Default::default()),
noble_grandpa: Some(Default::default()),
noble_authority_discovery: Some(runtime::AuthorityDiscoveryConfig { keys: vec![] }),
claims: Some(runtime::ClaimsConfig {
claims: vec![],
vesting: vec![],
}),
noble_vesting: Some(runtime::VestingConfig { vesting: vec![] }),
noble_sudo: Some(runtime::SudoConfig { key: root_key }),
parachains_configuration: Some(runtime::ParachainsConfigurationConfig {
config: tetcoin_runtime_parachains::configuration::HostConfiguration {
validation_upgrade_frequency: 10u32,
validation_upgrade_delay: 5,
acceptance_period: 1200,
max_code_size: 5 * 1024 * 1024,
max_pov_size: 50 * 1024 * 1024,
max_head_data_size: 32 * 1024,
group_rotation_frequency: 20,
chain_availability_period: 4,
thread_availability_period: 4,
no_show_slots: 10,
..Default::default()
},
}),
}
}
/// Can be called for a `Configuration` to check if it is a configuration for the `Test` network.
pub trait IdentifyVariant {
/// Returns if this is a configuration for the `Test` network.
fn is_test(&self) -> bool;
}
impl IdentifyVariant for Box<dyn ChainSpec> {
fn is_test(&self) -> bool {
self.id().starts_with("test")
}
}
| 31.985222 | 97 | 0.718004 |
1ca2e0f8d3ae82b106af3be982a7a4925de32d6f | 93,242 | /*
* Rust-FMOD - Copyright (c) 2014 Gomez Guillaume.
*
* The Original software, FmodEx library, is provided by FIRELIGHT TECHNOLOGIES.
*
* This software is provided 'as-is', without any express or implied warranty.
* In no event will the authors be held liable for any damages arising from
* the use of this software.
*
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely, subject to the following restrictions:
*
* 1. The origin of this software must not be misrepresented; you must not claim
* that you wrote the original software. If you use this software in a product,
* an acknowledgment in the product documentation would be appreciated but is
* not required.
*
* 2. Altered source versions must be plainly marked as such, and must not be
* misrepresented as being the original software.
*
* 3. This notice may not be removed or altered from any source distribution.
*/
use libc::{c_void, c_uint, c_int, c_char, c_short};
use ffi;
use types::*;
use sound;
use sound::Sound;
use sound_group;
use std::mem;
use channel_group;
use channel;
use dsp;
use dsp::Dsp;
use vector;
use reverb_properties;
use geometry;
use reverb;
use dsp_connection;
use std::default::Default;
use callbacks::*;
use std;
use file;
use libc::FILE;
use c_vec::CVec;
use std::ffi::CString;
fn get_saved_sys_callback<'r>() -> &'r mut SysCallback {
static mut callback : SysCallback = SysCallback {
file_open: None,
file_close: None,
file_read: None,
file_seek: None
};
unsafe { &mut callback }
}
struct SysCallback {
file_open: FileOpenCallback,
file_close: FileCloseCallback,
file_read: FileReadCallback,
file_seek: FileSeekCallback
}
impl SysCallback {
fn new() -> SysCallback {
SysCallback {
file_open: None,
file_close: None,
file_read: None,
file_seek: None
}
}
}
extern "C" fn file_open_callback(name: *mut c_char, unicode: c_int, file_size: *mut c_uint,
handle: *mut *mut c_void,
user_data: *mut *mut c_void) -> ::Status {
let tmp = get_saved_sys_callback();
match tmp.file_open {
Some(s) => {
let t_name = if name.is_null() {
String::new()
} else {
let l = ffi::strlen(name);
unsafe { String::from_raw_parts(name as *mut u8, l, l) }
};
match s(t_name.as_ref(), unicode) {
Some((f, s)) => {
unsafe {
*file_size = f.get_file_size() as u32;
*handle = file::get_ffi(&f) as *mut c_void;
*user_data = match s {
Some(mut d) => std::mem::transmute(&mut d),
None => ::std::ptr::null_mut()
};
}
::Status::Ok
}
None => {
unsafe {
*file_size = 0u32;
*handle = std::ptr::null_mut();
*user_data = std::ptr::null_mut();
}
::Status::FileNotFound
}
}
},
None => {
unsafe {
*file_size = 0u32;
*handle = std::ptr::null_mut();
*user_data = std::ptr::null_mut();
}
::Status::Ok
}
}
}
extern "C" fn file_close_callback(handle: *mut c_void, user_data: *mut c_void) -> ::Status {
let tmp = get_saved_sys_callback();
match tmp.file_close {
Some(s) => {
unsafe {
s(&mut file::from_ffi(handle as *mut FILE), if user_data.is_null() {
None
} else {
Some(std::mem::transmute(user_data))
});
}
::Status::Ok
}
None => ::Status::Ok
}
}
extern "C" fn file_read_callback(handle: *mut c_void, buffer: *mut c_void, size_bytes: c_uint,
bytes_read: *mut c_uint, user_data: *mut c_void) -> ::Status {
let tmp = get_saved_sys_callback();
match tmp.file_read {
Some(s) => {
unsafe {
let mut data_vec : CVec<u8> = CVec::new(buffer as *mut u8, size_bytes as usize);
let read_bytes = s(&mut file::from_ffi(handle as *mut FILE), data_vec.as_mut(),
size_bytes, if user_data.is_null() {
None
} else {
Some(std::mem::transmute(user_data))
});
*bytes_read = read_bytes as u32;
if read_bytes < size_bytes as usize {
::Status::FileEOF
} else {
::Status::Ok
}
}
}
None => ::Status::Ok
}
}
extern "C" fn file_seek_callback(handle: *mut c_void, pos: c_uint,
user_data: *mut c_void) -> ::Status {
let tmp = get_saved_sys_callback();
match tmp.file_seek {
Some(s) => {
unsafe {
s(&mut file::from_ffi(handle as *mut FILE), pos, if user_data.is_null() {
None
} else {
Some(std::mem::transmute(user_data))
});
}
::Status::Ok
}
None => ::Status::Ok
}
}
extern "C" fn pcm_read_callback(sound: *mut ffi::FMOD_SOUND, data: *mut c_void,
data_len: c_uint) -> ::Status {
unsafe {
if !sound.is_null() {
let mut tmp = ::std::ptr::null_mut();
ffi::FMOD_Sound_GetUserData(sound, &mut tmp);
if !tmp.is_null() {
let callbacks : &mut ffi::SoundData = std::mem::transmute(tmp);
match callbacks.pcm_read {
Some(p) => {
let max = data_len as isize >> 2;
let mut data_vec = CVec::new(data as *mut c_short, max as usize * 2);
let ret = p(&ffi::FFI::wrap(sound), data_vec.as_mut());
ret
},
None => ::Status::Ok
}
} else {
::Status::Ok
}
} else {
::Status::Ok
}
}
}
extern "C" fn non_block_callback(sound: *mut ffi::FMOD_SOUND, result: ::Status) -> ::Status {
unsafe {
if !sound.is_null() {
let mut tmp = ::std::ptr::null_mut();
ffi::FMOD_Sound_GetUserData(sound, &mut tmp);
if !tmp.is_null() {
let callbacks : &mut ffi::SoundData = ::std::mem::transmute(tmp);
match callbacks.non_block {
Some(p) => p(&ffi::FFI::wrap(sound), result),
None => ::Status::Ok
}
} else {
::Status::Ok
}
} else {
::Status::Ok
}
}
}
extern "C" fn pcm_set_pos_callback(sound: *mut ffi::FMOD_SOUND, sub_sound: c_int, position: c_uint,
postype: ffi::FMOD_TIMEUNIT) -> ::Status {
unsafe {
if !sound.is_null() {
let mut tmp = ::std::ptr::null_mut();
ffi::FMOD_Sound_GetUserData(sound, &mut tmp);
if !tmp.is_null() {
let callbacks : &mut ffi::SoundData = ::std::mem::transmute(tmp);
match callbacks.pcm_set_pos {
Some(p) => p(&ffi::FFI::wrap(sound), sub_sound, position, TimeUnit(postype)),
None => ::Status::Ok
}
} else {
::Status::Ok
}
} else {
::Status::Ok
}
}
}
/// Structure describing a globally unique identifier.
pub struct Guid
{
/// Specifies the first 8 hexadecimal digits of the GUID
pub data1: u32,
/// Specifies the first group of 4 hexadecimal digits.
pub data2: u16,
/// Specifies the second group of 4 hexadecimal digits.
pub data3: u16,
/// Array of 8 bytes. The first 2 bytes contain the third group of 4 hexadecimal digits. The
/// remaining 6 bytes contain the final 12 hexadecimal digits.
pub data4: [u8; 8]
}
impl Default for Guid {
fn default() -> Guid {
Guid {
data1: 0u32,
data2: 0u16,
data3: 0u16,
data4: [0u8; 8]
}
}
}
/// Structure used to store user data for file callback
pub struct UserData {
user_data: *mut c_void
}
impl UserData {
pub fn set_user_data<'r, T>(&'r mut self, user_data: &'r mut T) {
unsafe { self.user_data = std::mem::transmute(user_data) }
}
pub fn get_user_data<'r, T>(&self) -> Option<&'r mut T> {
if self.user_data.is_null() {
None
} else {
Some(unsafe { std::mem::transmute(self.user_data) })
}
}
}
impl Default for UserData {
fn default() -> UserData {
UserData {
user_data: ::std::ptr::null_mut()
}
}
}
/// Wrapper for arguments of
/// [`Sys::set_software_format`](struct.Sys.html#method.set_software_format) and
/// [`Sys::get_software_format`](struct.Sys.html#method.get_software_format).
pub struct SoftwareFormat
{
pub sample_rate : i32,
pub format : ::SoundFormat,
pub num_output_channels: i32,
pub max_input_channels : i32,
pub resample_method : ::DspResampler,
pub bits : i32
}
impl Default for SoftwareFormat {
fn default() -> SoftwareFormat {
SoftwareFormat {
sample_rate: 0i32,
format: ::SoundFormat::None,
num_output_channels: 0i32,
max_input_channels: 0i32,
resample_method: ::DspResampler::NoInterp,
bits: 0i32
}
}
}
/// Settings for advanced features like configuring memory and cpu usage for the
/// FMOD_CREATECOMPRESSEDSAMPLE feature.
pub struct AdvancedSettings {
/// [r/w] Optional. Specify 0 to ignore. For use with FMOD_CREATECOMPRESSEDSAMPLE only. Mpeg
/// codecs consume 21,684 bytes per instance and this number will determine how many mpeg
/// channels can be played simultaneously. Default = 32.
pub max_MPEG_codecs : i32,
/// [r/w] Optional. Specify 0 to ignore. For use with FMOD_CREATECOMPRESSEDSAMPLE only. ADPCM
/// codecs consume 2,136 bytes per instance and this number will determine how many ADPCM
/// channels can be played simultaneously. Default = 32.
pub max_ADPCM_codecs : i32,
/// [r/w] Optional. Specify 0 to ignore. For use with FMOD_CREATECOMPRESSEDSAMPLE only. XMA
/// codecs consume 14,836 bytes per instance and this number will determine how many XMA
/// channels can be played simultaneously. Default = 32.
pub max_XMA_codecs : i32,
/// [r/w] Optional. Specify 0 to ignore. For use with FMOD_CREATECOMPRESSEDSAMPLE only. CELT
/// codecs consume 11,500 bytes per instance and this number will determine how many CELT
/// channels can be played simultaneously. Default = 32.
pub max_CELT_codecs : i32,
/// [r/w] Optional. Specify 0 to ignore. For use with FMOD_CREATECOMPRESSEDSAMPLE only. Vorbis
/// codecs consume 12,000 bytes per instance and this number will determine how many Vorbis
/// channels can be played simultaneously. Default = 32.
pub max_VORBIS_codecs : i32,
/// [r/w] Optional. Specify 0 to ignore. For use with FMOD_CREATECOMPRESSEDSAMPLE only. AT9
/// codecs consume 8,720 bytes per instance and this number will determine how many AT9
/// channels can be played simultaneously. Default = 32.
pub max_AT9_codecs : i32,
/// [r/w] Optional. Specify 0 to ignore. For use with PS3 only. PCM codecs consume 12,672 bytes
/// per instance and this number will determine how many streams and PCM voices can be played
/// simultaneously. Default = 16.
pub max_PCM_codecs : i32,
/// [r/w] Optional. Specify 0 to ignore. Number of channels available on the ASIO device.
pub ASIO_num_channels : i32,
/// [r/w] Optional. Specify 0 to ignore. Pointer to an array of strings (number of entries
/// defined by ASIONumChannels) with ASIO channel names.
pub ASIO_channel_list : Vec<String>,
/// [r/w] Optional. Specify 0 to ignore. Pointer to a list of speakers that the ASIO channels
/// map to. This can be called after [`Sys::init`](doc/rfmod/struct.Sys.html#method.init) to
/// remap ASIO output.
pub ASIO_speaker_list : Vec<::Speaker>,
/// [r/w] Optional. Specify 0 to ignore. The max number of 3d reverb DSP's in the system. (NOTE:
/// CURRENTLY DISABLED / UNUSED)
pub max_3D_reverb_DSPs : i32,
/// [r/w] Optional. For use with FMOD_INIT_HRTF_LOWPASS. The angle range (0-360) of a 3D sound
/// in relation to the listener, at which the HRTF function begins to have an effect. 0 = in
/// front of the listener. 180 = from 90 degrees to the left of the listener to 90 degrees to
/// the right. 360 = behind the listener. Default = 180.0.
pub HRTF_min_angle : f32,
/// [r/w] Optional. For use with FMOD_INIT_HRTF_LOWPASS. The angle range (0-360) of a 3D sound
/// in relation to the listener, at which the HRTF function has maximum effect. 0 = front of the
/// listener. 180 = from 90 degrees to the left of the listener to 90 degrees to the right. 360
/// = behind the listener. Default = 360.0.
pub HRTF_max_angle : f32,
/// [r/w] Optional. Specify 0 to ignore. For use with FMOD_INIT_HRTF_LOWPASS. The cutoff
/// frequency of the HRTF's lowpass filter function when at maximum effect. (i.e. at
/// HRTFMaxAngle). Default = 4000.0.
pub HRTF_freq : f32,
/// [r/w] Optional. Specify 0 to ignore. For use with FMOD_INIT_VOL0_BECOMES_VIRTUAL. If this
/// flag is used, and the volume is 0.0, then the sound will become virtual. Use this value to
/// raise the threshold to a different point where a sound goes virtual.
pub vol0_virtual_vol : f32,
/// [r/w] Optional. Specify 0 to ignore. For use with FMOD Event system only. Specifies the
/// number of slots available for simultaneous non blocking loads, across all threads. Default =
/// 32.
pub event_queue_size : i32,
/// [r/w] Optional. Specify 0 to ignore. For streams. This determines the default size of the
/// double buffer (in milliseconds) that a stream uses. Default = 400ms
pub default_decode_buffer_size : u32,
/// [r/w] Optional. Specify 0 to ignore. Gives fmod's logging system a path/filename. Normally
/// the log is placed in the same directory as the executable and called fmod.log. When using
/// [`Sys::get_advanced_settings`](doc/rfmod/struct.Sys.html#method.get_advanced_settings),
/// provide at least 256 bytes of memory to copy into.
pub debug_log_filename : String,
/// [r/w] Optional. Specify 0 to ignore. For use with FMOD_INIT_ENABLE_PROFILE. Specify the port
/// to listen on for connections by the
/// profiler application.
pub profile_port : u16,
/// [r/w] Optional. Specify 0 to ignore. The maximum time in miliseconds it takes for a channel
/// to fade to the new level when its
/// occlusion changes.
pub geometry_max_fade_time : u32,
/// [r/w] Optional. Specify 0 to ignore. Tells
/// [`Sys::init`](doc/rfmod/struct.Sys.html#method.init) to allocate a pool of wavedata/spectrum
/// buffers to prevent memory fragmentation, any additional buffers will be allocated normally.
pub max_spectrum_wave_data_buffers: u32,
/// [r/w] Optional. Specify 0 to ignore. The delay the music system should allow for loading a
/// sample from disk (in milliseconds). Default = 400 ms.
pub music_system_cache_delay : u32,
/// [r/w] Optional. Specify 0 to ignore. For use with FMOD_INIT_DISTANCE_FILTERING. The default
/// center frequency in Hz for the distance filtering effect. Default = 1500.0.
pub distance_filter_center_freq : f32,
/// [r/w] Optional. Specify 0 to ignore. Specify the stack size for the FMOD Stream thread in
/// bytes. Useful for custom codecs that use excess stack. Default 49,152 (48kb)
pub stack_size_stream : u32,
/// [r/w] Optional. Specify 0 to ignore. Specify the stack size for the FMOD_NONBLOCKING loading
/// thread. Useful for custom codecs that use excess stack. Default 65,536 (64kb)
pub stack_size_non_blocking : u32,
/// [r/w] Optional. Specify 0 to ignore. Specify the stack size for the FMOD mixer thread.
/// Useful for custom dsps that use excess stack. Default 49,152 (48kb)
pub stack_size_mixer : u32,
}
impl Default for AdvancedSettings {
fn default() -> AdvancedSettings {
AdvancedSettings {
max_MPEG_codecs: 32i32,
max_ADPCM_codecs: 32i32,
max_XMA_codecs: 32i32,
max_CELT_codecs: 32i32,
max_VORBIS_codecs: 32i32,
max_AT9_codecs: 32i32,
max_PCM_codecs: 16i32,
ASIO_num_channels: 0i32,
ASIO_channel_list: Vec::new(),
ASIO_speaker_list: Vec::new(),
max_3D_reverb_DSPs: 0i32,
HRTF_min_angle: 180f32,
HRTF_max_angle: 360f32,
HRTF_freq: 4000f32,
vol0_virtual_vol: 0f32,
event_queue_size: 32i32,
default_decode_buffer_size: 400u32,
debug_log_filename: String::new(),
profile_port: 0u16,
geometry_max_fade_time: 0u32,
max_spectrum_wave_data_buffers: 0u32,
music_system_cache_delay: 400u32,
distance_filter_center_freq: 1500f32,
stack_size_stream: 49152u32,
stack_size_non_blocking: 65536u32,
stack_size_mixer: 49152u32,
}
}
}
/// Use this structure with [`Sys::create_sound`](struct.Sys.html#method.create_sound) when more
/// control is needed over loading. The possible reasons to use this with
/// [`Sys::create_sound`](struct.Sys.html#method.create_sound) are:
///
/// * Loading a file from memory.
/// * Loading a file from within another larger (possibly wad/pak) file, by giving the loader an
/// offset and length.
/// * To create a user created / non file based sound.
/// * To specify a starting subsound to seek to within a multi-sample sounds (ie FSB/DLS/SF2) when
/// created as a stream.
/// * To specify which subsounds to load for multi-sample sounds (ie FSB/DLS/SF2) so that memory is
/// saved and only a subset is actually loaded/read from disk.
/// * To specify 'piggyback' read and seek callbacks for capture of sound data as fmod reads and
/// decodes it. Useful for ripping decoded PCM data from sounds as they are loaded / played.
/// * To specify a MIDI DLS/SF2 sample set file to load when opening a MIDI file.
///
/// See below on what members to fill for each of the above types of sound you want to create.
pub struct CreateSoundexInfo {
/// [w] Optional. Specify 0 to ignore. Size in bytes of file to load, or sound to create (in
/// this case only if FMOD_OPENUSER is used). Required if loading from memory. If 0 is
/// specified, then it will use the size of the file (unless loading from memory then an error
/// will be returned).
pub length : u32,
/// [w] Optional. Specify 0 to ignore. Offset from start of the file to start loading from. This
/// is useful for loading files from inside big data files.
pub file_offset : u32,
/// [w] Optional. Specify 0 to ignore. Number of channels in a sound mandatory if FMOD_OPENUSER
/// or FMOD_OPENRAW is used.
pub num_channels : i32,
/// [w] Optional. Specify 0 to ignore. Default frequency of sound in a sound mandatory if
/// FMOD_OPENUSER or FMOD_OPENRAW is used. Other formats use the frequency determined by the
/// file format.
pub default_frequency : i32,
/// [w] Optional. Specify 0 or ::SoundFormatNone to ignore. Format of the sound mandatory if
/// FMOD_OPENUSER or FMOD_OPENRAW is used. Other formats use the format determined by the file
/// format.
pub format : ::SoundFormat,
/// [w] Optional. Specify 0 to ignore. For streams. This determines the size of the double
/// buffer (in PCM samples) that a stream uses. Use this for user created streams if you want to
/// determine the size of the callback buffer passed to you. Specify 0 to use FMOD's default
/// size which is currently equivalent to 400ms of the sound format created/loaded.
pub decode_buffer_size : u32,
/// [w] Optional. Specify 0 to ignore. In a multi-sample file format such as .FSB/.DLS/.SF2,
/// specify the initial subsound to seek to, only if FMOD_CREATESTREAM is used.
pub initial_subsound : i32,
/// [w] Optional. Specify 0 to ignore or have no subsounds. In a sound created with
/// FMOD_OPENUSER, specify the number of subsounds that are accessable with
/// [`Sound::get_sub_sound`](doc/rfmod/struct.Sound.html#method.get_sub_sound). If not created
/// with FMOD_OPENUSER, this will limit the number of subsounds loaded within a multi-subsound
/// file. If using FSB, then if FMOD_CREATESOUNDEXINFO::inclusionlist is used, this will shuffle
/// subsounds down so that there are not any gaps. It will mean that the indices of the sounds
/// will be different.
pub num_subsounds : i32,
/// [w] Optional. Specify 0 to ignore. In a multi-sample format such as .FSB/.DLS/.SF2 it may be
/// desirable to specify only a subset of sounds to be loaded out of the whole file. This is an
/// array of subsound indices to load into memory when created.
pub inclusion_list : Vec<i32>,
/// [w] Optional. Specify 0 to ignore. Callback to 'piggyback' on FMOD's read functions and
/// accept or even write PCM data while FMOD is opening the sound. Used for user sounds created
/// with FMOD_OPENUSER or for capturing decoded data as FMOD reads it.
pub pcm_read_callback : SoundPcmReadCallback,
/// [w] Optional. Specify 0 to ignore. Callback for when the user calls a seeking function such
/// as [`Channel::set_time`](doc/rfmod/struct.Channel.html#method.set_time) or
/// [`Channel::set_position`](doc/rfmod/struct.Channel.html#method.set_position) within a
/// multi-sample sound, and for when it is opened.
pub pcm_set_pos_callback : SoundPcmSetPosCallback,
/// [w] Optional. Specify 0 to ignore. Callback for successful completion, or error while
/// loading a sound that used the FMOD_NONBLOCKING flag. Also called duing seeking, when
/// setPosition is called or a stream is restarted.
pub non_block_callback : SoundNonBlockCallback,
/// [w] Optional. Specify 0 to ignore. Filename for a DLS or SF2 sample set when loading a MIDI
/// file. If not specified, on Windows it will attempt to open /windows/system32/drivers/gm.dls
/// or /windows/system32/drivers/etc/gm.dls, on Mac it will attempt to load
/// /System/Library/Components/CoreAudio.component/Contents/Resources/gs_instruments.dls,
/// otherwise the MIDI will fail to open. Current DLS support is for level 1 of the
/// specification.
pub dls_name : String,
/// [w] Optional. Specify 0 to ignore. Key for encrypted FSB file. Without this key an encrypted
/// FSB file will not load.
pub encryption_key : String,
/// [w] Optional. Specify 0 to ignore. For sequenced formats with dynamic channel allocation
/// such as .MID and .IT, this specifies the maximum voice count allowed while playing. .IT
/// defaults to 64. .MID defaults to 32.
pub max_polyphony : i32,
/// [w] Optional. Specify 0 to ignore. This is user data to be attached to the sound during
/// creation. Access via
/// [`Sound::get_user_data`](doc/rfmod/struct.Sound.html#method.get_user_data). Note: This is
/// not passed to FMOD_FILE_OPENCALLBACK, that is a different userdata that is file specific.
user_data : Box<ffi::SoundData>,
/// [w] Optional. Specify 0 or SoundTypeUnknown to ignore. Instead of scanning all codec types,
/// use this to speed up loading by making it jump straight to this codec.
pub suggested_sound_type : ::SoundType,
/// [w] Optional. Specify 0 to ignore. Callback for opening this file.
user_open : ffi::FMOD_FILE_OPENCALLBACK,
/// [w] Optional. Specify 0 to ignore. Callback for closing this file.
user_close : ffi::FMOD_FILE_CLOSECALLBACK,
/// [w] Optional. Specify 0 to ignore. Callback for reading from this file.
user_read : ffi::FMOD_FILE_READCALLBACK,
/// [w] Optional. Specify 0 to ignore. Callback for seeking within this file.
user_seek : ffi::FMOD_FILE_SEEKCALLBACK,
/// [w] Optional. Specify 0 to ignore. Callback for seeking within this file.
user_async_read : ffi::FMOD_FILE_ASYNCREADCALLBACK,
/// [w] Optional. Specify 0 to ignore. Callback for seeking within this file.
user_async_cancel : ffi::FMOD_FILE_ASYNCCANCELCALLBACK,
/// [w] Optional. Specify 0 to ignore. Use this to differ the way fmod maps multichannel sounds
/// to speakers. See SpeakerMapType for more.
pub speaker_map : ::SpeakerMapType,
/// [w] Optional. Specify 0 to ignore. Specify a sound group if required, to put sound in as it
/// is created.
pub initial_sound_group : sound_group::SoundGroup,
/// [w] Optional. Specify 0 to ignore. For streams. Specify an initial position to seek the
/// stream to.
pub initial_seek_position : u32,
/// [w] Optional. Specify 0 to ignore. For streams. Specify the time unit for the position set
/// in initialseekposition.
pub initial_seek_pos_type : TimeUnit,
/// [w] Optional. Specify true to ignore. Set to false to use fmod's built in file system.
/// Ignores setFileSystem callbacks and also FMOD_CREATESOUNEXINFO file callbacks. Useful for
/// specific cases where you don't want to use your own file system but want to use fmod's file
/// system (ie net streaming).
pub ignore_set_file_system : bool,
/// [w] Optional. Specify 0 to ignore. For CDDA sounds only - if non-zero use ASPI instead of
/// NTSCSI to access the specified CD/DVD device.
pub cdda_force_aspi : i32,
/// [w] Optional. Specify 0 or FMOD_AUDIOQUEUE_CODECPOLICY_DEFAULT to ignore. Policy used to
/// determine whether hardware or software is used for decoding, see FMOD_AUDIOQUEUE_CODECPOLICY
/// for options (iOS >= 3.0 required, otherwise only hardware is available)
pub audio_queue_policy : u32,
/// [w] Optional. Specify 0 to ignore. Allows you to set a minimum desired MIDI mixer
/// granularity. Values smaller than 512 give greater than default accuracy at the cost of more
/// CPU and vice versa. Specify 0 for default (512 samples).
pub min_midi_granularity : u32,
/// [w] Optional. Specify 0 to ignore. Specifies a thread index to execute non blocking load on.
/// Allows for up to 5 threads to be used for loading at once. This is to avoid one load
/// blocking another. Maximum value = 4.
pub non_block_thread_id : i32,
}
impl Default for CreateSoundexInfo {
fn default() -> CreateSoundexInfo {
CreateSoundexInfo {
length: 0u32,
file_offset: 0u32,
num_channels: 0i32,
default_frequency: 0i32,
format: ::SoundFormat::None,
decode_buffer_size: 0u32,
initial_subsound: 0i32,
num_subsounds: 0i32,
inclusion_list: Vec::new(),
pcm_read_callback: None,
pcm_set_pos_callback: None,
non_block_callback: None,
dls_name: String::new(),
encryption_key: String::new(),
max_polyphony: 0i32,
user_data: Box::new(ffi::SoundData::new()),
suggested_sound_type: ::SoundType::Unknown,
user_open: None,
user_close: None,
user_read: None,
user_seek: None,
user_async_read: None,
user_async_cancel: None,
speaker_map: ::SpeakerMapType::Default,
initial_sound_group: ffi::FFI::wrap(::std::ptr::null_mut()),
initial_seek_position: 0u32,
initial_seek_pos_type: TimeUnit(0u32),
ignore_set_file_system: true,
cdda_force_aspi: 0i32,
audio_queue_policy: 0u32,
min_midi_granularity: 0u32,
non_block_thread_id: 0i32,
}
}
}
impl CreateSoundexInfo {
fn convert_to_c(&mut self) -> ffi::FMOD_CREATESOUNDEXINFO {
let tmp_dls_name = CString::new(self.dls_name.clone()).unwrap();
let tmp_encryption_key = CString::new(self.encryption_key.clone()).unwrap();
ffi::FMOD_CREATESOUNDEXINFO{
cbsize: mem::size_of::<ffi::FMOD_CREATESOUNDEXINFO>() as i32,
length: self.length,
fileoffset: self.file_offset,
numchannels: self.num_channels,
defaultfrequency: self.default_frequency,
format: self.format,
decodebuffersize: self.decode_buffer_size,
initialsubsound: self.initial_subsound,
numsubsounds: self.num_subsounds,
inclusionlist: self.inclusion_list.as_mut_ptr(),
inclusionlistnum: self.inclusion_list.len() as i32,
pcmreadcallback: match self.pcm_read_callback {
Some(_) => Some(pcm_read_callback as extern "C" fn(*mut _, *mut _, _) -> _),
None => None
},
pcmsetposcallback: match self.pcm_set_pos_callback {
Some(_) => Some(pcm_set_pos_callback as extern "C" fn(*mut _, _, _, _) -> _),
None => None
},
nonblockcallback: match self.non_block_callback {
Some(_) => Some(non_block_callback as extern "C" fn(*mut _, _) -> _),
None => None
},
dlsname: tmp_dls_name.as_ptr() as *mut c_char,
encryptionkey: tmp_encryption_key.as_ptr() as *mut c_char,
maxpolyphony: self.max_polyphony,
userdata: {
self.user_data.non_block = self.non_block_callback;
self.user_data.pcm_read = self.pcm_read_callback;
self.user_data.pcm_set_pos = self.pcm_set_pos_callback;
unsafe { ::std::mem::transmute::<&mut ffi::SoundData, *mut c_void>(&mut *self.user_data) }
},
suggestedsoundtype: self.suggested_sound_type,
useropen: self.user_open,
userclose: self.user_close,
userread: self.user_read,
userseek: self.user_seek,
userasynccancel: self.user_async_cancel,
userasyncread: self.user_async_read,
speakermap: self.speaker_map,
initialsoundgroup: ffi::FFI::unwrap(&self.initial_sound_group),
initialseekposition: self.initial_seek_position,
initialseekpostype: match self.initial_seek_pos_type {TimeUnit(v) => v},
ignoresetfilesystem: match self.ignore_set_file_system {
true => 0i32,
false => 1i32
},
cddaforceaspi: self.cdda_force_aspi,
audioqueuepolicy: self.audio_queue_policy,
minmidigranularity: self.min_midi_granularity,
nonblockthreadid: self.non_block_thread_id,
}
}
}
/// When creating a codec, declare one of these and provide the relevant callbacks and name for FMOD
/// to use when it opens and reads a file.
pub struct FmodCodecDescription {
/// [in] Name of the codec.
pub name : String,
/// [in] Plugin writer's version number.
pub version : u32,
/// [in] Tells FMOD to open the file as a stream when calling
/// [`Sys::create_sound`](doc/rfmod/struct.Sys.html#method.create_sound), and not a static
/// sample. Should normally be 0 (FALSE), because generally the user wants to decode the file
/// into memory when using [`Sys::create_sound`](doc/rfmod/struct.Sys.html#method.create_sound).
/// Mainly used for formats that decode for a very long time, or could use large amounts of
/// memory when decoded. Usually sequenced formats such as mod/s3m/xm/it/midi fall into this
/// category. It is mainly to stop users that don't know what they're doing from getting
/// FMOD_ERR_MEMORY returned from createSound when they should have in fact called
/// System::createStream or used FMOD_CREATESTREAM in
/// [`Sys::create_sound`](doc/rfmod/struct.Sys.html#method.create_sound).
pub default_as_stream: i32,
/// [in] When setposition codec is called, only these time formats will be passed to the codec.
/// Use bitwise OR to accumulate different
/// types.
pub time_units : TimeUnit,
/// [in] Open callback for the codec for when FMOD tries to open a sound using this codec.
open : ffi::FMOD_CODEC_OPENCALLBACK,
/// [in] Close callback for the codec for when FMOD tries to close a sound using this codec.
close : ffi::FMOD_CODEC_CLOSECALLBACK,
/// [in] Read callback for the codec for when FMOD tries to read some data from the file to the
/// destination format (specified in the open callback).
read : ffi::FMOD_CODEC_READCALLBACK,
/// [in] Callback to return the length of the song in whatever format required when
/// [`Sound::get_length`](doc/rfmod/struct.Sound.html#method.get_length).
/// is called.
get_length : ffi::FMOD_CODEC_GETLENGTHCALLBACK,
/// [in] Seek callback for the codec for when FMOD tries to seek within the file with
/// [`Channel::set_position`](doc/rfmod/struct.Channel.html#method.set_position).
set_position : ffi::FMOD_CODEC_SETPOSITIONCALLBACK,
/// [in] Tell callback for the codec for when FMOD tries to get the current position within the
/// with [`Channel::get_position`](doc/rfmod/struct.Channel.html#method.get_position).
get_position : ffi::FMOD_CODEC_GETPOSITIONCALLBACK,
/// [in] Sound creation callback for the codec when FMOD finishes creating the sound. (So the
/// codec can set more parameters for the related created sound, ie loop points/mode or 3D
/// attributes etc).
sound_create : ffi::FMOD_CODEC_SOUNDCREATECALLBACK,
/// [in] Callback to tell FMOD about the waveformat of a particular subsound. This is to save
/// memory, rather than saving 1000 FMOD_CODEC_WAVEFORMAT structures in the codec, the codec
/// might have a more optimal way of storing this information.
get_wave_format : ffi::FMOD_CODEC_GETWAVEFORMAT,
}
impl Default for FmodCodecDescription {
fn default() -> FmodCodecDescription {
FmodCodecDescription {
name: String::new(),
version: 0u32,
default_as_stream: 0i32,
time_units: TimeUnit(0u32),
open: None,
close: None,
read: None,
get_length: None,
set_position: None,
get_position: None,
sound_create: None,
get_wave_format: None,
}
}
}
/// Wrapper for OutputHandle
pub struct OutputHandle {
handle: *mut c_void
}
/// Structure to be filled with detailed memory usage information of a FMOD object
#[derive(Clone, Copy)]
pub struct MemoryUsageDetails
{
/// [out] Memory not accounted for by other types
pub other : u32,
/// [out] String data
pub string : u32,
/// [out] System object and various internals
pub system : u32,
/// [out] Plugin objects and internals
pub plugins : u32,
/// [out] Output module object and internals
pub output : u32,
/// [out] Channel related memory
pub channel : u32,
/// [out] ChannelGroup objects and internals
pub channel_group : u32,
/// [out] Codecs allocated for streaming
pub codec : u32,
/// [out] File buffers and structures
pub file : u32,
/// [out] Sound objects and internals
pub sound : u32,
/// [out] Sound data stored in secondary RAM
pub secondary_ram : u32,
/// [out] SoundGroup objects and internals
pub sound_group : u32,
/// [out] Stream buffer memory
pub stream_buffer : u32,
/// [out] DSPConnection objects and internals
pub dsp_connection : u32,
/// [out] DSP implementation objects
pub dsp : u32,
/// [out] Realtime file format decoding DSP objects
pub dsp_codec : u32,
/// [out] Profiler memory footprint.
pub profile : u32,
/// [out] Buffer used to store recorded data from microphone
pub record_buffer : u32,
/// [out] Reverb implementation objects
pub reverb : u32,
/// [out] Reverb channel properties structs
pub reverb_channel_props : u32,
/// [out] Geometry objects and internals
pub geometry : u32,
/// [out] Sync point memory.
pub sync_point : u32,
/// [out] EventSystem and various internals
pub event_system : u32,
/// [out] MusicSystem and various internals
pub music_system : u32,
/// [out] Definition of objects contained in all loaded projects e.g. events, groups, categories
pub fev : u32,
/// [out] Data loaded with preloadFSB
pub memory_fsb : u32,
/// [out] EventProject objects and internals
pub event_project : u32,
/// [out] EventGroup objects and internals
pub event_group_i : u32,
/// [out] Objects used to manage wave banks
pub sound_bank_class : u32,
/// [out] Data used to manage lists of wave bank usage
pub sound_bank_list : u32,
/// [out] Stream objects and internals
pub stream_instance : u32,
/// [out] Sound definition objects
pub sound_def_class : u32,
/// [out] Sound definition static data objects
pub sound_def_def_class : u32,
/// [out] Sound definition pool data
pub sound_def_pool : u32,
/// [out] Reverb definition objects
pub reverb_def : u32,
/// [out] Reverb objects
pub event_reverb : u32,
/// [out] User property objects
pub user_property : u32,
/// [out] Event instance base objects
pub event_instance : u32,
/// [out] Complex event instance objects
pub event_instance_complex : u32,
/// [out] Simple event instance objects
pub event_instance_simple : u32,
/// [out] Event layer instance objects
pub event_instance_layer : u32,
/// [out] Event sound instance objects
pub event_instance_sound : u32,
/// [out] Event envelope objects
pub event_envelope : u32,
/// [out] Event envelope definition objects
pub event_envelope_def : u32,
/// [out] Event parameter objects
pub event_parameter : u32,
/// [out] Event category objects
pub event_category : u32,
/// [out] Event envelope point objects
pub event_envelope_point : u32,
/// [out] Event instance pool memory
pub event_instance_pool : u32,
}
impl Default for MemoryUsageDetails {
fn default() -> MemoryUsageDetails {
MemoryUsageDetails {
other: 0u32,
string: 0u32,
system: 0u32,
plugins: 0u32,
output: 0u32,
channel: 0u32,
channel_group: 0u32,
codec: 0u32,
file: 0u32,
sound: 0u32,
secondary_ram: 0u32,
sound_group: 0u32,
stream_buffer: 0u32,
dsp_connection: 0u32,
dsp: 0u32,
dsp_codec: 0u32,
profile: 0u32,
record_buffer: 0u32,
reverb: 0u32,
reverb_channel_props: 0u32,
geometry: 0u32,
sync_point: 0u32,
event_system: 0u32,
music_system: 0u32,
fev: 0u32,
memory_fsb: 0u32,
event_project: 0u32,
event_group_i: 0u32,
sound_bank_class: 0u32,
sound_bank_list: 0u32,
stream_instance: 0u32,
sound_def_class: 0u32,
sound_def_def_class: 0u32,
sound_def_pool: 0u32,
reverb_def: 0u32,
event_reverb: 0u32,
user_property: 0u32,
event_instance: 0u32,
event_instance_complex: 0u32,
event_instance_simple: 0u32,
event_instance_layer: 0u32,
event_instance_sound: 0u32,
event_envelope: 0u32,
event_envelope_def: 0u32,
event_parameter: 0u32,
event_category: 0u32,
event_envelope_point: 0u32,
event_instance_pool: 0u32,
}
}
}
pub fn get_memory_usage_details_ffi(details: MemoryUsageDetails) -> ffi::FMOD_MEMORY_USAGE_DETAILS {
ffi::FMOD_MEMORY_USAGE_DETAILS {
other: details.other,
string: details.string,
system: details.system,
plugins: details.plugins,
output: details.output,
channel: details.channel,
channel_group: details.channel_group,
codec: details.codec,
file: details.file,
sound: details.sound,
secondary_ram: details.secondary_ram,
sound_group: details.sound_group,
stream_buffer: details.stream_buffer,
dsp_connection: details.dsp_connection,
dsp: details.dsp,
dsp_codec: details.dsp_codec,
profile: details.profile,
record_buffer: details.record_buffer,
reverb: details.reverb,
reverb_channel_props: details.reverb_channel_props,
geometry: details.geometry,
sync_point: details.sync_point,
event_system: details.event_system,
music_system: details.music_system,
fev: details.fev,
memory_fsb: details.memory_fsb,
event_project: details.event_project,
event_group_i: details.event_group_i,
sound_bank_class: details.sound_bank_class,
sound_bank_list: details.sound_bank_list,
stream_instance: details.stream_instance,
sound_def_class: details.sound_def_class,
sound_def_def_class: details.sound_def_def_class,
sound_def_pool: details.sound_def_pool,
reverb_def: details.reverb_def,
event_reverb: details.event_reverb,
user_property: details.user_property,
event_instance: details.event_instance,
event_instance_complex: details.event_instance_complex,
event_instance_simple: details.event_instance_simple,
event_instance_layer: details.event_instance_layer,
event_instance_sound: details.event_instance_sound,
event_envelope: details.event_envelope,
event_envelope_def: details.event_envelope_def,
event_parameter: details.event_parameter,
event_category: details.event_category,
event_envelope_point: details.event_envelope_point,
event_instance_pool: details.event_instance_pool,
}
}
pub fn from_memory_usage_details_ptr(details: ffi::FMOD_MEMORY_USAGE_DETAILS) -> MemoryUsageDetails {
MemoryUsageDetails {
other: details.other,
string: details.string,
system: details.system,
plugins: details.plugins,
output: details.output,
channel: details.channel,
channel_group: details.channel_group,
codec: details.codec,
file: details.file,
sound: details.sound,
secondary_ram: details.secondary_ram,
sound_group: details.sound_group,
stream_buffer: details.stream_buffer,
dsp_connection: details.dsp_connection,
dsp: details.dsp,
dsp_codec: details.dsp_codec,
profile: details.profile,
record_buffer: details.record_buffer,
reverb: details.reverb,
reverb_channel_props: details.reverb_channel_props,
geometry: details.geometry,
sync_point: details.sync_point,
event_system: details.event_system,
music_system: details.music_system,
fev: details.fev,
memory_fsb: details.memory_fsb,
event_project: details.event_project,
event_group_i: details.event_group_i,
sound_bank_class: details.sound_bank_class,
sound_bank_list: details.sound_bank_list,
stream_instance: details.stream_instance,
sound_def_class: details.sound_def_class,
sound_def_def_class: details.sound_def_def_class,
sound_def_pool: details.sound_def_pool,
reverb_def: details.reverb_def,
event_reverb: details.event_reverb,
user_property: details.user_property,
event_instance: details.event_instance,
event_instance_complex: details.event_instance_complex,
event_instance_simple: details.event_instance_simple,
event_instance_layer: details.event_instance_layer,
event_instance_sound: details.event_instance_sound,
event_envelope: details.event_envelope,
event_envelope_def: details.event_envelope_def,
event_parameter: details.event_parameter,
event_category: details.event_category,
event_envelope_point: details.event_envelope_point,
event_instance_pool: details.event_instance_pool,
}
}
/// FMOD System Object
pub struct Sys {
system: *mut ffi::FMOD_SYSTEM,
is_first: bool
}
impl ffi::FFI<ffi::FMOD_SYSTEM> for Sys {
fn wrap(system: *mut ffi::FMOD_SYSTEM) -> Sys {
Sys {system: system, is_first: false}
}
fn unwrap(s: &Sys) -> *mut ffi::FMOD_SYSTEM {
s.system
}
}
impl Drop for Sys {
fn drop(&mut self) {
self.release();
}
}
impl Sys {
/* the first one created has to be the last one released */
pub fn new() -> Result<Sys, ::Status> {
let mut tmp = ::std::ptr::null_mut();
match unsafe { ffi::FMOD_System_Create(&mut tmp) } {
::Status::Ok => Ok(Sys{system: tmp, is_first: true}),
err => Err(err)
}
}
pub fn init(&self) -> ::Status {
unsafe { ffi::FMOD_System_Init(self.system, 1, ::INIT_NORMAL, ::std::ptr::null_mut()) }
}
pub fn init_with_parameters(&self, max_channels: i32, InitFlag(flag): InitFlag) -> ::Status {
unsafe { ffi::FMOD_System_Init(self.system, max_channels, flag, ::std::ptr::null_mut()) }
}
pub fn update(&self) -> ::Status {
unsafe { ffi::FMOD_System_Update(self.system) }
}
pub fn release(&mut self) -> ::Status {
if self.is_first && !self.system.is_null() {
unsafe {
match match ffi::FMOD_System_Close(self.system) {
::Status::Ok => ffi::FMOD_System_Release(self.system),
e => e
} {
::Status::Ok => {
self.system = ::std::ptr::null_mut();
::Status::Ok
}
e => e
}
}
} else {
::Status::Ok
}
}
/// If music is empty, null is sent
pub fn create_sound(&self, music: &str, options: Option<Mode>,
exinfo: Option<&mut CreateSoundexInfo>) -> Result<Sound, ::Status> {
let mut sound = sound::from_ptr_first(::std::ptr::null_mut());
let op = match options {
Some(Mode(t)) => t,
None => ::SOFTWARE | ::LOOP_OFF | ::_2D | ::CREATESTREAM
};
let ex = match exinfo {
Some(e) => {
let user_data = sound::get_user_data(&mut sound);
user_data.non_block = e.non_block_callback;
user_data.pcm_read = e.pcm_read_callback;
user_data.pcm_set_pos = e.pcm_set_pos_callback;
unsafe {
user_data.user_data =
::std::mem::transmute::<&mut ffi::SoundData, *mut c_void>(
&mut *e.user_data);
}
&mut e.convert_to_c() as *mut ffi::FMOD_CREATESOUNDEXINFO
},
None => ::std::ptr::null_mut()
};
match if music.len() > 0 {
let music_cstring = CString::new(music).unwrap();
unsafe { ffi::FMOD_System_CreateSound(self.system,
music_cstring.as_ptr() as *const c_char, op, ex,
sound::get_fffi(&mut sound)) }
} else {
unsafe { ffi::FMOD_System_CreateSound(self.system, ::std::ptr::null(), op, ex,
sound::get_fffi(&mut sound)) }
} {
::Status::Ok => {
Ok(sound)
},
e => Err(e)
}
}
pub fn create_stream(&self, music: &str, options: Option<Mode>,
exinfo: Option<&mut CreateSoundexInfo>) -> Result<Sound, ::Status> {
let mut sound = sound::from_ptr_first(::std::ptr::null_mut());
let op = match options {
Some(Mode(t)) => t,
None => ::SOFTWARE | ::LOOP_OFF | ::_2D | ::CREATESTREAM
};
let ex = match exinfo {
Some(e) => {
let user_data = sound::get_user_data(&mut sound);
user_data.non_block = e.non_block_callback;
user_data.pcm_read = e.pcm_read_callback;
user_data.pcm_set_pos = e.pcm_set_pos_callback;
unsafe {
user_data.user_data =
::std::mem::transmute::<&mut ffi::SoundData, *mut c_void>(
&mut *e.user_data);
}
&mut e.convert_to_c() as *mut ffi::FMOD_CREATESOUNDEXINFO
},
None => ::std::ptr::null_mut()
};
match if music.len() > 0 {
let music_cstring = CString::new(music).unwrap();
unsafe { ffi::FMOD_System_CreateStream(self.system,
music_cstring.as_ptr() as *const c_char, op, ex,
sound::get_fffi(&mut sound)) }
} else {
unsafe { ffi::FMOD_System_CreateStream(self.system, ::std::ptr::null(), op, ex,
sound::get_fffi(&mut sound)) }
} {
::Status::Ok => Ok(sound),
err => Err(err)
}
}
pub fn create_channel_group(&self, group_name: &str)
-> Result<channel_group::ChannelGroup, ::Status> {
let mut channel_group = ::std::ptr::null_mut();
let tmp_group_name = CString::new(group_name).unwrap();
match unsafe { ffi::FMOD_System_CreateChannelGroup(self.system,
tmp_group_name.as_ptr() as *const c_char,
&mut channel_group) } {
::Status::Ok => Ok(ffi::FFI::wrap(channel_group)),
e => Err(e)
}
}
pub fn create_sound_group(&self, group_name: &str)
-> Result<sound_group::SoundGroup, ::Status> {
let mut sound_group = ::std::ptr::null_mut();
let tmp_group_name = CString::new(group_name).unwrap();
match unsafe { ffi::FMOD_System_CreateSoundGroup(self.system,
tmp_group_name.as_ptr() as *const c_char,
&mut sound_group) } {
::Status::Ok => Ok(ffi::FFI::wrap(sound_group)),
e => Err(e)
}
}
pub fn create_reverb(&self) -> Result<reverb::Reverb, ::Status>{
let mut t_reverb = ::std::ptr::null_mut();
match unsafe { ffi::FMOD_System_CreateReverb(self.system, &mut t_reverb) } {
::Status::Ok => Ok(ffi::FFI::wrap(t_reverb)),
e => Err(e)
}
}
pub fn create_DSP(&self) -> Result<dsp::Dsp, ::Status> {
let mut t_dsp = ::std::ptr::null_mut();
match unsafe { ffi::FMOD_System_CreateDSP(self.system, ::std::ptr::null_mut(),
&mut t_dsp) } {
::Status::Ok => Ok(dsp::from_ptr_first(t_dsp)),
e => Err(e)
}
}
pub fn create_DSP_with_description(&self, description: &mut dsp::DspDescription)
-> Result<dsp::Dsp, ::Status> {
let mut t_dsp = ::std::ptr::null_mut();
let mut t_description = dsp::get_description_ffi(description);
match unsafe { ffi::FMOD_System_CreateDSP(self.system, &mut t_description, &mut t_dsp) } {
::Status::Ok => Ok(dsp::from_ptr_first(t_dsp)),
e => Err(e)
}
}
pub fn create_DSP_by_type(&self, _type: ::DspType) -> Result<dsp::Dsp, ::Status> {
let mut t_dsp = ::std::ptr::null_mut();
match unsafe { ffi::FMOD_System_CreateDSPByType(self.system, _type, &mut t_dsp) } {
::Status::Ok => Ok(dsp::from_ptr_first(t_dsp)),
e => Err(e)
}
}
pub fn set_output(&self, output_type: ::OutputType) -> ::Status {
unsafe { ffi::FMOD_System_SetOutput(self.system, output_type) }
}
pub fn get_output(&self) -> Result<::OutputType, ::Status> {
let mut output_type = ::OutputType::AutoDetect;
match unsafe { ffi::FMOD_System_GetOutput(self.system, &mut output_type) } {
::Status::Ok => Ok(output_type),
e => Err(e)
}
}
pub fn get_num_drivers(&self) -> Result<i32, ::Status> {
let mut num_drivers = 0i32;
match unsafe { ffi::FMOD_System_GetNumDrivers(self.system,
&mut num_drivers as *mut c_int) } {
::Status::Ok => Ok(num_drivers),
e => Err(e)
}
}
pub fn get_driver_info(&self, id: i32, name_len: usize) -> Result<(Guid, String), ::Status> {
let mut c = Vec::with_capacity(name_len + 1);
let mut guid = ffi::FMOD_GUID {
Data1: 0,
Data2: 0,
Data3: 0,
Data4: [0, 0, 0, 0, 0, 0, 0, 0],
};
for _ in 0..(name_len + 1) {
c.push(0);
}
match unsafe { ffi::FMOD_System_GetDriverInfo(self.system, id as c_int,
c.as_mut_ptr() as *mut c_char,
name_len as c_int, &mut guid) } {
::Status::Ok => Ok((Guid {
data1: guid.Data1,
data2: guid.Data2,
data3: guid.Data3,
data4: guid.Data4,
}, String::from_utf8(c).unwrap())),
e => Err(e),
}
}
pub fn get_driver_caps(&self, id: i32) -> Result<(FmodCaps, i32, ::SpeakerMode), ::Status> {
let mut fmod_caps = 0u32;
let mut speaker_mode = ::SpeakerMode::Raw;
let mut control_panel_output_rate = 0i32;
match unsafe { ffi::FMOD_System_GetDriverCaps(self.system, id as c_int, &mut fmod_caps,
&mut control_panel_output_rate as *mut c_int,
&mut speaker_mode) } {
::Status::Ok => Ok((FmodCaps(fmod_caps), control_panel_output_rate, speaker_mode)),
e => Err(e),
}
}
pub fn set_driver(&self, driver: i32) -> ::Status {
unsafe { ffi::FMOD_System_SetDriver(self.system, driver as c_int) }
}
pub fn get_driver(&self) -> Result<i32, ::Status> {
let mut driver = 0i32;
match unsafe { ffi::FMOD_System_GetDriver(self.system, &mut driver as *mut c_int) } {
::Status::Ok => Ok(driver),
e => Err(e),
}
}
pub fn set_hardware_channels(&self, num_hardware_channels: i32) -> ::Status {
unsafe { ffi::FMOD_System_SetHardwareChannels(self.system, num_hardware_channels as c_int) }
}
pub fn get_hardware_channels(&self) -> Result<i32, ::Status> {
let mut num_hardware_channels = 0i32;
match unsafe {
ffi::FMOD_System_GetHardwareChannels(self.system,
&mut num_hardware_channels as *mut c_int)
} {
::Status::Ok => Ok(num_hardware_channels),
e => Err(e),
}
}
pub fn set_software_channels(&self, num_software_channels: i32) -> ::Status {
unsafe { ffi::FMOD_System_SetSoftwareChannels(self.system, num_software_channels as c_int) }
}
pub fn get_software_channels(&self) -> Result<i32, ::Status> {
let mut num_software_channels = 0i32;
match unsafe {
ffi::FMOD_System_GetSoftwareChannels(self.system,
&mut num_software_channels as *mut c_int)
} {
::Status::Ok => Ok(num_software_channels),
e => Err(e),
}
}
pub fn set_software_format(&self, sample_rate: i32, format: ::SoundFormat,
num_output_channels: i32, max_input_channels: i32,
resample_method: ::DspResampler) -> ::Status {
unsafe {
ffi::FMOD_System_SetSoftwareFormat(self.system, sample_rate as c_int, format,
num_output_channels as c_int,
max_input_channels as c_int, resample_method)
}
}
pub fn get_software_format(&self) -> Result<SoftwareFormat, ::Status> {
let mut t = SoftwareFormat {
sample_rate: 0,
format: ::SoundFormat::None,
num_output_channels: 0,
max_input_channels: 0,
resample_method: ::DspResampler::NoInterp,
bits: 0,
};
match unsafe { ffi::FMOD_System_GetSoftwareFormat(self.system,
&mut t.sample_rate as *mut c_int,
&mut t.format,
&mut t.num_output_channels as *mut c_int,
&mut t.max_input_channels as *mut c_int,
&mut t.resample_method,
&mut t.bits as *mut c_int)
} {
::Status::Ok => Ok(t),
e => Err(e),
}
}
pub fn set_DSP_buffer_size(&self, buffer_length: u32, num_buffers: i32) -> ::Status {
unsafe { ffi::FMOD_System_SetDSPBufferSize(self.system, buffer_length as c_uint,
num_buffers as c_int) }
}
pub fn get_DSP_buffer_size(&self) -> Result<(u32, i32), ::Status> {
let mut buffer_length = 0u32;
let mut num_buffers = 0i32;
match unsafe { ffi::FMOD_System_GetDSPBufferSize(self.system,
&mut buffer_length as *mut c_uint,
&mut num_buffers as *mut c_int) } {
::Status::Ok => Ok((buffer_length, num_buffers)),
e => Err(e),
}
}
pub fn set_advanced_settings(&self, settings: &mut AdvancedSettings) -> ::Status {
let mut converted_c_char: Vec<*const c_char> =
(0..settings.ASIO_channel_list.len()).map(|pos| {
settings.ASIO_channel_list[pos].as_ptr() as *const c_char
}).collect();
let deb_log_filename = CString::new(settings.debug_log_filename.clone()).unwrap();
let mut advanced_settings = ffi::FMOD_ADVANCEDSETTINGS{
cbsize: mem::size_of::<ffi::FMOD_ADVANCEDSETTINGS>() as i32,
maxMPEGcodecs: settings.max_MPEG_codecs,
maxADPCMcodecs: settings.max_ADPCM_codecs,
maxXMAcodecs: settings.max_XMA_codecs,
maxCELTcodecs: settings.max_CELT_codecs,
maxVORBIScodecs: settings.max_VORBIS_codecs,
maxAT9Codecs: settings.max_AT9_codecs,
maxPCMcodecs: settings.max_PCM_codecs,
ASIONumChannels: settings.ASIO_num_channels,
ASIOChannelList: converted_c_char.as_mut_ptr() as *mut *mut c_char,
ASIOSpeakerList: settings.ASIO_speaker_list.as_mut_ptr(),
max3DReverbDSPs: settings.max_3D_reverb_DSPs,
HRTFMinAngle: settings.HRTF_min_angle,
HRTFMaxAngle: settings.HRTF_max_angle,
HRTFFreq: settings.HRTF_freq,
vol0virtualvol: settings.vol0_virtual_vol,
eventqueuesize: settings.event_queue_size,
defaultDecodeBufferSize: settings.default_decode_buffer_size,
debugLogFilename: deb_log_filename.as_ptr() as *mut c_char,
profileport: settings.profile_port,
geometryMaxFadeTime: settings.geometry_max_fade_time,
maxSpectrumWaveDataBuffers: settings.max_spectrum_wave_data_buffers,
musicSystemCacheDelay: settings.music_system_cache_delay,
distanceFilterCenterFreq: settings.distance_filter_center_freq,
stackSizeStream: settings.stack_size_stream,
stackSizeNonBlocking: settings.stack_size_non_blocking,
stackSizeMixer: settings.stack_size_mixer,
};
unsafe { ffi::FMOD_System_SetAdvancedSettings(self.system, &mut advanced_settings) }
}
pub fn get_advanced_settings(&self) -> Result<AdvancedSettings, ::Status> {
let mut advanced_settings = ffi::FMOD_ADVANCEDSETTINGS{
cbsize: mem::size_of::<ffi::FMOD_ADVANCEDSETTINGS>() as i32,
maxMPEGcodecs: 0,
maxADPCMcodecs: 0,
maxXMAcodecs: 0,
maxCELTcodecs: 0,
maxVORBIScodecs: 0,
maxAT9Codecs: 0,
maxPCMcodecs: 0,
ASIONumChannels: 0,
ASIOChannelList: ::std::ptr::null_mut(),
ASIOSpeakerList: ::std::ptr::null_mut(),
max3DReverbDSPs: 0,
HRTFMinAngle: 0f32,
HRTFMaxAngle: 0f32,
HRTFFreq: 0f32,
vol0virtualvol: 0f32,
eventqueuesize: 0,
defaultDecodeBufferSize: 0,
debugLogFilename: ::std::ptr::null_mut(),
profileport: 0,
geometryMaxFadeTime: 0,
maxSpectrumWaveDataBuffers: 0,
musicSystemCacheDelay: 0,
distanceFilterCenterFreq: 0f32,
stackSizeStream: 0,
stackSizeNonBlocking: 0,
stackSizeMixer: 0,
};
match unsafe { ffi::FMOD_System_GetAdvancedSettings(self.system, &mut advanced_settings) } {
::Status::Ok => {
let mut converted_ASIO_channel_vec = Vec::new();
let mut converted_ASIO_speaker_vec = Vec::new();
unsafe {
if !advanced_settings.ASIOChannelList.is_null() {
let mut it = 0;
loop {
let tmp = advanced_settings.ASIOChannelList.offset(it);
if (*tmp).is_null() {
break;
}
let l = ffi::strlen(*tmp);
converted_ASIO_channel_vec.push(
String::from_raw_parts(*tmp as *mut u8, l, l));
it += 1;
}
}
if !advanced_settings.ASIOSpeakerList.is_null() {
let mut it = 0;
loop {
let tmp = advanced_settings.ASIOSpeakerList.offset(it);
if *tmp == ::Speaker::Null {
break;
}
converted_ASIO_speaker_vec.push(*tmp);
it += 1;
}
}
}
Ok(AdvancedSettings {
max_MPEG_codecs: advanced_settings.maxMPEGcodecs,
max_ADPCM_codecs: advanced_settings.maxADPCMcodecs,
max_XMA_codecs: advanced_settings.maxXMAcodecs,
max_CELT_codecs: advanced_settings.maxCELTcodecs,
max_VORBIS_codecs: advanced_settings.maxVORBIScodecs,
max_AT9_codecs: advanced_settings.maxAT9Codecs,
max_PCM_codecs: advanced_settings.maxPCMcodecs,
ASIO_num_channels: advanced_settings.ASIONumChannels,
ASIO_channel_list: converted_ASIO_channel_vec.clone(),
ASIO_speaker_list: converted_ASIO_speaker_vec,
max_3D_reverb_DSPs: advanced_settings.max3DReverbDSPs,
HRTF_min_angle: advanced_settings.HRTFMinAngle,
HRTF_max_angle: advanced_settings.HRTFMaxAngle,
HRTF_freq: advanced_settings.HRTFFreq,
vol0_virtual_vol: advanced_settings.vol0virtualvol,
event_queue_size: advanced_settings.eventqueuesize,
default_decode_buffer_size: advanced_settings.defaultDecodeBufferSize,
debug_log_filename: {
if !advanced_settings.debugLogFilename.is_null() {
let l = ffi::strlen(advanced_settings.debugLogFilename);
unsafe { String::from_raw_parts(
advanced_settings.debugLogFilename as *mut u8, l, l) }
} else {
String::new()
}
},
profile_port: advanced_settings.profileport,
geometry_max_fade_time: advanced_settings.geometryMaxFadeTime,
max_spectrum_wave_data_buffers: advanced_settings.maxSpectrumWaveDataBuffers,
music_system_cache_delay: advanced_settings.musicSystemCacheDelay,
distance_filter_center_freq: advanced_settings.distanceFilterCenterFreq,
stack_size_stream: advanced_settings.stackSizeStream,
stack_size_non_blocking: advanced_settings.stackSizeNonBlocking,
stack_size_mixer: advanced_settings.stackSizeMixer,
})
}
e => Err(e),
}
}
pub fn set_speaker_mode(&self, speaker_mode: ::SpeakerMode) -> ::Status {
unsafe { ffi::FMOD_System_SetSpeakerMode(self.system, speaker_mode) }
}
pub fn get_speaker_mode(&self) -> Result<::SpeakerMode, ::Status> {
let mut speaker_mode = ::SpeakerMode::Raw;
match unsafe { ffi::FMOD_System_GetSpeakerMode(self.system, &mut speaker_mode) } {
::Status::Ok => Ok(speaker_mode),
e => Err(e)
}
}
pub fn set_plugin_path(&self, path: &str) -> ::Status {
let tmp_path = CString::new(path).unwrap();
unsafe { ffi::FMOD_System_SetPluginPath(self.system, tmp_path.as_ptr() as *const c_char) }
}
pub fn load_plugin(&self, filename: &str, priority: u32) -> Result<PluginHandle, ::Status> {
let mut handle = 0u32;
let tmp_filename = filename.as_ptr();
match unsafe { ffi::FMOD_System_LoadPlugin(self.system, tmp_filename as *const c_char,
&mut handle as *mut c_uint,
priority as c_uint) } {
::Status::Ok => Ok(PluginHandle(handle)),
e => Err(e),
}
}
pub fn unload_plugin(&self, PluginHandle(handle): PluginHandle) -> ::Status {
unsafe { ffi::FMOD_System_UnloadPlugin(self.system, handle) }
}
pub fn get_num_plugins(&self, plugin_type: ::PluginType) -> Result<i32, ::Status> {
let mut num_plugins = 0i32;
match unsafe { ffi::FMOD_System_GetNumPlugins(self.system, plugin_type,
&mut num_plugins) } {
::Status::Ok => Ok(num_plugins),
e => Err(e),
}
}
pub fn get_plugin_handle(&self, plugin_type: ::PluginType,
index: i32) -> Result<PluginHandle, ::Status> {
let mut handle = 0u32;
match unsafe { ffi::FMOD_System_GetPluginHandle(self.system, plugin_type, index as c_int,
&mut handle as *mut c_uint) } {
::Status::Ok => Ok(PluginHandle(handle)),
e => Err(e),
}
}
pub fn get_plugin_info(&self, PluginHandle(handle): PluginHandle,
name_len: usize) -> Result<(String, ::PluginType, u32), ::Status> {
let mut plugin_type = ::PluginType::Output;
let mut version = 0u32;
let mut c = Vec::with_capacity(name_len + 1);
for _ in 0..(name_len + 1) {
c.push(0);
}
match unsafe { ffi::FMOD_System_GetPluginInfo(self.system, handle, &mut plugin_type,
c.as_mut_ptr() as *mut c_char,
name_len as c_int,
&mut version as *mut c_uint) } {
::Status::Ok => Ok((String::from_utf8(c).unwrap(), plugin_type, version)),
e => Err(e),
}
}
pub fn set_output_by_plugin(&self, PluginHandle(handle): PluginHandle) -> ::Status {
unsafe { ffi::FMOD_System_SetOutputByPlugin(self.system, handle) }
}
pub fn get_output_by_plugin(&self) -> Result<PluginHandle, ::Status> {
let mut handle = 0u32;
match unsafe { ffi::FMOD_System_GetOutputByPlugin(self.system, &mut handle) } {
::Status::Ok => Ok(PluginHandle(handle)),
e => Err(e),
}
}
pub fn create_DSP_by_plugin(&self,
PluginHandle(handle): PluginHandle) -> Result<Dsp, ::Status> {
let mut dsp = ::std::ptr::null_mut();
match unsafe { ffi::FMOD_System_CreateDSPByPlugin(self.system, handle, &mut dsp) } {
::Status::Ok => Ok(dsp::from_ptr_first(dsp)),
e => Err(e),
}
}
pub fn set_3D_num_listeners(&self, num_listeners: i32) -> ::Status {
unsafe { ffi::FMOD_System_Set3DNumListeners(self.system, num_listeners as c_int) }
}
pub fn get_3D_num_listeners(&self) -> Result<i32, ::Status> {
let mut num_listeners = 0i32;
match unsafe { ffi::FMOD_System_Get3DNumListeners(self.system,
&mut num_listeners as *mut c_int) } {
::Status::Ok => Ok(num_listeners),
e => Err(e),
}
}
pub fn set_3D_listener_attributes(&self, listener: i32, pos: &vector::Vector,
vel: &vector::Vector, forward: &vector::Vector,
up: &vector::Vector) -> ::Status {
let c_p = vector::get_ffi(pos);
let c_v = vector::get_ffi(vel);
let c_f = vector::get_ffi(forward);
let c_u = vector::get_ffi(up);
unsafe { ffi::FMOD_System_Set3DListenerAttributes(self.system, listener as c_int, &c_p,
&c_v, &c_f, &c_u) }
}
/// Returns:
///
/// Ok(position, velocity, forward, up)
pub fn get_3D_listener_attributes(&self, listener: i32)
-> Result<(vector::Vector, vector::Vector, vector::Vector,
vector::Vector), ::Status> {
let mut pos = vector::get_ffi(&vector::Vector::new());
let mut vel = vector::get_ffi(&vector::Vector::new());
let mut forward = vector::get_ffi(&vector::Vector::new());
let mut up = vector::get_ffi(&vector::Vector::new());
match unsafe { ffi::FMOD_System_Get3DListenerAttributes(self.system, listener as c_int,
&mut pos, &mut vel, &mut forward,
&mut up) } {
::Status::Ok => Ok((vector::from_ptr(pos), vector::from_ptr(vel),
vector::from_ptr(forward), vector::from_ptr(up))),
e => Err(e),
}
}
pub fn set_3D_speaker_position(&self, speaker: ::Speaker, x: f32, y: f32,
active: bool) -> ::Status {
let t_active : c_int = match active {
true => 1,
false => 0,
};
unsafe { ffi::FMOD_System_Set3DSpeakerPosition(self.system, speaker, x, y, t_active) }
}
/// Returns:
///
/// Ok(x, y, is_active)
pub fn get_3D_speaker_position(&self,
speaker: ::Speaker) -> Result<(f32, f32, bool), ::Status> {
let mut x = 0f32;
let mut y = 0f32;
let mut active : c_int = 0;
match unsafe { ffi::FMOD_System_Get3DSpeakerPosition(self.system, speaker, &mut x, &mut y,
&mut active) } {
::Status::Ok => Ok((x, y, match active {
0 => false,
_ => true,
})),
e => Err(e),
}
}
pub fn set_3D_settings(&self, doppler_scale: f32, distance_factor: f32,
roll_off_scale: f32) -> ::Status {
unsafe { ffi::FMOD_System_Set3DSettings(self.system, doppler_scale, distance_factor,
roll_off_scale) }
}
/// Returns:
///
/// Ok(doppler_scale, distance_factor, roll_off_scale)
pub fn get_3D_settings(&self) -> Result<(f32, f32, f32), ::Status> {
let mut doppler_scale = 0f32;
let mut distance_factor = 0f32;
let mut roll_off_scale = 0f32;
match unsafe { ffi::FMOD_System_Get3DSettings(self.system, &mut doppler_scale,
&mut distance_factor, &mut roll_off_scale) } {
::Status::Ok => Ok((doppler_scale, distance_factor, roll_off_scale)),
e => Err(e),
}
}
pub fn set_stream_buffer_size(&self, file_buffer_size: u32,
TimeUnit(file_buffer_size_type): TimeUnit) -> ::Status {
unsafe { ffi::FMOD_System_SetStreamBufferSize(self.system, file_buffer_size as c_uint,
file_buffer_size_type) }
}
/// Returns:
///
/// Ok(file_buffer_size, distance_factor, time)
pub fn get_stream_buffer_size(&self) -> Result<(u32, TimeUnit), ::Status> {
let mut file_buffer_size = 0u32;
let mut file_buffer_size_type = 0u32;
match unsafe { ffi::FMOD_System_GetStreamBufferSize(self.system, &mut file_buffer_size,
&mut file_buffer_size_type) } {
::Status::Ok => Ok((file_buffer_size, TimeUnit(file_buffer_size_type))),
e => Err(e),
}
}
pub fn get_version(&self) -> Result<u32, ::Status> {
let mut version : c_uint = 0;
match unsafe { ffi::FMOD_System_GetVersion(self.system, &mut version) } {
::Status::Ok => Ok(version as u32),
e => Err(e),
}
}
pub fn get_output_handle(&self) -> Result<OutputHandle, ::Status> {
let mut output_h = ::std::ptr::null_mut();
match unsafe { ffi::FMOD_System_GetOutputHandle(self.system, &mut output_h) } {
::Status::Ok => Ok(OutputHandle{handle: output_h}),
e => Err(e),
}
}
pub fn get_channels_playing(&self) -> Result<i32, ::Status> {
let mut playing_chans : c_int = 0;
match unsafe { ffi::FMOD_System_GetChannelsPlaying(self.system, &mut playing_chans) } {
::Status::Ok => Ok(playing_chans as i32),
e => Err(e),
}
}
/// Returns:
///
/// Ok(dsp, stream, geometry, update, total)
pub fn get_CPU_usage(&self) -> Result<(f32, f32, f32, f32, f32), ::Status> {
let mut dsp = 0f32;
let mut stream = 0f32;
let mut geometry = 0f32;
let mut update = 0f32;
let mut total = 0f32;
match unsafe { ffi::FMOD_System_GetCPUUsage(self.system, &mut dsp, &mut stream,
&mut geometry, &mut update, &mut total) } {
::Status::Ok => Ok((dsp, stream, geometry, update, total)),
e => Err(e),
}
}
/// Returns:
///
/// Ok(current_alloced, max_allocated, total)
pub fn get_sound_RAM(&self) -> Result<(i32, i32, i32), ::Status> {
let mut current_alloced : c_int = 0;
let mut max_allocated : c_int = 0;
let mut total : c_int = 0;
match unsafe { ffi::FMOD_System_GetSoundRAM(self.system, &mut current_alloced,
&mut max_allocated, &mut total) } {
::Status::Ok => Ok((current_alloced as i32, max_allocated as i32, total as i32)),
e => Err(e),
}
}
pub fn get_num_CDROM_drives(&self) -> Result<i32, ::Status> {
let mut num_drives : c_int= 0;
match unsafe { ffi::FMOD_System_GetNumCDROMDrives(self.system, &mut num_drives) } {
::Status::Ok => Ok(num_drives as i32),
e => Err(e)
}
}
/// Returns:
///
/// Ok(drive_name, scsi_name, device_name)
pub fn get_CDROM_drive_name(&self, drive: i32, drive_name_len: usize, scsi_name_len: usize,
device_name_len: usize)
-> Result<(String, String, String), ::Status> {
let mut drive_name = Vec::with_capacity(drive_name_len + 1);
let mut scsi_name = Vec::with_capacity(scsi_name_len + 1);
let mut device_name = Vec::with_capacity(device_name_len + 1);
for _ in 0..(drive_name_len + 1) {
drive_name.push(0);
}
for _ in 0..(scsi_name_len + 1) {
scsi_name.push(0);
}
for _ in 0..(device_name_len + 1) {
device_name.push(0);
}
match unsafe { ffi::FMOD_System_GetCDROMDriveName(self.system, drive as c_int,
drive_name.as_mut_ptr() as *mut c_char,
drive_name_len as c_int,
scsi_name.as_mut_ptr() as *mut c_char,
scsi_name_len as c_int,
device_name.as_mut_ptr() as *mut c_char,
device_name_len as c_int) } {
::Status::Ok => Ok((String::from_utf8(drive_name).unwrap(),
String::from_utf8(scsi_name).unwrap(),
String::from_utf8(device_name).unwrap())),
e => Err(e),
}
}
pub fn get_spectrum(&self, spectrum_size: usize, channel_offset: Option<i32>,
window_type: Option<::DspFftWindow>) -> Result<Vec<f32>, ::Status> {
let mut ptr : Vec<f32> = ::std::iter::repeat(0f32).take(spectrum_size).collect();
let c_window_type = match window_type {
Some(wt) => wt,
None => ::DspFftWindow::Rect
};
let c_channel_offset : c_int = match channel_offset {
Some(co) => co as c_int,
None => 0
};
match unsafe { ffi::FMOD_System_GetSpectrum(self.system, ptr.as_mut_ptr(),
spectrum_size as c_int, c_channel_offset,
c_window_type) } {
::Status::Ok => Ok(ptr),
e => Err(e),
}
}
pub fn get_wave_data(&self, wave_size: usize,
channel_offset: i32) -> Result<Vec<f32>, ::Status> {
let mut ptr : Vec<f32> = ::std::iter::repeat(0f32).take(wave_size).collect();
match unsafe { ffi::FMOD_System_GetWaveData(self.system, ptr.as_mut_ptr(),
wave_size as c_int, channel_offset as c_int) } {
::Status::Ok => Ok(ptr),
e => Err(e),
}
}
pub fn get_channel(&self, channel_id: i32) -> Result<channel::Channel, ::Status> {
let mut channel = ::std::ptr::null_mut();
match unsafe { ffi::FMOD_System_GetChannel(self.system, channel_id as c_int,
&mut channel) } {
::Status::Ok => Ok(ffi::FFI::wrap(channel)),
e => Err(e),
}
}
pub fn get_master_channel_group(&self) -> Result<channel_group::ChannelGroup, ::Status> {
let mut channel_group = ::std::ptr::null_mut();
match unsafe { ffi::FMOD_System_GetMasterChannelGroup(self.system, &mut channel_group) } {
::Status::Ok => Ok(ffi::FFI::wrap(channel_group)),
e => Err(e),
}
}
pub fn get_master_sound_group(&self) -> Result<sound_group::SoundGroup, ::Status> {
let mut sound_group = ::std::ptr::null_mut();
match unsafe { ffi::FMOD_System_GetMasterSoundGroup(self.system, &mut sound_group) } {
::Status::Ok => Ok(ffi::FFI::wrap(sound_group)),
e => Err(e),
}
}
pub fn set_reverb_properties(&self,
properties: reverb_properties::ReverbProperties) -> ::Status {
let t_properties = reverb_properties::get_ffi(properties);
unsafe { ffi::FMOD_System_SetReverbProperties(self.system, &t_properties) }
}
pub fn get_reverb_properties(&self) -> Result<reverb_properties::ReverbProperties, ::Status> {
let mut properties = reverb_properties::get_ffi(Default::default());
match unsafe { ffi::FMOD_System_GetReverbProperties(self.system, &mut properties) } {
::Status::Ok => Ok(reverb_properties::from_ptr(properties)),
e => Err(e),
}
}
pub fn set_reverb_ambient_properties(&self, properties: reverb_properties::ReverbProperties)
-> ::Status {
let mut t_properties = reverb_properties::get_ffi(properties);
unsafe { ffi::FMOD_System_SetReverbAmbientProperties(self.system, &mut t_properties) }
}
pub fn get_reverb_ambient_properties(&self)
-> Result<reverb_properties::ReverbProperties, ::Status> {
let mut properties = reverb_properties::get_ffi(Default::default());
match unsafe { ffi::FMOD_System_GetReverbAmbientProperties(self.system, &mut properties) } {
::Status::Ok => Ok(reverb_properties::from_ptr(properties)),
e => Err(e),
}
}
pub fn get_DSP_head(&self) -> Result<Dsp, ::Status> {
let mut head = ::std::ptr::null_mut();
match unsafe { ffi::FMOD_System_GetDSPHead(self.system, &mut head) } {
::Status::Ok => Ok(ffi::FFI::wrap(head)),
e => Err(e),
}
}
pub fn add_DSP(&self, dsp: &dsp::Dsp) -> Result<dsp_connection::DspConnection, ::Status> {
let mut t_connection = ::std::ptr::null_mut();
match unsafe { ffi::FMOD_System_AddDSP(self.system, ffi::FFI::unwrap(dsp),
&mut t_connection) } {
::Status::Ok => Ok(ffi::FFI::wrap(t_connection)),
e => Err(e),
}
}
pub fn lock_DSP(&self) -> ::Status {
unsafe { ffi::FMOD_System_LockDSP(self.system) }
}
pub fn unlock_DSP(&self) -> ::Status {
unsafe { ffi::FMOD_System_UnlockDSP(self.system) }
}
/// Returns:
///
/// Ok(hi, lo)
pub fn get_DSP_clock(&self) -> Result<(u32, u32), ::Status> {
let mut hi : c_uint = 0;
let mut lo : c_uint = 0;
match unsafe { ffi::FMOD_System_GetDSPClock(self.system, &mut hi, &mut lo) } {
::Status::Ok => Ok((hi as u32, lo as u32)),
e => Err(e),
}
}
pub fn get_record_num_drivers(&self) -> Result<i32, ::Status> {
let mut num_drivers : c_int = 0;
match unsafe { ffi::FMOD_System_GetRecordNumDrivers(self.system, &mut num_drivers) } {
::Status::Ok => Ok(num_drivers as i32),
e => Err(e),
}
}
pub fn get_record_driver_info(&self, id: i32,
name_len: usize) -> Result<(Guid, String), ::Status> {
let mut guid = ffi::FMOD_GUID{
Data1: 0,
Data2: 0,
Data3: 0,
Data4: [0, 0, 0, 0, 0, 0, 0, 0]
};
let mut c = Vec::with_capacity(name_len + 1);
for _ in 0..(name_len + 1) {
c.push(0);
}
match unsafe { ffi::FMOD_System_GetRecordDriverInfo(self.system, id as c_int,
c.as_mut_ptr() as *mut c_char,
name_len as c_int, &mut guid) } {
::Status::Ok => Ok((Guid {
data1: guid.Data1,
data2: guid.Data2,
data3: guid.Data3,
data4: guid.Data4
}, String::from_utf8(c).unwrap())),
e => Err(e),
}
}
/// Returns:
///
/// Ok(caps, min_frequency, max_frequency)
pub fn get_record_driver_caps(&self, id: i32) -> Result<(FmodCaps, i32, i32), ::Status> {
let mut fmod_caps : c_uint = 0;
let mut min_frequency : c_int = 0;
let mut max_frequency : c_int = 0;
match unsafe { ffi::FMOD_System_GetRecordDriverCaps(self.system, id as c_int,
&mut fmod_caps, &mut min_frequency,
&mut max_frequency) } {
::Status::Ok => Ok((FmodCaps(fmod_caps), min_frequency as i32, max_frequency as i32)),
e => Err(e),
}
}
pub fn get_record_position(&self, id: i32) -> Result<u32, ::Status> {
let mut position : c_uint = 0;
match unsafe { ffi::FMOD_System_GetRecordPosition(self.system, id as c_int,
&mut position) } {
::Status::Ok => Ok(position as u32),
e => Err(e),
}
}
pub fn start_record(&self, id: i32, sound: &sound::Sound, _loop: bool) -> ::Status {
let t_loop = match _loop {
true => 1,
_ => 0,
};
unsafe { ffi::FMOD_System_RecordStart(self.system, id as c_int, ffi::FFI::unwrap(sound),
t_loop) }
}
pub fn stop_record(&self, id: i32) -> ::Status {
unsafe { ffi::FMOD_System_RecordStop(self.system, id as c_int) }
}
pub fn is_recording(&self, id: i32) -> Result<bool, ::Status> {
let mut is_recording : c_int = 0;
match unsafe { ffi::FMOD_System_IsRecording(self.system, id as c_int, &mut is_recording) } {
::Status::Ok => Ok(is_recording == 1),
e => Err(e),
}
}
pub fn create_geometry(&self, max_polygons: i32,
max_vertices: i32) -> Result<geometry::Geometry, ::Status> {
let mut geometry = ::std::ptr::null_mut();
match unsafe { ffi::FMOD_System_CreateGeometry(self.system, max_polygons as c_int,
max_vertices as c_int, &mut geometry) } {
::Status::Ok => Ok(ffi::FFI::wrap(geometry)),
e => Err(e),
}
}
pub fn set_geometry_settings(&self, max_world_size: f32) -> ::Status {
unsafe { ffi::FMOD_System_SetGeometrySettings(self.system, max_world_size) }
}
pub fn get_geometry_settings(&self) -> Result<f32, ::Status> {
let mut max_world_size = 0f32;
match unsafe { ffi::FMOD_System_GetGeometrySettings(self.system, &mut max_world_size) } {
::Status::Ok => Ok(max_world_size),
e => Err(e),
}
}
/// Returns:
///
/// Ok(listener, source, direct, reverb)
pub fn get_geometry_occlusion(&self)
-> Result<(vector::Vector, vector::Vector, f32, f32), ::Status> {
let listener = vector::get_ffi(&vector::Vector::new());
let source = vector::get_ffi(&vector::Vector::new());
let mut direct = 0f32;
let mut reverb = 0f32;
match unsafe { ffi::FMOD_System_GetGeometryOcclusion(self.system, &listener, &source,
&mut direct, &mut reverb) } {
::Status::Ok => Ok((vector::from_ptr(listener),
vector::from_ptr(source), direct, reverb)),
e => Err(e),
}
}
/// Returns:
///
/// Ok(memory_used, details)
pub fn get_memory_info(&self, MemoryBits(memory_bits): MemoryBits,
EventMemoryBits(event_memory_bits): EventMemoryBits)
-> Result<(u32, MemoryUsageDetails), ::Status> {
let mut details = get_memory_usage_details_ffi(Default::default());
let mut memory_used : c_uint = 0;
match unsafe { ffi::FMOD_System_GetMemoryInfo(self.system, memory_bits, event_memory_bits,
&mut memory_used, &mut details) } {
::Status::Ok => Ok((memory_used as u32, from_memory_usage_details_ptr(details))),
e => Err(e),
}
}
pub fn set_file_system(&self, user_open: FileOpenCallback, user_close: FileCloseCallback,
user_read: FileReadCallback, user_seek: FileSeekCallback,/*
user_async_read: ffi::FMOD_FILE_ASYNCREADCALLBACK,
user_async_cancel: ffi::FMOD_FILE_ASYNCCANCELCALLBACK,*/
block_align: i32) -> ::Status {
let tmp = get_saved_sys_callback();
tmp.file_open = user_open;
tmp.file_read = user_read;
tmp.file_close = user_close;
tmp.file_seek = user_seek;
unsafe { ffi::FMOD_System_SetFileSystem(self.system,
match user_open {
Some(_) => Some(file_open_callback as extern "C" fn(*mut _, _, *mut _, *mut *mut _,
*mut *mut _) -> _),
None => None
},
match user_close {
Some(_) => Some(file_close_callback as extern "C" fn(*mut _, *mut _) -> _),
None => None
},
match user_read {
Some(_) => Some(file_read_callback as extern "C" fn(*mut _, *mut _, _, *mut _,
*mut _) -> _),
None => None
},
match user_seek {
Some(_) => Some(file_seek_callback as extern "C" fn(*mut _, _, *mut _) -> _),
None => None
},
None,
None,
block_align)
}
}
}
| 42.634659 | 106 | 0.5657 |
eff595ab6c2b768336e709a21e14286272c39eb3 | 116 | pub mod content;
pub mod engine;
pub mod import;
pub mod segment;
pub mod endpoint;
pub use endpoint::NeiEndpoint;
| 14.5 | 30 | 0.767241 |
5d284981c70afda49338ac299f872c3e2feeb832 | 20,659 | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::indexed_vec::IndexVec;
use rustc_data_structures::sync::Lrc;
use rustc::ty::query::Providers;
use rustc::ty::{self, TyCtxt};
use rustc::hir;
use rustc::hir::def_id::DefId;
use rustc::lint::builtin::{SAFE_EXTERN_STATICS, SAFE_PACKED_BORROWS, UNUSED_UNSAFE};
use rustc::mir::*;
use rustc::mir::visit::{PlaceContext, Visitor};
use syntax::ast;
use syntax::symbol::Symbol;
use util;
pub struct UnsafetyChecker<'a, 'tcx: 'a> {
mir: &'a Mir<'tcx>,
source_scope_local_data: &'a IndexVec<SourceScope, SourceScopeLocalData>,
violations: Vec<UnsafetyViolation>,
source_info: SourceInfo,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
used_unsafe: FxHashSet<ast::NodeId>,
inherited_blocks: Vec<(ast::NodeId, bool)>,
}
impl<'a, 'gcx, 'tcx> UnsafetyChecker<'a, 'tcx> {
fn new(mir: &'a Mir<'tcx>,
source_scope_local_data: &'a IndexVec<SourceScope, SourceScopeLocalData>,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>) -> Self {
Self {
mir,
source_scope_local_data,
violations: vec![],
source_info: SourceInfo {
span: mir.span,
scope: OUTERMOST_SOURCE_SCOPE
},
tcx,
param_env,
used_unsafe: FxHashSet(),
inherited_blocks: vec![],
}
}
}
impl<'a, 'tcx> Visitor<'tcx> for UnsafetyChecker<'a, 'tcx> {
fn visit_terminator(&mut self,
block: BasicBlock,
terminator: &Terminator<'tcx>,
location: Location)
{
self.source_info = terminator.source_info;
match terminator.kind {
TerminatorKind::Goto { .. } |
TerminatorKind::SwitchInt { .. } |
TerminatorKind::Drop { .. } |
TerminatorKind::Yield { .. } |
TerminatorKind::Assert { .. } |
TerminatorKind::DropAndReplace { .. } |
TerminatorKind::GeneratorDrop |
TerminatorKind::Resume |
TerminatorKind::Abort |
TerminatorKind::Return |
TerminatorKind::Unreachable |
TerminatorKind::FalseEdges { .. } |
TerminatorKind::FalseUnwind { .. } => {
// safe (at least as emitted during MIR construction)
}
TerminatorKind::Call { ref func, .. } => {
let func_ty = func.ty(self.mir, self.tcx);
let sig = func_ty.fn_sig(self.tcx);
if let hir::Unsafety::Unsafe = sig.unsafety() {
self.require_unsafe("call to unsafe function",
"consult the function's documentation for information on how to avoid \
undefined behavior")
}
}
}
self.super_terminator(block, terminator, location);
}
fn visit_statement(&mut self,
block: BasicBlock,
statement: &Statement<'tcx>,
location: Location)
{
self.source_info = statement.source_info;
match statement.kind {
StatementKind::Assign(..) |
StatementKind::ReadForMatch(..) |
StatementKind::SetDiscriminant { .. } |
StatementKind::StorageLive(..) |
StatementKind::StorageDead(..) |
StatementKind::EndRegion(..) |
StatementKind::Validate(..) |
StatementKind::UserAssertTy(..) |
StatementKind::Nop => {
// safe (at least as emitted during MIR construction)
}
StatementKind::InlineAsm { .. } => {
self.require_unsafe("use of inline assembly",
"inline assembly is entirely unchecked and can cause undefined behavior")
},
}
self.super_statement(block, statement, location);
}
fn visit_rvalue(&mut self,
rvalue: &Rvalue<'tcx>,
location: Location)
{
if let &Rvalue::Aggregate(box ref aggregate, _) = rvalue {
match aggregate {
&AggregateKind::Array(..) |
&AggregateKind::Tuple |
&AggregateKind::Adt(..) => {}
&AggregateKind::Closure(def_id, _) |
&AggregateKind::Generator(def_id, _, _) => {
let UnsafetyCheckResult {
violations, unsafe_blocks
} = self.tcx.unsafety_check_result(def_id);
self.register_violations(&violations, &unsafe_blocks);
}
}
}
self.super_rvalue(rvalue, location);
}
fn visit_place(&mut self,
place: &Place<'tcx>,
context: PlaceContext<'tcx>,
location: Location) {
if let PlaceContext::Borrow { .. } = context {
if util::is_disaligned(self.tcx, self.mir, self.param_env, place) {
let source_info = self.source_info;
let lint_root =
self.source_scope_local_data[source_info.scope].lint_root;
self.register_violations(&[UnsafetyViolation {
source_info,
description: Symbol::intern("borrow of packed field").as_interned_str(),
details:
Symbol::intern("fields of packed structs might be misaligned: \
dereferencing a misaligned pointer or even just creating a \
misaligned reference is undefined behavior")
.as_interned_str(),
kind: UnsafetyViolationKind::BorrowPacked(lint_root)
}], &[]);
}
}
match place {
&Place::Projection(box Projection {
ref base, ref elem
}) => {
let old_source_info = self.source_info;
if let &Place::Local(local) = base {
if self.mir.local_decls[local].internal {
// Internal locals are used in the `move_val_init` desugaring.
// We want to check unsafety against the source info of the
// desugaring, rather than the source info of the RHS.
self.source_info = self.mir.local_decls[local].source_info;
}
}
let base_ty = base.ty(self.mir, self.tcx).to_ty(self.tcx);
match base_ty.sty {
ty::RawPtr(..) => {
self.require_unsafe("dereference of raw pointer",
"raw pointers may be NULL, dangling or unaligned; they can violate \
aliasing rules and cause data races: all of these are undefined \
behavior")
}
ty::Adt(adt, _) => {
if adt.is_union() {
if context == PlaceContext::Store ||
context == PlaceContext::AsmOutput ||
context == PlaceContext::Drop
{
let elem_ty = match elem {
&ProjectionElem::Field(_, ty) => ty,
_ => span_bug!(
self.source_info.span,
"non-field projection {:?} from union?",
place)
};
if elem_ty.moves_by_default(self.tcx, self.param_env,
self.source_info.span) {
self.require_unsafe(
"assignment to non-`Copy` union field",
"the previous content of the field will be dropped, which \
causes undefined behavior if the field was not properly \
initialized")
} else {
// write to non-move union, safe
}
} else {
self.require_unsafe("access to union field",
"the field may not be properly initialized: using \
uninitialized data will cause undefined behavior")
}
}
}
_ => {}
}
self.source_info = old_source_info;
}
&Place::Local(..) => {
// locals are safe
}
&Place::Promoted(_) => {
bug!("unsafety checking should happen before promotion")
}
&Place::Static(box Static { def_id, ty: _ }) => {
if self.tcx.is_static(def_id) == Some(hir::Mutability::MutMutable) {
self.require_unsafe("use of mutable static",
"mutable statics can be mutated by multiple threads: aliasing violations \
or data races will cause undefined behavior");
} else if self.tcx.is_foreign_item(def_id) {
let source_info = self.source_info;
let lint_root =
self.source_scope_local_data[source_info.scope].lint_root;
self.register_violations(&[UnsafetyViolation {
source_info,
description: Symbol::intern("use of extern static").as_interned_str(),
details:
Symbol::intern("extern statics are not controlled by the Rust type \
system: invalid data, aliasing violations or data \
races will cause undefined behavior")
.as_interned_str(),
kind: UnsafetyViolationKind::ExternStatic(lint_root)
}], &[]);
}
}
};
self.super_place(place, context, location);
}
}
impl<'a, 'tcx> UnsafetyChecker<'a, 'tcx> {
fn require_unsafe(&mut self,
description: &'static str,
details: &'static str)
{
let source_info = self.source_info;
self.register_violations(&[UnsafetyViolation {
source_info,
description: Symbol::intern(description).as_interned_str(),
details: Symbol::intern(details).as_interned_str(),
kind: UnsafetyViolationKind::General,
}], &[]);
}
fn register_violations(&mut self,
violations: &[UnsafetyViolation],
unsafe_blocks: &[(ast::NodeId, bool)]) {
let within_unsafe = match self.source_scope_local_data[self.source_info.scope].safety {
Safety::Safe => {
for violation in violations {
if !self.violations.contains(violation) {
self.violations.push(violation.clone())
}
}
false
}
Safety::BuiltinUnsafe | Safety::FnUnsafe => true,
Safety::ExplicitUnsafe(node_id) => {
if !violations.is_empty() {
self.used_unsafe.insert(node_id);
}
true
}
};
self.inherited_blocks.extend(unsafe_blocks.iter().map(|&(node_id, is_used)| {
(node_id, is_used && !within_unsafe)
}));
}
}
pub(crate) fn provide(providers: &mut Providers) {
*providers = Providers {
unsafety_check_result,
unsafe_derive_on_repr_packed,
..*providers
};
}
struct UnusedUnsafeVisitor<'a> {
used_unsafe: &'a FxHashSet<ast::NodeId>,
unsafe_blocks: &'a mut Vec<(ast::NodeId, bool)>,
}
impl<'a, 'tcx> hir::intravisit::Visitor<'tcx> for UnusedUnsafeVisitor<'a> {
fn nested_visit_map<'this>(&'this mut self) ->
hir::intravisit::NestedVisitorMap<'this, 'tcx>
{
hir::intravisit::NestedVisitorMap::None
}
fn visit_block(&mut self, block: &'tcx hir::Block) {
hir::intravisit::walk_block(self, block);
if let hir::UnsafeBlock(hir::UserProvided) = block.rules {
self.unsafe_blocks.push((block.id, self.used_unsafe.contains(&block.id)));
}
}
}
fn check_unused_unsafe<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
used_unsafe: &FxHashSet<ast::NodeId>,
unsafe_blocks: &'a mut Vec<(ast::NodeId, bool)>)
{
let body_id =
tcx.hir.as_local_node_id(def_id).and_then(|node_id| {
tcx.hir.maybe_body_owned_by(node_id)
});
let body_id = match body_id {
Some(body) => body,
None => {
debug!("check_unused_unsafe({:?}) - no body found", def_id);
return
}
};
let body = tcx.hir.body(body_id);
debug!("check_unused_unsafe({:?}, body={:?}, used_unsafe={:?})",
def_id, body, used_unsafe);
let mut visitor = UnusedUnsafeVisitor { used_unsafe, unsafe_blocks };
hir::intravisit::Visitor::visit_body(&mut visitor, body);
}
fn unsafety_check_result<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId)
-> UnsafetyCheckResult
{
debug!("unsafety_violations({:?})", def_id);
// NB: this borrow is valid because all the consumers of
// `mir_built` force this.
let mir = &tcx.mir_built(def_id).borrow();
let source_scope_local_data = match mir.source_scope_local_data {
ClearCrossCrate::Set(ref data) => data,
ClearCrossCrate::Clear => {
debug!("unsafety_violations: {:?} - remote, skipping", def_id);
return UnsafetyCheckResult {
violations: Lrc::new([]),
unsafe_blocks: Lrc::new([])
}
}
};
let param_env = tcx.param_env(def_id);
let mut checker = UnsafetyChecker::new(
mir, source_scope_local_data, tcx, param_env);
checker.visit_mir(mir);
check_unused_unsafe(tcx, def_id, &checker.used_unsafe, &mut checker.inherited_blocks);
UnsafetyCheckResult {
violations: checker.violations.into(),
unsafe_blocks: checker.inherited_blocks.into()
}
}
fn unsafe_derive_on_repr_packed<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) {
let lint_node_id = match tcx.hir.as_local_node_id(def_id) {
Some(node_id) => node_id,
None => bug!("checking unsafety for non-local def id {:?}", def_id)
};
// FIXME: when we make this a hard error, this should have its
// own error code.
let message = if tcx.generics_of(def_id).own_counts().types != 0 {
"#[derive] can't be used on a #[repr(packed)] struct with \
type parameters (error E0133)".to_string()
} else {
"#[derive] can't be used on a #[repr(packed)] struct that \
does not derive Copy (error E0133)".to_string()
};
tcx.lint_node(SAFE_PACKED_BORROWS,
lint_node_id,
tcx.def_span(def_id),
&message);
}
/// Return the NodeId for an enclosing scope that is also `unsafe`
fn is_enclosed(tcx: TyCtxt,
used_unsafe: &FxHashSet<ast::NodeId>,
id: ast::NodeId) -> Option<(String, ast::NodeId)> {
let parent_id = tcx.hir.get_parent_node(id);
if parent_id != id {
if used_unsafe.contains(&parent_id) {
Some(("block".to_string(), parent_id))
} else if let Some(hir::map::NodeItem(&hir::Item {
node: hir::ItemKind::Fn(_, header, _, _),
..
})) = tcx.hir.find(parent_id) {
match header.unsafety {
hir::Unsafety::Unsafe => Some(("fn".to_string(), parent_id)),
hir::Unsafety::Normal => None,
}
} else {
is_enclosed(tcx, used_unsafe, parent_id)
}
} else {
None
}
}
fn report_unused_unsafe(tcx: TyCtxt, used_unsafe: &FxHashSet<ast::NodeId>, id: ast::NodeId) {
let span = tcx.sess.source_map().def_span(tcx.hir.span(id));
let msg = "unnecessary `unsafe` block";
let mut db = tcx.struct_span_lint_node(UNUSED_UNSAFE, id, span, msg);
db.span_label(span, msg);
if let Some((kind, id)) = is_enclosed(tcx, used_unsafe, id) {
db.span_label(tcx.sess.source_map().def_span(tcx.hir.span(id)),
format!("because it's nested under this `unsafe` {}", kind));
}
db.emit();
}
fn builtin_derive_def_id<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Option<DefId> {
debug!("builtin_derive_def_id({:?})", def_id);
if let Some(impl_def_id) = tcx.impl_of_method(def_id) {
if tcx.has_attr(impl_def_id, "automatically_derived") {
debug!("builtin_derive_def_id({:?}) - is {:?}", def_id, impl_def_id);
Some(impl_def_id)
} else {
debug!("builtin_derive_def_id({:?}) - not automatically derived", def_id);
None
}
} else {
debug!("builtin_derive_def_id({:?}) - not a method", def_id);
None
}
}
pub fn check_unsafety<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) {
debug!("check_unsafety({:?})", def_id);
// closures are handled by their parent fn.
if tcx.is_closure(def_id) {
return;
}
let UnsafetyCheckResult {
violations,
unsafe_blocks
} = tcx.unsafety_check_result(def_id);
for &UnsafetyViolation {
source_info, description, details, kind
} in violations.iter() {
// Report an error.
match kind {
UnsafetyViolationKind::General => {
struct_span_err!(
tcx.sess, source_info.span, E0133,
"{} is unsafe and requires unsafe function or block", description)
.span_label(source_info.span, &description.as_str()[..])
.note(&details.as_str()[..])
.emit();
}
UnsafetyViolationKind::ExternStatic(lint_node_id) => {
tcx.lint_node_note(SAFE_EXTERN_STATICS,
lint_node_id,
source_info.span,
&format!("{} is unsafe and requires unsafe function or block \
(error E0133)", &description.as_str()[..]),
&details.as_str()[..]);
}
UnsafetyViolationKind::BorrowPacked(lint_node_id) => {
if let Some(impl_def_id) = builtin_derive_def_id(tcx, def_id) {
tcx.unsafe_derive_on_repr_packed(impl_def_id);
} else {
tcx.lint_node_note(SAFE_PACKED_BORROWS,
lint_node_id,
source_info.span,
&format!("{} is unsafe and requires unsafe function or block \
(error E0133)", &description.as_str()[..]),
&details.as_str()[..]);
}
}
}
}
let mut unsafe_blocks: Vec<_> = unsafe_blocks.into_iter().collect();
unsafe_blocks.sort();
let used_unsafe: FxHashSet<_> = unsafe_blocks.iter()
.flat_map(|&&(id, used)| if used { Some(id) } else { None })
.collect();
for &(block_id, is_used) in unsafe_blocks {
if !is_used {
report_unused_unsafe(tcx, &used_unsafe, block_id);
}
}
}
| 40.192607 | 100 | 0.516579 |
09b6eb7f352c4d79ca84dfd7413a67d026e00b8f | 24,455 | use super::*;
use etherparse::packet_filter::*;
use proptest::*;
#[test]
fn default() {
let value: ElementFilter<IpFilter> = Default::default();
assert_eq!(ElementFilter::Any, value);
}
///The packet filter test generates all permutation of packet combinations & filter configurations
///and tests that all of them return the correct result.
#[derive(Debug, Clone, Default)]
struct PacketFilterTest {
link: Option<Ethernet2Header>,
vlan: Option<VlanHeader>,
ip: Option<IpHeader>,
transport: Option<TransportHeader>,
filter: Filter
}
impl PacketFilterTest {
///Add all permutations of vlan data types to the test (none, single, double)
///and then proceeds calling "add_ip_data" with each permutations.
fn add_vlan_data(&self,
outer_vlan: &SingleVlanHeader,
inner_vlan: &SingleVlanHeader,
ipv4: &(Ipv4Header, Vec<u8>),
ipv6: &Ipv6Header,
udp: &UdpHeader,
tcp: &TcpHeader)
{
//none
{
let mut t = self.clone();
t.vlan = None;
t.add_transport_data(udp, tcp);
}
//single
{
let mut t = self.clone();
t.vlan = Some(VlanHeader::Single(inner_vlan.clone()));
t.add_ip_data(ipv4, ipv6, udp, tcp);
}
//double
{
let mut t = self.clone();
t.vlan = Some(VlanHeader::Double(DoubleVlanHeader {
outer: outer_vlan.clone(),
inner: inner_vlan.clone()
}));
t.add_ip_data(ipv4, ipv6, udp, tcp);
}
}
///Add all permutations of ip data types to the test (none, v4, v6)
///and then proceeds calling "add_transport_data" with each permutations.
fn add_ip_data(&self,
ipv4: &(Ipv4Header, Vec<u8>),
ipv6: &Ipv6Header,
udp: &UdpHeader,
tcp: &TcpHeader)
{
//none
{
let mut t = self.clone();
t.ip = None;
t.add_transport_data(udp, tcp);
}
//ipv4
{
let mut t = self.clone();
t.ip = Some(IpHeader::Version4(ipv4.0.clone()));
t.add_transport_data(udp, tcp);
}
//ipv6
{
let mut t = self.clone();
t.ip = Some(IpHeader::Version6(ipv6.clone()));
t.add_transport_data(udp, tcp);
}
}
///Add all permutations of transport data types to the test (none, udp, tcp)
///and then proceeds calling "add_link_filter" with each permutations.
fn add_transport_data(&self, udp: &UdpHeader, tcp: &TcpHeader) {
//none
{
let mut t = self.clone();
t.transport = None;
t.add_link_filter(true);
}
//tcp
{
let mut t = self.clone();
t.transport = Some(TransportHeader::Tcp(tcp.clone()));
t.add_link_filter(true);
}
//udp
{
let mut t = self.clone();
t.transport = Some(TransportHeader::Udp(udp.clone()));
t.add_link_filter(true);
}
}
fn add_link_filter(&self, expected_result: bool) {
//any
{
let mut t = self.clone();
t.filter.link = ElementFilter::Any;
t.add_vlan_filter(expected_result);
}
//none
{
let mut t = self.clone();
t.filter.link = ElementFilter::No;
t.add_vlan_filter(match &t.link {
None => expected_result,
_ => false
});
}
//some
match &self.link {
Some(_) => {
let mut t = self.clone();
t.filter.link = ElementFilter::Some(
LinkFilter::Ethernet2 {
source: None,
destination: None
}
);
t.add_vlan_filter(expected_result);
},
_ => {
//test that the filter results in a negative match
let mut t = self.clone();
t.filter.link = ElementFilter::Some(
LinkFilter::Ethernet2 {
source: None,
destination: None
}
);
t.add_vlan_filter(false);
}
}
}
fn add_vlan_filter(&self, expected_result: bool) {
//any
{
let mut t = self.clone();
t.filter.vlan = ElementFilter::Any;
t.add_ip_filter(expected_result);
}
//none
{
let mut t = self.clone();
t.filter.vlan = ElementFilter::No;
t.add_ip_filter(match &t.vlan {
None => expected_result,
_ => false
});
}
//single
match &self.vlan {
Some(VlanHeader::Single(_)) => {
let mut t = self.clone();
t.filter.vlan = ElementFilter::Some(
VlanFilter::Single(None)
);
t.add_ip_filter(expected_result);
},
Some(VlanHeader::Double(_)) => {
let mut t = self.clone();
t.filter.vlan = ElementFilter::Some(
VlanFilter::Double{
outer_identifier: None,
inner_identifier: None,
}
);
t.add_ip_filter(expected_result);
},
_ => {
//test that the filter results in a negative match
let mut t = self.clone();
t.filter.vlan = ElementFilter::Some(
VlanFilter::Single(None)
);
t.add_ip_filter(false);
}
}
}
fn add_ip_filter(&self, expected_result: bool) {
//any
{
let mut t = self.clone();
t.filter.ip = ElementFilter::Any;
t.add_transport_filter(expected_result);
}
//none
{
let mut t = self.clone();
t.filter.ip = ElementFilter::No;
t.add_transport_filter(match &t.ip {
None => expected_result,
_ => false
});
}
//some
match &self.ip {
Some(IpHeader::Version4(_)) => {
let mut t = self.clone();
t.filter.ip = ElementFilter::Some(
IpFilter::Ipv4 {
source: None,
destination: None
}
);
t.add_transport_filter(expected_result);
},
Some(IpHeader::Version6(_)) => {
let mut t = self.clone();
t.filter.ip = ElementFilter::Some(
IpFilter::Ipv6 {
source: None,
destination: None
}
);
t.add_transport_filter(expected_result);
},
_ => {
//test that the filter results in a negative match
let mut t = self.clone();
t.filter.ip = ElementFilter::Some(
IpFilter::Ipv4 {
source: None,
destination: None
}
);
t.add_transport_filter(false);
}
}
}
fn add_transport_filter(&self, expected_result: bool) {
//any
{
let mut t = self.clone();
t.filter.transport = ElementFilter::Any;
t.run(expected_result);
}
//none
{
let mut t = self.clone();
t.filter.transport = ElementFilter::No;
t.run(match &t.transport {
None => expected_result,
_ => false
});
}
//some
match &self.transport {
Some(TransportHeader::Udp(_)) => {
let mut t = self.clone();
t.filter.transport = ElementFilter::Some(
TransportFilter::Udp {
source_port: None,
destination_port: None
}
);
t.run(expected_result);
},
Some(TransportHeader::Tcp(_)) => {
let mut t = self.clone();
t.filter.transport = ElementFilter::Some(
TransportFilter::Tcp {
source_port: None,
destination_port: None
}
);
t.run(expected_result);
},
_ => {
//test that the filter results in a negative match
let mut t = self.clone();
t.filter.transport = ElementFilter::Some(
TransportFilter::Udp {
source_port: None,
destination_port: None
}
);
t.run(false);
}
}
}
///Gives self.filter the headers in self as input and assert the given parameter as a result.
fn run(&self, expected_result: bool) {
//generate a slice containing the headers
let mut link_data = Vec::new();
let mut vlan_data = Vec::new();
let mut ip_data = Vec::new();
let mut transport_data = Vec::new();
let payload = Vec::new();
let slice = SlicedPacket {
link: match &self.link {
Some(header) => {
header.write(&mut link_data).unwrap();
Some(LinkSlice::Ethernet2(Ethernet2HeaderSlice::from_slice(&link_data[..]).unwrap()))
},
None => None
},
vlan: match &self.vlan {
Some(VlanHeader::Single(header)) => {
header.write(&mut vlan_data).unwrap();
Some(VlanSlice::SingleVlan(SingleVlanHeaderSlice::from_slice(&vlan_data[..]).unwrap()))
},
Some(VlanHeader::Double(header)) => {
header.write(&mut vlan_data).unwrap();
Some(VlanSlice::DoubleVlan(DoubleVlanHeaderSlice::from_slice(&vlan_data[..]).unwrap()))
},
None => None
},
ip: match &self.ip {
Some(IpHeader::Version4(header)) => {
header.write(&mut ip_data).unwrap();
Some(InternetSlice::Ipv4(Ipv4HeaderSlice::from_slice(&ip_data[..]).unwrap()))
},
Some(IpHeader::Version6(header)) => {
header.write(&mut ip_data).unwrap();
Some(
InternetSlice::Ipv6(Ipv6HeaderSlice::from_slice(&ip_data[..]).unwrap(),
[None, None, None, None, None,
None, None, None, None, None,
None, None]))
},
None => None
},
transport: match &self.transport {
Some(TransportHeader::Udp(header)) => {
header.write(&mut transport_data).unwrap();
Some(TransportSlice::Udp(UdpHeaderSlice::from_slice(&transport_data[..]).unwrap()))
},
Some(TransportHeader::Tcp(header)) => {
header.write(&mut transport_data).unwrap();
Some(TransportSlice::Tcp(TcpHeaderSlice::from_slice(&transport_data[..]).unwrap()))
},
None => None
},
payload: &payload[..]
};
assert_eq!(expected_result, self.filter.applies_to_slice(&slice));
}
}
///Test that all known packet compositions are parsed correctly.
#[test]
fn test_compositions()
{
//test without link
{
let test: PacketFilterTest = Default::default();
test.add_vlan_data(
&{
//explicitly set the outer vlan ether_type id
let mut re : SingleVlanHeader = Default::default();
re.ether_type = EtherType::VlanTaggedFrame as u16;
re
},
&Default::default(),
&Default::default(),
&Default::default(),
&Default::default(),
&Default::default()
);
}
//test with ethernet2 link
{
let mut test: PacketFilterTest = Default::default();
test.link = Some(Default::default());
test.add_vlan_data(
&{
//explicitly set the outer vlan ether_type id
let mut re : SingleVlanHeader = Default::default();
re.ether_type = EtherType::VlanTaggedFrame as u16;
re
},
&Default::default(),
&Default::default(),
&Default::default(),
&Default::default(),
&Default::default()
);
}
}
#[cfg(test)]
mod link_filter {
use super::*;
proptest! {
#[test]
fn applies_to_slice(ref eth in ethernet_2_unknown())
{
use self::LinkFilter::*;
//create the slice the filter can be checked against
let eth_data = {
let mut eth_data = Vec::new();
eth.write(&mut eth_data).unwrap();
eth_data };
let eth_slice = LinkSlice::Ethernet2(
Ethernet2HeaderSlice::from_slice(ð_data[..]).unwrap()
);
//test ethernet 2 filter with wildcards
{
let wildcard = Ethernet2 {
source: Some(eth.source),
destination: Some(eth.destination)
};
assert_eq!(true, wildcard.applies_to_slice(ð_slice));
}
//matching
assert_eq!(true, Ethernet2 {
source: Some(eth.source),
destination: Some(eth.destination)
}.applies_to_slice(ð_slice));
//non matching
assert_eq!(false, Ethernet2 {
source: Some({
let mut value = eth.source;
value[0] = !value[0];
value
}),
destination: Some(eth.destination)
}.applies_to_slice(ð_slice));
assert_eq!(false, Ethernet2 {
source: Some(eth.source),
destination: Some({
let mut value = eth.destination;
value[0] = !value[0];
value
})
}.applies_to_slice(ð_slice));
}
}
}
#[cfg(test)]
mod vlan_filter {
use super::*;
proptest! {
#[test]
fn applies_to_slice(ref vlan_outer in vlan_single_with(EtherType::VlanTaggedFrame as u16),
ref vlan_inner in vlan_single_unknown())
{
use self::VlanFilter::*;
//create the slices the filters can be checked against
let single_data = {
let mut single_data = Vec::new();
vlan_inner.write(&mut single_data).unwrap();
single_data };
let single_slice = VlanSlice::SingleVlan(
SingleVlanHeaderSlice::from_slice(&single_data[..]).unwrap()
);
let double_data = {
let mut double_data = Vec::new();
DoubleVlanHeader {
outer: vlan_outer.clone(),
inner: vlan_inner.clone()
}.write(&mut double_data).unwrap();
double_data };
let double_slice = VlanSlice::DoubleVlan(
DoubleVlanHeaderSlice::from_slice(&double_data[..]).unwrap()
);
//test single vlan filter with wildcards
{
let wildcard = Single(None);
assert_eq!(true, wildcard.applies_to_slice(&single_slice));
assert_eq!(false, wildcard.applies_to_slice(&double_slice));
}
//matching
assert_eq!(true, Single(
Some(vlan_inner.vlan_identifier)
).applies_to_slice(&single_slice));
//non matching
assert_eq!(false, Single(
Some(!vlan_inner.vlan_identifier)
).applies_to_slice(&single_slice));
//test double vlan filter with wildcards
{
let wildcard = Double {
outer_identifier: None,
inner_identifier: None
};
assert_eq!(true, wildcard.applies_to_slice(&double_slice));
assert_eq!(false, wildcard.applies_to_slice(&single_slice));
}
//matching
assert_eq!(true, Double {
outer_identifier: Some(vlan_outer.vlan_identifier),
inner_identifier: Some(vlan_inner.vlan_identifier)
}.applies_to_slice(&double_slice));
//non matching
assert_eq!(false, Double {
outer_identifier: Some(!vlan_outer.vlan_identifier),
inner_identifier: Some(vlan_inner.vlan_identifier)
}.applies_to_slice(&double_slice));
assert_eq!(false, Double {
outer_identifier: Some(vlan_outer.vlan_identifier),
inner_identifier: Some(!vlan_inner.vlan_identifier)
}.applies_to_slice(&double_slice));
}
}
}
#[cfg(test)]
mod ip_filter {
use super::*;
proptest! {
#[test]
fn applies_to_slice(ref ipv4 in ipv4_unknown(),
ref ipv6 in ipv6_unknown())
{
use self::IpFilter::*;
//create the slices the filters can be checked against
let ipv4_data = {
let mut ipv4_data = Vec::new();
ipv4.write(&mut ipv4_data).unwrap();
ipv4_data };
let ipv4_slice = InternetSlice::Ipv4(
Ipv4HeaderSlice::from_slice(&ipv4_data[..]).unwrap()
);
let ipv6_data = {
let mut ipv6_data = Vec::new();
ipv6.write(&mut ipv6_data).unwrap();
ipv6_data };
let ipv6_slice = InternetSlice::Ipv6(
Ipv6HeaderSlice::from_slice(&ipv6_data[..]).unwrap(),
[None, None, None, None, None,
None, None, None, None, None,
None, None]
);
//test ipv4 filter with wildcards
{
let wildcard = Ipv4 {
source: None,
destination:None
};
assert_eq!(true, wildcard.applies_to_slice(&ipv4_slice));
assert_eq!(false, wildcard.applies_to_slice(&ipv6_slice));
}
//matching
assert_eq!(true, Ipv4 {
source: Some(ipv4.source),
destination: Some(ipv4.destination)
}.applies_to_slice(&ipv4_slice));
//non matching
assert_eq!(false, Ipv4 {
source: Some({
let mut value = ipv4.source;
value[0] = !value[0];
value
}),
destination: Some(ipv4.destination)
}.applies_to_slice(&ipv4_slice));
assert_eq!(false, Ipv4 {
source: Some(ipv4.source),
destination: Some({
let mut value = ipv4.destination;
value[0] = !value[0];
value
})
}.applies_to_slice(&ipv4_slice));
//test ipv6 filter with wildcards
{
let wildcard = Ipv6 {
source: None,
destination:None
};
assert_eq!(true, wildcard.applies_to_slice(&ipv6_slice));
assert_eq!(false, wildcard.applies_to_slice(&ipv4_slice));
}
//matching
assert_eq!(true, Ipv6 {
source: Some(ipv6.source),
destination: Some(ipv6.destination)
}.applies_to_slice(&ipv6_slice));
//non matching
assert_eq!(false, Ipv6 {
source: Some({
let mut value = ipv6.source;
value[0] = !value[0];
value }),
destination: Some(ipv6.destination)
}.applies_to_slice(&ipv6_slice));
assert_eq!(false, Ipv6 {
source: Some(ipv6.source),
destination: Some({
let mut value = ipv6.destination;
value[0] = !value[0];
value })
}.applies_to_slice(&ipv6_slice));
}
}
}
#[cfg(test)]
mod transport_filter {
use super::*;
proptest! {
#[test]
fn applies_to_slice(ref udp in udp_any(),
ref tcp in tcp_any())
{
use self::TransportFilter::*;
//create the slices the filters can be checked against
let udp_data = {
let mut udp_data = Vec::new();
udp.write(&mut udp_data).unwrap();
udp_data };
let udp_slice = TransportSlice::Udp(
UdpHeaderSlice::from_slice(&udp_data[..]).unwrap()
);
let tcp_data = {
let mut tcp_data = Vec::new();
tcp.write(&mut tcp_data).unwrap();
tcp_data };
let tcp_slice = TransportSlice::Tcp(
TcpHeaderSlice::from_slice(&tcp_data[..]).unwrap()
);
//test udp filter with wildcards
{
let wildcard = Udp {
source_port: None,
destination_port:None
};
assert_eq!(true, wildcard.applies_to_slice(&udp_slice));
assert_eq!(false, wildcard.applies_to_slice(&tcp_slice));
}
//matching
assert_eq!(true, Udp {
source_port: Some(udp.source_port),
destination_port: Some(udp.destination_port)
}.applies_to_slice(&udp_slice));
//non matching
assert_eq!(false, Udp {
source_port: Some(!udp.source_port), //inverted port
destination_port: Some(udp.destination_port)
}.applies_to_slice(&udp_slice));
assert_eq!(false, Udp {
source_port: Some(udp.source_port),
destination_port: Some(!udp.destination_port) //inverted port
}.applies_to_slice(&udp_slice));
//test tcp filter with wildcards
{
let wildcard = Tcp {
source_port: None,
destination_port:None
};
assert_eq!(true, wildcard.applies_to_slice(&tcp_slice));
assert_eq!(false, wildcard.applies_to_slice(&udp_slice));
}
//matching
assert_eq!(true, Tcp {
source_port: Some(tcp.source_port),
destination_port: Some(tcp.destination_port)
}.applies_to_slice(&tcp_slice));
//non matching
assert_eq!(false, Tcp {
source_port: Some(!tcp.source_port), //inverted port
destination_port: Some(tcp.destination_port)
}.applies_to_slice(&tcp_slice));
assert_eq!(false, Tcp {
source_port: Some(tcp.source_port),
destination_port: Some(!tcp.destination_port) //inverted port
}.applies_to_slice(&tcp_slice));
}
}
}
#[test]
fn type_derives() {
println!("{:?}", TransportFilter::Udp{
source_port: None,
destination_port: None
});
println!("{:?}", TransportFilter::Tcp{
source_port: None,
destination_port: None
});
}
| 34.202797 | 107 | 0.472828 |
0e9a229a5e59207553cf03cda8c25722fcaf85cd | 673 | // run-pass
// Test equality constrai32s on associated types in a where clause.
pub trait ToI32 {
fn to_i32(&self) -> i32;
}
impl ToI32 for i32 {
fn to_i32(&self) -> i32 { *self }
}
impl ToI32 for u32 {
fn to_i32(&self) -> i32 { *self as i32 }
}
pub trait GetToI32
{
type R : ToI32;
fn get(&self) -> <Self as GetToI32>::R;
}
impl GetToI32 for i32 {
type R = i32;
fn get(&self) -> i32 { *self }
}
impl GetToI32 for u32 {
type R = u32;
fn get(&self) -> u32 { *self }
}
fn foo<G>(g: G) -> i32
where G : GetToI32
{
ToI32::to_i32(&g.get())
}
pub fn main() {
assert_eq!(foo(22i32), 22);
assert_eq!(foo(22u32), 22);
}
| 15.295455 | 67 | 0.569094 |
3a244d4ddff0302918ce487d70bd38c01da7d957 | 13,653 | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
#[cfg(test)]
mod mock_vm_test;
use diem_crypto::{ed25519::Ed25519PrivateKey, PrivateKey, Uniform};
use diem_state_view::StateView;
use diem_types::{
access_path::AccessPath,
account_address::AccountAddress,
account_config::{diem_root_address, validator_set_address, XUS_NAME},
chain_id::ChainId,
contract_event::ContractEvent,
event::EventKey,
on_chain_config::{
config_address, new_epoch_event_key, ConfigurationResource, OnChainConfig, ValidatorSet,
},
transaction::{
RawTransaction, Script, SignedTransaction, Transaction, TransactionArgument,
TransactionOutput, TransactionPayload, TransactionStatus,
},
vm_status::{KeptVMStatus, StatusCode, VMStatus},
write_set::{WriteOp, WriteSet, WriteSetMut},
};
use diem_vm::VMExecutor;
use move_core_types::{language_storage::TypeTag, move_resource::MoveResource};
use once_cell::sync::Lazy;
use std::collections::HashMap;
#[derive(Debug)]
enum MockVMTransaction {
Mint {
sender: AccountAddress,
amount: u64,
},
Payment {
sender: AccountAddress,
recipient: AccountAddress,
amount: u64,
},
Reconfiguration,
}
pub static KEEP_STATUS: Lazy<TransactionStatus> =
Lazy::new(|| TransactionStatus::Keep(KeptVMStatus::Executed));
// We use 10 as the assertion error code for insufficient balance within the Diem coin contract.
pub static DISCARD_STATUS: Lazy<TransactionStatus> =
Lazy::new(|| TransactionStatus::Discard(StatusCode::INSUFFICIENT_BALANCE_FOR_TRANSACTION_FEE));
pub struct MockVM;
impl VMExecutor for MockVM {
fn execute_block(
transactions: Vec<Transaction>,
state_view: &impl StateView,
) -> Result<Vec<TransactionOutput>, VMStatus> {
if state_view.is_genesis() {
assert_eq!(
transactions.len(),
1,
"Genesis block should have only one transaction."
);
let output = TransactionOutput::new(
gen_genesis_writeset(),
// mock the validator set event
vec![ContractEvent::new(
new_epoch_event_key(),
0,
TypeTag::Bool,
bcs::to_bytes(&0).unwrap(),
)],
0,
KEEP_STATUS.clone(),
);
return Ok(vec![output]);
}
// output_cache is used to store the output of transactions so they are visible to later
// transactions.
let mut output_cache = HashMap::new();
let mut outputs = vec![];
for txn in transactions {
match decode_transaction(txn.as_signed_user_txn().unwrap()) {
MockVMTransaction::Mint { sender, amount } => {
let old_balance = read_balance(&output_cache, state_view, sender);
let new_balance = old_balance + amount;
let old_seqnum = read_seqnum(&output_cache, state_view, sender);
let new_seqnum = old_seqnum + 1;
output_cache.insert(balance_ap(sender), new_balance);
output_cache.insert(seqnum_ap(sender), new_seqnum);
let write_set = gen_mint_writeset(sender, new_balance, new_seqnum);
let events = gen_events(sender);
outputs.push(TransactionOutput::new(
write_set,
events,
0,
KEEP_STATUS.clone(),
));
}
MockVMTransaction::Payment {
sender,
recipient,
amount,
} => {
let sender_old_balance = read_balance(&output_cache, state_view, sender);
let recipient_old_balance = read_balance(&output_cache, state_view, recipient);
if sender_old_balance < amount {
outputs.push(TransactionOutput::new(
WriteSet::default(),
vec![],
0,
DISCARD_STATUS.clone(),
));
continue;
}
let sender_old_seqnum = read_seqnum(&output_cache, state_view, sender);
let sender_new_seqnum = sender_old_seqnum + 1;
let sender_new_balance = sender_old_balance - amount;
let recipient_new_balance = recipient_old_balance + amount;
output_cache.insert(balance_ap(sender), sender_new_balance);
output_cache.insert(seqnum_ap(sender), sender_new_seqnum);
output_cache.insert(balance_ap(recipient), recipient_new_balance);
let write_set = gen_payment_writeset(
sender,
sender_new_balance,
sender_new_seqnum,
recipient,
recipient_new_balance,
);
let events = gen_events(sender);
outputs.push(TransactionOutput::new(
write_set,
events,
0,
TransactionStatus::Keep(KeptVMStatus::Executed),
));
}
MockVMTransaction::Reconfiguration => {
read_balance_from_storage(state_view, &balance_ap(validator_set_address()));
read_balance_from_storage(state_view, &balance_ap(diem_root_address()));
outputs.push(TransactionOutput::new(
// WriteSet cannot be empty so use genesis writeset only for testing.
gen_genesis_writeset(),
// mock the validator set event
vec![ContractEvent::new(
new_epoch_event_key(),
0,
TypeTag::Bool,
bcs::to_bytes(&0).unwrap(),
)],
0,
KEEP_STATUS.clone(),
));
}
}
}
Ok(outputs)
}
}
fn read_balance(
output_cache: &HashMap<AccessPath, u64>,
state_view: &impl StateView,
account: AccountAddress,
) -> u64 {
let balance_access_path = balance_ap(account);
match output_cache.get(&balance_access_path) {
Some(balance) => *balance,
None => read_balance_from_storage(state_view, &balance_access_path),
}
}
fn read_seqnum(
output_cache: &HashMap<AccessPath, u64>,
state_view: &impl StateView,
account: AccountAddress,
) -> u64 {
let seqnum_access_path = seqnum_ap(account);
match output_cache.get(&seqnum_access_path) {
Some(seqnum) => *seqnum,
None => read_seqnum_from_storage(state_view, &seqnum_access_path),
}
}
fn read_balance_from_storage(state_view: &impl StateView, balance_access_path: &AccessPath) -> u64 {
read_u64_from_storage(state_view, balance_access_path)
}
fn read_seqnum_from_storage(state_view: &impl StateView, seqnum_access_path: &AccessPath) -> u64 {
read_u64_from_storage(state_view, seqnum_access_path)
}
fn read_u64_from_storage(state_view: &impl StateView, access_path: &AccessPath) -> u64 {
state_view
.get(access_path)
.expect("Failed to query storage.")
.map_or(0, |bytes| decode_bytes(&bytes))
}
fn decode_bytes(bytes: &[u8]) -> u64 {
let mut buf = [0; 8];
buf.copy_from_slice(bytes);
u64::from_le_bytes(buf)
}
fn balance_ap(account: AccountAddress) -> AccessPath {
AccessPath::new(account, b"balance".to_vec())
}
fn seqnum_ap(account: AccountAddress) -> AccessPath {
AccessPath::new(account, b"seqnum".to_vec())
}
fn gen_genesis_writeset() -> WriteSet {
let mut write_set = WriteSetMut::default();
let validator_set_ap = ValidatorSet::CONFIG_ID.access_path();
write_set.push((
validator_set_ap,
WriteOp::Value(bcs::to_bytes(&ValidatorSet::new(vec![])).unwrap()),
));
write_set.push((
AccessPath::new(config_address(), ConfigurationResource::resource_path()),
WriteOp::Value(bcs::to_bytes(&ConfigurationResource::default()).unwrap()),
));
write_set
.freeze()
.expect("genesis writeset should be valid")
}
fn gen_mint_writeset(sender: AccountAddress, balance: u64, seqnum: u64) -> WriteSet {
let mut write_set = WriteSetMut::default();
write_set.push((
balance_ap(sender),
WriteOp::Value(balance.to_le_bytes().to_vec()),
));
write_set.push((
seqnum_ap(sender),
WriteOp::Value(seqnum.to_le_bytes().to_vec()),
));
write_set.freeze().expect("mint writeset should be valid")
}
fn gen_payment_writeset(
sender: AccountAddress,
sender_balance: u64,
sender_seqnum: u64,
recipient: AccountAddress,
recipient_balance: u64,
) -> WriteSet {
let mut write_set = WriteSetMut::default();
write_set.push((
balance_ap(sender),
WriteOp::Value(sender_balance.to_le_bytes().to_vec()),
));
write_set.push((
seqnum_ap(sender),
WriteOp::Value(sender_seqnum.to_le_bytes().to_vec()),
));
write_set.push((
balance_ap(recipient),
WriteOp::Value(recipient_balance.to_le_bytes().to_vec()),
));
write_set
.freeze()
.expect("payment write set should be valid")
}
fn gen_events(sender: AccountAddress) -> Vec<ContractEvent> {
vec![ContractEvent::new(
EventKey::new_from_address(&sender, 0),
0,
TypeTag::Vector(Box::new(TypeTag::U8)),
b"event_data".to_vec(),
)]
}
pub fn encode_mint_program(amount: u64) -> Script {
let argument = TransactionArgument::U64(amount);
Script::new(vec![], vec![], vec![argument])
}
pub fn encode_transfer_program(recipient: AccountAddress, amount: u64) -> Script {
let argument1 = TransactionArgument::Address(recipient);
let argument2 = TransactionArgument::U64(amount);
Script::new(vec![], vec![], vec![argument1, argument2])
}
pub fn encode_mint_transaction(sender: AccountAddress, amount: u64) -> Transaction {
encode_transaction(sender, encode_mint_program(amount))
}
pub fn encode_transfer_transaction(
sender: AccountAddress,
recipient: AccountAddress,
amount: u64,
) -> Transaction {
encode_transaction(sender, encode_transfer_program(recipient, amount))
}
fn encode_transaction(sender: AccountAddress, program: Script) -> Transaction {
let raw_transaction = RawTransaction::new_script(
sender,
0,
program,
0,
0,
XUS_NAME.to_owned(),
0,
ChainId::test(),
);
let privkey = Ed25519PrivateKey::generate_for_testing();
Transaction::UserTransaction(
raw_transaction
.sign(&privkey, privkey.public_key())
.expect("Failed to sign raw transaction.")
.into_inner(),
)
}
pub fn encode_reconfiguration_transaction(sender: AccountAddress) -> Transaction {
let raw_transaction =
RawTransaction::new_write_set(sender, 0, WriteSet::default(), ChainId::test());
let privkey = Ed25519PrivateKey::generate_for_testing();
Transaction::UserTransaction(
raw_transaction
.sign(&privkey, privkey.public_key())
.expect("Failed to sign raw transaction.")
.into_inner(),
)
}
fn decode_transaction(txn: &SignedTransaction) -> MockVMTransaction {
let sender = txn.sender();
match txn.payload() {
TransactionPayload::Script(script) => {
assert!(script.code().is_empty(), "Code should be empty.");
match script.args().len() {
1 => match script.args()[0] {
TransactionArgument::U64(amount) => MockVMTransaction::Mint { sender, amount },
_ => unimplemented!(
"Only one integer argument is allowed for mint transactions."
),
},
2 => match (&script.args()[0], &script.args()[1]) {
(TransactionArgument::Address(recipient), TransactionArgument::U64(amount)) => {
MockVMTransaction::Payment {
sender,
recipient: *recipient,
amount: *amount,
}
}
_ => unimplemented!(
"The first argument for payment transaction must be recipient address \
and the second argument must be amount."
),
},
_ => unimplemented!("Transaction must have one or two arguments."),
}
}
TransactionPayload::ScriptFunction(_) => {
// TODO: we need to migrate Script to ScriptFunction later
unimplemented!("MockVM does not support script function transaction payload.")
}
TransactionPayload::WriteSet(_) => {
// Use WriteSet for reconfig only for testing.
MockVMTransaction::Reconfiguration
}
TransactionPayload::Module(_) => {
unimplemented!("MockVM does not support Module transaction payload.")
}
}
}
| 35.554688 | 100 | 0.580166 |
ab4fe21cb627890f2146b02b5b2e2318ca4287eb | 13,890 | // Copyright 2019 The Druid Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Traits for handling value types.
use std::rc::Rc;
use std::sync::Arc;
use crate::kurbo::{self, ParamCurve};
use crate::piet;
use crate::shell::Scale;
pub use druid_derive::Data;
/// A trait used to represent value types.
///
/// These should be cheap to compare and cheap to clone.
///
/// See <https://sinusoid.es/lager/model.html#id2> for a well-written
/// explanation of value types (albeit within a C++ context).
///
/// ## Derive macro
///
/// In general, you can use `derive` to generate a `Data` impl for your types.
///
/// ```
/// # use std::sync::Arc;
/// # use druid::Data;
/// #[derive(Clone, Data)]
/// enum Foo {
/// Case1(i32, f32),
/// Case2 { a: String, b: Arc<i32> }
/// }
/// ```
///
/// ### Derive macro attributes
///
/// There are a number of field attributes available for use with `derive(Data)`.
///
/// - **`#[data(ignore)]`**
///
/// Skip this field when computing `same`ness.
///
/// If the type you are implementing `Data` on contains some fields that are
/// not relevant to the `Data` impl, you can ignore them with this attribute.
///
/// - **`#[data(same_fn = "path")]`**
///
/// Use a specific function to compute `same`ness.
///
/// By default, derived implementations of `Data` just call [`Data::same`]
/// recursively on each field. With this attribute, you can specify a
/// custom function that will be used instead.
///
/// This function must have a signature in the form, `fn<T>(&T, &T) -> bool`,
/// where `T` is the type of the field.
///
/// ## Collection types
///
/// `Data` is not implemented for `std` collection types, because comparing them
/// can be expensive. To use collection types with druid, there are two easy options:
/// either wrap the collection in an `Arc`, or build druid with the `im` feature,
/// which adds `Data` implementations to the collections from the [`im` crate],
/// a set of immutable data structures that fit nicely with druid.
///
/// If the `im` feature is used, the `im` crate is reexported from the root
/// of the druid crate.
///
/// ### Example:
///
/// ```
/// # use std::path::PathBuf;
/// # use std::time::Instant;
/// # use druid::Data;
/// #[derive(Clone, Data)]
/// struct PathEntry {
/// // There's no Data impl for PathBuf, but no problem
/// #[data(same_fn = "PartialEq::eq")]
/// path: PathBuf,
/// priority: usize,
/// // This field is not part of our data model.
/// #[data(ignore)]
/// last_read: Instant,
/// }
/// ```
///
/// ## C-style enums
///
/// In the case of a "c-style" enum (one that only contains unit variants,
/// that is where no variant has fields), the implementation that is generated
/// checks for equality. Therefore, such types must also implement `PartialEq`.
///
/// [`Data::same`]: trait.Data.html#tymethod.same
/// [`im` crate]: https://docs.rs/im
pub trait Data: Clone + 'static {
//// ANCHOR: same_fn
/// Determine whether two values are the same.
///
/// This is intended to always be a fast operation. If it returns
/// `true`, the two values *must* be equal, but two equal values
/// need not be considered the same here, as will often be the
/// case when two copies are separately allocated.
///
/// Note that "equal" above has a slightly different meaning than
/// `PartialEq`, for example two floating point NaN values should
/// be considered equal when they have the same bit representation.
fn same(&self, other: &Self) -> bool;
//// ANCHOR_END: same_fn
}
/// A reference counted string slice.
///
/// This is a data-friendly way to represent strings in druid. Unlike `String`
/// it cannot be mutated, but unlike `String` it can be cheaply cloned.
pub type ArcStr = Arc<str>;
/// An impl of `Data` suitable for simple types.
///
/// The `same` method is implemented with equality, so the type should
/// implement `Eq` at least.
macro_rules! impl_data_simple {
($t:ty) => {
impl Data for $t {
fn same(&self, other: &Self) -> bool {
self == other
}
}
};
}
impl_data_simple!(i8);
impl_data_simple!(i16);
impl_data_simple!(i32);
impl_data_simple!(i64);
impl_data_simple!(isize);
impl_data_simple!(u8);
impl_data_simple!(u16);
impl_data_simple!(u32);
impl_data_simple!(u64);
impl_data_simple!(usize);
impl_data_simple!(char);
impl_data_simple!(bool);
//TODO: remove me!?
impl_data_simple!(String);
impl Data for f32 {
fn same(&self, other: &Self) -> bool {
self.to_bits() == other.to_bits()
}
}
impl Data for f64 {
fn same(&self, other: &Self) -> bool {
self.to_bits() == other.to_bits()
}
}
impl<T: ?Sized + 'static> Data for Arc<T> {
fn same(&self, other: &Self) -> bool {
Arc::ptr_eq(self, other)
}
}
impl<T: ?Sized + 'static> Data for Rc<T> {
fn same(&self, other: &Self) -> bool {
Rc::ptr_eq(self, other)
}
}
impl<T: Data> Data for Option<T> {
fn same(&self, other: &Self) -> bool {
match (self, other) {
(Some(a), Some(b)) => a.same(b),
(None, None) => true,
_ => false,
}
}
}
impl<T: Data, U: Data> Data for Result<T, U> {
fn same(&self, other: &Self) -> bool {
match (self, other) {
(Ok(a), Ok(b)) => a.same(b),
(Err(a), Err(b)) => a.same(b),
_ => false,
}
}
}
impl Data for () {
fn same(&self, _other: &Self) -> bool {
true
}
}
impl<T0: Data> Data for (T0,) {
fn same(&self, other: &Self) -> bool {
self.0.same(&other.0)
}
}
impl<T0: Data, T1: Data> Data for (T0, T1) {
fn same(&self, other: &Self) -> bool {
self.0.same(&other.0) && self.1.same(&other.1)
}
}
impl<T0: Data, T1: Data, T2: Data> Data for (T0, T1, T2) {
fn same(&self, other: &Self) -> bool {
self.0.same(&other.0) && self.1.same(&other.1) && self.2.same(&other.2)
}
}
impl<T0: Data, T1: Data, T2: Data, T3: Data> Data for (T0, T1, T2, T3) {
fn same(&self, other: &Self) -> bool {
self.0.same(&other.0)
&& self.1.same(&other.1)
&& self.2.same(&other.2)
&& self.3.same(&other.3)
}
}
impl<T0: Data, T1: Data, T2: Data, T3: Data, T4: Data> Data for (T0, T1, T2, T3, T4) {
fn same(&self, other: &Self) -> bool {
self.0.same(&other.0)
&& self.1.same(&other.1)
&& self.2.same(&other.2)
&& self.3.same(&other.3)
&& self.4.same(&other.4)
}
}
impl<T0: Data, T1: Data, T2: Data, T3: Data, T4: Data, T5: Data> Data for (T0, T1, T2, T3, T4, T5) {
fn same(&self, other: &Self) -> bool {
self.0.same(&other.0)
&& self.1.same(&other.1)
&& self.2.same(&other.2)
&& self.3.same(&other.3)
&& self.4.same(&other.4)
&& self.5.same(&other.5)
}
}
impl Data for Scale {
fn same(&self, other: &Self) -> bool {
self == other
}
}
impl Data for kurbo::Point {
fn same(&self, other: &Self) -> bool {
self.x.same(&other.x) && self.y.same(&other.y)
}
}
impl Data for kurbo::Vec2 {
fn same(&self, other: &Self) -> bool {
self.x.same(&other.x) && self.y.same(&other.y)
}
}
impl Data for kurbo::Size {
fn same(&self, other: &Self) -> bool {
self.width.same(&other.width) && self.height.same(&other.height)
}
}
impl Data for kurbo::Affine {
fn same(&self, other: &Self) -> bool {
let rhs = self.as_coeffs();
let lhs = other.as_coeffs();
rhs.iter().zip(lhs.iter()).all(|(r, l)| r.same(l))
}
}
impl Data for kurbo::Insets {
fn same(&self, other: &Self) -> bool {
self.x0.same(&other.x0)
&& self.y0.same(&other.y0)
&& self.x1.same(&other.x1)
&& self.y1.same(&other.y1)
}
}
impl Data for kurbo::Rect {
fn same(&self, other: &Self) -> bool {
self.x0.same(&other.x0)
&& self.y0.same(&other.y0)
&& self.x1.same(&other.x1)
&& self.y1.same(&other.y1)
}
}
impl Data for kurbo::RoundedRect {
fn same(&self, other: &Self) -> bool {
self.rect().same(&other.rect()) && self.radius().same(&self.radius())
}
}
impl Data for kurbo::Arc {
fn same(&self, other: &Self) -> bool {
self.center.same(&other.center)
&& self.radii.same(&other.radii)
&& self.start_angle.same(&other.start_angle)
&& self.sweep_angle.same(&other.sweep_angle)
&& self.x_rotation.same(&other.x_rotation)
}
}
impl Data for kurbo::PathEl {
fn same(&self, other: &Self) -> bool {
use kurbo::PathEl::*;
match (self, other) {
(MoveTo(p1), MoveTo(p2)) => p1.same(p2),
(LineTo(p1), LineTo(p2)) => p1.same(p2),
(QuadTo(x1, y1), QuadTo(x2, y2)) => x1.same(x2) && y1.same(y2),
(CurveTo(x1, y1, z1), CurveTo(x2, y2, z2)) => x1.same(x2) && y1.same(y2) && z1.same(z2),
(ClosePath, ClosePath) => true,
_ => false,
}
}
}
impl Data for kurbo::PathSeg {
fn same(&self, other: &Self) -> bool {
use kurbo::PathSeg;
match (self, other) {
(PathSeg::Line(l1), PathSeg::Line(l2)) => l1.same(l2),
(PathSeg::Quad(q1), PathSeg::Quad(q2)) => q1.same(q2),
(PathSeg::Cubic(c1), PathSeg::Cubic(c2)) => c1.same(c2),
_ => false,
}
}
}
impl Data for kurbo::BezPath {
fn same(&self, other: &Self) -> bool {
let rhs = self.elements();
let lhs = other.elements();
if rhs.len() == lhs.len() {
rhs.iter().zip(lhs.iter()).all(|(x, y)| x.same(y))
} else {
false
}
}
}
impl Data for kurbo::Circle {
fn same(&self, other: &Self) -> bool {
self.center.same(&other.center) && self.radius.same(&other.radius)
}
}
impl Data for kurbo::CubicBez {
fn same(&self, other: &Self) -> bool {
self.p0.same(&other.p0)
&& self.p1.same(&other.p1)
&& self.p2.same(&other.p2)
&& self.p3.same(&other.p3)
}
}
impl Data for kurbo::Line {
fn same(&self, other: &Self) -> bool {
self.p0.same(&other.p0) && self.p1.same(&other.p1)
}
}
impl Data for kurbo::ConstPoint {
fn same(&self, other: &Self) -> bool {
self.eval(0.).same(&other.eval(0.))
}
}
impl Data for kurbo::QuadBez {
fn same(&self, other: &Self) -> bool {
self.p0.same(&other.p0) && self.p1.same(&other.p1) && self.p2.same(&other.p2)
}
}
impl Data for piet::Color {
fn same(&self, other: &Self) -> bool {
self.as_rgba_u32().same(&other.as_rgba_u32())
}
}
impl Data for piet::FontFamily {
fn same(&self, other: &Self) -> bool {
self == other
}
}
impl Data for piet::FontWeight {
fn same(&self, other: &Self) -> bool {
self == other
}
}
impl Data for piet::FontStyle {
fn same(&self, other: &Self) -> bool {
self == other
}
}
#[cfg(feature = "im")]
impl<T: Data> Data for im::Vector<T> {
fn same(&self, other: &Self) -> bool {
// if a vec is small enough that it doesn't require an allocation
// it is 'inline'; in this case a pointer comparison is meaningless.
if self.is_inline() {
self.len() == other.len() && self.iter().zip(other.iter()).all(|(a, b)| a.same(b))
} else {
self.ptr_eq(other)
}
}
}
#[cfg(feature = "im")]
impl<K: Clone + 'static, V: Data> Data for im::HashMap<K, V> {
fn same(&self, other: &Self) -> bool {
self.ptr_eq(other)
}
}
#[cfg(feature = "im")]
impl<T: Data> Data for im::HashSet<T> {
fn same(&self, other: &Self) -> bool {
self.ptr_eq(other)
}
}
#[cfg(feature = "im")]
impl<K: Clone + 'static, V: Data> Data for im::OrdMap<K, V> {
fn same(&self, other: &Self) -> bool {
self.ptr_eq(other)
}
}
#[cfg(feature = "im")]
impl<T: Data> Data for im::OrdSet<T> {
fn same(&self, other: &Self) -> bool {
self.ptr_eq(other)
}
}
macro_rules! impl_data_for_array {
() => {};
($this:tt $($rest:tt)*) => {
impl<T: Data> Data for [T; $this] {
fn same(&self, other: &Self) -> bool {
self.iter().zip(other.iter()).all(|(a, b)| a.same(b))
}
}
impl_data_for_array!($($rest)*);
}
}
impl_data_for_array! { 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 }
#[cfg(test)]
mod test {
use super::Data;
#[test]
fn array_data() {
let input = [1u8, 0, 0, 1, 0];
assert!(input.same(&[1u8, 0, 0, 1, 0]));
assert!(!input.same(&[1u8, 1, 0, 1, 0]));
}
#[test]
#[cfg(feature = "im")]
fn im_data() {
for len in 8..256 {
let input = std::iter::repeat(0_u8).take(len).collect::<im::Vector<_>>();
let mut inp2 = input.clone();
assert!(input.same(&inp2));
inp2.set(len - 1, 98);
assert!(!input.same(&inp2));
}
}
#[test]
#[cfg(feature = "im")]
fn im_vec_different_length() {
let one = std::iter::repeat(0_u8).take(9).collect::<im::Vector<_>>();
let two = std::iter::repeat(0_u8).take(10).collect::<im::Vector<_>>();
assert!(!one.same(&two));
}
}
| 27.669323 | 100 | 0.559323 |
2118e492a3341b23b1c8e44ab02a4a5b2386b2b8 | 11,939 | extern crate alloc;
use core::fmt;
use std::collections::HashMap;
use std::fmt::Debug;
use log::info;
use serde::{de, Deserialize, Deserializer, Serialize};
use serde::de::{SeqAccess, Visitor};
use serde::ser::{Serializer, SerializeSeq};
/// Struct representing the ServiceBlock used in Beacons to advertise additional services
///
/// Contains two vectors,
///
/// one reserved for ConvergencyLayerAgents
///
/// and one for user defined services
#[derive(Debug, Clone, PartialEq)]
pub struct ServiceBlock {
clas: Vec<(String, Option<u16>)>,
services: HashMap<u8, Vec<u8>>,
}
impl Default for ServiceBlock {
fn default() -> Self {
Self::new()
}
}
impl ServiceBlock {
/// Creates a new ServiceBlock without any services or clas
pub fn new() -> ServiceBlock {
ServiceBlock {
clas: Vec::new(),
services: HashMap::new(),
}
}
/// Returns the vector of ConvergencyLayerAgents
pub fn clas(&self) -> &Vec<(String, Option<u16>)> {
&self.clas
}
/// Converts services into the format used by IPND
pub fn convert_services(&self) -> HashMap<u8, String> {
let mut convert: HashMap<u8, String> = HashMap::new();
for (tag, payload) in &self.services {
match *tag {
Service::CUSTOM_STRING => {
convert.insert(
*tag,
String::from_utf8(payload.clone())
.expect("Error parsing string from bytebuffer"),
);
}
Service::GEO_LOCATION => {
let latitude: f32 =
f32::from_be_bytes([payload[0], payload[1], payload[2], payload[3]]);
let longitude: f32 =
f32::from_be_bytes([payload[4], payload[5], payload[6], payload[7]]);
convert.insert(*tag, format!("{} {}", latitude, longitude));
}
Service::BATTERY => {
let int: i8 = i8::from_be_bytes([payload[0]]);
convert.insert(*tag, format!("{}", int));
}
Service::ADDRESS => {
let message = String::from_utf8(payload.clone())
.expect("Couldn't parse byte array into string");
convert.insert(*tag, message);
}
_ => {
info!("Unknown Service encountered. Compare senders IPND version with this one to check for incompatibilities.");
}
}
}
convert
}
/// Returns the vector of user defined services
pub fn services(&self) -> &HashMap<u8, Vec<u8>> {
&self.services
}
/// This method adds a cla to the corresponding vector of a ServiceBlock
pub fn add_cla(&mut self, name: &str, port: &Option<u16>) {
self.clas.push((name.to_owned(), *port))
}
/// This method adds a custom service to the HashMap of a ServiceBlock
pub fn add_custom_service(&mut self, tag: u8, service: &[u8]) {
self.services.insert(tag, service.to_owned());
}
/// This method sets the clas vector of a ServiceBlock to the one provided
pub fn set_clas(&mut self, clas: Vec<(String, Option<u16>)>) {
self.clas = clas;
}
/// This method sets the services hashmap of a ServiceBlock to the one provided
pub fn set_services(&mut self, services: HashMap<u8, Vec<u8>>) {
self.services = services;
}
/// Method to build custom services
///
/// Performs checks on tag and payload combinations
///
/// to make sure that the tag and payload content match
pub fn build_custom_service(tag: u8, payload: &str) -> Result<(u8, Vec<u8>), String> {
match tag {
// CustomString to allow a random unformatted string
Service::CUSTOM_STRING => {
if payload.as_bytes().len() > 64 {
Err(String::from(
"The provided custom message is to big. Aim for less than 64 characters",
))
} else {
Ok((tag, payload.as_bytes().to_vec()))
}
}
// GeoLocation expects two floats to represent geographical location (Latitude/Longitude)
Service::GEO_LOCATION => {
let input: Vec<&str> = payload.split_whitespace().collect();
if input.len() < 2 {
Err(String::from(
"Not enough arguments provided to represent geographical location",
))
} else {
let first: f32 = input[0].parse().expect("Couldn't parse latitude");
let second: f32 = input[1].parse().expect("Couldn't parse longitude");
let mut bytes = first.to_be_bytes().to_vec();
bytes.extend(second.to_be_bytes().to_vec().iter());
Ok((tag, bytes))
}
}
// Battery expect an integer between 0 and 100 to represent battery level in %
Service::BATTERY => {
let res = payload.parse::<i8>();
if let Ok(input) = res {
if !(0..=100).contains(&input) {
Err(String::from("Provided number can not be used to represent battery level. Please provide a number between 0 and 100"))
} else {
Ok((tag, input.to_be_bytes().to_vec()))
}
} else {
Err(format!(
"Could not parse provided argument into an integer. {}",
res.expect_err("")
))
}
}
// TODO: refactor this to geolocation
// Address expects 5 arguments String Int Int String String to represent an address
Service::ADDRESS => {
//let input: Vec<&str> = payload.split_whitespace().collect();
if payload.split_whitespace().count() == 5 {
Ok((tag, payload.as_bytes().to_vec()))
} else {
Err(String::from("Can not derive address from provided arguments. Argument order is: Street HouseNumber PostalNumber City CountryCode"))
}
}
// Undefined tags
_ => Err(String::from(
"This custom tag is not yet defined. Please refrain from using it until added.",
)),
}
}
/// Check if the ServiceBlock contains no CLAs and no other Services
pub fn is_empty(&self) -> bool {
self.clas.len() + self.services.len() == 0
}
}
// Implementation of the Display trait for ServiceBlocks for proper formatting
impl std::fmt::Display for ServiceBlock {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut output = String::new();
output.push_str("ConvergencyLayerAgents:\n");
let mut counter = 0;
for (name, port) in self.clas() {
let str = if port.is_some() {
format!(
"{}. CLA: Name = {} Port = {}\n",
counter,
name,
port.unwrap()
)
} else {
format!("{}. CLA: Name = {}\n", counter, name)
};
output.push_str(str.as_str());
counter += 1;
}
counter = 0;
output.push_str("Other services:\n");
for (tag, payload) in self.services() {
let str = match *tag {
Service::CUSTOM_STRING => format!(
"{}. Tag = {} Custom String Message: {}\n",
counter,
tag,
String::from_utf8(payload.clone())
.expect("Error parsing string from bytebuffer")
),
Service::GEO_LOCATION => {
let latitude: f32 =
f32::from_be_bytes([payload[0], payload[1], payload[2], payload[3]]);
let longitude: f32 =
f32::from_be_bytes([payload[4], payload[5], payload[6], payload[7]]);
format!("{}. Tag = {} Geographic location service. Current location at: Latitude {} Longitude {}\n",
counter, tag, latitude, longitude)
}
Service::BATTERY => {
let int: i8 = i8::from_be_bytes([payload[0]]);
format!(
"{}. Tag = {} Battery service. Battery level at {}%\n",
counter, tag, int
)
}
Service::ADDRESS => {
let message = String::from_utf8(payload.clone())
.expect("Couldn't parse byte array into string");
let address: Vec<&str> = message.split_whitespace().collect();
format!("{}. Tag = {} Address service. Street {}; House Number {}; Postal Number {}; City {}; Country Code {}\n",
counter, tag, address[0], address[1], address[2], address[3], address[4])
}
_ => {
info!("Unknown Service encountered. Compare senders IPND version with this one to check for incompatibilities.");
format!("")
}
};
output.push_str(str.as_str());
counter += 1;
}
output.pop();
write!(f, "{}", output)
}
}
impl Serialize for ServiceBlock {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
// If the ServiceBlock is empty there is nothing to serialize, else the amount of elements to serialize is equal to
// the amount of elements inside both vectors of the ServiceBlock
let num_elems = if self.is_empty() { 0 } else { 2 };
let mut seq = serializer.serialize_seq(Some(num_elems))?;
if num_elems > 0 {
seq.serialize_element(&self.clas)?;
seq.serialize_element(&self.services)?;
}
seq.end()
}
}
impl<'de> Deserialize<'de> for ServiceBlock {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct ServiceBlockVisitor;
impl<'de> Visitor<'de> for ServiceBlockVisitor {
type Value = ServiceBlock;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("ServiceBlock")
}
fn visit_seq<V>(self, mut seq: V) -> Result<Self::Value, V::Error>
where
V: SeqAccess<'de>,
{
if seq.size_hint().unwrap() < 1 {
Ok(ServiceBlock::new())
} else {
let clas = seq
.next_element()?
.ok_or_else(|| de::Error::invalid_length(0, &self))?;
let services = seq
.next_element()?
.ok_or_else(|| de::Error::invalid_length(0, &self))?;
let mut service_block = ServiceBlock::new();
service_block.set_clas(clas);
service_block.set_services(services);
Ok(service_block)
}
}
}
deserializer.deserialize_any(ServiceBlockVisitor)
}
}
/// Enum struct for defining services
struct Service;
impl Service {
pub const CUSTOM_STRING: u8 = 63;
pub const GEO_LOCATION: u8 = 127;
pub const BATTERY: u8 = 191;
pub const ADDRESS: u8 = 255;
}
| 39.144262 | 156 | 0.504816 |
87f64dfd2d5a518b10f0a6d93640ef3a25ff8331 | 2,799 | use super::ClickCommand;
use crate::slashies::interaction::Interaction;
use std::num::{ParseFloatError, ParseIntError};
use thiserror::Error;
use twilight_model::application::component::Button;
#[derive(Debug, Error, Clone)]
pub enum ParseError {
#[error(transparent)]
Int(#[from] ParseIntError),
#[error(transparent)]
Float(#[from] ParseFloatError),
#[error("{0}")]
Custom(&'static str),
}
pub trait ParseCommand {
type Output;
fn parse(interaction: Interaction, input: &str) -> Result<Self::Output, ParseError>;
fn parse_u8(input: &str) -> Result<u8, ParseError> {
Ok(input.parse()?)
}
fn parse_u16(input: &str) -> Result<u16, ParseError> {
Ok(input.parse()?)
}
fn parse_u32(input: &str) -> Result<u32, ParseError> {
Ok(input.parse()?)
}
fn parse_u64(input: &str) -> Result<u64, ParseError> {
Ok(input.parse()?)
}
fn parse_u128(input: &str) -> Result<u128, ParseError> {
Ok(input.parse()?)
}
fn parse_usize(input: &str) -> Result<usize, ParseError> {
Ok(input.parse()?)
}
fn parse_i8(input: &str) -> Result<i8, ParseError> {
Ok(input.parse()?)
}
fn parse_i16(input: &str) -> Result<i16, ParseError> {
Ok(input.parse()?)
}
fn parse_i32(input: &str) -> Result<i32, ParseError> {
Ok(input.parse()?)
}
fn parse_i64(input: &str) -> Result<i64, ParseError> {
Ok(input.parse()?)
}
fn parse_i128(input: &str) -> Result<i128, ParseError> {
Ok(input.parse()?)
}
fn parse_isize(input: &str) -> Result<isize, ParseError> {
Ok(input.parse()?)
}
fn parse_f32(input: &str) -> Result<f32, ParseError> {
let parsed = input.parse::<f32>()?;
if parsed.is_infinite() {
return Err(ParseError::Custom("expected a finite number"));
}
if parsed.is_nan() {
return Err(ParseError::Custom("expected a valid number"));
}
Ok(parsed)
}
fn parse_f64(input: &str) -> Result<f64, ParseError> {
let parsed = input.parse::<f64>()?;
if parsed.is_infinite() {
return Err(ParseError::Custom("expected a finite number"));
}
if parsed.is_nan() {
return Err(ParseError::Custom("expected a valid number"));
}
Ok(parsed)
}
fn parse_many(
interaction: Interaction,
input: &[&str],
) -> Result<Vec<Self::Output>, ParseError> {
let mut output = Vec::with_capacity(input.len());
for item in input {
output.push(Self::parse(interaction, item)?);
}
Ok(output)
}
fn parse_button<const N: usize>(input: &str) -> Result<Button, ParseError>
where
Self: ClickCommand<N>,
{
let buttons = Self::define_buttons()
.map_err(|_| ParseError::Custom("could not get button components"))?;
buttons
.iter()
.cloned()
.find(|button| button.custom_id.as_deref() == Some(input))
.ok_or(ParseError::Custom(
"an error occurred while getting the button pressed (this shouldn't happen)",
))
}
}
| 22.03937 | 85 | 0.655591 |
f9c1219c9d17e3b283428c5b9b1d6252a45a4aab | 23,764 | // Copyright 2019 The xi-editor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! The implementation of the WinHandler trait (druid-shell integration).
use std::any::Any;
use std::cell::RefCell;
use std::collections::{HashMap, VecDeque};
use std::rc::Rc;
use crate::kurbo::{Size, Vec2};
use crate::piet::Piet;
use crate::shell::{
Application, FileDialogOptions, IdleToken, MouseEvent, WinHandler, WindowHandle,
};
use crate::app_delegate::{AppDelegate, DelegateCtx};
use crate::core::CommandQueue;
use crate::ext_event::ExtEventHost;
use crate::menu::ContextMenu;
use crate::window::Window;
use crate::{
Command, Data, Env, Event, KeyEvent, KeyModifiers, MenuDesc, Target, TimerToken, WheelEvent,
WindowDesc, WindowId,
};
use crate::command::sys as sys_cmd;
pub(crate) const RUN_COMMANDS_TOKEN: IdleToken = IdleToken::new(1);
/// A token we are called back with if an external event was submitted.
pub(crate) const EXT_EVENT_IDLE_TOKEN: IdleToken = IdleToken::new(2);
/// The struct implements the druid-shell `WinHandler` trait.
///
/// One `DruidHandler` exists per window.
///
/// This is something of an internal detail and possibly we don't want to surface
/// it publicly.
pub struct DruidHandler<T> {
/// The shared app state.
app_state: AppState<T>,
/// The id for the current window.
window_id: WindowId,
}
/// The top level event handler.
///
/// This corresponds to the `AppHandler` trait in druid-shell, which is only
/// used to handle events that are not associated with a window.
///
/// Currently, this means only menu items on macOS when no window is open.
pub(crate) struct AppHandler<T> {
app_state: AppState<T>,
}
/// State shared by all windows in the UI.
#[derive(Clone)]
pub(crate) struct AppState<T> {
inner: Rc<RefCell<Inner<T>>>,
}
struct Inner<T> {
delegate: Option<Box<dyn AppDelegate<T>>>,
command_queue: CommandQueue,
ext_event_host: ExtEventHost,
windows: Windows<T>,
/// the application-level menu, only set on macos and only if there
/// are no open windows.
root_menu: Option<MenuDesc<T>>,
pub(crate) env: Env,
pub(crate) data: T,
}
/// All active windows.
struct Windows<T> {
pending: HashMap<WindowId, WindowDesc<T>>,
windows: HashMap<WindowId, Window<T>>,
}
impl<T> Windows<T> {
fn connect(&mut self, id: WindowId, handle: WindowHandle) {
if let Some(pending) = self.pending.remove(&id) {
let win = Window::new(id, handle, pending);
assert!(self.windows.insert(id, win).is_none(), "duplicate window");
} else {
log::error!("no window for connecting handle {:?}", id);
}
}
fn add(&mut self, id: WindowId, win: WindowDesc<T>) {
assert!(self.pending.insert(id, win).is_none(), "duplicate pending");
}
fn remove(&mut self, id: WindowId) -> Option<Window<T>> {
self.windows.remove(&id)
}
fn iter_mut(&mut self) -> impl Iterator<Item = &'_ mut Window<T>> {
self.windows.values_mut()
}
fn get(&self, id: WindowId) -> Option<&Window<T>> {
self.windows.get(&id)
}
fn get_mut(&mut self, id: WindowId) -> Option<&mut Window<T>> {
self.windows.get_mut(&id)
}
}
impl<T> AppHandler<T> {
pub(crate) fn new(app_state: AppState<T>) -> Self {
Self { app_state }
}
}
impl<T> AppState<T> {
pub(crate) fn new(
data: T,
env: Env,
delegate: Option<Box<dyn AppDelegate<T>>>,
ext_event_host: ExtEventHost,
) -> Self {
let inner = Rc::new(RefCell::new(Inner {
delegate,
command_queue: VecDeque::new(),
root_menu: None,
ext_event_host,
data,
env,
windows: Windows::default(),
}));
AppState { inner }
}
}
impl<T: Data> Inner<T> {
fn get_menu_cmd(&self, window_id: Option<WindowId>, cmd_id: u32) -> Option<Command> {
match window_id {
Some(id) => self.windows.get(id).and_then(|w| w.get_menu_cmd(cmd_id)),
None => self
.root_menu
.as_ref()
.and_then(|m| m.command_for_id(cmd_id)),
}
}
fn append_command(&mut self, target: Target, cmd: Command) {
self.command_queue.push_back((target, cmd));
}
/// A helper fn for setting up the `DelegateCtx`. Takes a closure with
/// an arbitrary return type `R`, and returns `Some(R)` if an `AppDelegate`
/// is configured.
fn with_delegate<R, F>(&mut self, f: F) -> Option<R>
where
F: FnOnce(&mut Box<dyn AppDelegate<T>>, &mut T, &Env, &mut DelegateCtx) -> R,
{
let Inner {
ref mut delegate,
ref mut command_queue,
ref mut data,
ref env,
..
} = self;
let mut ctx = DelegateCtx { command_queue };
if let Some(delegate) = delegate {
Some(f(delegate, data, env, &mut ctx))
} else {
None
}
}
fn delegate_event(&mut self, id: WindowId, event: Event) -> Option<Event> {
if self.delegate.is_some() {
self.with_delegate(|del, data, env, ctx| del.event(ctx, id, event, data, env))
.unwrap()
} else {
Some(event)
}
}
fn delegate_cmd(&mut self, target: &Target, cmd: &Command) -> bool {
self.with_delegate(|del, data, env, ctx| del.command(ctx, target, cmd, data, env))
.unwrap_or(true)
}
fn connect(&mut self, id: WindowId, handle: WindowHandle) {
self.windows.connect(id, handle);
// If the external event host has no handle, it cannot wake us
// when an event arrives.
if self.ext_event_host.handle_window_id.is_none() {
self.set_ext_event_idle_handler(id);
}
self.with_delegate(|del, data, env, ctx| del.window_added(id, data, env, ctx));
}
/// Called after this window has been closed by the platform.
///
/// We clean up resources and notifiy the delegate, if necessary.
fn remove_window(&mut self, window_id: WindowId) {
self.with_delegate(|del, data, env, ctx| del.window_removed(window_id, data, env, ctx));
// when closing the last window:
if let Some(mut win) = self.windows.remove(window_id) {
if self.windows.windows.is_empty() {
// on mac we need to keep the menu around
self.root_menu = win.menu.take();
//FIXME: on windows we need to shutdown the app here?
}
}
// if we are closing the window that is currently responsible for
// waking us when external events arrive, we want to pass that responsibility
// to another window.
if self.ext_event_host.handle_window_id == Some(window_id) {
self.ext_event_host.handle_window_id = None;
// find any other live window
let win_id = self.windows.windows.keys().find(|k| *k != &window_id);
if let Some(any_other_window) = win_id.cloned() {
self.set_ext_event_idle_handler(any_other_window);
}
}
}
/// Set the idle handle that will be used to wake us when external events arrive.
fn set_ext_event_idle_handler(&mut self, id: WindowId) {
if let Some(mut idle) = self
.windows
.get_mut(id)
.and_then(|win| win.handle.get_idle_handle())
{
if self.ext_event_host.has_pending_items() {
idle.schedule_idle(EXT_EVENT_IDLE_TOKEN);
}
self.ext_event_host.set_idle(idle, id);
}
}
/// triggered by a menu item or other command.
///
/// This doesn't close the window; it calls the close method on the platform
/// window handle; the platform should close the window, and then call
/// our handlers `destroy()` method, at which point we can do our cleanup.
fn request_close_window(&mut self, window_id: WindowId) {
if let Some(win) = self.windows.get_mut(window_id) {
win.handle.close();
}
}
fn show_window(&mut self, id: WindowId) {
if let Some(win) = self.windows.get_mut(id) {
win.handle.bring_to_front_and_focus();
}
}
/// Returns `true` if an animation frame was requested.
fn paint(&mut self, window_id: WindowId, piet: &mut Piet) -> bool {
if let Some(win) = self.windows.get_mut(window_id) {
win.do_paint(piet, &mut self.command_queue, &self.data, &self.env);
win.wants_animation_frame()
} else {
false
}
}
fn dispatch_cmd(&mut self, target: Target, cmd: Command) {
if !self.delegate_cmd(&target, &cmd) {
return;
}
match target {
Target::Window(id) => {
// first handle special window-level events
match cmd.selector {
sys_cmd::SET_MENU => return self.set_menu(id, &cmd),
sys_cmd::SHOW_CONTEXT_MENU => return self.show_context_menu(id, &cmd),
_ => (),
}
if let Some(w) = self.windows.get_mut(id) {
let event = Event::Command(cmd);
w.event(&mut self.command_queue, event, &mut self.data, &self.env);
}
}
// in this case we send it to every window that might contain
// this widget, breaking if the event is handled.
Target::Widget(id) => {
for w in self.windows.iter_mut().filter(|w| w.may_contain_widget(id)) {
let event = Event::TargetedCommand(id.into(), cmd.clone());
if w.event(&mut self.command_queue, event, &mut self.data, &self.env) {
break;
}
}
}
Target::Global => {
for w in self.windows.iter_mut() {
let event = Event::Command(cmd.clone());
if w.event(&mut self.command_queue, event, &mut self.data, &self.env) {
break;
}
}
}
}
}
fn do_window_event(&mut self, source_id: WindowId, event: Event) -> bool {
match event {
Event::Command(..) | Event::TargetedCommand(..) => {
panic!("commands should be dispatched via dispatch_cmd");
}
_ => (),
}
// if the event was swallowed by the delegate we consider it handled?
let event = match self.delegate_event(source_id, event) {
Some(event) => event,
None => return true,
};
if let Some(win) = self.windows.get_mut(source_id) {
win.event(&mut self.command_queue, event, &mut self.data, &self.env)
} else {
false
}
}
fn set_menu(&mut self, window_id: WindowId, cmd: &Command) {
if let Some(win) = self.windows.get_mut(window_id) {
match cmd.get_object::<MenuDesc<T>>() {
Ok(menu) => win.set_menu(menu.to_owned(), &self.data, &self.env),
Err(e) => log::warn!("set-menu object error: '{}'", e),
}
}
}
fn show_context_menu(&mut self, window_id: WindowId, cmd: &Command) {
if let Some(win) = self.windows.get_mut(window_id) {
match cmd.get_object::<ContextMenu<T>>() {
Ok(ContextMenu { menu, location }) => {
win.show_context_menu(menu.to_owned(), *location, &self.data, &self.env)
}
Err(e) => log::warn!("show-context-menu object error: '{}'", e),
}
}
}
fn do_update(&mut self) {
// we send `update` to all windows, not just the active one:
for window in self.windows.iter_mut() {
window.update(&self.data, &self.env);
}
self.invalidate_and_finalize();
}
/// invalidate any window handles that need it.
///
/// This should always be called at the end of an event update cycle,
/// including for lifecycle events.
fn invalidate_and_finalize(&mut self) {
for win in self.windows.iter_mut() {
win.invalidate_and_finalize(&mut self.command_queue, &self.data, &self.env);
}
}
#[cfg(target_os = "macos")]
fn window_got_focus(&mut self, window_id: WindowId) {
if let Some(win) = self.windows.get_mut(window_id) {
win.macos_update_app_menu(&self.data, &self.env)
}
}
#[cfg(not(target_os = "macos"))]
fn window_got_focus(&mut self, _: WindowId) {}
}
impl<T: Data> DruidHandler<T> {
/// Note: the root widget doesn't go in here, because it gets added to the
/// app state.
pub(crate) fn new_shared(app_state: AppState<T>, window_id: WindowId) -> DruidHandler<T> {
DruidHandler {
app_state,
window_id,
}
}
}
impl<T: Data> AppState<T> {
pub(crate) fn data(&self) -> T {
self.inner.borrow().data.clone()
}
pub(crate) fn env(&self) -> Env {
self.inner.borrow().env.clone()
}
pub(crate) fn add_window(&self, id: WindowId, window: WindowDesc<T>) {
self.inner.borrow_mut().windows.add(id, window);
}
fn connect_window(&mut self, window_id: WindowId, handle: WindowHandle) {
self.inner.borrow_mut().connect(window_id, handle)
}
fn remove_window(&mut self, window_id: WindowId) {
self.inner.borrow_mut().remove_window(window_id)
}
fn window_got_focus(&mut self, window_id: WindowId) {
self.inner.borrow_mut().window_got_focus(window_id)
}
/// Send an event to the widget hierarchy.
///
/// Returns `true` if the event produced an action.
///
/// This is principally because in certain cases (such as keydown on Windows)
/// the OS needs to know if an event was handled.
fn do_window_event(&mut self, event: Event, window_id: WindowId) -> bool {
let result = self.inner.borrow_mut().do_window_event(window_id, event);
self.process_commands();
self.inner.borrow_mut().do_update();
result
}
fn paint_window(&mut self, window_id: WindowId, piet: &mut Piet) -> bool {
self.inner.borrow_mut().paint(window_id, piet)
}
fn idle(&mut self, token: IdleToken) {
match token {
RUN_COMMANDS_TOKEN => {
self.process_commands();
self.inner.borrow_mut().invalidate_and_finalize();
}
EXT_EVENT_IDLE_TOKEN => {
self.process_ext_events();
self.process_commands();
self.inner.borrow_mut().do_update();
}
other => log::warn!("unexpected idle token {:?}", other),
}
}
fn process_commands(&mut self) {
loop {
let next_cmd = self.inner.borrow_mut().command_queue.pop_front();
match next_cmd {
Some((target, cmd)) => self.handle_cmd(target, cmd),
None => break,
}
}
}
fn process_ext_events(&mut self) {
loop {
let ext_cmd = self.inner.borrow_mut().ext_event_host.recv();
match ext_cmd {
Some((targ, cmd)) => self.handle_cmd(targ.unwrap_or(Target::Global), cmd),
None => break,
}
}
}
/// Handle a 'command' message from druid-shell. These map to an item
/// in an application, window, or context (right-click) menu.
///
/// If the menu is associated with a window (the general case) then
/// the `window_id` will be `Some(_)`, otherwise (such as if no window
/// is open but a menu exists, as on macOS) it will be `None`.
fn handle_system_cmd(&mut self, cmd_id: u32, window_id: Option<WindowId>) {
let cmd = self.inner.borrow().get_menu_cmd(window_id, cmd_id);
let target = window_id.map(Into::into).unwrap_or(Target::Global);
match cmd {
Some(cmd) => self.inner.borrow_mut().append_command(target, cmd),
None => log::warn!("No command for menu id {}", cmd_id),
}
self.process_commands();
self.inner.borrow_mut().do_update();
}
/// Handle a command. Top level commands (e.g. for creating and destroying
/// windows) have their logic here; other commands are passed to the window.
fn handle_cmd(&mut self, target: Target, cmd: Command) {
use Target as T;
match (target, &cmd.selector) {
// these are handled the same no matter where they come from
(_, &sys_cmd::QUIT_APP) => self.quit(),
(_, &sys_cmd::HIDE_APPLICATION) => self.hide_app(),
(_, &sys_cmd::HIDE_OTHERS) => self.hide_others(),
(_, &sys_cmd::NEW_WINDOW) => {
if let Err(e) = self.new_window(cmd) {
log::error!("failed to create window: '{}'", e);
}
}
// these should come from a window
// FIXME: we need to be able to open a file without a window handle
(T::Window(id), &sys_cmd::SHOW_OPEN_PANEL) => self.show_open_panel(cmd, id),
(T::Window(id), &sys_cmd::SHOW_SAVE_PANEL) => self.show_save_panel(cmd, id),
(T::Window(id), &sys_cmd::CLOSE_WINDOW) => self.request_close_window(cmd, id),
(T::Window(_), &sys_cmd::SHOW_WINDOW) => self.show_window(cmd),
(T::Window(id), &sys_cmd::PASTE) => self.do_paste(id),
_sel => self.inner.borrow_mut().dispatch_cmd(target, cmd),
}
}
fn show_open_panel(&mut self, cmd: Command, window_id: WindowId) {
let options = cmd
.get_object::<FileDialogOptions>()
.map(|opts| opts.to_owned())
.unwrap_or_default();
//FIXME: this is blocking; if we hold `borrow_mut` we are likely to cause
//a crash. as a workaround we take a clone of the window handle.
//it's less clear what the better solution would be.
let handle = self
.inner
.borrow_mut()
.windows
.get_mut(window_id)
.map(|w| w.handle.clone());
let result = handle.and_then(|mut handle| handle.open_file_sync(options));
if let Some(info) = result {
let cmd = Command::new(sys_cmd::OPEN_FILE, info);
self.inner.borrow_mut().dispatch_cmd(window_id.into(), cmd);
}
}
fn show_save_panel(&mut self, cmd: Command, window_id: WindowId) {
let options = cmd
.get_object::<FileDialogOptions>()
.map(|opts| opts.to_owned())
.unwrap_or_default();
let handle = self
.inner
.borrow_mut()
.windows
.get_mut(window_id)
.map(|w| w.handle.clone());
let result = handle.and_then(|mut handle| handle.save_as_sync(options));
if let Some(info) = result {
let cmd = Command::new(sys_cmd::SAVE_FILE, info);
self.inner.borrow_mut().dispatch_cmd(window_id.into(), cmd);
}
}
fn new_window(&mut self, cmd: Command) -> Result<(), Box<dyn std::error::Error>> {
let desc = cmd.take_object::<WindowDesc<T>>()?;
let window = desc.build_native(self)?;
window.show();
Ok(())
}
fn request_close_window(&mut self, cmd: Command, window_id: WindowId) {
let id = cmd.get_object().unwrap_or(&window_id);
self.inner.borrow_mut().request_close_window(*id);
}
fn show_window(&mut self, cmd: Command) {
let id: WindowId = *cmd
.get_object()
.expect("show window selector missing window id");
self.inner.borrow_mut().show_window(id);
}
fn do_paste(&mut self, window_id: WindowId) {
let event = Event::Paste(Application::clipboard());
self.inner.borrow_mut().do_window_event(window_id, event);
}
fn quit(&self) {
Application::quit()
}
fn hide_app(&self) {
#[cfg(all(target_os = "macos", not(feature = "use_gtk")))]
Application::hide()
}
fn hide_others(&mut self) {
#[cfg(all(target_os = "macos", not(feature = "use_gtk")))]
Application::hide_others()
}
}
impl<T: Data> crate::shell::AppHandler for AppHandler<T> {
fn command(&mut self, id: u32) {
self.app_state.handle_system_cmd(id, None)
}
}
impl<T: Data> WinHandler for DruidHandler<T> {
fn connect(&mut self, handle: &WindowHandle) {
self.app_state
.connect_window(self.window_id, handle.clone());
let event = Event::WindowConnected;
self.app_state.do_window_event(event, self.window_id);
}
fn paint(&mut self, piet: &mut Piet) -> bool {
self.app_state.paint_window(self.window_id, piet)
}
fn size(&mut self, width: u32, height: u32) {
let event = Event::Size(Size::new(f64::from(width), f64::from(height)));
self.app_state.do_window_event(event, self.window_id);
}
fn command(&mut self, id: u32) {
self.app_state.handle_system_cmd(id, Some(self.window_id));
}
fn mouse_down(&mut self, event: &MouseEvent) {
// TODO: double-click detection (or is this done in druid-shell?)
let event = Event::MouseDown(event.clone().into());
self.app_state.do_window_event(event, self.window_id);
}
fn mouse_up(&mut self, event: &MouseEvent) {
let event = Event::MouseUp(event.clone().into());
self.app_state.do_window_event(event, self.window_id);
}
fn mouse_move(&mut self, event: &MouseEvent) {
let event = Event::MouseMoved(event.clone().into());
self.app_state.do_window_event(event, self.window_id);
}
fn key_down(&mut self, event: KeyEvent) -> bool {
self.app_state
.do_window_event(Event::KeyDown(event), self.window_id)
}
fn key_up(&mut self, event: KeyEvent) {
self.app_state
.do_window_event(Event::KeyUp(event), self.window_id);
}
fn wheel(&mut self, delta: Vec2, mods: KeyModifiers) {
let event = Event::Wheel(WheelEvent { delta, mods });
self.app_state.do_window_event(event, self.window_id);
}
fn zoom(&mut self, delta: f64) {
let event = Event::Zoom(delta);
self.app_state.do_window_event(event, self.window_id);
}
fn got_focus(&mut self) {
self.app_state.window_got_focus(self.window_id);
}
fn timer(&mut self, token: TimerToken) {
self.app_state
.do_window_event(Event::Timer(token), self.window_id);
}
fn idle(&mut self, token: IdleToken) {
self.app_state.idle(token);
}
fn as_any(&mut self) -> &mut dyn Any {
self
}
fn destroy(&mut self) {
self.app_state.remove_window(self.window_id);
}
}
impl<T> Default for Windows<T> {
fn default() -> Self {
Windows {
windows: HashMap::new(),
pending: HashMap::new(),
}
}
}
| 34.34104 | 96 | 0.582057 |
287ba1629c3988e2298788383305399daea6507d | 1,014 | #![feature(test)]
extern crate racer_testutils;
extern crate test;
use test::Bencher;
use racer_testutils::*;
#[bench]
fn completes_rand(b: &mut Bencher) {
let src = "
extern crate rand;
use rand::{Rng, thread_rng};
fn main() {
let mut rng: Box<Rng> = Box::new(thread_rng());
rng.gen_rang~
}
";
let mut match_ = None;
b.iter(|| {
with_test_project(|dir| {
let src_dir = dir.nested_dir("test-crate3").nested_dir("src");
match_ = Some(get_only_completion(src, Some(src_dir)));
});
})
}
#[bench]
fn completes_rayon(b: &mut Bencher) {
let src = "
extern crate rayon;
extern crate rand;
use rand::{Rng, thread_rng};
fn main() {
rayon::colle~
}
";
let mut match_ = None;
b.iter(|| {
with_test_project(|dir| {
let src_dir = dir.nested_dir("test-crate3").nested_dir("src");
match_ = Some(get_only_completion(src, Some(src_dir)));
});
})
}
| 22.533333 | 74 | 0.563116 |
db3fa6247db26bff4b3d9b740751d9236eeee115 | 780 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct node_ {
a: Box<cycle>
}
enum cycle {
node(node_),
empty
}
fn main() {
let mut x = box cycle::node(node_ {a: box cycle::empty});
// Create a cycle!
match *x {
cycle::node(ref mut y) => {
y.a = x; //~ ERROR cannot move out of
}
cycle::empty => {}
};
}
| 26 | 68 | 0.64359 |
8f42c57178017fa84857dccdfcaa78b6bdb4c66c | 705 | // Copyright 2018 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Modules common to all Epic server types
pub mod adapters;
pub mod hooks;
pub mod stats;
pub mod types;
| 33.571429 | 75 | 0.746099 |
d922749df36c45206f3023b1f0c97244bd746d50 | 1,220 | use std::{slice, str};
#[repr(C)]
pub struct Metadata {
name: *const u8,
description: *const u8,
name_len: u16,
description_len: u16,
}
unsafe impl Send for Metadata {}
unsafe impl Sync for Metadata {}
impl Default for Metadata {
fn default() -> Self {
Self::new()
}
}
impl Metadata {
pub fn new() -> Metadata {
Metadata {
name: "".as_ptr(),
description: "".as_ptr(),
name_len: 0,
description_len: 0,
}
}
pub fn name(&self) -> &'static str {
unsafe {
str::from_utf8_unchecked(slice::from_raw_parts(self.name, self.name_len as usize))
}
}
pub fn description(&self) -> &'static str {
unsafe {
str::from_utf8_unchecked(slice::from_raw_parts(
self.description,
self.description_len as usize,
))
}
}
pub fn set_name(&mut self, name: &'static str) {
self.name = name.as_ptr();
self.name_len = name.len() as u16;
}
pub fn set_description(&mut self, desc: &'static str) {
self.description = desc.as_ptr();
self.description_len = desc.len() as u16;
}
}
| 22.181818 | 94 | 0.539344 |
e2217fdfac036becda935869bc0ff7c53cfba4d6 | 20,041 | //! See docs in `build/expr/mod.rs`.
use rustc_index::vec::Idx;
use crate::build::expr::category::{Category, RvalueFunc};
use crate::build::{BlockAnd, BlockAndExtension, Builder};
use crate::hair::*;
use rustc_middle::middle::region;
use rustc_middle::mir::AssertKind;
use rustc_middle::mir::*;
use rustc_middle::ty::{self, Ty, UpvarSubsts};
use rustc_span::Span;
impl<'a, 'tcx> Builder<'a, 'tcx> {
/// Returns an rvalue suitable for use until the end of the current
/// scope expression.
///
/// The operand returned from this function will *not be valid* after
/// an ExprKind::Scope is passed, so please do *not* return it from
/// functions to avoid bad miscompiles.
crate fn as_local_rvalue<M>(&mut self, block: BasicBlock, expr: M) -> BlockAnd<Rvalue<'tcx>>
where
M: Mirror<'tcx, Output = Expr<'tcx>>,
{
let local_scope = self.local_scope();
self.as_rvalue(block, local_scope, expr)
}
/// Compile `expr`, yielding an rvalue.
fn as_rvalue<M>(
&mut self,
block: BasicBlock,
scope: Option<region::Scope>,
expr: M,
) -> BlockAnd<Rvalue<'tcx>>
where
M: Mirror<'tcx, Output = Expr<'tcx>>,
{
let expr = self.hir.mirror(expr);
self.expr_as_rvalue(block, scope, expr)
}
fn expr_as_rvalue(
&mut self,
mut block: BasicBlock,
scope: Option<region::Scope>,
expr: Expr<'tcx>,
) -> BlockAnd<Rvalue<'tcx>> {
debug!("expr_as_rvalue(block={:?}, scope={:?}, expr={:?})", block, scope, expr);
let this = self;
let expr_span = expr.span;
let source_info = this.source_info(expr_span);
match expr.kind {
ExprKind::ThreadLocalRef(did) => block.and(Rvalue::ThreadLocalRef(did)),
ExprKind::Scope { region_scope, lint_level, value } => {
let region_scope = (region_scope, source_info);
this.in_scope(region_scope, lint_level, |this| this.as_rvalue(block, scope, value))
}
ExprKind::Repeat { value, count } => {
let value_operand = unpack!(block = this.as_operand(block, scope, value));
block.and(Rvalue::Repeat(value_operand, count))
}
ExprKind::Binary { op, lhs, rhs } => {
let lhs = unpack!(block = this.as_operand(block, scope, lhs));
let rhs = unpack!(block = this.as_operand(block, scope, rhs));
this.build_binary_op(block, op, expr_span, expr.ty, lhs, rhs)
}
ExprKind::Unary { op, arg } => {
let arg = unpack!(block = this.as_operand(block, scope, arg));
// Check for -MIN on signed integers
if this.hir.check_overflow() && op == UnOp::Neg && expr.ty.is_signed() {
let bool_ty = this.hir.bool_ty();
let minval = this.minval_literal(expr_span, expr.ty);
let is_min = this.temp(bool_ty, expr_span);
this.cfg.push_assign(
block,
source_info,
is_min,
Rvalue::BinaryOp(BinOp::Eq, arg.to_copy(), minval),
);
block = this.assert(
block,
Operand::Move(is_min),
false,
AssertKind::OverflowNeg(arg.to_copy()),
expr_span,
);
}
block.and(Rvalue::UnaryOp(op, arg))
}
ExprKind::Box { value } => {
let value = this.hir.mirror(value);
// The `Box<T>` temporary created here is not a part of the HIR,
// and therefore is not considered during generator OIBIT
// determination. See the comment about `box` at `yield_in_scope`.
let result = this.local_decls.push(LocalDecl::new(expr.ty, expr_span).internal());
this.cfg.push(
block,
Statement { source_info, kind: StatementKind::StorageLive(result) },
);
if let Some(scope) = scope {
// schedule a shallow free of that memory, lest we unwind:
this.schedule_drop_storage_and_value(expr_span, scope, result);
}
// malloc some memory of suitable type (thus far, uninitialized):
let box_ = Rvalue::NullaryOp(NullOp::Box, value.ty);
this.cfg.push_assign(block, source_info, Place::from(result), box_);
// initialize the box contents:
unpack!(
block =
this.into(this.hir.tcx().mk_place_deref(Place::from(result)), block, value)
);
block.and(Rvalue::Use(Operand::Move(Place::from(result))))
}
ExprKind::Cast { source } => {
let source = unpack!(block = this.as_operand(block, scope, source));
block.and(Rvalue::Cast(CastKind::Misc, source, expr.ty))
}
ExprKind::Pointer { cast, source } => {
let source = unpack!(block = this.as_operand(block, scope, source));
block.and(Rvalue::Cast(CastKind::Pointer(cast), source, expr.ty))
}
ExprKind::Array { fields } => {
// (*) We would (maybe) be closer to codegen if we
// handled this and other aggregate cases via
// `into()`, not `as_rvalue` -- in that case, instead
// of generating
//
// let tmp1 = ...1;
// let tmp2 = ...2;
// dest = Rvalue::Aggregate(Foo, [tmp1, tmp2])
//
// we could just generate
//
// dest.f = ...1;
// dest.g = ...2;
//
// The problem is that then we would need to:
//
// (a) have a more complex mechanism for handling
// partial cleanup;
// (b) distinguish the case where the type `Foo` has a
// destructor, in which case creating an instance
// as a whole "arms" the destructor, and you can't
// write individual fields; and,
// (c) handle the case where the type Foo has no
// fields. We don't want `let x: ();` to compile
// to the same MIR as `let x = ();`.
// first process the set of fields
let el_ty = expr.ty.sequence_element_type(this.hir.tcx());
let fields: Vec<_> = fields
.into_iter()
.map(|f| unpack!(block = this.as_operand(block, scope, f)))
.collect();
block.and(Rvalue::Aggregate(box AggregateKind::Array(el_ty), fields))
}
ExprKind::Tuple { fields } => {
// see (*) above
// first process the set of fields
let fields: Vec<_> = fields
.into_iter()
.map(|f| unpack!(block = this.as_operand(block, scope, f)))
.collect();
block.and(Rvalue::Aggregate(box AggregateKind::Tuple, fields))
}
ExprKind::Closure { closure_id, substs, upvars, movability } => {
// see (*) above
let operands: Vec<_> = upvars
.into_iter()
.map(|upvar| {
let upvar = this.hir.mirror(upvar);
match Category::of(&upvar.kind) {
// Use as_place to avoid creating a temporary when
// moving a variable into a closure, so that
// borrowck knows which variables to mark as being
// used as mut. This is OK here because the upvar
// expressions have no side effects and act on
// disjoint places.
// This occurs when capturing by copy/move, while
// by reference captures use as_operand
Some(Category::Place) => {
let place = unpack!(block = this.as_place(block, upvar));
this.consume_by_copy_or_move(place)
}
_ => {
// Turn mutable borrow captures into unique
// borrow captures when capturing an immutable
// variable. This is sound because the mutation
// that caused the capture will cause an error.
match upvar.kind {
ExprKind::Borrow {
borrow_kind:
BorrowKind::Mut { allow_two_phase_borrow: false },
arg,
} => unpack!(
block = this.limit_capture_mutability(
upvar.span, upvar.ty, scope, block, arg,
)
),
_ => unpack!(block = this.as_operand(block, scope, upvar)),
}
}
}
})
.collect();
let result = match substs {
UpvarSubsts::Generator(substs) => {
// We implicitly set the discriminant to 0. See
// librustc_mir/transform/deaggregator.rs for details.
let movability = movability.unwrap();
box AggregateKind::Generator(closure_id, substs, movability)
}
UpvarSubsts::Closure(substs) => box AggregateKind::Closure(closure_id, substs),
};
block.and(Rvalue::Aggregate(result, operands))
}
ExprKind::Assign { .. } | ExprKind::AssignOp { .. } => {
block = unpack!(this.stmt_expr(block, expr, None));
block.and(Rvalue::Use(Operand::Constant(box Constant {
span: expr_span,
user_ty: None,
literal: ty::Const::zero_sized(this.hir.tcx(), this.hir.tcx().types.unit),
})))
}
ExprKind::Yield { .. }
| ExprKind::Literal { .. }
| ExprKind::StaticRef { .. }
| ExprKind::Block { .. }
| ExprKind::Match { .. }
| ExprKind::NeverToAny { .. }
| ExprKind::Use { .. }
| ExprKind::Borrow { .. }
| ExprKind::AddressOf { .. }
| ExprKind::Adt { .. }
| ExprKind::Loop { .. }
| ExprKind::LogicalOp { .. }
| ExprKind::Call { .. }
| ExprKind::Field { .. }
| ExprKind::Deref { .. }
| ExprKind::Index { .. }
| ExprKind::VarRef { .. }
| ExprKind::SelfRef
| ExprKind::Break { .. }
| ExprKind::Continue { .. }
| ExprKind::Return { .. }
| ExprKind::InlineAsm { .. }
| ExprKind::LlvmInlineAsm { .. }
| ExprKind::PlaceTypeAscription { .. }
| ExprKind::ValueTypeAscription { .. } => {
// these do not have corresponding `Rvalue` variants,
// so make an operand and then return that
debug_assert!(match Category::of(&expr.kind) {
Some(Category::Rvalue(RvalueFunc::AsRvalue)) => false,
_ => true,
});
let operand = unpack!(block = this.as_operand(block, scope, expr));
block.and(Rvalue::Use(operand))
}
}
}
crate fn build_binary_op(
&mut self,
mut block: BasicBlock,
op: BinOp,
span: Span,
ty: Ty<'tcx>,
lhs: Operand<'tcx>,
rhs: Operand<'tcx>,
) -> BlockAnd<Rvalue<'tcx>> {
let source_info = self.source_info(span);
let bool_ty = self.hir.bool_ty();
if self.hir.check_overflow() && op.is_checkable() && ty.is_integral() {
let result_tup = self.hir.tcx().intern_tup(&[ty, bool_ty]);
let result_value = self.temp(result_tup, span);
self.cfg.push_assign(
block,
source_info,
result_value,
Rvalue::CheckedBinaryOp(op, lhs.to_copy(), rhs.to_copy()),
);
let val_fld = Field::new(0);
let of_fld = Field::new(1);
let tcx = self.hir.tcx();
let val = tcx.mk_place_field(result_value, val_fld, ty);
let of = tcx.mk_place_field(result_value, of_fld, bool_ty);
let err = AssertKind::Overflow(op, lhs, rhs);
block = self.assert(block, Operand::Move(of), false, err, span);
block.and(Rvalue::Use(Operand::Move(val)))
} else {
if ty.is_integral() && (op == BinOp::Div || op == BinOp::Rem) {
// Checking division and remainder is more complex, since we 1. always check
// and 2. there are two possible failure cases, divide-by-zero and overflow.
let zero_err = if op == BinOp::Div {
AssertKind::DivisionByZero(lhs.to_copy())
} else {
AssertKind::RemainderByZero(lhs.to_copy())
};
let overflow_err = AssertKind::Overflow(op, lhs.to_copy(), rhs.to_copy());
// Check for / 0
let is_zero = self.temp(bool_ty, span);
let zero = self.zero_literal(span, ty);
self.cfg.push_assign(
block,
source_info,
is_zero,
Rvalue::BinaryOp(BinOp::Eq, rhs.to_copy(), zero),
);
block = self.assert(block, Operand::Move(is_zero), false, zero_err, span);
// We only need to check for the overflow in one case:
// MIN / -1, and only for signed values.
if ty.is_signed() {
let neg_1 = self.neg_1_literal(span, ty);
let min = self.minval_literal(span, ty);
let is_neg_1 = self.temp(bool_ty, span);
let is_min = self.temp(bool_ty, span);
let of = self.temp(bool_ty, span);
// this does (rhs == -1) & (lhs == MIN). It could short-circuit instead
self.cfg.push_assign(
block,
source_info,
is_neg_1,
Rvalue::BinaryOp(BinOp::Eq, rhs.to_copy(), neg_1),
);
self.cfg.push_assign(
block,
source_info,
is_min,
Rvalue::BinaryOp(BinOp::Eq, lhs.to_copy(), min),
);
let is_neg_1 = Operand::Move(is_neg_1);
let is_min = Operand::Move(is_min);
self.cfg.push_assign(
block,
source_info,
of,
Rvalue::BinaryOp(BinOp::BitAnd, is_neg_1, is_min),
);
block = self.assert(block, Operand::Move(of), false, overflow_err, span);
}
}
block.and(Rvalue::BinaryOp(op, lhs, rhs))
}
}
fn limit_capture_mutability(
&mut self,
upvar_span: Span,
upvar_ty: Ty<'tcx>,
temp_lifetime: Option<region::Scope>,
mut block: BasicBlock,
arg: ExprRef<'tcx>,
) -> BlockAnd<Operand<'tcx>> {
let this = self;
let source_info = this.source_info(upvar_span);
let temp = this.local_decls.push(LocalDecl::new(upvar_ty, upvar_span));
this.cfg.push(block, Statement { source_info, kind: StatementKind::StorageLive(temp) });
let arg_place = unpack!(block = this.as_place(block, arg));
let mutability = match arg_place.as_ref() {
PlaceRef { local, projection: &[] } => this.local_decls[local].mutability,
PlaceRef { local, projection: &[ProjectionElem::Deref] } => {
debug_assert!(
this.local_decls[local].is_ref_for_guard(),
"Unexpected capture place",
);
this.local_decls[local].mutability
}
PlaceRef {
local,
projection: &[ref proj_base @ .., ProjectionElem::Field(upvar_index, _)],
}
| PlaceRef {
local,
projection:
&[ref proj_base @ .., ProjectionElem::Field(upvar_index, _), ProjectionElem::Deref],
} => {
let place = PlaceRef { local, projection: proj_base };
// Not projected from the implicit `self` in a closure.
debug_assert!(
match place.local_or_deref_local() {
Some(local) => local == Local::new(1),
None => false,
},
"Unexpected capture place"
);
// Not in a closure
debug_assert!(
this.upvar_mutbls.len() > upvar_index.index(),
"Unexpected capture place"
);
this.upvar_mutbls[upvar_index.index()]
}
_ => bug!("Unexpected capture place"),
};
let borrow_kind = match mutability {
Mutability::Not => BorrowKind::Unique,
Mutability::Mut => BorrowKind::Mut { allow_two_phase_borrow: false },
};
this.cfg.push_assign(
block,
source_info,
Place::from(temp),
Rvalue::Ref(this.hir.tcx().lifetimes.re_erased, borrow_kind, arg_place),
);
// In constants, temp_lifetime is None. We should not need to drop
// anything because no values with a destructor can be created in
// a constant at this time, even if the type may need dropping.
if let Some(temp_lifetime) = temp_lifetime {
this.schedule_drop_storage_and_value(upvar_span, temp_lifetime, temp);
}
block.and(Operand::Move(Place::from(temp)))
}
// Helper to get a `-1` value of the appropriate type
fn neg_1_literal(&mut self, span: Span, ty: Ty<'tcx>) -> Operand<'tcx> {
let param_ty = ty::ParamEnv::empty().and(ty);
let bits = self.hir.tcx().layout_of(param_ty).unwrap().size.bits();
let n = (!0u128) >> (128 - bits);
let literal = ty::Const::from_bits(self.hir.tcx(), n, param_ty);
self.literal_operand(span, literal)
}
// Helper to get the minimum value of the appropriate type
fn minval_literal(&mut self, span: Span, ty: Ty<'tcx>) -> Operand<'tcx> {
assert!(ty.is_signed());
let param_ty = ty::ParamEnv::empty().and(ty);
let bits = self.hir.tcx().layout_of(param_ty).unwrap().size.bits();
let n = 1 << (bits - 1);
let literal = ty::Const::from_bits(self.hir.tcx(), n, param_ty);
self.literal_operand(span, literal)
}
}
| 42.731343 | 104 | 0.478419 |
fc0d20b325ddf686fbb7ff0a7b94ba6717eb94e4 | 915 | use GodotType;
use Variant;
use Vector3;
impl GodotType for Vector3 {
fn to_variant(&self) -> Variant {
Variant::from_vector3(self)
}
fn from_variant(variant: &Variant) -> Option<Self> {
variant.try_to_vector3()
}
}
godot_test!(
test_vector3_variants {
let vector = Vector3::new(1.0, 2.0, 3.0);
let variant = vector.to_variant();
let vector_from_variant = Vector3::from_variant(&variant).unwrap();
assert_eq!(vector, vector_from_variant);
}
);
#[cfg(test)]
mod tests {
use super::Vector3;
#[test]
fn it_supports_equality() {
assert_eq!(
Vector3::new(1.0, 2.0, 3.0),
Vector3::new(1.0, 2.0, 3.0)
);
}
#[test]
fn it_supports_inequality() {
assert_ne!(
Vector3::new(1.0, 10.0, 100.0),
Vector3::new(1.0, 2.0, 3.0)
);
}
}
| 20.333333 | 75 | 0.545355 |
691a64dc65ca358663776f95af1bde45743d0a4b | 1,365 | use jni_sys::{JNIEnv, jobject, jint};
use std::sync::atomic::{AtomicI32, Ordering};
static VALUE : AtomicI32 = AtomicI32::new(0);
#[no_mangle] pub extern "stdcall" fn Java_com_maulingmonkey_jerk_example_1hello_1world_1jar_Global_assert_1native_1value__I(_env: *mut JNIEnv, _this: jobject, expected_value: jint) {
let value = VALUE.load(Ordering::SeqCst);
if value != expected_value {
// Don't unwind across FFI boundaries when we fail this test
eprintln!("assert_native_value: {} != {}", value, expected_value);
std::process::exit(1);
}
}
#[no_mangle] pub extern "stdcall" fn Java_com_maulingmonkey_jerk_example_1hello_1world_1jar_Global_test() {
// https://github.com/MaulingMonkey/jerk/issues/12
//
// Loading a separately build cdylib actually caused a whole second copy of
// the entire crate to be loaded with it's own separate static vars and
// everything. This test would catch that.
VALUE.store(1, Ordering::SeqCst); jerk::run_test!("com.maulingmonkey.jerk.example_hello_world_jar", "Global", "assert_value_eq_1");
VALUE.store(3, Ordering::SeqCst); jerk::run_test!("com.maulingmonkey.jerk.example_hello_world_jar", "Global", "assert_value_eq_3");
VALUE.store(5, Ordering::SeqCst); jerk::run_test!("com.maulingmonkey.jerk.example_hello_world_jar", "Global", "assert_value_eq_5");
}
| 52.5 | 182 | 0.731136 |
acf42f8798120acbcf21d84aa48ebba57d933d60 | 11,908 | //! MIDI Toolbox: A little helper for terminal MIDI handling
//!
//! Features:
//! * Forward MIDI data between different ports
//! * Transform MIDI data (e.g. change the channel)
//! * Monitor the received data
//! * Log the received data to a file
//!
//! TODO:
//! * Replay a previously captured file
//! * Send a MIDI file to a device
mod avg;
use avg::Avg;
mod display;
use display::{Display, Colors, COLORS_BW, COLORS_TC};
mod midi;
use midi::MidiMessage;
extern crate clap;
use clap::{Arg, App};
extern crate midir;
use midir::{MidiInput, MidiInputPort, MidiOutput, MidiOutputConnection, MidiIO, Ignore};
extern crate regex;
use regex::Regex;
use std::error::Error;
use std::fs::File;
use std::io::stdin;
use std::io::prelude::*;
use std::io::BufReader;
struct Config {
in_port: usize,
in_channel: u8,
out_port: usize,
out_channel: u8,
}
fn main() {
let mut config = Config{
in_port: std::usize::MAX,
in_channel: 0,
out_port: std::usize::MAX,
out_channel: 0,
};
let matches = App::new("MIDIToolbox")
.version("0.2.0")
.about("Some MIDI utilities for the terminal")
.arg(Arg::with_name("version")
.short("v")
.long("version")
.help("Shows the version of the app"))
.arg(Arg::with_name("inport")
.short("i")
.long("inport")
.help("Selects the MIDI port to receive MIDI events on (0 - n, default 0)")
.takes_value(true))
.arg(Arg::with_name("outport")
.short("o")
.long("outport")
.help("Selects the MIDI port to send MIDI events to (0 - n, default OFF)")
.takes_value(true))
.arg(Arg::with_name("inchannel")
.short("c")
.long("inchannel")
.help("Selects the MIDI channel to receive MIDI events on (1 - 16, 0 = omni (default))")
.takes_value(true))
.arg(Arg::with_name("outchannel")
.short("n")
.long("outchannel")
.help("Selects the MIDI channel to send MIDI events on (1 - 16, 0 = omni (default))")
.takes_value(true))
.arg(Arg::with_name("monitor")
.short("m")
.long("monitor")
.help("Print received MIDI events to stdout"))
.arg(Arg::with_name("write")
.short("w")
.long("write")
.help("Record the received MIDI events to a file")
.takes_value(true))
.arg(Arg::with_name("list")
.short("l")
.long("list")
.help("List available MIDI ports and exit"))
.arg(Arg::with_name("configfile")
.short("r")
.long("read")
.help("Read a CSV file containing a multiplex/ demultiplex setup. Each line consists of a single entry of the form \"inport, inchannel, outport, outchannel\"")
.takes_value(true))
.arg(Arg::with_name("blackwhite")
.short("b")
.long("no-color")
.help("Don't use color when printing events."))
.arg(Arg::with_name("timing")
.short("t")
.long("show-timing")
.help("Show system real-time messages."))
.get_matches();
let in_port = matches.value_of("inport").unwrap_or("");
config.in_port = in_port.parse().unwrap_or(std::usize::MAX);
let in_channel = matches.value_of("inchannel").unwrap_or("0");
config.in_channel = in_channel.parse().unwrap_or(0);
let out_port = matches.value_of("outport").unwrap_or("");
config.out_port = out_port.parse().unwrap_or(std::usize::MAX);
let out_channel = matches.value_of("outchannel").unwrap_or("0");
config.out_channel = out_channel.parse().unwrap_or(0);
let monitor = matches.is_present("monitor");
let list = matches.is_present("list");
let record = matches.is_present("write");
let outfile = matches.value_of("write").unwrap_or("");
let show_time = matches.is_present("timing");
if list {
match list_all_ports() {
Ok(_) => (),
Err(err) => println!("Error: {}", err)
}
return;
}
// Set colors to use for output
let colors = if matches.is_present("blackwhite") {
&COLORS_BW
} else {
&COLORS_TC
};
let mut configs: Vec<Config> = vec!();
if matches.is_present("configfile") {
let re = Regex::new(r"(\d*),(\d*),(\d*),(\d)").unwrap();
let configfile = matches.value_of("configfile").unwrap_or("");
let file = File::open(configfile).unwrap(); // TODO: Show error
let buf_reader = BufReader::new(file);
let lines = buf_reader.lines();
for line in lines {
let line = if let Ok(l) = line { l } else { continue; };
let cap = re.captures(&line).unwrap();
if cap.len() == 5 {
let c = Config{
in_port: cap[1].parse().unwrap(),
in_channel: cap[2].parse().unwrap(),
out_port: cap[3].parse().unwrap(),
out_channel: cap[4].parse().unwrap(),
};
configs.push(c);
}
}
} else {
configs.push(config);
}
match receive_data(&configs, monitor, record, outfile, colors, show_time) {
Ok(_) => (),
Err(err) => println!("Error: {}", err)
}
}
/// Receive data from a MIDI in port and optionally forward it.
///
/// If no output port has been defined, the data is only read, written to file
/// if configured, and written to stdout if configured.
fn receive_data(configs: &[Config],
do_monitor: bool,
do_record: bool,
outfile: &str,
colors: &'static Colors,
show_time: bool)
-> Result<(), Box<dyn Error>> {
let mut conn_list = vec!();
for config in configs {
let mut display = Display::new(colors, show_time);
let mut midi_in = MidiInput::new("MIDI input")?;
midi_in.ignore(Ignore::None);
let conf_in_port = config.in_port;
let in_port = get_in_port(config, &midi_in)?;
let in_channel = config.in_channel;
let do_forward = config.out_port < std::usize::MAX;
let mut conn_out = get_out_connection(config)?;
let mut message_out: [u8; 3] = [0x00, 0x00, 0x00];
let out_channel = config.out_channel;
let mut file = if do_record {
let mut filename = outfile.to_string();
filename += "_p";
filename += &config.in_port.to_string();
Some(File::create(filename)?)
} else {
None
};
let conn_in = midi_in.connect(&in_port, "MIDI forward", move |timestamp, message, _| {
if in_channel > 0 && (message[0] & 0x0F) != in_channel - 1 {
return; // Not listening on this channel
}
if do_forward {
// Filter some messages (for Push2)
let m = MidiMessage::parse(message);
match m {
MidiMessage::NoteOn{channel: _, key, velocity: _} => {
if key <= 10 {
return;
}
}
_ => (),
}
// Forward data to configured output port
if out_channel < 16 && out_channel != in_channel {
// Adjust MIDI channel
message_out[0] = message[0] & 0xF0 | out_channel - 1;
} else {
message_out[0] = message[0];
}
if message.len() > 1 {
message_out[1] = message[1];
if message.len() == 3 {
message_out[2] = message[2];
}
}
if let Some(c) = conn_out.as_mut() {
c.send(&message_out).unwrap_or_else(|_| println!("Error when forwarding message ..."));
}
}
if do_monitor {
// Print received data to screen
display.show_message(timestamp, conf_in_port, message);
}
if do_record {
// Write received data to file
if let Some(f) = file.as_mut() {
let line = if message.len() == 3 {
format!("{:02x} {:02x} {:02x}\n", message[0], message[1], message[2])
} else if message.len() == 2 {
format!("{:02x} {:02x}\n", message[0], message[1])
} else {
"\n".to_string()
};
f.write_all(line.as_bytes()).unwrap();
}
}
}, ())?;
conn_list.push(conn_in);
}
println!("Press return to exit.");
let mut input = String::new();
stdin().read_line(&mut input)?;
Ok(())
}
fn get_in_port(config: &Config, midi_in: &MidiInput) -> Result<MidiInputPort, Box<dyn Error>> {
let conf_in_port = config.in_port;
let in_port = get_port(midi_in, conf_in_port)?;
let in_port_name = midi_in.port_name(&in_port)?;
print!("Reading from '{}'", in_port_name);
if config.in_channel > 0 {
print!(", channel {}", config.in_channel);
} else {
print!(", all channels");
}
Ok(in_port)
}
fn get_out_connection(config: &Config) -> Result<Option<MidiOutputConnection>, Box<dyn Error>> {
let do_forward = config.out_port < std::usize::MAX;
let conn_out: Option<MidiOutputConnection> = if do_forward {
let midi_out = MidiOutput::new("MIDI output")?;
let out_port = get_port(&midi_out, config.out_port)?;
let out_port_name = midi_out.port_name(&out_port)?;
print!(", forwarding to '{}'", out_port_name);
if config.out_channel > 0 {
println!(", channel {}", config.out_channel);
} else {
println!(", all channels");
}
Some(midi_out.connect(&out_port, "MIDI forward")?)
} else {
println!("");
None
};
Ok(conn_out)
}
fn get_port<T: MidiIO>(midi_io: &T, port: usize) -> Result<T::Port, Box<dyn Error>> {
let midi_ports = midi_io.ports();
let port = midi_ports.get(port)
.ok_or("Invalid port number")?;
Ok(port.clone())
}
fn list_all_ports()
-> Result<(), Box<dyn Error>> {
let mut midi_in = MidiInput::new("MIDI input")?;
midi_in.ignore(Ignore::None);
list_ports(&midi_in, "input")?;
let midi_out = MidiOutput::new("MIDI output")?;
list_ports(&midi_out, "output")
}
fn list_ports<T: MidiIO>(midi_io: &T, descr: &str)
-> Result<(), Box<dyn Error>> {
println!("\nAvailable {} ports:", descr);
let midi_ports = midi_io.ports();
for (i, p) in midi_ports.iter().enumerate() {
println!("{}: {}", i, midi_io.port_name(p)?);
}
Ok(())
}
| 36.527607 | 187 | 0.493366 |
91833e3dcadce5ba68fc98a6594d93a263ad8b30 | 15,409 | pub mod operation;
pub mod test_event;
pub mod test_file;
use std::{ops::Deref, sync::Arc, time::Duration};
use semver::VersionReq;
use crate::{
bson::{doc, from_bson},
coll::options::{DistinctOptions, DropCollectionOptions},
concern::{Acknowledgment, WriteConcern},
options::{ClientOptions, CreateCollectionOptions, InsertManyOptions},
sdam::ServerInfo,
selection_criteria::SelectionCriteria,
test::{
assert_matches,
util::{get_default_name, FailPointGuard},
EventClient,
TestClient,
SERVERLESS,
},
RUNTIME,
};
use operation::{OperationObject, OperationResult};
use test_event::CommandStartedEvent;
use test_file::{TestData, TestFile};
const SKIPPED_OPERATIONS: &[&str] = &[
"bulkWrite",
"count",
"download",
"download_by_name",
"listCollectionObjects",
"listDatabaseObjects",
"mapReduce",
"watch",
];
pub async fn run_v2_test(test_file: TestFile) {
let internal_client = TestClient::new().await;
if let Some(requirements) = test_file.run_on {
let can_run_on = requirements
.iter()
.any(|run_on| run_on.can_run_on(&internal_client));
if !can_run_on {
println!("Client topology not compatible with test");
return;
}
}
for test in test_file.tests {
println!("Running {}", &test.description);
if test
.operations
.iter()
.any(|operation| SKIPPED_OPERATIONS.contains(&operation.name.as_str()))
{
continue;
}
if let Some(skip_reason) = test.skip_reason {
println!("skipping {}: {}", test.description, skip_reason);
continue;
}
// `killAllSessions` isn't supported on serverless.
// TODO CLOUDP-84298 remove this conditional.
if !*SERVERLESS {
match internal_client
.database("admin")
.run_command(doc! { "killAllSessions": [] }, None)
.await
{
Ok(_) => {}
Err(err) => match err.code() {
Some(11601) => {}
_ => panic!("{}: killAllSessions failed", test.description),
},
}
}
let db_name = test_file
.database_name
.clone()
.unwrap_or_else(|| get_default_name(&test.description));
let coll_name = test_file
.collection_name
.clone()
.unwrap_or_else(|| get_default_name(&test.description));
let coll = internal_client.database(&db_name).collection(&coll_name);
let options = DropCollectionOptions::builder()
.write_concern(majority_write_concern())
.build();
let req = VersionReq::parse(">=4.7").unwrap();
if !(db_name.as_str() == "admin"
&& internal_client.is_sharded()
&& req.matches(&internal_client.server_version))
{
coll.drop(options).await.unwrap();
}
let options = CreateCollectionOptions::builder()
.write_concern(majority_write_concern())
.build();
internal_client
.database(&db_name)
.create_collection(&coll_name, options)
.await
.unwrap();
if let Some(data) = &test_file.data {
match data {
TestData::Single(data) => {
if !data.is_empty() {
let options = InsertManyOptions::builder()
.write_concern(majority_write_concern())
.build();
coll.insert_many(data.clone(), options).await.unwrap();
}
}
TestData::Many(_) => panic!("{}: invalid data format", &test.description),
}
}
let options = match test.client_uri {
Some(ref uri) => Some(ClientOptions::parse_uri(uri, None).await.unwrap()),
None => None,
};
let client =
EventClient::with_additional_options(options, None, test.use_multiple_mongoses, None)
.await;
// TODO RUST-900: Remove this extraneous call.
if internal_client.is_sharded()
&& internal_client.server_version_lte(4, 2)
&& test.operations.iter().any(|op| op.name == "distinct")
{
for server_address in internal_client.options.hosts.clone() {
let options = DistinctOptions::builder()
.selection_criteria(Some(SelectionCriteria::Predicate(Arc::new(
move |server_info: &ServerInfo| *server_info.address() == server_address,
))))
.build();
coll.distinct("_id", None, options).await.unwrap();
}
}
let mut fail_point_guards: Vec<FailPointGuard> = Vec::new();
if let Some(fail_point) = test.fail_point {
fail_point_guards.push(fail_point.enable(client.deref(), None).await.unwrap());
}
let options = match test.session_options {
Some(ref options) => options.get("session0").cloned(),
None => None,
};
let mut session0 = Some(client.start_session(options).await.unwrap());
let session0_lsid = session0.as_ref().unwrap().id().clone();
let options = match test.session_options {
Some(ref options) => options.get("session1").cloned(),
None => None,
};
let mut session1 = Some(client.start_session(options).await.unwrap());
let session1_lsid = session1.as_ref().unwrap().id().clone();
for operation in test.operations {
let db = match &operation.database_options {
Some(options) => client.database_with_options(&db_name, options.clone()),
None => client.database(&db_name),
};
let coll = match &operation.collection_options {
Some(options) => db.collection_with_options(&coll_name, options.clone()),
None => db.collection(&coll_name),
};
let session = match operation.session.as_deref() {
Some("session0") => session0.as_mut(),
Some("session1") => session1.as_mut(),
Some(other) => panic!("unknown session name: {}", other),
None => None,
};
let result = match operation.object {
Some(OperationObject::Collection) | None => {
let result = operation.execute_on_collection(&coll, session).await;
// This test (in src/test/spec/json/sessions/server-support.json) runs two
// operations with implicit sessions in sequence and then checks to see if they
// used the same lsid. We delay for one second to ensure that the
// implicit session used in the first operation is returned to the pool before
// the second operation is executed.
if test.description == "Server supports implicit sessions" {
RUNTIME.delay_for(Duration::from_secs(1)).await;
}
result
}
Some(OperationObject::Database) => {
operation.execute_on_database(&db, session).await
}
Some(OperationObject::Client) => operation.execute_on_client(&client).await,
Some(OperationObject::Session0) => {
if operation.name == "endSession" {
let session = session0.take();
drop(session);
RUNTIME.delay_for(Duration::from_secs(1)).await;
continue;
} else {
operation
.execute_on_session(session0.as_mut().unwrap())
.await
}
}
Some(OperationObject::Session1) => {
if operation.name == "endSession" {
let session = session1.take();
drop(session);
RUNTIME.delay_for(Duration::from_secs(1)).await;
continue;
} else {
operation
.execute_on_session(session1.as_mut().unwrap())
.await
}
}
Some(OperationObject::TestRunner) => {
match operation.name.as_str() {
"assertDifferentLsidOnLastTwoCommands" => {
assert_different_lsid_on_last_two_commands(&client)
}
"assertSameLsidOnLastTwoCommands" => {
assert_same_lsid_on_last_two_commands(&client)
}
"assertSessionDirty" => {
assert!(session.unwrap().is_dirty())
}
"assertSessionNotDirty" => {
assert!(!session.unwrap().is_dirty())
}
"assertSessionTransactionState"
| "assertSessionPinned"
| "assertSessionUnpinned" => {
operation
.execute_on_session(session.unwrap())
.await
.unwrap();
}
"assertCollectionExists"
| "assertCollectionNotExists"
| "assertIndexExists"
| "assertIndexNotExists" => {
operation.execute_on_client(&internal_client).await.unwrap();
}
"targetedFailPoint" => {
let fail_point = from_bson(
operation
.execute_on_client(&internal_client)
.await
.unwrap()
.unwrap(),
)
.unwrap();
let selection_criteria = session
.unwrap()
.transaction
.pinned_mongos()
.cloned()
.unwrap_or_else(|| panic!("ClientSession is not pinned"));
fail_point_guards.push(
client
.deref()
.enable_failpoint(fail_point, Some(selection_criteria))
.await
.unwrap(),
);
}
other => panic!("unknown operation: {}", other),
}
continue;
}
Some(OperationObject::GridfsBucket) => {
panic!("unsupported operation: {}", operation.name)
}
};
if let Some(error) = operation.error {
assert_eq!(error, result.is_err(), "{}", &test.description);
}
if let Some(expected_result) = operation.result {
match expected_result {
OperationResult::Success(expected) => {
let result = result.unwrap().unwrap();
assert_matches(&result, &expected, Some(&test.description));
}
OperationResult::Error(operation_error) => {
let error = result.unwrap_err();
if let Some(error_contains) = operation_error.error_contains {
let message = error.message().unwrap();
assert!(message.contains(&error_contains));
}
if let Some(error_code_name) = operation_error.error_code_name {
let code_name = error.code_name().unwrap();
assert_eq!(error_code_name, code_name);
}
if let Some(error_code) = operation_error.error_code {
let code = error.code().unwrap();
assert_eq!(error_code, code);
}
if let Some(error_labels_contain) = operation_error.error_labels_contain {
let labels = error.labels();
error_labels_contain
.iter()
.for_each(|label| assert!(labels.contains(label)));
}
if let Some(error_labels_omit) = operation_error.error_labels_omit {
let labels = error.labels();
error_labels_omit
.iter()
.for_each(|label| assert!(!labels.contains(label)));
}
}
}
}
}
drop(session0);
drop(session1);
// wait for the transaction in progress to be aborted implicitly when the session is dropped
if test.description.as_str() == "implicit abort" {
RUNTIME.delay_for(Duration::from_secs(1)).await;
}
if let Some(expectations) = test.expectations {
let events: Vec<CommandStartedEvent> = client
.get_all_command_started_events()
.into_iter()
.map(Into::into)
.collect();
assert!(events.len() >= expectations.len(), "{}", test.description);
for (actual_event, expected_event) in events.iter().zip(expectations.iter()) {
assert!(actual_event.matches_expected(
expected_event,
&session0_lsid,
&session1_lsid
));
}
}
if let Some(outcome) = test.outcome {
assert!(outcome.matches_actual(db_name, coll_name, &client).await);
}
}
}
fn majority_write_concern() -> WriteConcern {
WriteConcern::builder().w(Acknowledgment::Majority).build()
}
fn assert_different_lsid_on_last_two_commands(client: &EventClient) {
let events = client.get_all_command_started_events();
let lsid1 = events[events.len() - 1].command.get("lsid").unwrap();
let lsid2 = events[events.len() - 2].command.get("lsid").unwrap();
assert_ne!(lsid1, lsid2);
}
fn assert_same_lsid_on_last_two_commands(client: &EventClient) {
let events = client.get_all_command_started_events();
let lsid1 = events[events.len() - 1].command.get("lsid").unwrap();
let lsid2 = events[events.len() - 2].command.get("lsid").unwrap();
assert_eq!(lsid1, lsid2);
}
| 40.44357 | 100 | 0.486793 |
6166a1ec651db8d5760fac2ac12dc64381c90ec9 | 60,064 | extern crate cargo;
extern crate cargotest;
extern crate hamcrest;
use std::fs::File;
use std::io::prelude::*;
use std::str;
use cargotest::{sleep_ms, is_nightly};
use cargotest::support::{project, execs, basic_bin_manifest, basic_lib_manifest};
use cargotest::support::paths::CargoPathExt;
use hamcrest::{assert_that, existing_file, is_not};
use cargo::util::process;
#[test]
fn cargo_test_simple() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", r#"
fn hello() -> &'static str {
"hello"
}
pub fn main() {
println!("{}", hello())
}
#[test]
fn test_hello() {
assert_eq!(hello(), "hello")
}"#);
assert_that(p.cargo_process("build"), execs());
assert_that(&p.bin("foo"), existing_file());
assert_that(process(&p.bin("foo")),
execs().with_stdout("hello\n"));
assert_that(p.cargo("test"),
execs().with_stderr(format!("\
[COMPILING] foo v0.5.0 ({})
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", p.url()))
.with_stdout("
running 1 test
test test_hello ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
"));
}
#[test]
fn cargo_test_release() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.1.0"
[dependencies]
bar = { path = "bar" }
"#)
.file("src/lib.rs", r#"
extern crate bar;
pub fn foo() { bar::bar(); }
#[test]
fn test() { foo(); }
"#)
.file("tests/test.rs", r#"
extern crate foo;
#[test]
fn test() { foo::foo(); }
"#)
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
"#)
.file("bar/src/lib.rs", "pub fn bar() {}");
assert_that(p.cargo_process("test").arg("-v").arg("--release"),
execs().with_stderr(format!("\
[COMPILING] bar v0.0.1 ({dir}/bar)
[RUNNING] [..] -C opt-level=3 [..]
[COMPILING] foo v0.1.0 ({dir})
[RUNNING] [..] -C opt-level=3 [..]
[RUNNING] [..] -C opt-level=3 [..]
[RUNNING] [..] -C opt-level=3 [..]
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] `[..]target[/]release[/]deps[/]foo-[..][EXE]`
[RUNNING] `[..]target[/]release[/]deps[/]test-[..][EXE]`
[DOCTEST] foo
[RUNNING] `rustdoc --test [..]lib.rs[..]`", dir = p.url()))
.with_stdout("
running 1 test
test test ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 1 test
test test ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
"));
}
#[test]
fn cargo_test_verbose() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", r#"
fn main() {}
#[test] fn test_hello() {}
"#);
assert_that(p.cargo_process("test").arg("-v").arg("hello"),
execs().with_stderr(format!("\
[COMPILING] foo v0.5.0 ({url})
[RUNNING] `rustc [..] src[/]foo.rs [..]`
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `[..]target[/]debug[/]deps[/]foo-[..][EXE] hello`", url = p.url()))
.with_stdout("
running 1 test
test test_hello ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
"));
}
#[test]
fn many_similar_names() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "
pub fn foo() {}
#[test] fn lib_test() {}
")
.file("src/main.rs", "
extern crate foo;
fn main() {}
#[test] fn bin_test() { foo::foo() }
")
.file("tests/foo.rs", r#"
extern crate foo;
#[test] fn test_test() { foo::foo() }
"#);
let output = p.cargo_process("test").arg("-v").exec_with_output().unwrap();
let output = str::from_utf8(&output.stdout).unwrap();
assert!(output.contains("test bin_test"), "bin_test missing\n{}", output);
assert!(output.contains("test lib_test"), "lib_test missing\n{}", output);
assert!(output.contains("test test_test"), "test_test missing\n{}", output);
}
#[test]
fn cargo_test_failing_test() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", r#"
fn hello() -> &'static str {
"hello"
}
pub fn main() {
println!("{}", hello())
}
#[test]
fn test_hello() {
assert_eq!(hello(), "nope")
}"#);
assert_that(p.cargo_process("build"), execs());
assert_that(&p.bin("foo"), existing_file());
assert_that(process(&p.bin("foo")),
execs().with_stdout("hello\n"));
assert_that(p.cargo("test"),
execs().with_stderr(format!("\
[COMPILING] foo v0.5.0 ({url})
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
[ERROR] test failed", url = p.url()))
.with_stdout_contains("
running 1 test
test test_hello ... FAILED
failures:
---- test_hello stdout ----
<tab>thread 'test_hello' panicked at 'assertion failed: \
`(left == right)` (left: \
`\"hello\"`, right: `\"nope\"`)', src[/]foo.rs:12
")
.with_stdout_contains("\
failures:
test_hello
test result: FAILED. 0 passed; 1 failed; 0 ignored; 0 measured
")
.with_status(101));
}
#[test]
fn test_with_lib_dep() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
[[bin]]
name = "baz"
path = "src/main.rs"
"#)
.file("src/lib.rs", r#"
///
/// ```rust
/// extern crate foo;
/// fn main() {
/// println!("{:?}", foo::foo());
/// }
/// ```
///
pub fn foo(){}
#[test] fn lib_test() {}
"#)
.file("src/main.rs", "
extern crate foo;
fn main() {}
#[test]
fn bin_test() {}
");
assert_that(p.cargo_process("test"),
execs().with_stderr(format!("\
[COMPILING] foo v0.0.1 ({})
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]baz-[..][EXE]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
[DOCTEST] foo", p.url()))
.with_stdout("
running 1 test
test bin_test ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 1 test
test lib_test ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 1 test
test foo_0 ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
"))
}
#[test]
fn test_with_deep_lib_dep() {
let p = project("bar")
.file("Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
[dependencies.foo]
path = "../foo"
"#)
.file("src/lib.rs", "
extern crate foo;
/// ```
/// bar::bar();
/// ```
pub fn bar() {}
#[test]
fn bar_test() {
foo::foo();
}
");
let p2 = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "
pub fn foo() {}
#[test]
fn foo_test() {}
");
p2.build();
assert_that(p.cargo_process("test"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] foo v0.0.1 ([..])
[COMPILING] bar v0.0.1 ({dir})
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[..]
[DOCTEST] bar", dir = p.url()))
.with_stdout("
running 1 test
test bar_test ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 1 test
test bar_0 ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
"));
}
#[test]
fn external_test_explicit() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
[[test]]
name = "test"
path = "src/test.rs"
"#)
.file("src/lib.rs", r#"
pub fn get_hello() -> &'static str { "Hello" }
#[test]
fn internal_test() {}
"#)
.file("src/test.rs", r#"
extern crate foo;
#[test]
fn external_test() { assert_eq!(foo::get_hello(), "Hello") }
"#);
assert_that(p.cargo_process("test"),
execs().with_stderr(format!("\
[COMPILING] foo v0.0.1 ({})
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
[RUNNING] target[/]debug[/]deps[/]test-[..][EXE]
[DOCTEST] foo", p.url()))
.with_stdout("
running 1 test
test internal_test ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 1 test
test external_test ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
"))
}
#[test]
fn external_test_implicit() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", r#"
pub fn get_hello() -> &'static str { "Hello" }
#[test]
fn internal_test() {}
"#)
.file("tests/external.rs", r#"
extern crate foo;
#[test]
fn external_test() { assert_eq!(foo::get_hello(), "Hello") }
"#);
assert_that(p.cargo_process("test"),
execs().with_stderr(format!("\
[COMPILING] foo v0.0.1 ({})
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]external-[..][EXE]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
[DOCTEST] foo", p.url()))
.with_stdout("
running 1 test
test external_test ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 1 test
test internal_test ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
"))
}
#[test]
fn dont_run_examples() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", r#"
"#)
.file("examples/dont-run-me-i-will-fail.rs", r#"
fn main() { panic!("Examples should not be run by 'cargo test'"); }
"#);
assert_that(p.cargo_process("test"),
execs().with_status(0));
}
#[test]
fn pass_through_command_line() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "
#[test] fn foo() {}
#[test] fn bar() {}
");
assert_that(p.cargo_process("test").arg("bar"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
[DOCTEST] foo", dir = p.url()))
.with_stdout("
running 1 test
test bar ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
"));
assert_that(p.cargo("test").arg("foo"),
execs().with_status(0)
.with_stderr("\
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
[DOCTEST] foo")
.with_stdout("
running 1 test
test foo ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
"));
}
// Regression test for running cargo-test twice with
// tests in an rlib
#[test]
fn cargo_test_twice() {
let p = project("test_twice")
.file("Cargo.toml", &basic_lib_manifest("test_twice"))
.file("src/test_twice.rs", r#"
#![crate_type = "rlib"]
#[test]
fn dummy_test() { }
"#);
p.cargo_process("build");
for _ in 0..2 {
assert_that(p.cargo("test"),
execs().with_status(0));
}
}
#[test]
fn lib_bin_same_name() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "foo"
[[bin]]
name = "foo"
"#)
.file("src/lib.rs", "
#[test] fn lib_test() {}
")
.file("src/main.rs", "
extern crate foo;
#[test]
fn bin_test() {}
");
assert_that(p.cargo_process("test"),
execs().with_stderr(format!("\
[COMPILING] foo v0.0.1 ({})
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
[DOCTEST] foo", p.url()))
.with_stdout("
running 1 test
test [..] ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 1 test
test [..] ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
"))
}
#[test]
fn lib_with_standard_name() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "syntax"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "
/// ```
/// syntax::foo();
/// ```
pub fn foo() {}
#[test]
fn foo_test() {}
")
.file("tests/test.rs", "
extern crate syntax;
#[test]
fn test() { syntax::foo() }
");
assert_that(p.cargo_process("test"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] syntax v0.0.1 ({dir})
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]syntax-[..][EXE]
[RUNNING] target[/]debug[/]deps[/]test-[..][EXE]
[DOCTEST] syntax", dir = p.url()))
.with_stdout("
running 1 test
test foo_test ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 1 test
test test ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 1 test
test foo_0 ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
"));
}
#[test]
fn lib_with_standard_name2() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "syntax"
version = "0.0.1"
authors = []
[lib]
name = "syntax"
test = false
doctest = false
"#)
.file("src/lib.rs", "
pub fn foo() {}
")
.file("src/main.rs", "
extern crate syntax;
fn main() {}
#[test]
fn test() { syntax::foo() }
");
assert_that(p.cargo_process("test"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] syntax v0.0.1 ({dir})
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]syntax-[..][EXE]", dir = p.url()))
.with_stdout("
running 1 test
test test ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
"));
}
#[test]
fn lib_without_name() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "syntax"
version = "0.0.1"
authors = []
[lib]
test = false
doctest = false
"#)
.file("src/lib.rs", "
pub fn foo() {}
")
.file("src/main.rs", "
extern crate syntax;
fn main() {}
#[test]
fn test() { syntax::foo() }
");
assert_that(p.cargo_process("test"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] syntax v0.0.1 ({dir})
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]syntax-[..][EXE]", dir = p.url()))
.with_stdout("
running 1 test
test test ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
"));
}
#[test]
fn bin_without_name() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "syntax"
version = "0.0.1"
authors = []
[lib]
test = false
doctest = false
[[bin]]
path = "src/main.rs"
"#)
.file("src/lib.rs", "
pub fn foo() {}
")
.file("src/main.rs", "
extern crate syntax;
fn main() {}
#[test]
fn test() { syntax::foo() }
");
assert_that(p.cargo_process("test"),
execs().with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
binary target bin.name is required"));
}
#[test]
fn bench_without_name() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "syntax"
version = "0.0.1"
authors = []
[lib]
test = false
doctest = false
[[bench]]
path = "src/bench.rs"
"#)
.file("src/lib.rs", "
pub fn foo() {}
")
.file("src/main.rs", "
extern crate syntax;
fn main() {}
#[test]
fn test() { syntax::foo() }
")
.file("src/bench.rs", "
#![feature(test)]
extern crate syntax;
extern crate test;
#[bench]
fn external_bench(_b: &mut test::Bencher) {}
");
assert_that(p.cargo_process("test"),
execs().with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
bench target bench.name is required"));
}
#[test]
fn test_without_name() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "syntax"
version = "0.0.1"
authors = []
[lib]
test = false
doctest = false
[[test]]
path = "src/test.rs"
"#)
.file("src/lib.rs", r#"
pub fn foo() {}
pub fn get_hello() -> &'static str { "Hello" }
"#)
.file("src/main.rs", "
extern crate syntax;
fn main() {}
#[test]
fn test() { syntax::foo() }
")
.file("src/test.rs", r#"
extern crate syntax;
#[test]
fn external_test() { assert_eq!(syntax::get_hello(), "Hello") }
"#);
assert_that(p.cargo_process("test"),
execs().with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
test target test.name is required"));
}
#[test]
fn example_without_name() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "syntax"
version = "0.0.1"
authors = []
[lib]
test = false
doctest = false
[[example]]
path = "examples/example.rs"
"#)
.file("src/lib.rs", "
pub fn foo() {}
")
.file("src/main.rs", "
extern crate syntax;
fn main() {}
#[test]
fn test() { syntax::foo() }
")
.file("examples/example.rs", r#"
extern crate syntax;
fn main() {
println!("example1");
}
"#);
assert_that(p.cargo_process("test"),
execs().with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
example target example.name is required"));
}
#[test]
fn bin_there_for_integration() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", "
fn main() { std::process::exit(101); }
#[test] fn main_test() {}
")
.file("tests/foo.rs", r#"
use std::process::Command;
#[test]
fn test_test() {
let status = Command::new("target/debug/foo").status().unwrap();
assert_eq!(status.code(), Some(101));
}
"#);
let output = p.cargo_process("test").arg("-v").exec_with_output().unwrap();
let output = str::from_utf8(&output.stdout).unwrap();
assert!(output.contains("main_test ... ok"), "no main_test\n{}", output);
assert!(output.contains("test_test ... ok"), "no test_test\n{}", output);
}
#[test]
fn test_dylib() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "foo"
crate_type = ["dylib"]
[dependencies.bar]
path = "bar"
"#)
.file("src/lib.rs", r#"
extern crate bar as the_bar;
pub fn bar() { the_bar::baz(); }
#[test]
fn foo() { bar(); }
"#)
.file("tests/test.rs", r#"
extern crate foo as the_foo;
#[test]
fn foo() { the_foo::bar(); }
"#)
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
[lib]
name = "bar"
crate_type = ["dylib"]
"#)
.file("bar/src/lib.rs", "
pub fn baz() {}
");
assert_that(p.cargo_process("test"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] bar v0.0.1 ({dir}/bar)
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
[RUNNING] target[/]debug[/]deps[/]test-[..][EXE]", dir = p.url()))
.with_stdout("
running 1 test
test foo ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 1 test
test foo ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
"));
p.root().move_into_the_past();
assert_that(p.cargo("test"),
execs().with_status(0)
.with_stderr("\
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
[RUNNING] target[/]debug[/]deps[/]test-[..][EXE]")
.with_stdout("
running 1 test
test foo ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 1 test
test foo ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
"));
}
#[test]
fn test_twice_with_build_cmd() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
build = "build.rs"
"#)
.file("build.rs", "fn main() {}")
.file("src/lib.rs", "
#[test]
fn foo() {}
");
assert_that(p.cargo_process("test"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
[DOCTEST] foo", dir = p.url()))
.with_stdout("
running 1 test
test foo ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
"));
assert_that(p.cargo("test"),
execs().with_status(0)
.with_stderr("\
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
[DOCTEST] foo")
.with_stdout("
running 1 test
test foo ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
"));
}
#[test]
fn test_then_build() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "
#[test]
fn foo() {}
");
assert_that(p.cargo_process("test"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
[DOCTEST] foo", dir = p.url()))
.with_stdout("
running 1 test
test foo ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
"));
assert_that(p.cargo("build"),
execs().with_status(0)
.with_stdout(""));
}
#[test]
fn test_no_run() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "
#[test]
fn foo() { panic!() }
");
assert_that(p.cargo_process("test").arg("--no-run"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
",
dir = p.url())));
}
#[test]
fn test_run_specific_bin_target() {
let prj = project("foo")
.file("Cargo.toml" , r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[[bin]]
name="bin1"
path="src/bin1.rs"
[[bin]]
name="bin2"
path="src/bin2.rs"
"#)
.file("src/bin1.rs", "#[test] fn test1() { }")
.file("src/bin2.rs", "#[test] fn test2() { }");
assert_that(prj.cargo_process("test").arg("--bin").arg("bin2"),
execs().with_status(0)
.with_stderr(format!("\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]bin2-[..][EXE]", dir = prj.url()))
.with_stdout("
running 1 test
test test2 ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
"));
}
#[test]
fn test_run_specific_test_target() {
let prj = project("foo")
.file("Cargo.toml" , r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/bin/a.rs", "fn main() { }")
.file("src/bin/b.rs", "#[test] fn test_b() { } fn main() { }")
.file("tests/a.rs", "#[test] fn test_a() { }")
.file("tests/b.rs", "#[test] fn test_b() { }");
assert_that(prj.cargo_process("test").arg("--test").arg("b"),
execs().with_status(0)
.with_stderr(format!("\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]b-[..][EXE]", dir = prj.url()))
.with_stdout("
running 1 test
test test_b ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
"));
}
#[test]
fn test_no_harness() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[[bin]]
name = "foo"
test = false
[[test]]
name = "bar"
path = "foo.rs"
harness = false
"#)
.file("src/main.rs", "fn main() {}")
.file("foo.rs", "fn main() {}");
assert_that(p.cargo_process("test").arg("--").arg("--nocapture"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]bar-[..][EXE]
",
dir = p.url())));
}
#[test]
fn selective_testing() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.d1]
path = "d1"
[dependencies.d2]
path = "d2"
[lib]
name = "foo"
doctest = false
"#)
.file("src/lib.rs", "")
.file("d1/Cargo.toml", r#"
[package]
name = "d1"
version = "0.0.1"
authors = []
[lib]
name = "d1"
doctest = false
"#)
.file("d1/src/lib.rs", "")
.file("d1/src/main.rs", "extern crate d1; fn main() {}")
.file("d2/Cargo.toml", r#"
[package]
name = "d2"
version = "0.0.1"
authors = []
[lib]
name = "d2"
doctest = false
"#)
.file("d2/src/lib.rs", "")
.file("d2/src/main.rs", "extern crate d2; fn main() {}");
p.build();
println!("d1");
assert_that(p.cargo("test").arg("-p").arg("d1"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] d1 v0.0.1 ({dir}/d1)
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]d1-[..][EXE]
[RUNNING] target[/]debug[/]deps[/]d1-[..][EXE]", dir = p.url()))
.with_stdout("
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
"));
println!("d2");
assert_that(p.cargo("test").arg("-p").arg("d2"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] d2 v0.0.1 ({dir}/d2)
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]d2-[..][EXE]
[RUNNING] target[/]debug[/]deps[/]d2-[..][EXE]", dir = p.url()))
.with_stdout("
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
"));
println!("whole");
assert_that(p.cargo("test"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]", dir = p.url()))
.with_stdout("
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
"));
}
#[test]
fn almost_cyclic_but_not_quite() {
let p = project("a")
.file("Cargo.toml", r#"
[package]
name = "a"
version = "0.0.1"
authors = []
[dev-dependencies.b]
path = "b"
[dev-dependencies.c]
path = "c"
"#)
.file("src/lib.rs", r#"
#[cfg(test)] extern crate b;
#[cfg(test)] extern crate c;
"#)
.file("b/Cargo.toml", r#"
[package]
name = "b"
version = "0.0.1"
authors = []
[dependencies.a]
path = ".."
"#)
.file("b/src/lib.rs", r#"
extern crate a;
"#)
.file("c/Cargo.toml", r#"
[package]
name = "c"
version = "0.0.1"
authors = []
"#)
.file("c/src/lib.rs", "");
assert_that(p.cargo_process("build"), execs().with_status(0));
assert_that(p.cargo("test"),
execs().with_status(0));
}
#[test]
fn build_then_selective_test() {
let p = project("a")
.file("Cargo.toml", r#"
[package]
name = "a"
version = "0.0.1"
authors = []
[dependencies.b]
path = "b"
"#)
.file("src/lib.rs", "extern crate b;")
.file("src/main.rs", "extern crate b; extern crate a; fn main() {}")
.file("b/Cargo.toml", r#"
[package]
name = "b"
version = "0.0.1"
authors = []
"#)
.file("b/src/lib.rs", "");
assert_that(p.cargo_process("build"), execs().with_status(0));
p.root().move_into_the_past();
assert_that(p.cargo("test").arg("-p").arg("b"),
execs().with_status(0));
}
#[test]
fn example_dev_dep() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
[dev-dependencies.bar]
path = "bar"
"#)
.file("src/lib.rs", r#"
"#)
.file("examples/e1.rs", r#"
extern crate bar;
fn main() { }
"#)
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
"#)
.file("bar/src/lib.rs", r#"
// make sure this file takes awhile to compile
macro_rules! f0( () => (1) );
macro_rules! f1( () => ({(f0!()) + (f0!())}) );
macro_rules! f2( () => ({(f1!()) + (f1!())}) );
macro_rules! f3( () => ({(f2!()) + (f2!())}) );
macro_rules! f4( () => ({(f3!()) + (f3!())}) );
macro_rules! f5( () => ({(f4!()) + (f4!())}) );
macro_rules! f6( () => ({(f5!()) + (f5!())}) );
macro_rules! f7( () => ({(f6!()) + (f6!())}) );
macro_rules! f8( () => ({(f7!()) + (f7!())}) );
pub fn bar() {
f8!();
}
"#);
assert_that(p.cargo_process("test"),
execs().with_status(0));
assert_that(p.cargo("run")
.arg("--example").arg("e1").arg("--release").arg("-v"),
execs().with_status(0));
}
#[test]
fn selective_testing_with_docs() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.d1]
path = "d1"
"#)
.file("src/lib.rs", r#"
/// ```
/// not valid rust
/// ```
pub fn foo() {}
"#)
.file("d1/Cargo.toml", r#"
[package]
name = "d1"
version = "0.0.1"
authors = []
[lib]
name = "d1"
path = "d1.rs"
"#)
.file("d1/d1.rs", "");
p.build();
assert_that(p.cargo("test").arg("-p").arg("d1"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] d1 v0.0.1 ({dir}/d1)
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]d1[..][EXE]
[DOCTEST] d1", dir = p.url()))
.with_stdout("
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
"));
}
#[test]
fn example_bin_same_name() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#)
.file("examples/foo.rs", r#"fn main() { println!("example"); }"#);
assert_that(p.cargo_process("test").arg("--no-run").arg("-v"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] foo v0.0.1 ({dir})
[RUNNING] `rustc [..]`
[RUNNING] `rustc [..]`
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
", dir = p.url())));
assert_that(&p.bin("foo"), is_not(existing_file()));
assert_that(&p.bin("examples/foo"), existing_file());
assert_that(p.process(&p.bin("examples/foo")),
execs().with_status(0).with_stdout("example\n"));
assert_that(p.cargo("run"),
execs().with_status(0)
.with_stderr("\
[COMPILING] foo v0.0.1 ([..])
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] [..]")
.with_stdout("\
bin
"));
assert_that(&p.bin("foo"), existing_file());
}
#[test]
fn test_with_example_twice() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#)
.file("examples/foo.rs", r#"fn main() { println!("example"); }"#);
println!("first");
assert_that(p.cargo_process("test").arg("-v"),
execs().with_status(0));
assert_that(&p.bin("examples/foo"), existing_file());
println!("second");
assert_that(p.cargo("test").arg("-v"),
execs().with_status(0));
assert_that(&p.bin("examples/foo"), existing_file());
}
#[test]
fn example_with_dev_dep() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "foo"
test = false
doctest = false
[dev-dependencies.a]
path = "a"
"#)
.file("src/lib.rs", "")
.file("examples/ex.rs", "extern crate a; fn main() {}")
.file("a/Cargo.toml", r#"
[package]
name = "a"
version = "0.0.1"
authors = []
"#)
.file("a/src/lib.rs", "");
assert_that(p.cargo_process("test").arg("-v"),
execs().with_status(0)
.with_stderr("\
[..]
[..]
[..]
[..]
[RUNNING] `rustc --crate-name ex [..] --extern a=[..]`
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
"));
}
#[test]
fn bin_is_preserved() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}");
assert_that(p.cargo_process("build").arg("-v"),
execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
println!("testing");
assert_that(p.cargo("test").arg("-v"),
execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
}
#[test]
fn bad_example() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "");
assert_that(p.cargo_process("run").arg("--example").arg("foo"),
execs().with_status(101).with_stderr("\
[ERROR] no example target named `foo`
"));
assert_that(p.cargo_process("run").arg("--bin").arg("foo"),
execs().with_status(101).with_stderr("\
[ERROR] no bin target named `foo`
"));
}
#[test]
fn doctest_feature() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[features]
bar = []
"#)
.file("src/lib.rs", r#"
/// ```rust
/// assert_eq!(foo::foo(), 1);
/// ```
#[cfg(feature = "bar")]
pub fn foo() -> i32 { 1 }
"#);
assert_that(p.cargo_process("test").arg("--features").arg("bar"),
execs().with_status(0)
.with_stderr("\
[COMPILING] foo [..]
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo[..][EXE]
[DOCTEST] foo")
.with_stdout("
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
running 1 test
test foo_0 ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
"))
}
#[test]
fn dashes_to_underscores() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo-bar"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", r#"
/// ```
/// assert_eq!(foo_bar::foo(), 1);
/// ```
pub fn foo() -> i32 { 1 }
"#);
assert_that(p.cargo_process("test").arg("-v"),
execs().with_status(0));
}
#[test]
fn doctest_dev_dep() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dev-dependencies]
b = { path = "b" }
"#)
.file("src/lib.rs", r#"
/// ```
/// extern crate b;
/// ```
pub fn foo() {}
"#)
.file("b/Cargo.toml", r#"
[package]
name = "b"
version = "0.0.1"
authors = []
"#)
.file("b/src/lib.rs", "");
assert_that(p.cargo_process("test").arg("-v"),
execs().with_status(0));
}
#[test]
fn filter_no_doc_tests() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", r#"
/// ```
/// extern crate b;
/// ```
pub fn foo() {}
"#)
.file("tests/foo.rs", "");
assert_that(p.cargo_process("test").arg("--test=foo"),
execs().with_stderr("\
[COMPILING] foo v0.0.1 ([..])
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo[..][EXE]")
.with_stdout("
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
"));
}
#[test]
fn dylib_doctest() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "foo"
crate-type = ["rlib", "dylib"]
test = false
"#)
.file("src/lib.rs", r#"
/// ```
/// foo::foo();
/// ```
pub fn foo() {}
"#);
assert_that(p.cargo_process("test"),
execs().with_stderr("\
[COMPILING] foo v0.0.1 ([..])
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[DOCTEST] foo")
.with_stdout("
running 1 test
test foo_0 ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
"));
}
#[test]
fn dylib_doctest2() {
// can't doctest dylibs as they're statically linked together
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "foo"
crate-type = ["dylib"]
test = false
"#)
.file("src/lib.rs", r#"
/// ```
/// foo::foo();
/// ```
pub fn foo() {}
"#);
assert_that(p.cargo_process("test"),
execs().with_stdout(""));
}
#[test]
fn cyclic_dev_dep_doc_test() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dev-dependencies]
bar = { path = "bar" }
"#)
.file("src/lib.rs", r#"
//! ```
//! extern crate bar;
//! ```
"#)
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
[dependencies]
foo = { path = ".." }
"#)
.file("bar/src/lib.rs", r#"
extern crate foo;
"#);
assert_that(p.cargo_process("test"),
execs().with_stderr("\
[COMPILING] foo v0.0.1 ([..])
[COMPILING] bar v0.0.1 ([..])
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo[..][EXE]
[DOCTEST] foo")
.with_stdout("
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
running 1 test
test _0 ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
"))
}
#[test]
fn dev_dep_with_build_script() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dev-dependencies]
bar = { path = "bar" }
"#)
.file("src/lib.rs", "")
.file("examples/foo.rs", "fn main() {}")
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
build = "build.rs"
"#)
.file("bar/src/lib.rs", "")
.file("bar/build.rs", "fn main() {}");
assert_that(p.cargo_process("test"),
execs().with_status(0));
}
#[test]
fn no_fail_fast() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", r#"
pub fn add_one(x: i32) -> i32{
x + 1
}
/// ```rust
/// use foo::sub_one;
/// assert_eq!(sub_one(101), 100);
/// ```
pub fn sub_one(x: i32) -> i32{
x - 1
}
"#)
.file("tests/test_add_one.rs", r#"
extern crate foo;
use foo::*;
#[test]
fn add_one_test() {
assert_eq!(add_one(1), 2);
}
#[test]
fn fail_add_one_test() {
assert_eq!(add_one(1), 1);
}
"#)
.file("tests/test_sub_one.rs", r#"
extern crate foo;
use foo::*;
#[test]
fn sub_one_test() {
assert_eq!(sub_one(1), 0);
}
"#);
assert_that(p.cargo_process("test").arg("--no-fail-fast"),
execs().with_status(101)
.with_stderr_contains("\
[COMPILING] foo v0.0.1 ([..])
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]foo-[..][EXE]
[RUNNING] target[/]debug[/]deps[/]test_add_one-[..][EXE]")
.with_stdout_contains("
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
")
.with_stderr_contains("\
[RUNNING] target[/]debug[/]deps[/]test_sub_one-[..][EXE]
[DOCTEST] foo")
.with_stdout_contains("\
test result: FAILED. 1 passed; 1 failed; 0 ignored; 0 measured
running 1 test
test sub_one_test ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
running 1 test
test sub_one_0 ... ok
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
"))
}
#[test]
fn test_multiple_packages() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.d1]
path = "d1"
[dependencies.d2]
path = "d2"
[lib]
name = "foo"
doctest = false
"#)
.file("src/lib.rs", "")
.file("d1/Cargo.toml", r#"
[package]
name = "d1"
version = "0.0.1"
authors = []
[lib]
name = "d1"
doctest = false
"#)
.file("d1/src/lib.rs", "")
.file("d2/Cargo.toml", r#"
[package]
name = "d2"
version = "0.0.1"
authors = []
[lib]
name = "d2"
doctest = false
"#)
.file("d2/src/lib.rs", "");
p.build();
assert_that(p.cargo("test").arg("-p").arg("d1").arg("-p").arg("d2"),
execs().with_status(0)
.with_stderr_contains("\
[RUNNING] target[/]debug[/]deps[/]d1-[..][EXE]")
.with_stdout_contains("
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
")
.with_stderr_contains("\
[RUNNING] target[/]debug[/]deps[/]d2-[..][EXE]")
.with_stdout_contains("
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
"));
}
#[test]
fn bin_does_not_rebuild_tests() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}")
.file("tests/foo.rs", "");
p.build();
assert_that(p.cargo("test").arg("-v"),
execs().with_status(0));
sleep_ms(1000);
File::create(&p.root().join("src/main.rs")).unwrap()
.write_all(b"fn main() { 3; }").unwrap();
assert_that(p.cargo("test").arg("-v").arg("--no-run"),
execs().with_status(0)
.with_stderr("\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] src[/]main.rs [..]`
[RUNNING] `rustc [..] src[/]main.rs [..]`
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
"));
}
#[test]
fn selective_test_wonky_profile() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[profile.release]
opt-level = 2
[dependencies]
a = { path = "a" }
"#)
.file("src/lib.rs", "")
.file("a/Cargo.toml", r#"
[package]
name = "a"
version = "0.0.1"
authors = []
"#)
.file("a/src/lib.rs", "");
p.build();
assert_that(p.cargo("test").arg("-v").arg("--no-run").arg("--release")
.arg("-p").arg("foo").arg("-p").arg("a"),
execs().with_status(0));
}
#[test]
fn selective_test_optional_dep() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies]
a = { path = "a", optional = true }
"#)
.file("src/lib.rs", "")
.file("a/Cargo.toml", r#"
[package]
name = "a"
version = "0.0.1"
authors = []
"#)
.file("a/src/lib.rs", "");
p.build();
assert_that(p.cargo("test").arg("-v").arg("--no-run")
.arg("--features").arg("a").arg("-p").arg("a"),
execs().with_status(0).with_stderr("\
[COMPILING] a v0.0.1 ([..])
[RUNNING] `rustc [..] a[/]src[/]lib.rs [..]`
[RUNNING] `rustc [..] a[/]src[/]lib.rs [..]`
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
"));
}
#[test]
fn only_test_docs() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", r#"
#[test]
fn foo() {
let a: u32 = "hello";
}
/// ```
/// println!("ok");
/// ```
pub fn bar() {
}
"#)
.file("tests/foo.rs", "this is not rust");
p.build();
assert_that(p.cargo("test").arg("--doc"),
execs().with_status(0)
.with_stderr("\
[COMPILING] foo v0.0.1 ([..])
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[DOCTEST] foo")
.with_stdout("
running 1 test
test bar_0 ... ok
test result: ok.[..]
"));
}
#[test]
fn test_panic_abort_with_dep() {
if !is_nightly() {
return
}
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies]
bar = { path = "bar" }
[profile.dev]
panic = 'abort'
"#)
.file("src/lib.rs", r#"
extern crate bar;
#[test]
fn foo() {}
"#)
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
"#)
.file("bar/src/lib.rs", "");
assert_that(p.cargo_process("test").arg("-v"),
execs().with_status(0));
}
#[test]
fn cfg_test_even_with_no_harness() {
if !is_nightly() {
return
}
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[lib]
harness = false
doctest = false
"#)
.file("src/lib.rs", r#"
#[cfg(test)]
fn main() {
println!("hello!");
}
"#);
assert_that(p.cargo_process("test").arg("-v"),
execs().with_status(0)
.with_stdout("hello!\n")
.with_stderr("\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..]`
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `[..]`
"));
}
#[test]
fn panic_abort_multiple() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies]
a = { path = "a" }
[profile.release]
panic = 'abort'
"#)
.file("src/lib.rs", "extern crate a;")
.file("a/Cargo.toml", r#"
[package]
name = "a"
version = "0.0.1"
authors = []
"#)
.file("a/src/lib.rs", "");
assert_that(p.cargo_process("test")
.arg("--release").arg("-v")
.arg("-p").arg("foo")
.arg("-p").arg("a"),
execs().with_status(0));
}
#[test]
fn pass_correct_cfgs_flags_to_rustdoc() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
[features]
default = ["feature_a/default"]
nightly = ["feature_a/nightly"]
[dependencies.feature_a]
path = "libs/feature_a"
default-features = false
"#)
.file("src/lib.rs", r#"
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert!(true);
}
}
"#)
.file("libs/feature_a/Cargo.toml", r#"
[package]
name = "feature_a"
version = "0.1.0"
authors = []
[features]
default = ["serde_codegen"]
nightly = ["serde_derive"]
[dependencies]
serde_derive = { version = "0.8", optional = true }
[build-dependencies]
serde_codegen = { version = "0.8", optional = true }
"#)
.file("libs/feature_a/src/lib.rs", r#"
#[cfg(feature = "serde_derive")]
const MSG: &'static str = "This is safe";
#[cfg(feature = "serde_codegen")]
const MSG: &'static str = "This is risky";
pub fn get() -> &'static str {
MSG
}
"#);
assert_that(p.cargo_process("test")
.arg("--package").arg("feature_a")
.arg("--verbose"),
execs().with_status(0)
.with_stderr_contains("\
[DOCTEST] feature_a
[RUNNING] `rustdoc --test [..]serde_codegen[..]`"));
assert_that(p.cargo_process("test")
.arg("--verbose"),
execs().with_status(0)
.with_stderr_contains("\
[DOCTEST] foo
[RUNNING] `rustdoc --test [..]feature_a[..]`"));
}
#[test]
fn test_release_ignore_panic() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies]
a = { path = "a" }
[profile.test]
panic = 'abort'
[profile.release]
panic = 'abort'
"#)
.file("src/lib.rs", "extern crate a;")
.file("a/Cargo.toml", r#"
[package]
name = "a"
version = "0.0.1"
authors = []
"#)
.file("a/src/lib.rs", "");
p.build();
println!("test");
assert_that(p.cargo("test").arg("-v"), execs().with_status(0));
println!("bench");
assert_that(p.cargo("bench").arg("-v"), execs().with_status(0));
}
#[test]
fn test_many_with_features() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies]
a = { path = "a" }
[features]
foo = []
[workspace]
"#)
.file("src/lib.rs", "")
.file("a/Cargo.toml", r#"
[package]
name = "a"
version = "0.0.1"
authors = []
"#)
.file("a/src/lib.rs", "");
p.build();
assert_that(p.cargo("test").arg("-v")
.arg("-p").arg("a")
.arg("-p").arg("foo")
.arg("--features").arg("foo"),
execs().with_status(0));
}
| 25.016243 | 81 | 0.455181 |
266c0e0d8bce6c976cd421b1298392338a0a0e4e | 2,984 | #![crate_type = "lib"]
#![deny(missing_docs)]
//! # win-opacity
//!
//! win-opacity is a library for changing the opacity level of windows on the Windows operating system.
// Make sure that this is compiled on Windows
#[cfg(windows)]
extern crate winapi;
use winapi::shared::minwindef::{BOOL, DWORD, LPARAM, TRUE};
use winapi::shared::windef::HWND as Handle;
use winapi::um::winnt::{LONG, LPSTR};
use winapi::um::winuser;
extern "system" fn enum_windows_callback(handle: Handle, lparam: LPARAM) -> BOOL {
let windows_vec = lparam as *mut Vec<Handle>;
if let Some(ref mut windows_collector) = unsafe { windows_vec.as_mut() } {
windows_collector.push(handle);
}
TRUE
}
/// Returns all of the available windows.
///
/// ```rust
/// extern crate win_opacity;
///
/// win_opacity::get_all_windows();
/// ```
pub fn get_all_windows() -> Vec<Handle> {
let mut windows: Vec<Handle> = Vec::new();
unsafe {
winuser::EnumWindows(
Some(enum_windows_callback),
&mut windows as *mut _ as LPARAM,
)
};
windows
}
/// Returns the title of a window.
///
/// ```rust
/// extern crate win_opacity;
///
/// let window = win_opacity::get_all_windows()[0];
/// win_opacity::get_window_title(&window);
/// ```
pub fn get_window_title(handle: &Handle) -> String {
const MAX_COUNT: usize = 256;
let mut buffer = [0u8; MAX_COUNT];
let mut result = String::new();
unsafe {
let length = winuser::GetWindowTextA(*handle, &mut buffer as *mut _ as LPSTR, MAX_COUNT as i32);
if length > 0 {
let exact_text = std::slice::from_raw_parts(buffer.as_ptr(), length as usize);
result = String::from_utf8_lossy(exact_text).trim().to_string();
}
}
result
}
/// Indicates if a window is visible.
///
/// ```rust
/// extern crate win_opacity;
///
/// let window = win_opacity::get_all_windows()[0];
/// win_opacity::is_window_visible(&window);
/// ```
pub fn is_window_visible(handle: &Handle) -> bool {
unsafe {
winuser::IsWindowVisible(*handle) == TRUE
}
}
/// Returns all visible windows.
///
/// ```rust
/// extern crate win_opacity;
///
/// let windows = win_opacity::get_visible_windows();
/// if let Some(window) = windows.get(0) {
/// assert!(win_opacity::is_window_visible(window));
/// }
/// ```
pub fn get_visible_windows() -> Vec<Handle> {
get_all_windows()
.into_iter()
.filter(|&win| is_window_visible(&win) && get_window_title(&win).len() > 0)
.collect::<Vec<_>>()
}
/// Sets the opacity level of a window.
///
/// ```rust
/// extern crate win_opacity;
///
/// let window = win_opacity::get_visible_windows()[0];
/// win_opacity::set_opacity(window, 230);
/// ```
pub fn set_opacity(handle: Handle, opacity: u8) {
const GWL_EXSTYLE: i32 = -20;
const WS_EX_LAYERED: LONG = 0x80000;
const LWA_ALPHA: DWORD = 0x2;
unsafe {
let window_long = winuser::GetWindowLongA(handle, GWL_EXSTYLE);
winuser::SetWindowLongA(handle, GWL_EXSTYLE, window_long | WS_EX_LAYERED);
winuser::SetLayeredWindowAttributes(handle, 0, opacity, LWA_ALPHA);
}
} | 26.175439 | 103 | 0.676273 |
90eb6a293b8aab81900435a652d5c7396ec9e89d | 297 | #![deny(warnings)]
#![deny(rust_2018_idioms)]
#[cfg(all(target_env = "musl", target_pointer_width = "64"))]
#[global_allocator]
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
use static_web_server::{Result, Server};
fn main() -> Result {
Server::new().run()?;
Ok(())
}
| 19.8 | 62 | 0.670034 |
8f095537c4a4487c7e68aa70ca96c57969aceea5 | 3,673 | use std::iter::Enumerate;
use std::iter::Map;
use std::iter::Skip;
use std::iter::Take;
use std::iter::Zip;
use std::ops::Range;
use std::slice;
use anyhow::Result;
use itertools::Itertools;
use itertools::{izip, repeat_n, RepeatN};
use crate::fst_impls::const_fst::data_structure::ConstState;
use crate::fst_impls::ConstFst;
use crate::fst_traits::FstIterData;
use crate::fst_traits::{ArcIterator, FstIntoIterator, FstIterator, StateIterator};
use crate::semirings::Semiring;
use crate::Arc;
use crate::StateId;
impl<W> ConstFst<W> {
fn state_range(&self) -> Range<usize> {
0..self.states.len()
}
fn arc_range(&self, state: &ConstState<W>) -> Range<usize> {
state.pos..state.pos + state.narcs
}
}
impl<'a, W: 'static + Semiring> ArcIterator<'a> for ConstFst<W> {
type Iter = slice::Iter<'a, Arc<W>>;
fn arcs_iter(&'a self, state_id: StateId) -> Result<Self::Iter> {
let state = self
.states
.get(state_id)
.ok_or_else(|| format_err!("State {:?} doesn't exist", state_id))?;
Ok(self.arcs[self.arc_range(state)].iter())
}
unsafe fn arcs_iter_unchecked(&'a self, state_id: usize) -> Self::Iter {
let state = self.states.get_unchecked(state_id);
self.arcs[self.arc_range(state)].iter()
}
}
impl<W: Semiring> FstIntoIterator for ConstFst<W>
where
W: 'static,
{
type ArcsIter = std::vec::IntoIter<Arc<W>>;
// TODO: Change this to impl once the feature has been stabilized
// #![feature(type_alias_impl_trait)]
// https://github.com/rust-lang/rust/issues/63063)
type FstIter = Box<dyn Iterator<Item = FstIterData<W, Self::ArcsIter>>>;
fn fst_into_iter(mut self) -> Self::FstIter {
// Here the contiguous arcs are moved into multiple vectors in order to be able to create
// iterator for each states.
// TODO: Find a way to avoid this allocation.
let mut arcs = Vec::with_capacity(self.states.len());
for const_state in &self.states {
arcs.push(self.arcs.drain(0..const_state.narcs).collect_vec())
}
Box::new(
izip!(self.states.into_iter(), arcs.into_iter())
.enumerate()
.map(|(state_id, (const_state, arcs_from_state))| FstIterData {
state_id,
arcs: arcs_from_state.into_iter(),
final_weight: const_state.final_weight,
num_arcs: const_state.narcs,
}),
)
}
}
impl<'a, W> StateIterator<'a> for ConstFst<W> {
type Iter = Range<StateId>;
fn states_iter(&'a self) -> Self::Iter {
self.state_range()
}
}
impl<'a, W: Semiring + 'static> FstIterator<'a> for ConstFst<W> {
type ArcsIter = Take<Skip<std::slice::Iter<'a, Arc<W>>>>;
type FstIter = Map<
Enumerate<Zip<std::slice::Iter<'a, ConstState<W>>, RepeatN<&'a Vec<Arc<W>>>>>,
Box<
dyn FnMut(
(StateId, (&'a ConstState<W>, &'a Vec<Arc<W>>)),
) -> FstIterData<&'a W, Self::ArcsIter>,
>,
>;
fn fst_iter(&'a self) -> Self::FstIter {
let it = repeat_n(&self.arcs, self.states.len());
izip!(self.states.iter(), it).enumerate().map(Box::new(
|(state_id, (fst_state, arcs)): (StateId, (&'a ConstState<W>, &'a Vec<Arc<W>>))| {
FstIterData {
state_id,
arcs: arcs.iter().skip(fst_state.pos).take(fst_state.narcs),
final_weight: fst_state.final_weight.as_ref(),
num_arcs: fst_state.narcs,
}
},
))
}
}
| 33.09009 | 97 | 0.582358 |
d6eb6420df7b7634dc9d528a06f840fa5a52bd8c | 1,209 | extern crate qmlrs;
extern crate libc;
use qmlrs::*;
#[test]
pub fn qobject_test() {
let mut engine = qmlrs::Engine::new("test");
extern "C" fn test_slot(_: *mut ffi::QObject, id: libc::c_int, _: *const ffi::QVariantList, ret: *mut ffi::QVariant) {
println!("slot: {} called",id);
if 1 == id {
println!("test_slot called");
} else if 2 == id {
println!("func(int) called");
unsafe {
ffi::qmlrs_variant_set_int64(ret,42);
}
} else {
panic!("unknown id {}",id);
}
}
let mut metaobj = MetaObject::new("Person",test_slot);
assert_eq!(metaobj.add_signal("nameChanged()"),0);
metaobj.add_property("name","QString",Some("nameChanged()"));
assert_eq!(metaobj.add_slot("test_slot()"),1);
assert_eq!(metaobj.add_method("func(QVariant)","int"),2);
let mut obj = metaobj.instantiate();
obj.set_property("name",Variant::String("Kai".to_string()));
obj.emit(0,&[]);
//obj.call(1,&[]);
//obj.call(2,&[Variant::I64(42)]);
engine.set_property("person_one", &obj);
engine.load_local_file("tests/qobject_test.qml");
engine.exec();
}
| 26.866667 | 122 | 0.572374 |
644a742446bf5599b8822fe94afece57b3abe60e | 47 | mod command;
pub use command::CommandBuilder;
| 11.75 | 32 | 0.787234 |
8735176bf6514eeef44e911e36824fe90aba7a41 | 3,713 | //! In-tree testing for deno integration.
//!
//! This module exists because this is way easier than using copying requires
//! files.
use anyhow::{Context, Error};
use std::collections::HashMap;
use swc_bundler::{Bundler, Load, Resolve};
use swc_common::{sync::Lrc, FileName, SourceFile, SourceMap, Span, GLOBALS};
use swc_ecma_ast::{Expr, Lit, Module, Str};
use swc_ecma_parser::{lexer::Lexer, JscTarget, Parser, StringInput, Syntax, TsConfig};
use swc_ecma_transforms::typescript::strip;
use swc_ecma_visit::FoldWith;
use url::Url;
#[test]
#[ignore = "Too slow"]
fn oak_6_2_0_application() {
bundle("https://deno.land/x/[email protected]/mod.ts");
}
fn bundle(url: &str) -> Module {
let result = testing::run_test2(false, |cm, _handler| {
GLOBALS.with(|globals| {
let bundler = Bundler::new(
globals,
cm.clone(),
Loader { cm: cm.clone() },
Resolver,
swc_bundler::Config {
require: false,
disable_inliner: true,
..Default::default()
},
Box::new(Hook),
);
let mut entries = HashMap::new();
entries.insert("main".to_string(), FileName::Custom(url.to_string()));
let output = bundler.bundle(entries).unwrap();
Ok(output.into_iter().next().unwrap().module)
})
})
.unwrap();
result
}
#[derive(Clone)]
struct Loader {
cm: Lrc<SourceMap>,
}
impl Load for Loader {
fn load(&self, file: &FileName) -> Result<(Lrc<SourceFile>, Module), Error> {
let url = match file {
FileName::Custom(v) => v,
_ => unreachable!("this test only uses url"),
};
let url = Url::parse(&url).context("failed to parse url")?;
let resp = reqwest::blocking::get(url.clone())
.with_context(|| format!("failed to fetch `{}`", url))?;
let bytes = resp
.bytes()
.with_context(|| format!("failed to read data from `{}`", url))?;
let src = String::from_utf8_lossy(&bytes);
let fm = self
.cm
.new_source_file(FileName::Custom(url.to_string()), src.to_string());
let lexer = Lexer::new(
Syntax::Typescript(TsConfig {
decorators: true,
..Default::default()
}),
JscTarget::Es2020,
StringInput::from(&*fm),
None,
);
let mut parser = Parser::new_from(lexer);
let module = parser.parse_typescript_module().unwrap();
let module = module.fold_with(&mut strip());
Ok((fm, module))
}
}
#[derive(Debug, Clone, Copy)]
struct Resolver;
impl Resolve for Resolver {
fn resolve(&self, base: &FileName, module_specifier: &str) -> Result<FileName, Error> {
let base_url = match base {
FileName::Custom(v) => v,
_ => unreachable!("this test only uses url"),
};
let base_url = Url::parse(&base_url).context("failed to parse url")?;
let options = Url::options();
let base_url = options.base_url(Some(&base_url));
let url = base_url
.parse(module_specifier)
.with_context(|| format!("failed to resolve `{}`", module_specifier))?;
return Ok(FileName::Custom(url.to_string()));
}
}
struct Hook;
impl swc_bundler::Hook for Hook {
fn get_import_meta_url(&self, span: Span, file: &FileName) -> Result<Option<Expr>, Error> {
Ok(Some(Expr::Lit(Lit::Str(Str {
span,
value: file.to_string().into(),
has_escape: false,
}))))
}
}
| 29.943548 | 95 | 0.555885 |
e264615578e42a70ac6502078a2817a71ff9a385 | 46,024 | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! An owned, growable vector.
use cast::{forget, transmute};
use clone::Clone;
use cmp::{Ord, Eq, Ordering, TotalEq, TotalOrd};
use container::{Container, Mutable};
use default::Default;
use fmt;
use iter::{DoubleEndedIterator, FromIterator, Extendable, Iterator, range};
use libc::{free, c_void};
use mem::{size_of, move_val_init};
use mem;
use num;
use num::{CheckedMul, CheckedAdd};
use ops::Drop;
use option::{None, Option, Some};
use ptr::RawPtr;
use ptr;
use rt::global_heap::{malloc_raw, realloc_raw};
use raw::Slice;
use slice::{ImmutableEqVector, ImmutableVector, Items, MutItems, MutableVector};
use slice::{MutableTotalOrdVector, OwnedVector, Vector};
/// An owned, growable vector.
///
/// # Examples
///
/// ```rust
/// # use std::vec::Vec;
/// let mut vec = Vec::new();
/// vec.push(1);
/// vec.push(2);
///
/// assert_eq!(vec.len(), 2);
/// assert_eq!(vec.get(0), &1);
///
/// assert_eq!(vec.pop(), Some(2));
/// assert_eq!(vec.len(), 1);
/// ```
///
/// The `vec!` macro is provided to make initialization more convenient:
///
/// ```rust
/// let mut vec = vec!(1, 2, 3);
/// vec.push(4);
/// assert_eq!(vec, vec!(1, 2, 3, 4));
/// ```
#[unsafe_no_drop_flag]
pub struct Vec<T> {
len: uint,
cap: uint,
ptr: *mut T
}
impl<T> Vec<T> {
/// Constructs a new, empty `Vec`.
///
/// The vector will not allocate until elements are pushed onto it.
///
/// # Example
///
/// ```rust
/// # use std::vec::Vec;
/// let mut vec: Vec<int> = Vec::new();
/// ```
#[inline]
pub fn new() -> Vec<T> {
Vec { len: 0, cap: 0, ptr: 0 as *mut T }
}
/// Constructs a new, empty `Vec` with the specified capacity.
///
/// The vector will be able to hold exactly `capacity` elements without
/// reallocating. If `capacity` is 0, the vector will not allocate.
///
/// # Example
///
/// ```rust
/// # use std::vec::Vec;
/// let vec: Vec<int> = Vec::with_capacity(10);
/// ```
pub fn with_capacity(capacity: uint) -> Vec<T> {
if capacity == 0 {
Vec::new()
} else {
let size = capacity.checked_mul(&size_of::<T>()).expect("capacity overflow");
let ptr = unsafe { malloc_raw(size) };
Vec { len: 0, cap: capacity, ptr: ptr as *mut T }
}
}
/// Creates and initializes a `Vec`.
///
/// Creates a `Vec` of size `length` and initializes the elements to the
/// value returned by the closure `op`.
///
/// # Example
///
/// ```rust
/// # use std::vec::Vec;
/// let vec = Vec::from_fn(3, |idx| idx * 2);
/// assert_eq!(vec, vec!(0, 2, 4));
/// ```
pub fn from_fn(length: uint, op: |uint| -> T) -> Vec<T> {
unsafe {
let mut xs = Vec::with_capacity(length);
while xs.len < length {
move_val_init(xs.as_mut_slice().unsafe_mut_ref(xs.len), op(xs.len));
xs.len += 1;
}
xs
}
}
/// Create a `Vec<T>` directly from the raw constituents.
///
/// This is highly unsafe:
///
/// - if `ptr` is null, then `length` and `capacity` should be 0
/// - `ptr` must point to an allocation of size `capacity`
/// - there must be `length` valid instances of type `T` at the
/// beginning of that allocation
/// - `ptr` must be allocated by the default `Vec` allocator
pub unsafe fn from_raw_parts(length: uint, capacity: uint, ptr: *mut T) -> Vec<T> {
Vec { len: length, cap: capacity, ptr: ptr }
}
/// Consumes the `Vec`, partitioning it based on a predicate.
///
/// Partitions the `Vec` into two `Vec`s `(A,B)`, where all elements of `A`
/// satisfy `f` and all elements of `B` do not. The order of elements is
/// preserved.
///
/// # Example
///
/// ```rust
/// let vec = vec!(1, 2, 3, 4);
/// let (even, odd) = vec.partition(|&n| n % 2 == 0);
/// assert_eq!(even, vec!(2, 4));
/// assert_eq!(odd, vec!(1, 3));
/// ```
#[inline]
pub fn partition(self, f: |&T| -> bool) -> (Vec<T>, Vec<T>) {
let mut lefts = Vec::new();
let mut rights = Vec::new();
for elt in self.move_iter() {
if f(&elt) {
lefts.push(elt);
} else {
rights.push(elt);
}
}
(lefts, rights)
}
}
impl<T: Clone> Vec<T> {
/// Iterates over the `second` vector, copying each element and appending it to
/// the `first`. Afterwards, the `first` is then returned for use again.
///
/// # Example
///
/// ```rust
/// let vec = vec!(1, 2);
/// let vec = vec.append([3, 4]);
/// assert_eq!(vec, vec!(1, 2, 3, 4));
/// ```
#[inline]
pub fn append(mut self, second: &[T]) -> Vec<T> {
self.push_all(second);
self
}
/// Constructs a `Vec` by cloning elements of a slice.
///
/// # Example
///
/// ```rust
/// # use std::vec::Vec;
/// let slice = [1, 2, 3];
/// let vec = Vec::from_slice(slice);
/// ```
pub fn from_slice(values: &[T]) -> Vec<T> {
values.iter().map(|x| x.clone()).collect()
}
/// Constructs a `Vec` with copies of a value.
///
/// Creates a `Vec` with `length` copies of `value`.
///
/// # Example
/// ```rust
/// # use std::vec::Vec;
/// let vec = Vec::from_elem(3, "hi");
/// println!("{}", vec); // prints [hi, hi, hi]
/// ```
pub fn from_elem(length: uint, value: T) -> Vec<T> {
unsafe {
let mut xs = Vec::with_capacity(length);
while xs.len < length {
move_val_init(xs.as_mut_slice().unsafe_mut_ref(xs.len), value.clone());
xs.len += 1;
}
xs
}
}
/// Appends all elements in a slice to the `Vec`.
///
/// Iterates over the slice `other`, clones each element, and then appends
/// it to this `Vec`. The `other` vector is traversed in-order.
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(1);
/// vec.push_all([2, 3, 4]);
/// assert_eq!(vec, vec!(1, 2, 3, 4));
/// ```
#[inline]
pub fn push_all(&mut self, other: &[T]) {
self.extend(other.iter().map(|e| e.clone()));
}
/// Grows the `Vec` in-place.
///
/// Adds `n` copies of `value` to the `Vec`.
///
/// # Example
///
/// ```rust
/// let mut vec = vec!("hello");
/// vec.grow(2, &("world"));
/// assert_eq!(vec, vec!("hello", "world", "world"));
/// ```
pub fn grow(&mut self, n: uint, value: &T) {
let new_len = self.len() + n;
self.reserve(new_len);
let mut i: uint = 0u;
while i < n {
self.push((*value).clone());
i += 1u;
}
}
/// Sets the value of a vector element at a given index, growing the vector
/// as needed.
///
/// Sets the element at position `index` to `value`. If `index` is past the
/// end of the vector, expands the vector by replicating `initval` to fill
/// the intervening space.
///
/// # Example
///
/// ```rust
/// let mut vec = vec!("a", "b", "c");
/// vec.grow_set(1, &("fill"), "d");
/// vec.grow_set(4, &("fill"), "e");
/// assert_eq!(vec, vec!("a", "d", "c", "fill", "e"));
/// ```
pub fn grow_set(&mut self, index: uint, initval: &T, value: T) {
let l = self.len();
if index >= l {
self.grow(index - l + 1u, initval);
}
*self.get_mut(index) = value;
}
/// Partitions a vector based on a predicate.
///
/// Clones the elements of the vector, partitioning them into two `Vec`s
/// `(A,B)`, where all elements of `A` satisfy `f` and all elements of `B`
/// do not. The order of elements is preserved.
///
/// # Example
///
/// ```rust
/// let vec = vec!(1, 2, 3, 4);
/// let (even, odd) = vec.partitioned(|&n| n % 2 == 0);
/// assert_eq!(even, vec!(2, 4));
/// assert_eq!(odd, vec!(1, 3));
/// ```
pub fn partitioned(&self, f: |&T| -> bool) -> (Vec<T>, Vec<T>) {
let mut lefts = Vec::new();
let mut rights = Vec::new();
for elt in self.iter() {
if f(elt) {
lefts.push(elt.clone());
} else {
rights.push(elt.clone());
}
}
(lefts, rights)
}
}
impl<T:Clone> Clone for Vec<T> {
fn clone(&self) -> Vec<T> {
let len = self.len;
let mut vector = Vec::with_capacity(len);
// Unsafe code so this can be optimised to a memcpy (or something
// similarly fast) when T is Copy. LLVM is easily confused, so any
// extra operations during the loop can prevent this optimisation
{
let this_slice = self.as_slice();
while vector.len < len {
unsafe {
mem::move_val_init(
vector.as_mut_slice().unsafe_mut_ref(vector.len),
this_slice.unsafe_ref(vector.len).clone());
}
vector.len += 1;
}
}
vector
}
fn clone_from(&mut self, other: &Vec<T>) {
// drop anything in self that will not be overwritten
if self.len() > other.len() {
self.truncate(other.len())
}
// reuse the contained values' allocations/resources.
for (place, thing) in self.mut_iter().zip(other.iter()) {
place.clone_from(thing)
}
// self.len <= other.len due to the truncate above, so the
// slice here is always in-bounds.
let len = self.len();
self.extend(other.slice_from(len).iter().map(|x| x.clone()));
}
}
impl<T> FromIterator<T> for Vec<T> {
fn from_iter<I:Iterator<T>>(mut iterator: I) -> Vec<T> {
let (lower, _) = iterator.size_hint();
let mut vector = Vec::with_capacity(lower);
for element in iterator {
vector.push(element)
}
vector
}
}
impl<T> Extendable<T> for Vec<T> {
fn extend<I: Iterator<T>>(&mut self, mut iterator: I) {
let (lower, _) = iterator.size_hint();
self.reserve_additional(lower);
for element in iterator {
self.push(element)
}
}
}
impl<T: Eq> Eq for Vec<T> {
#[inline]
fn eq(&self, other: &Vec<T>) -> bool {
self.as_slice() == other.as_slice()
}
}
impl<T: Ord> Ord for Vec<T> {
#[inline]
fn lt(&self, other: &Vec<T>) -> bool {
self.as_slice() < other.as_slice()
}
}
impl<T: TotalEq> TotalEq for Vec<T> {}
impl<T: TotalOrd> TotalOrd for Vec<T> {
#[inline]
fn cmp(&self, other: &Vec<T>) -> Ordering {
self.as_slice().cmp(&other.as_slice())
}
}
impl<T> Container for Vec<T> {
#[inline]
fn len(&self) -> uint {
self.len
}
}
impl<T> Vec<T> {
/// Returns the number of elements the vector can hold without
/// reallocating.
///
/// # Example
///
/// ```rust
/// # use std::vec::Vec;
/// let vec: Vec<int> = Vec::with_capacity(10);
/// assert_eq!(vec.capacity(), 10);
/// ```
#[inline]
pub fn capacity(&self) -> uint {
self.cap
}
/// Reserves capacity for at least `n` additional elements in the given
/// vector.
///
/// # Failure
///
/// Fails if the new capacity overflows `uint`.
///
/// # Example
///
/// ```rust
/// # use std::vec::Vec;
/// let mut vec: Vec<int> = vec!(1);
/// vec.reserve_additional(10);
/// assert!(vec.capacity() >= 11);
/// ```
pub fn reserve_additional(&mut self, extra: uint) {
if self.cap - self.len < extra {
match self.len.checked_add(&extra) {
None => fail!("Vec::reserve_additional: `uint` overflow"),
Some(new_cap) => self.reserve(new_cap)
}
}
}
/// Reserves capacity for at least `n` elements in the given vector.
///
/// This function will over-allocate in order to amortize the allocation
/// costs in scenarios where the caller may need to repeatedly reserve
/// additional space.
///
/// If the capacity for `self` is already equal to or greater than the
/// requested capacity, then no action is taken.
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(1, 2, 3);
/// vec.reserve(10);
/// assert!(vec.capacity() >= 10);
/// ```
pub fn reserve(&mut self, capacity: uint) {
if capacity >= self.len {
self.reserve_exact(num::next_power_of_two(capacity))
}
}
/// Reserves capacity for exactly `capacity` elements in the given vector.
///
/// If the capacity for `self` is already equal to or greater than the
/// requested capacity, then no action is taken.
///
/// # Example
///
/// ```rust
/// # use std::vec::Vec;
/// let mut vec: Vec<int> = Vec::with_capacity(10);
/// vec.reserve_exact(11);
/// assert_eq!(vec.capacity(), 11);
/// ```
pub fn reserve_exact(&mut self, capacity: uint) {
if capacity > self.cap {
let size = capacity.checked_mul(&size_of::<T>()).expect("capacity overflow");
self.cap = capacity;
unsafe {
self.ptr = realloc_raw(self.ptr as *mut u8, size) as *mut T;
}
}
}
/// Shrink the capacity of the vector to match the length
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(1, 2, 3);
/// vec.shrink_to_fit();
/// assert_eq!(vec.capacity(), vec.len());
/// ```
pub fn shrink_to_fit(&mut self) {
if self.len == 0 {
unsafe { free(self.ptr as *mut c_void) };
self.cap = 0;
self.ptr = 0 as *mut T;
} else {
unsafe {
// Overflow check is unnecessary as the vector is already at least this large.
self.ptr = realloc_raw(self.ptr as *mut u8, self.len * size_of::<T>()) as *mut T;
}
self.cap = self.len;
}
}
/// Remove the last element from a vector and return it, or `None` if it is
/// empty.
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(1, 2, 3);
/// assert_eq!(vec.pop(), Some(3));
/// assert_eq!(vec, vec!(1, 2));
/// ```
#[inline]
pub fn pop(&mut self) -> Option<T> {
if self.len == 0 {
None
} else {
unsafe {
self.len -= 1;
Some(ptr::read(self.as_slice().unsafe_ref(self.len())))
}
}
}
/// Append an element to a vector.
///
/// # Failure
///
/// Fails if the number of elements in the vector overflows a `uint`.
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(1, 2);
/// vec.push(3);
/// assert_eq!(vec, vec!(1, 2, 3));
/// ```
#[inline]
pub fn push(&mut self, value: T) {
if self.len == self.cap {
if self.cap == 0 { self.cap += 2 }
let old_size = self.cap * size_of::<T>();
self.cap = self.cap * 2;
let size = old_size * 2;
if old_size > size { fail!("capacity overflow") }
unsafe {
self.ptr = realloc_raw(self.ptr as *mut u8, size) as *mut T;
}
}
unsafe {
let end = (self.ptr as *T).offset(self.len as int) as *mut T;
move_val_init(&mut *end, value);
self.len += 1;
}
}
/// Appends one element to the vector provided. The vector itself is then
/// returned for use again.
///
/// # Example
///
/// ```rust
/// let vec = vec!(1, 2);
/// let vec = vec.append_one(3);
/// assert_eq!(vec, vec!(1, 2, 3));
/// ```
#[inline]
pub fn append_one(mut self, x: T) -> Vec<T> {
self.push(x);
self
}
/// Shorten a vector, dropping excess elements.
///
/// If `len` is greater than the vector's current length, this has no
/// effect.
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(1, 2, 3, 4);
/// vec.truncate(2);
/// assert_eq!(vec, vec!(1, 2));
/// ```
pub fn truncate(&mut self, len: uint) {
unsafe {
let mut i = len;
// drop any extra elements
while i < self.len {
ptr::read(self.as_slice().unsafe_ref(i));
i += 1;
}
}
self.len = len;
}
/// Work with `self` as a mutable slice.
///
/// # Example
///
/// ```rust
/// fn foo(slice: &mut [int]) {}
///
/// let mut vec = vec!(1, 2);
/// foo(vec.as_mut_slice());
/// ```
#[inline]
pub fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T] {
unsafe {
transmute(Slice { data: self.as_mut_ptr() as *T, len: self.len })
}
}
/// Creates a consuming iterator, that is, one that moves each
/// value out of the vector (from start to end). The vector cannot
/// be used after calling this.
///
/// # Example
///
/// ```rust
/// let v = vec!("a".to_owned(), "b".to_owned());
/// for s in v.move_iter() {
/// // s has type ~str, not &~str
/// println!("{}", s);
/// }
/// ```
#[inline]
pub fn move_iter(self) -> MoveItems<T> {
unsafe {
let iter = transmute(self.as_slice().iter());
let ptr = self.ptr as *mut c_void;
forget(self);
MoveItems { allocation: ptr, iter: iter }
}
}
/// Sets the length of a vector.
///
/// This will explicitly set the size of the vector, without actually
/// modifying its buffers, so it is up to the caller to ensure that the
/// vector is actually the specified size.
#[inline]
pub unsafe fn set_len(&mut self, len: uint) {
self.len = len;
}
/// Returns a reference to the value at index `index`.
///
/// # Failure
///
/// Fails if `index` is out of bounds
///
/// # Example
///
/// ```rust
/// let vec = vec!(1, 2, 3);
/// assert!(vec.get(1) == &2);
/// ```
#[inline]
pub fn get<'a>(&'a self, index: uint) -> &'a T {
&self.as_slice()[index]
}
/// Returns a mutable reference to the value at index `index`.
///
/// # Failure
///
/// Fails if `index` is out of bounds
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(1, 2, 3);
/// *vec.get_mut(1) = 4;
/// assert_eq!(vec, vec!(1, 4, 3));
/// ```
#[inline]
pub fn get_mut<'a>(&'a mut self, index: uint) -> &'a mut T {
&mut self.as_mut_slice()[index]
}
/// Returns an iterator over references to the elements of the vector in
/// order.
///
/// # Example
///
/// ```rust
/// let vec = vec!(1, 2, 3);
/// for num in vec.iter() {
/// println!("{}", *num);
/// }
/// ```
#[inline]
pub fn iter<'a>(&'a self) -> Items<'a,T> {
self.as_slice().iter()
}
/// Returns an iterator over mutable references to the elements of the
/// vector in order.
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(1, 2, 3);
/// for num in vec.mut_iter() {
/// *num = 0;
/// }
/// ```
#[inline]
pub fn mut_iter<'a>(&'a mut self) -> MutItems<'a,T> {
self.as_mut_slice().mut_iter()
}
/// Sort the vector, in place, using `compare` to compare elements.
///
/// This sort is `O(n log n)` worst-case and stable, but allocates
/// approximately `2 * n`, where `n` is the length of `self`.
///
/// # Example
///
/// ```rust
/// let mut v = vec!(5i, 4, 1, 3, 2);
/// v.sort_by(|a, b| a.cmp(b));
/// assert_eq!(v, vec!(1, 2, 3, 4, 5));
///
/// // reverse sorting
/// v.sort_by(|a, b| b.cmp(a));
/// assert_eq!(v, vec!(5, 4, 3, 2, 1));
/// ```
#[inline]
pub fn sort_by(&mut self, compare: |&T, &T| -> Ordering) {
self.as_mut_slice().sort_by(compare)
}
/// Returns a slice of `self` between `start` and `end`.
///
/// # Failure
///
/// Fails when `start` or `end` point outside the bounds of `self`, or when
/// `start` > `end`.
///
/// # Example
///
/// ```rust
/// let vec = vec!(1, 2, 3, 4);
/// assert!(vec.slice(0, 2) == [1, 2]);
/// ```
#[inline]
pub fn slice<'a>(&'a self, start: uint, end: uint) -> &'a [T] {
self.as_slice().slice(start, end)
}
/// Returns a slice containing all but the first element of the vector.
///
/// # Failure
///
/// Fails when the vector is empty.
///
/// # Example
///
/// ```rust
/// let vec = vec!(1, 2, 3);
/// assert!(vec.tail() == [2, 3]);
/// ```
#[inline]
pub fn tail<'a>(&'a self) -> &'a [T] {
self.as_slice().tail()
}
/// Returns all but the first `n' elements of a vector.
///
/// # Failure
///
/// Fails when there are fewer than `n` elements in the vector.
///
/// # Example
///
/// ```rust
/// let vec = vec!(1, 2, 3, 4);
/// assert!(vec.tailn(2) == [3, 4]);
/// ```
#[inline]
pub fn tailn<'a>(&'a self, n: uint) -> &'a [T] {
self.as_slice().tailn(n)
}
/// Returns a reference to the last element of a vector, or `None` if it is
/// empty.
///
/// # Example
///
/// ```rust
/// let vec = vec!(1, 2, 3);
/// assert!(vec.last() == Some(&3));
/// ```
#[inline]
pub fn last<'a>(&'a self) -> Option<&'a T> {
self.as_slice().last()
}
/// Returns a mutable reference to the last element of a vector, or `None`
/// if it is empty.
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(1, 2, 3);
/// *vec.mut_last().unwrap() = 4;
/// assert_eq!(vec, vec!(1, 2, 4));
/// ```
#[inline]
pub fn mut_last<'a>(&'a mut self) -> Option<&'a mut T> {
self.as_mut_slice().mut_last()
}
/// Remove an element from anywhere in the vector and return it, replacing
/// it with the last element. This does not preserve ordering, but is O(1).
///
/// Returns `None` if `index` is out of bounds.
///
/// # Example
/// ```rust
/// let mut v = vec!("foo".to_owned(), "bar".to_owned(), "baz".to_owned(), "qux".to_owned());
///
/// assert_eq!(v.swap_remove(1), Some("bar".to_owned()));
/// assert_eq!(v, vec!("foo".to_owned(), "qux".to_owned(), "baz".to_owned()));
///
/// assert_eq!(v.swap_remove(0), Some("foo".to_owned()));
/// assert_eq!(v, vec!("baz".to_owned(), "qux".to_owned()));
///
/// assert_eq!(v.swap_remove(2), None);
/// ```
#[inline]
pub fn swap_remove(&mut self, index: uint) -> Option<T> {
let length = self.len();
if index < length - 1 {
self.as_mut_slice().swap(index, length - 1);
} else if index >= length {
return None
}
self.pop()
}
/// Prepend an element to the vector.
///
/// # Warning
///
/// This is an O(n) operation as it requires copying every element in the
/// vector.
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(1, 2, 3);
/// vec.unshift(4);
/// assert_eq!(vec, vec!(4, 1, 2, 3));
/// ```
#[inline]
pub fn unshift(&mut self, element: T) {
self.insert(0, element)
}
/// Removes the first element from a vector and returns it, or `None` if
/// the vector is empty.
///
/// # Warning
///
/// This is an O(n) operation as it requires copying every element in the
/// vector.
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(1, 2, 3);
/// assert!(vec.shift() == Some(1));
/// assert_eq!(vec, vec!(2, 3));
/// ```
#[inline]
pub fn shift(&mut self) -> Option<T> {
self.remove(0)
}
/// Insert an element at position `index` within the vector, shifting all
/// elements after position i one position to the right.
///
/// # Failure
///
/// Fails if `index` is out of bounds of the vector.
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(1, 2, 3);
/// vec.insert(1, 4);
/// assert_eq!(vec, vec!(1, 4, 2, 3));
/// ```
pub fn insert(&mut self, index: uint, element: T) {
let len = self.len();
assert!(index <= len);
// space for the new element
self.reserve(len + 1);
unsafe { // infallible
// The spot to put the new value
{
let p = self.as_mut_ptr().offset(index as int);
// Shift everything over to make space. (Duplicating the
// `index`th element into two consecutive places.)
ptr::copy_memory(p.offset(1), &*p, len - index);
// Write it in, overwriting the first copy of the `index`th
// element.
move_val_init(&mut *p, element);
}
self.set_len(len + 1);
}
}
/// Remove and return the element at position `index` within the vector,
/// shifting all elements after position `index` one position to the left.
/// Returns `None` if `i` is out of bounds.
///
/// # Example
///
/// ```rust
/// let mut v = vec!(1, 2, 3);
/// assert_eq!(v.remove(1), Some(2));
/// assert_eq!(v, vec!(1, 3));
///
/// assert_eq!(v.remove(4), None);
/// // v is unchanged:
/// assert_eq!(v, vec!(1, 3));
/// ```
pub fn remove(&mut self, index: uint) -> Option<T> {
let len = self.len();
if index < len {
unsafe { // infallible
let ret;
{
// the place we are taking from.
let ptr = self.as_mut_ptr().offset(index as int);
// copy it out, unsafely having a copy of the value on
// the stack and in the vector at the same time.
ret = Some(ptr::read(ptr as *T));
// Shift everything down to fill in that spot.
ptr::copy_memory(ptr, &*ptr.offset(1), len - index - 1);
}
self.set_len(len - 1);
ret
}
} else {
None
}
}
/// Takes ownership of the vector `other`, moving all elements into
/// the current vector. This does not copy any elements, and it is
/// illegal to use the `other` vector after calling this method
/// (because it is moved here).
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(~1);
/// vec.push_all_move(vec!(~2, ~3, ~4));
/// assert_eq!(vec, vec!(~1, ~2, ~3, ~4));
/// ```
pub fn push_all_move(&mut self, other: Vec<T>) {
self.extend(other.move_iter());
}
/// Returns a mutable slice of `self` between `start` and `end`.
///
/// # Failure
///
/// Fails when `start` or `end` point outside the bounds of `self`, or when
/// `start` > `end`.
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(1, 2, 3, 4);
/// assert!(vec.mut_slice(0, 2) == [1, 2]);
/// ```
#[inline]
pub fn mut_slice<'a>(&'a mut self, start: uint, end: uint)
-> &'a mut [T] {
self.as_mut_slice().mut_slice(start, end)
}
/// Returns a mutable slice of self from `start` to the end of the vec.
///
/// # Failure
///
/// Fails when `start` points outside the bounds of self.
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(1, 2, 3, 4);
/// assert!(vec.mut_slice_from(2) == [3, 4]);
/// ```
#[inline]
pub fn mut_slice_from<'a>(&'a mut self, start: uint) -> &'a mut [T] {
self.as_mut_slice().mut_slice_from(start)
}
/// Returns a mutable slice of self from the start of the vec to `end`.
///
/// # Failure
///
/// Fails when `end` points outside the bounds of self.
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(1, 2, 3, 4);
/// assert!(vec.mut_slice_to(2) == [1, 2]);
/// ```
#[inline]
pub fn mut_slice_to<'a>(&'a mut self, end: uint) -> &'a mut [T] {
self.as_mut_slice().mut_slice_to(end)
}
/// Returns a pair of mutable slices that divides the vec at an index.
///
/// The first will contain all indices from `[0, mid)` (excluding
/// the index `mid` itself) and the second will contain all
/// indices from `[mid, len)` (excluding the index `len` itself).
///
/// # Failure
///
/// Fails if `mid > len`.
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(1, 2, 3, 4, 5, 6);
///
/// // scoped to restrict the lifetime of the borrows
/// {
/// let (left, right) = vec.mut_split_at(0);
/// assert!(left == &mut []);
/// assert!(right == &mut [1, 2, 3, 4, 5, 6]);
/// }
///
/// {
/// let (left, right) = vec.mut_split_at(2);
/// assert!(left == &mut [1, 2]);
/// assert!(right == &mut [3, 4, 5, 6]);
/// }
///
/// {
/// let (left, right) = vec.mut_split_at(6);
/// assert!(left == &mut [1, 2, 3, 4, 5, 6]);
/// assert!(right == &mut []);
/// }
/// ```
#[inline]
pub fn mut_split_at<'a>(&'a mut self, mid: uint) -> (&'a mut [T], &'a mut [T]) {
self.as_mut_slice().mut_split_at(mid)
}
/// Reverse the order of elements in a vector, in place.
///
/// # Example
///
/// ```rust
/// let mut v = vec!(1, 2, 3);
/// v.reverse();
/// assert_eq!(v, vec!(3, 2, 1));
/// ```
#[inline]
pub fn reverse(&mut self) {
self.as_mut_slice().reverse()
}
/// Returns a slice of `self` from `start` to the end of the vec.
///
/// # Failure
///
/// Fails when `start` points outside the bounds of self.
///
/// # Example
///
/// ```rust
/// let vec = vec!(1, 2, 3);
/// assert!(vec.slice_from(1) == [2, 3]);
/// ```
#[inline]
pub fn slice_from<'a>(&'a self, start: uint) -> &'a [T] {
self.as_slice().slice_from(start)
}
/// Returns a slice of self from the start of the vec to `end`.
///
/// # Failure
///
/// Fails when `end` points outside the bounds of self.
///
/// # Example
///
/// ```rust
/// let vec = vec!(1, 2, 3);
/// assert!(vec.slice_to(2) == [1, 2]);
/// ```
#[inline]
pub fn slice_to<'a>(&'a self, end: uint) -> &'a [T] {
self.as_slice().slice_to(end)
}
/// Returns a slice containing all but the last element of the vector.
///
/// # Failure
///
/// Fails if the vector is empty
#[inline]
pub fn init<'a>(&'a self) -> &'a [T] {
self.slice(0, self.len() - 1)
}
/// Returns an unsafe pointer to the vector's buffer.
///
/// The caller must ensure that the vector outlives the pointer this
/// function returns, or else it will end up pointing to garbage.
///
/// Modifying the vector may cause its buffer to be reallocated, which
/// would also make any pointers to it invalid.
#[inline]
pub fn as_ptr(&self) -> *T {
// If we have a 0-sized vector, then the base pointer should not be NULL
// because an iterator over the slice will attempt to yield the base
// pointer as the first element in the vector, but this will end up
// being Some(NULL) which is optimized to None.
if mem::size_of::<T>() == 0 {
1 as *T
} else {
self.ptr as *T
}
}
/// Returns a mutable unsafe pointer to the vector's buffer.
///
/// The caller must ensure that the vector outlives the pointer this
/// function returns, or else it will end up pointing to garbage.
///
/// Modifying the vector may cause its buffer to be reallocated, which
/// would also make any pointers to it invalid.
#[inline]
pub fn as_mut_ptr(&mut self) -> *mut T {
// see above for the 0-size check
if mem::size_of::<T>() == 0 {
1 as *mut T
} else {
self.ptr
}
}
/// Retains only the elements specified by the predicate.
///
/// In other words, remove all elements `e` such that `f(&e)` returns false.
/// This method operates in place and preserves the order the retained elements.
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(1i, 2, 3, 4);
/// vec.retain(|x| x%2 == 0);
/// assert_eq!(vec, vec!(2, 4));
/// ```
pub fn retain(&mut self, f: |&T| -> bool) {
let len = self.len();
let mut del = 0u;
{
let v = self.as_mut_slice();
for i in range(0u, len) {
if !f(&v[i]) {
del += 1;
} else if del > 0 {
v.swap(i-del, i);
}
}
}
if del > 0 {
self.truncate(len - del);
}
}
/// Expands a vector in place, initializing the new elements to the result of a function.
///
/// The vector is grown by `n` elements. The i-th new element are initialized to the value
/// returned by `f(i)` where `i` is in the range [0, n).
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(0u, 1);
/// vec.grow_fn(3, |i| i);
/// assert_eq!(vec, vec!(0, 1, 0, 1, 2));
/// ```
pub fn grow_fn(&mut self, n: uint, f: |uint| -> T) {
self.reserve_additional(n);
for i in range(0u, n) {
self.push(f(i));
}
}
}
impl<T:TotalOrd> Vec<T> {
/// Sorts the vector in place.
///
/// This sort is `O(n log n)` worst-case and stable, but allocates
/// approximately `2 * n`, where `n` is the length of `self`.
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(3i, 1, 2);
/// vec.sort();
/// assert_eq!(vec, vec!(1, 2, 3));
/// ```
pub fn sort(&mut self) {
self.as_mut_slice().sort()
}
}
impl<T> Mutable for Vec<T> {
#[inline]
fn clear(&mut self) {
self.truncate(0)
}
}
impl<T:Eq> Vec<T> {
/// Return true if a vector contains an element with the given value
///
/// # Example
///
/// ```rust
/// let vec = vec!(1, 2, 3);
/// assert!(vec.contains(&1));
/// ```
pub fn contains(&self, x: &T) -> bool {
self.as_slice().contains(x)
}
/// Remove consecutive repeated elements in the vector.
///
/// If the vector is sorted, this removes all duplicates.
///
/// # Example
///
/// ```rust
/// let mut vec = vec!(1, 2, 2, 3, 2);
/// vec.dedup();
/// assert_eq!(vec, vec!(1, 2, 3, 2));
/// ```
pub fn dedup(&mut self) {
unsafe {
// Although we have a mutable reference to `self`, we cannot make
// *arbitrary* changes. The `Eq` comparisons could fail, so we
// must ensure that the vector is in a valid state at all time.
//
// The way that we handle this is by using swaps; we iterate
// over all the elements, swapping as we go so that at the end
// the elements we wish to keep are in the front, and those we
// wish to reject are at the back. We can then truncate the
// vector. This operation is still O(n).
//
// Example: We start in this state, where `r` represents "next
// read" and `w` represents "next_write`.
//
// r
// +---+---+---+---+---+---+
// | 0 | 1 | 1 | 2 | 3 | 3 |
// +---+---+---+---+---+---+
// w
//
// Comparing self[r] against self[w-1], this is not a duplicate, so
// we swap self[r] and self[w] (no effect as r==w) and then increment both
// r and w, leaving us with:
//
// r
// +---+---+---+---+---+---+
// | 0 | 1 | 1 | 2 | 3 | 3 |
// +---+---+---+---+---+---+
// w
//
// Comparing self[r] against self[w-1], this value is a duplicate,
// so we increment `r` but leave everything else unchanged:
//
// r
// +---+---+---+---+---+---+
// | 0 | 1 | 1 | 2 | 3 | 3 |
// +---+---+---+---+---+---+
// w
//
// Comparing self[r] against self[w-1], this is not a duplicate,
// so swap self[r] and self[w] and advance r and w:
//
// r
// +---+---+---+---+---+---+
// | 0 | 1 | 2 | 1 | 3 | 3 |
// +---+---+---+---+---+---+
// w
//
// Not a duplicate, repeat:
//
// r
// +---+---+---+---+---+---+
// | 0 | 1 | 2 | 3 | 1 | 3 |
// +---+---+---+---+---+---+
// w
//
// Duplicate, advance r. End of vec. Truncate to w.
let ln = self.len();
if ln < 1 { return; }
// Avoid bounds checks by using unsafe pointers.
let p = self.as_mut_slice().as_mut_ptr();
let mut r = 1;
let mut w = 1;
while r < ln {
let p_r = p.offset(r as int);
let p_wm1 = p.offset((w - 1) as int);
if *p_r != *p_wm1 {
if r != w {
let p_w = p_wm1.offset(1);
mem::swap(&mut *p_r, &mut *p_w);
}
w += 1;
}
r += 1;
}
self.truncate(w);
}
}
}
impl<T> Vector<T> for Vec<T> {
/// Work with `self` as a slice.
///
/// # Example
///
/// ```rust
/// fn foo(slice: &[int]) {}
///
/// let vec = vec!(1, 2);
/// foo(vec.as_slice());
/// ```
#[inline]
fn as_slice<'a>(&'a self) -> &'a [T] {
unsafe { transmute(Slice { data: self.as_ptr(), len: self.len }) }
}
}
#[unsafe_destructor]
impl<T> Drop for Vec<T> {
fn drop(&mut self) {
// This is (and should always remain) a no-op if the fields are
// zeroed (when moving out, because of #[unsafe_no_drop_flag]).
unsafe {
for x in self.as_mut_slice().iter() {
ptr::read(x);
}
free(self.ptr as *mut c_void)
}
}
}
impl<T> Default for Vec<T> {
fn default() -> Vec<T> {
Vec::new()
}
}
impl<T:fmt::Show> fmt::Show for Vec<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.as_slice().fmt(f)
}
}
/// An iterator that moves out of a vector.
pub struct MoveItems<T> {
allocation: *mut c_void, // the block of memory allocated for the vector
iter: Items<'static, T>
}
impl<T> Iterator<T> for MoveItems<T> {
#[inline]
fn next(&mut self) -> Option<T> {
unsafe {
self.iter.next().map(|x| ptr::read(x))
}
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
self.iter.size_hint()
}
}
impl<T> DoubleEndedIterator<T> for MoveItems<T> {
#[inline]
fn next_back(&mut self) -> Option<T> {
unsafe {
self.iter.next_back().map(|x| ptr::read(x))
}
}
}
#[unsafe_destructor]
impl<T> Drop for MoveItems<T> {
fn drop(&mut self) {
// destroy the remaining elements
for _x in *self {}
unsafe {
free(self.allocation)
}
}
}
#[cfg(test)]
mod tests {
use prelude::*;
use mem::size_of;
#[test]
fn test_small_vec_struct() {
assert!(size_of::<Vec<u8>>() == size_of::<uint>() * 3);
}
#[test]
fn test_double_drop() {
struct TwoVec<T> {
x: Vec<T>,
y: Vec<T>
}
struct DropCounter<'a> {
count: &'a mut int
}
#[unsafe_destructor]
impl<'a> Drop for DropCounter<'a> {
fn drop(&mut self) {
*self.count += 1;
}
}
let mut count_x @ mut count_y = 0;
{
let mut tv = TwoVec {
x: Vec::new(),
y: Vec::new()
};
tv.x.push(DropCounter {count: &mut count_x});
tv.y.push(DropCounter {count: &mut count_y});
// If Vec had a drop flag, here is where it would be zeroed.
// Instead, it should rely on its internal state to prevent
// doing anything significant when dropped multiple times.
drop(tv.x);
// Here tv goes out of scope, tv.y should be dropped, but not tv.x.
}
assert_eq!(count_x, 1);
assert_eq!(count_y, 1);
}
#[test]
fn test_reserve_additional() {
let mut v = Vec::new();
assert_eq!(v.capacity(), 0);
v.reserve_additional(2);
assert!(v.capacity() >= 2);
for i in range(0, 16) {
v.push(i);
}
assert!(v.capacity() >= 16);
v.reserve_additional(16);
assert!(v.capacity() >= 32);
v.push(16);
v.reserve_additional(16);
assert!(v.capacity() >= 33)
}
#[test]
fn test_extend() {
let mut v = Vec::new();
let mut w = Vec::new();
v.extend(range(0, 3));
for i in range(0, 3) { w.push(i) }
assert_eq!(v, w);
v.extend(range(3, 10));
for i in range(3, 10) { w.push(i) }
assert_eq!(v, w);
}
#[test]
fn test_mut_slice_from() {
let mut values = Vec::from_slice([1u8,2,3,4,5]);
{
let slice = values.mut_slice_from(2);
assert!(slice == [3, 4, 5]);
for p in slice.mut_iter() {
*p += 2;
}
}
assert!(values.as_slice() == [1, 2, 5, 6, 7]);
}
#[test]
fn test_mut_slice_to() {
let mut values = Vec::from_slice([1u8,2,3,4,5]);
{
let slice = values.mut_slice_to(2);
assert!(slice == [1, 2]);
for p in slice.mut_iter() {
*p += 1;
}
}
assert!(values.as_slice() == [2, 3, 3, 4, 5]);
}
#[test]
fn test_mut_split_at() {
let mut values = Vec::from_slice([1u8,2,3,4,5]);
{
let (left, right) = values.mut_split_at(2);
assert!(left.slice(0, left.len()) == [1, 2]);
for p in left.mut_iter() {
*p += 1;
}
assert!(right.slice(0, right.len()) == [3, 4, 5]);
for p in right.mut_iter() {
*p += 2;
}
}
assert!(values == Vec::from_slice([2u8, 3, 5, 6, 7]));
}
#[test]
fn test_clone() {
let v: Vec<int> = vec!();
let w = vec!(1, 2, 3);
assert_eq!(v, v.clone());
let z = w.clone();
assert_eq!(w, z);
// they should be disjoint in memory.
assert!(w.as_ptr() != z.as_ptr())
}
#[test]
fn test_clone_from() {
let mut v = vec!();
let three = vec!(box 1, box 2, box 3);
let two = vec!(box 4, box 5);
// zero, long
v.clone_from(&three);
assert_eq!(v, three);
// equal
v.clone_from(&three);
assert_eq!(v, three);
// long, short
v.clone_from(&two);
assert_eq!(v, two);
// short, long
v.clone_from(&three);
assert_eq!(v, three)
}
#[test]
fn test_grow_fn() {
let mut v = Vec::from_slice([0u, 1]);
v.grow_fn(3, |i| i);
assert!(v == Vec::from_slice([0u, 1, 0, 1, 2]));
}
#[test]
fn test_retain() {
let mut vec = Vec::from_slice([1u, 2, 3, 4]);
vec.retain(|x| x%2 == 0);
assert!(vec == Vec::from_slice([2u, 4]));
}
#[test]
fn zero_sized_values() {
let mut v = Vec::new();
assert_eq!(v.len(), 0);
v.push(());
assert_eq!(v.len(), 1);
v.push(());
assert_eq!(v.len(), 2);
assert_eq!(v.pop(), Some(()));
assert_eq!(v.pop(), Some(()));
assert_eq!(v.pop(), None);
assert_eq!(v.iter().len(), 0);
v.push(());
assert_eq!(v.iter().len(), 1);
v.push(());
assert_eq!(v.iter().len(), 2);
for &() in v.iter() {}
assert_eq!(v.mut_iter().len(), 2);
v.push(());
assert_eq!(v.mut_iter().len(), 3);
v.push(());
assert_eq!(v.mut_iter().len(), 4);
for &() in v.mut_iter() {}
unsafe { v.set_len(0); }
assert_eq!(v.mut_iter().len(), 0);
}
}
| 27.859564 | 97 | 0.476708 |
6a6550009ecebb251231cb56f312cc6b5873df9e | 356 | // test4.rs
// This test covers the sections:
// - Modules
// - Macros
// Write a macro that passes the test! No hints this time, you can do it!
macro_rules! my_macro {
($val:expr) => {
"Hello {}".to_owned() + $val
}
}
// DONE! 🥳
fn main() {
if my_macro!("world!") != "Hello world!" {
panic!("Oh no! Wrong output!");
}
}
| 17.8 | 73 | 0.547753 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.