hexsha
stringlengths
40
40
size
int64
4
1.05M
content
stringlengths
4
1.05M
avg_line_length
float64
1.33
100
max_line_length
int64
1
1k
alphanum_fraction
float64
0.25
1
bfada9329402e05874ee8de5e216f2b1d5bf70eb
14,659
//! Deserialize postgres rows into a Rust data structure. use serde::de::{ self, Deserialize, Visitor, IntoDeserializer, value::SeqDeserializer }; use tokio_postgres::row::Row; use error::{Error, Result}; /// A structure that deserialize Postgres rows into Rust values. pub struct Deserializer { input: Row, index: usize, } impl Deserializer { /// Create a `Row` deserializer from a `Row`. pub fn from_row(input: Row) -> Self { Self { index: 0, input } } } /// Attempt to deserialize from a single `Row`. pub fn from_row<'a, T: Deserialize<'a>>(input: Row) -> Result<T> { let mut deserializer = Deserializer::from_row(input); Ok(T::deserialize(&mut deserializer)?) } /// Attempt to deserialize from `Rows`. //pub fn from_rows<'a, T: Deserialize<'a>>(input: &'a Rows) -> Result<Vec<T>> { // input.into_iter().map(|row| { // let mut deserializer = Deserializer::from_row(row); // T::deserialize(&mut deserializer) // }).collect() //} macro_rules! unsupported_type { ($($fn_name:ident),*,) => { $( fn $fn_name<V: Visitor<'de>>(self, _: V) -> Result<V::Value> { Err(Error::UnsupportedType) } )* } } macro_rules! get_value { ($this:ident, $v:ident, $fn_call:ident, $ty:ty) => {{ $v.$fn_call($this.input.try_get::<_, $ty>($this.index) .map_err(|e| Error::InvalidType(format!("{:?}", e)))?) }} } impl<'de, 'b> de::Deserializer<'de> for &'b mut Deserializer { type Error = Error; unsupported_type! { deserialize_any, deserialize_u8, deserialize_u16, deserialize_u64, deserialize_char, deserialize_str, deserialize_bytes, deserialize_unit, deserialize_identifier, deserialize_option, } fn deserialize_ignored_any<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> { visitor.visit_unit() } fn deserialize_bool<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> { get_value!(self, visitor, visit_bool, bool) } fn deserialize_i8<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> { get_value!(self, visitor, visit_i8, i8) } fn deserialize_i16<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> { get_value!(self, visitor, visit_i16, i16) } fn deserialize_i32<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> { get_value!(self, visitor, visit_i32, i32) } fn deserialize_i64<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> { get_value!(self, visitor, visit_i64, i64) } fn deserialize_u32<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> { get_value!(self, visitor, visit_u32, u32) } fn deserialize_f32<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> { get_value!(self, visitor, visit_f32, f32) } fn deserialize_f64<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> { get_value!(self, visitor, visit_f64, f64) } fn deserialize_string<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> { get_value!(self, visitor, visit_string, String) } fn deserialize_byte_buf<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> { get_value!(self, visitor, visit_byte_buf, Vec<u8>) } fn deserialize_seq<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> { let raw = self.input.try_get::<_, Vec<u8>>(self.index) .map_err(|e| Error::InvalidType(format!("{:?}", e)))?; visitor.visit_seq(SeqDeserializer::new(raw.into_iter())) } fn deserialize_enum<V: Visitor<'de>>(self, _: &str, _: &[&str], _visitor: V) -> Result<V::Value> { //visitor.visit_enum(self) Err(Error::UnsupportedType) } fn deserialize_unit_struct<V: Visitor<'de>>(self, _: &str, _: V) -> Result<V::Value> { Err(Error::UnsupportedType) } fn deserialize_newtype_struct<V: Visitor<'de>>(self, _: &str, _: V) -> Result<V::Value> { Err(Error::UnsupportedType) } fn deserialize_tuple<V: Visitor<'de>>(self, _: usize, _: V) -> Result<V::Value> { Err(Error::UnsupportedType) } fn deserialize_tuple_struct<V: Visitor<'de>>(self, _: &str, _: usize, _: V) -> Result<V::Value> { Err(Error::UnsupportedType) } fn deserialize_map<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> { visitor.visit_map(self) } fn deserialize_struct<V: Visitor<'de>>(self, _: &'static str, _: &'static [&'static str], v: V) -> Result<V::Value> { self.deserialize_map(v) } } impl<'de> de::MapAccess<'de> for Deserializer { type Error = Error; fn next_key_seed<T: de::DeserializeSeed<'de>>(&mut self, seed: T) -> Result<Option<T::Value>> { if self.index >= self.input.columns().len() { return Ok(None) } self.input.columns() .get(self.index) .ok_or(Error::UnknownField) .map(|c| c.name().to_owned().into_deserializer()) .and_then(|n| seed.deserialize(n).map(Some)) } fn next_value_seed<T: de::DeserializeSeed<'de>>(&mut self, seed: T) -> Result<T::Value> { let result = seed.deserialize(&mut *self); self.index += 1; if let Err(Error::InvalidType(err)) = result { let name = self.input.columns().get(self.index - 1).unwrap().name(); Err(Error::InvalidType(format!("{} {}", name, err))) } else { result } } } /* impl<'de, 'a, 'b> de::EnumAccess<'de> for &'b mut Deserializer<'a> { type Error = Error; type Variant = Self; fn variant_seed<V: de::DeserializeSeed<'de>>(self, seed: V) -> Result<(V::Value, Self::Variant)> { let value = seed.deserialize(self); } } impl<'de, 'a, 'b> de::VariantAccess<'de> for &'b mut Deserializer<'a> { type Error = Error; fn unit_variant(self) -> Result<()> { Ok(()) } fn newtype_variant_seed<T: de::DeserializeSeed<'de>>(self, seed: T) -> Result<T::Value> { self.input.get_opt::<_, T::Value>(self.index) .unwrap() .map_err(|_| Error::InvalidType) } fn tuple_variant<V: Visitor<'de>>(self, _: usize, _: V) -> Result<V::Value> { unimplemented!("tuple_variant") } fn struct_variant<V: Visitor<'de>>(self, _: &[&str], _: V) -> Result<V::Value> { unimplemented!("struct_variant") } } */ #[cfg(test)] mod tests { use std::env; use serde_derive::Deserialize; use postgres::Connection; fn setup_and_connect_to_db() -> Connection { let user = env::var("PGUSER").unwrap_or("postgres".into()); let pass = env::var("PGPASSWORD").map(|p| format!("{}", p)).unwrap_or("postgres".into()); let addr = env::var("PGADDR").unwrap_or("localhost".into()); let port = env::var("PGPORT").unwrap_or("5432".into()); let url = format!("postgres://{user}:{pass}@{addr}:{port}", user = user, pass = pass, addr = addr, port = port); Connection::connect(url, postgres::TlsMode::None).unwrap() } #[test] fn non_null() { #[derive(Debug, Deserialize, PartialEq)] struct Buu { wants_candy: bool, width: i16, amount_eaten: i32, amount_want_to_eat: i64, speed: f32, weight: f64, catchphrase: String, stomach_contents: Vec<u8>, } let connection = setup_and_connect_to_db(); connection.execute("CREATE TABLE IF NOT EXISTS Buu ( wants_candy BOOL NOT NULL, width SMALLINT NOT NULL, amount_eaten INT NOT NULL, amount_want_to_eat BIGINT NOT NULL, speed REAL NOT NULL, weight DOUBLE PRECISION NOT NULL, catchphrase VARCHAR NOT NULL, stomach_contents BYTEA NOT NULL )", &[]).unwrap(); connection.execute("INSERT INTO Buu ( wants_candy, width, amount_eaten, amount_want_to_eat, speed, weight, catchphrase, stomach_contents ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)", &[&true, &20i16, &1000i32, &1000_000i64, &99.99f32, &9999.9999f64, &String::from("Woo Woo"), &vec![1u8, 2, 3, 4, 5, 6]]).unwrap(); let results = connection.query("SELECT wants_candy, width, amount_eaten, amount_want_to_eat, speed, weight, catchphrase, stomach_contents FROM Buu", &[]).unwrap(); let row = results.get(0); let buu: Buu = super::from_row(row).unwrap(); assert_eq!(true, buu.wants_candy); assert_eq!(20, buu.width); assert_eq!(1000, buu.amount_eaten); assert_eq!(1000_000, buu.amount_want_to_eat); assert_eq!(99.99, buu.speed); assert_eq!(9999.9999, buu.weight); assert_eq!("Woo Woo", buu.catchphrase); assert_eq!(vec![1,2,3,4,5,6], buu.stomach_contents); connection.execute("DROP TABLE Buu", &[]).unwrap(); } #[test] fn nullable() { #[derive(Debug, Deserialize, PartialEq)] struct Buu { wants_candy: Option<bool>, width: Option<i16>, amount_eaten: Option<i32>, amount_want_to_eat: Option<i64>, speed: Option<f32>, weight: Option<f64>, catchphrase: Option<String>, stomach_contents: Option<Vec<u8>>, } let connection = setup_and_connect_to_db(); connection.execute("CREATE TABLE IF NOT EXISTS NullBuu ( wants_candy BOOL, width SMALLINT, amount_eaten INT, amount_want_to_eat BIGINT, speed REAL, weight DOUBLE PRECISION, catchphrase VARCHAR, stomach_contents BYTEA )", &[]).unwrap(); connection.execute("INSERT INTO NullBuu ( wants_candy, width, amount_eaten, amount_want_to_eat, speed, weight, catchphrase, stomach_contents ) VALUES ( NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)", &[]).unwrap(); let results = connection.query("SELECT wants_candy, width, amount_eaten, amount_want_to_eat, speed, weight, catchphrase, stomach_contents FROM NullBuu", &[]).unwrap(); let row = results.get(0); let buu: Buu = super::from_row(row).unwrap(); assert_eq!(None, buu.wants_candy); assert_eq!(None, buu.width); assert_eq!(None, buu.amount_eaten); assert_eq!(None, buu.amount_want_to_eat); assert_eq!(None, buu.speed); assert_eq!(None, buu.weight); assert_eq!(None, buu.catchphrase); assert_eq!(None, buu.stomach_contents); connection.execute("DROP TABLE NullBuu", &[]).unwrap(); } #[test] fn mispelled_field_name() { #[derive(Debug, Deserialize, PartialEq)] struct Buu { wants_candie: bool, } let connection = setup_and_connect_to_db(); connection.execute("CREATE TABLE IF NOT EXISTS SpellBuu ( wants_candy BOOL NOT NULL )", &[]).unwrap(); connection.execute("INSERT INTO SpellBuu ( wants_candy ) VALUES ($1)", &[&true]).unwrap(); let results = connection.query("SELECT wants_candy FROM SpellBuu", &[]).unwrap(); let row = results.get(0); assert_eq!( super::from_row::<Buu>(row), Err(super::Error::Message(String::from("missing field `wants_candie`")))); connection.execute("DROP TABLE SpellBuu", &[]).unwrap(); } #[test] fn missing_optional() { #[derive(Debug, Deserialize, PartialEq)] struct Buu { wants_candy: bool, } let connection = setup_and_connect_to_db(); connection.execute("CREATE TABLE IF NOT EXISTS MiBuu ( wants_candy BOOL )", &[]).unwrap(); connection.execute("INSERT INTO MiBuu ( wants_candy ) VALUES ($1)", &[&None::<bool>]).unwrap(); let results = connection.query("SELECT wants_candy FROM MiBuu", &[]).unwrap(); let row = results.get(0); assert_eq!( super::from_row::<Buu>(row), Err(super::Error::InvalidType(String::from("wants_candy Error(Conversion(WasNull))")))); connection.execute("DROP TABLE MiBuu", &[]).unwrap(); } /* use postgres_derive::FromSql; #[test] fn enums() { #[derive(Debug, Deserialize, PartialEq)] struct Goku { hair: HairColour, } #[derive(Debug, Deserialize, FromSql, PartialEq)] #[postgres(name = "hair_colour")] enum HairColour { #[postgres(name = "black")] Black, #[postgres(name = "yellow")] Yellow, #[postgres(name = "blue")] Blue, } let connection = setup_and_connect_to_db(); connection.execute("CREATE TYPE hair_colour as ENUM ( 'black', 'yellow', 'blue' )", &[]).unwrap(); connection.execute("CREATE TABLE Gokus (hair hair_colour)", &[]).unwrap(); connection.execute("INSERT INTO Gokus VALUES ('black')", &[]) .unwrap(); let results = connection.query("SELECT * FROM Gokus", &[]) .unwrap(); let row = results.get(0); let goku: Goku = super::from_row(row).unwrap(); assert_eq!(HairColour::Black, goku.hair); connection.execute("DROP TABLE Gokus", &[]).unwrap(); connection.execute("DROP TYPE hair_colour", &[]).unwrap(); } */ }
29.027723
138
0.536462
29afe756d1cb261647073671a4da1ef4f9fa1458
750
// SPDX-License-Identifier: Apache-2.0 OR MIT // // Modifications Copyright Kani Contributors // See GitHub history for details. use serde::Serialize; #[derive(Clone, Debug, Serialize)] crate struct ExternalHtml { /// Content that will be included inline in the `<head>` section of a /// rendered Markdown file or generated documentation crate in_header: String, /// Content that will be included inline between `<body>` and the content of /// a rendered Markdown file or generated documentation crate before_content: String, /// Content that will be included inline between the content and `</body>` of /// a rendered Markdown file or generated documentation crate after_content: String, } impl ExternalHtml {}
35.714286
81
0.728
9b26f796e8ea62dd4829a2a21b1058ec3f34d383
6,824
// Generated from definition io.k8s.api.autoscaling.v2beta2.MetricIdentifier /// MetricIdentifier defines the name and optionally selector for a metric #[derive(Clone, Debug, Default, PartialEq)] pub struct MetricIdentifier { /// name is the name of the given metric pub name: String, /// selector is the string-encoded form of a standard kubernetes label selector for the given metric When set, it is passed as an additional parameter to the metrics server for more specific metrics scoping. When unset, just the metricName will be used to gather metrics. pub selector: Option<crate::apimachinery::pkg::apis::meta::v1::LabelSelector>, } impl<'de> crate::serde::Deserialize<'de> for MetricIdentifier { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: crate::serde::Deserializer<'de> { #[allow(non_camel_case_types)] enum Field { Key_name, Key_selector, Other, } impl<'de> crate::serde::Deserialize<'de> for Field { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: crate::serde::Deserializer<'de> { struct Visitor; impl<'de> crate::serde::de::Visitor<'de> for Visitor { type Value = Field; fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str("field identifier") } fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: crate::serde::de::Error { Ok(match v { "name" => Field::Key_name, "selector" => Field::Key_selector, _ => Field::Other, }) } } deserializer.deserialize_identifier(Visitor) } } struct Visitor; impl<'de> crate::serde::de::Visitor<'de> for Visitor { type Value = MetricIdentifier; fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str("MetricIdentifier") } fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: crate::serde::de::MapAccess<'de> { let mut value_name: Option<String> = None; let mut value_selector: Option<crate::apimachinery::pkg::apis::meta::v1::LabelSelector> = None; while let Some(key) = crate::serde::de::MapAccess::next_key::<Field>(&mut map)? { match key { Field::Key_name => value_name = crate::serde::de::MapAccess::next_value(&mut map)?, Field::Key_selector => value_selector = crate::serde::de::MapAccess::next_value(&mut map)?, Field::Other => { let _: crate::serde::de::IgnoredAny = crate::serde::de::MapAccess::next_value(&mut map)?; }, } } Ok(MetricIdentifier { name: value_name.unwrap_or_default(), selector: value_selector, }) } } deserializer.deserialize_struct( "MetricIdentifier", &[ "name", "selector", ], Visitor, ) } } impl crate::serde::Serialize for MetricIdentifier { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: crate::serde::Serializer { let mut state = serializer.serialize_struct( "MetricIdentifier", 1 + self.selector.as_ref().map_or(0, |_| 1), )?; crate::serde::ser::SerializeStruct::serialize_field(&mut state, "name", &self.name)?; if let Some(value) = &self.selector { crate::serde::ser::SerializeStruct::serialize_field(&mut state, "selector", value)?; } crate::serde::ser::SerializeStruct::end(state) } } #[cfg(feature = "schemars")] impl crate::schemars::JsonSchema for MetricIdentifier { fn schema_name() -> String { "io.k8s.api.autoscaling.v2beta2.MetricIdentifier".to_owned() } fn json_schema(__gen: &mut crate::schemars::gen::SchemaGenerator) -> crate::schemars::schema::Schema { crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject { metadata: Some(Box::new(crate::schemars::schema::Metadata { description: Some("MetricIdentifier defines the name and optionally selector for a metric".to_owned()), ..Default::default() })), instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::Object))), object: Some(Box::new(crate::schemars::schema::ObjectValidation { properties: IntoIterator::into_iter([ ( "name".to_owned(), crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject { metadata: Some(Box::new(crate::schemars::schema::Metadata { description: Some("name is the name of the given metric".to_owned()), ..Default::default() })), instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::String))), ..Default::default() }), ), ( "selector".to_owned(), { let mut schema_obj = __gen.subschema_for::<crate::apimachinery::pkg::apis::meta::v1::LabelSelector>().into_object(); schema_obj.metadata = Some(Box::new(crate::schemars::schema::Metadata { description: Some("selector is the string-encoded form of a standard kubernetes label selector for the given metric When set, it is passed as an additional parameter to the metrics server for more specific metrics scoping. When unset, just the metricName will be used to gather metrics.".to_owned()), ..Default::default() })); crate::schemars::schema::Schema::Object(schema_obj) }, ), ]).collect(), required: IntoIterator::into_iter([ "name", ]).map(std::borrow::ToOwned::to_owned).collect(), ..Default::default() })), ..Default::default() }) } }
46.421769
332
0.533851
f8ae057eacd029ee8afa9fb71a83f9238f067f5f
532
use crate::{guild::Emoji, id::GuildId}; #[cfg_attr( feature = "serde-support", derive(serde::Deserialize, serde::Serialize) )] #[derive(Clone, Debug, Eq, Hash, PartialEq)] pub struct GuildPreview { pub id: GuildId, pub name: String, pub icon: Option<String>, pub splash: Option<String>, pub discovery_splash: Option<String>, pub emojis: Vec<Emoji>, pub features: Vec<String>, pub approximate_member_count: u64, pub approximate_presence_count: u64, pub description: Option<String>, }
26.6
48
0.682331
90e3a613b1446770e4cecedc1f2ddbee802c4cd3
2,252
use std::path::PathBuf; use swc_common::{comments::SingleThreadedComments, Mark}; use swc_ecma_ast::Id; use swc_ecma_parser::{parse_file_as_module, EsConfig, Syntax}; use swc_ecma_transforms_base::resolver; use swc_ecma_visit::FoldWith; use testing::NormalizedOutput; use super::VarUsageInfo; use crate::marks::Marks; #[testing::fixture("tests/fixture/**/input.js")] #[testing::fixture("tests/single-pass/**/input.js")] #[testing::fixture("tests/terser/compress/**/input.js")] fn snapshot(input: PathBuf) { let dir = input.parent().unwrap(); testing::run_test2(false, |cm, handler| { let fm = cm.load_file(&input).expect("failed to load input.js"); let comments = SingleThreadedComments::default(); let unresolved_mark = Mark::new(); let top_level_mark = Mark::new(); let marks = Marks::new(); let program = parse_file_as_module( &fm, Syntax::Es(EsConfig { jsx: true, ..Default::default() }), Default::default(), Some(&comments), &mut vec![], ) .map_err(|err| { err.into_diagnostic(&handler).emit(); }) .map(|module| module.fold_with(&mut resolver(unresolved_mark, top_level_mark, false))); let program = match program { Ok(program) => program, Err(..) => { return Ok(()); } }; let data = super::analyze(&program, Some(marks)); // Iteration order of hashmap is not deterministic let mut snapshot = TestSnapshot { vars: data .vars .into_iter() .map(|(id, mut v)| { v.infects = Default::default(); v.accessed_props = Default::default(); (id, v) }) .collect(), }; snapshot.vars.sort_by(|a, b| a.0.cmp(&b.0)); NormalizedOutput::from(format!("{:#?}", snapshot)) .compare_to_file(dir.join("analysis-snapshot.rust-debug")) .unwrap(); Ok(()) }) .unwrap() } #[derive(Debug)] struct TestSnapshot { vars: Vec<(Id, VarUsageInfo)>, }
27.802469
95
0.539964
de0e875c683313c58a08f111f1f7d447abfff9b2
1,506
// Copyright 2021 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use bstr::ByteSlice; use bytes::BufMut; use common_exception::Result; use super::string2string::String2StringFunction; use super::string2string::StringOperator; #[derive(Clone, Default)] pub struct Lower; impl StringOperator for Lower { #[inline] fn try_apply<'a>(&'a mut self, s: &'a [u8], mut buffer: &mut [u8]) -> Result<usize> { for (start, end, ch) in s.char_indices() { if ch == '\u{FFFD}' { // If char is not valid, just copy it. buffer.put_slice(&s.as_bytes()[start..end]); } else if ch.is_ascii() { buffer.put_u8(ch.to_ascii_lowercase() as u8); } else { for x in ch.to_lowercase() { buffer.put_slice(x.encode_utf8(&mut [0; 4]).as_bytes()); } } } Ok(s.len()) } } pub type LowerFunction = String2StringFunction<Lower>;
33.466667
89
0.630146
01717345b2069117b2f81928298573ca613d543d
12,692
use glib::{ object as gobject, object::{Cast, IsA}, signal::{connect_raw, SignalHandlerId}, translate::*, StaticType, Value, }; use std::boxed::Box as Box_; use std::{fmt, mem::transmute}; // use Scriptable; // TODO: , @implements Scriptable glib_wrapper! { pub struct Interval(Object<ffi::ClutterInterval, ffi::ClutterIntervalClass, IntervalClass>) @extends gobject::InitiallyUnowned; match fn { get_type => || ffi::clutter_interval_get_type(), } } impl Interval { //pub fn new(gtype: glib::types::Type, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) -> Interval { // unsafe { TODO: call clutter_sys:clutter_interval_new() } //} pub fn with_values( gtype: glib::types::Type, initial: Option<&glib::Value>, final_: Option<&glib::Value>, ) -> Interval { unsafe { from_glib_none(ffi::clutter_interval_new_with_values( gtype.to_glib(), initial.to_glib_none().0, final_.to_glib_none().0, )) } } //pub fn register_progress_func<P: Fn(&glib::Value, &glib::Value, f64, &glib::Value) -> bool + 'static>(value_type: glib::types::Type, func: P) { // unsafe { TODO: call clutter_sys:clutter_interval_register_progress_func() } //} } pub const NONE_INTERVAL: Option<&Interval> = None; /// Trait containing all `Interval` methods. /// /// # Implementors /// /// [`Interval`](struct.Interval.html) pub trait IntervalExt: 'static { /// Creates a copy of `self`. /// /// # Returns /// /// the newly created `Interval` fn clone(&self) -> Option<Interval>; /// Computes the value between the `self` boundaries given the /// progress `factor` /// /// Unlike `IntervalExt::compute_value`, this function will /// return a const pointer to the computed value /// /// You should use this function if you immediately pass the computed /// value to another function that makes a copy of it, like /// `gobject::ObjectExt::set_property` /// ## `factor` /// the progress factor, between 0 and 1 /// /// # Returns /// /// a pointer to the computed value, /// or `None` if the computation was not successfull fn compute(&self, factor: f64) -> Option<glib::Value>; /// Computes the value between the `self` boundaries given the /// progress `factor` and copies it into `value`. /// ## `factor` /// the progress factor, between 0 and 1 /// ## `value` /// return location for an initialized `gobject::Value` /// /// # Returns /// /// `true` if the operation was successful fn compute_value(&self, factor: f64) -> Option<glib::Value>; /// Retrieves the final value of `self` and copies /// it into `value`. /// /// The passed `gobject::Value` must be initialized to the value held by /// the `Interval`. /// ## `value` /// a `gobject::Value` fn get_final_value(&self) -> glib::Value; /// Retrieves the initial value of `self` and copies /// it into `value`. /// /// The passed `gobject::Value` must be initialized to the value held by /// the `Interval`. /// ## `value` /// a `gobject::Value` fn get_initial_value(&self) -> glib::Value; //fn get_interval(&self, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs); /// Retrieves the `glib::Type` of the values inside `self`. /// /// # Returns /// /// the type of the value, or G_TYPE_INVALID fn get_value_type(&self) -> glib::types::Type; /// Checks if the `self` has a valid initial and final values. /// /// # Returns /// /// `true` if the `Interval` has an initial and /// final values, and `false` otherwise fn is_valid(&self) -> bool; /// Gets the pointer to the final value of `self` /// /// # Returns /// /// the final value of the interval. /// The value is owned by the `Interval` and it should not be /// modified or freed fn peek_final_value(&self) -> Option<glib::Value>; /// Gets the pointer to the initial value of `self` /// /// # Returns /// /// the initial value of the interval. /// The value is owned by the `Interval` and it should not be /// modified or freed fn peek_initial_value(&self) -> Option<glib::Value>; //fn set_final(&self, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs); /// Sets the final value of `self` to `value`. The value is /// copied inside the `Interval`. /// ## `value` /// a `gobject::Value` fn set_final_value(&self, value: &glib::Value); //fn set_initial(&self, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs); /// Sets the initial value of `self` to `value`. The value is copied /// inside the `Interval`. /// ## `value` /// a `gobject::Value` fn set_initial_value(&self, value: &glib::Value); //fn set_interval(&self, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs); // /// Validates the initial and final values of `self` against // /// a `gobject::ParamSpec`. // /// ## `pspec` // /// a `gobject::ParamSpec` // /// // /// # Returns // /// // /// `true` if the `Interval` is valid, `false` otherwise // fn validate<P: IsA<glib::ParamSpec>>(&self, pspec: &P) -> bool; // /// The final value of the interval. // fn get_property_final(&self) -> Option<glib::Value>; // /// The initial value of the interval. // fn get_property_initial(&self) -> Option<glib::Value>; fn connect_property_final_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_initial_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<Interval>> IntervalExt for O { fn clone(&self) -> Option<Interval> { unsafe { from_glib_full(ffi::clutter_interval_clone(self.as_ref().to_glib_none().0)) } } fn compute(&self, factor: f64) -> Option<glib::Value> { unsafe { from_glib_none(ffi::clutter_interval_compute( self.as_ref().to_glib_none().0, factor, )) } } fn compute_value(&self, factor: f64) -> Option<glib::Value> { unsafe { let mut value = glib::Value::uninitialized(); let ret = from_glib(ffi::clutter_interval_compute_value( self.as_ref().to_glib_none().0, factor, value.to_glib_none_mut().0, )); if ret { Some(value) } else { None } } } fn get_final_value(&self) -> glib::Value { unsafe { let mut value = glib::Value::uninitialized(); ffi::clutter_interval_get_final_value( self.as_ref().to_glib_none().0, value.to_glib_none_mut().0, ); value } } fn get_initial_value(&self) -> glib::Value { unsafe { let mut value = glib::Value::uninitialized(); ffi::clutter_interval_get_initial_value( self.as_ref().to_glib_none().0, value.to_glib_none_mut().0, ); value } } //fn get_interval(&self, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) { // unsafe { TODO: call clutter_sys:clutter_interval_get_interval() } //} fn get_value_type(&self) -> glib::types::Type { unsafe { from_glib(ffi::clutter_interval_get_value_type( self.as_ref().to_glib_none().0, )) } } fn is_valid(&self) -> bool { unsafe { from_glib(ffi::clutter_interval_is_valid( self.as_ref().to_glib_none().0, )) } } fn peek_final_value(&self) -> Option<glib::Value> { unsafe { from_glib_none(ffi::clutter_interval_peek_final_value( self.as_ref().to_glib_none().0, )) } } fn peek_initial_value(&self) -> Option<glib::Value> { unsafe { from_glib_none(ffi::clutter_interval_peek_initial_value( self.as_ref().to_glib_none().0, )) } } //fn set_final(&self, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) { // unsafe { TODO: call clutter_sys:clutter_interval_set_final() } //} fn set_final_value(&self, value: &glib::Value) { unsafe { ffi::clutter_interval_set_final_value( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } //fn set_initial(&self, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) { // unsafe { TODO: call clutter_sys:clutter_interval_set_initial() } //} fn set_initial_value(&self, value: &glib::Value) { unsafe { ffi::clutter_interval_set_initial_value( self.as_ref().to_glib_none().0, value.to_glib_none().0, ); } } //fn set_interval(&self, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) { // unsafe { TODO: call clutter_sys:clutter_interval_set_interval() } //} // fn validate<P: IsA<glib::ParamSpec>>(&self, pspec: &P) -> bool { // unsafe { // from_glib(ffi::clutter_interval_validate( // self.as_ref().to_glib_none().0, // pspec.as_ref().to_glib_none().0, // )) // } // } // fn get_property_final(&self) -> Option<glib::Value> { // unsafe { // let mut value = Value::from_type(<glib::Value as StaticType>::static_type()); // gobject_sys::g_object_get_property( // self.to_glib_none().0 as *mut gobject_sys::GObject, // b"final\0".as_ptr() as *const _, // value.to_glib_none_mut().0, // ); // value // .get() // .expect("Return Value for property `final` getter") // } // } // fn get_property_initial(&self) -> Option<glib::Value> { // unsafe { // let mut value = Value::from_type(<glib::Value as StaticType>::static_type()); // gobject_sys::g_object_get_property( // self.to_glib_none().0 as *mut gobject_sys::GObject, // b"initial\0".as_ptr() as *const _, // value.to_glib_none_mut().0, // ); // value // .get() // .expect("Return Value for property `initial` getter") // } // } fn connect_property_final_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_final_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::ClutterInterval, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<Interval>, { let f: &F = &*(f as *const F); f(&Interval::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::final\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_final_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } fn connect_property_initial_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_initial_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::ClutterInterval, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer, ) where P: IsA<Interval>, { let f: &F = &*(f as *const F); f(&Interval::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::initial\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_initial_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } } impl fmt::Display for Interval { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Interval") } }
32.460358
149
0.550425
8739339249bbe78c870f3c7d136cb773ad3e2500
11,532
#![cfg(feature = "test-bpf")] mod program_test; use fixed::types::I80F48; use mango::matching::{OrderType, Side}; use mango::state::{TriggerCondition, ADVANCED_ORDER_FEE}; use program_test::assertions::*; use program_test::cookies::*; use program_test::scenarios::*; use program_test::*; use solana_program_test::*; #[tokio::test] async fn test_perp_trigger_orders_basic() { // === Arrange === let config = MangoProgramTestConfig::default_two_mints(); let mut test = MangoProgramTest::start_new(&config).await; // Disable all logs except error // solana_logger::setup_with("error"); let mut mango_group_cookie = MangoGroupCookie::default(&mut test).await; mango_group_cookie.full_setup(&mut test, config.num_users, config.num_mints - 1).await; // General parameters let user_index: usize = 0; let user2_index: usize = 1; let mint_index: usize = 0; let base_price: f64 = 10_000.0; let base_size: f64 = 1.0; // Set oracles mango_group_cookie.set_oracle(&mut test, mint_index, base_price).await; // Deposit let user_deposits = vec![ (user_index, test.quote_index, base_price * base_size), (user_index, mint_index, base_size), (user2_index, test.quote_index, base_price * base_size), (user2_index, mint_index, base_size), ]; deposit_scenario(&mut test, &mut mango_group_cookie, &user_deposits).await; // Make an advanced orders account let mango_account_cookie = &mango_group_cookie.mango_accounts[user_index]; let mut advanced_orders_cookie = AdvancedOrdersCookie::init(&mut test, mango_account_cookie).await; assert!(!advanced_orders_cookie.advanced_orders.orders[0].is_active); assert!(!advanced_orders_cookie.advanced_orders.orders[1].is_active); let advanced_orders_initial_lamports = test.get_account(advanced_orders_cookie.address).await.lamports; // Add two advanced orders let mut perp_market = mango_group_cookie.perp_markets[0]; perp_market .add_trigger_order( &mut test, &mut mango_group_cookie, &mut advanced_orders_cookie, user_index, OrderType::Limit, Side::Bid, TriggerCondition::Above, base_price, base_size, I80F48::from_num(base_price * 1.1), ) .await; assert!(advanced_orders_cookie.advanced_orders.orders[0].is_active); assert!(!advanced_orders_cookie.advanced_orders.orders[1].is_active); assert!( test.get_account(advanced_orders_cookie.address).await.lamports - advanced_orders_initial_lamports == ADVANCED_ORDER_FEE ); perp_market .add_trigger_order( &mut test, &mut mango_group_cookie, &mut advanced_orders_cookie, user_index, OrderType::Limit, Side::Bid, TriggerCondition::Below, base_price * 0.91, base_size, I80F48::from_num(base_price * 0.9), ) .await; assert!(advanced_orders_cookie.advanced_orders.orders[0].is_active); assert!(advanced_orders_cookie.advanced_orders.orders[1].is_active); assert!( test.get_account(advanced_orders_cookie.address).await.lamports - advanced_orders_initial_lamports == 2 * ADVANCED_ORDER_FEE ); // Remove the first advanced order advanced_orders_cookie .remove_advanced_order(&mut test, &mut mango_group_cookie, user_index, 0) .await .expect("deletion succeeds"); assert!(!advanced_orders_cookie.advanced_orders.orders[0].is_active); assert!(advanced_orders_cookie.advanced_orders.orders[1].is_active); assert!( test.get_account(advanced_orders_cookie.address).await.lamports - advanced_orders_initial_lamports == ADVANCED_ORDER_FEE ); // advance slots, since we want to send the same tx a second time test.advance_clock_by_slots(2).await; advanced_orders_cookie .remove_advanced_order(&mut test, &mut mango_group_cookie, user_index, 0) .await .expect("deletion of inactive is ok"); advanced_orders_cookie .remove_advanced_order(&mut test, &mut mango_group_cookie, user_index, 2) .await .expect("deletion of unused is ok"); // Trigger the second advanced order let agent_user_index = user2_index; perp_market .execute_trigger_order( &mut test, &mut mango_group_cookie, &mut advanced_orders_cookie, user_index, agent_user_index, 1, ) .await .expect_err("order trigger condition should not be met"); mango_group_cookie.set_oracle(&mut test, mint_index, base_price * 0.89).await; mango_group_cookie.run_keeper(&mut test).await; perp_market .execute_trigger_order( &mut test, &mut mango_group_cookie, &mut advanced_orders_cookie, user_index, agent_user_index, 1, ) .await .expect("order executed"); assert!(!advanced_orders_cookie.advanced_orders.orders[1].is_active); assert!( test.get_account(advanced_orders_cookie.address).await.lamports - advanced_orders_initial_lamports == 0 ); // Check that order is in book now mango_group_cookie.run_keeper(&mut test).await; let user_perp_orders = vec![(user_index, mint_index, Side::Bid, base_size, base_price)]; assert_open_perp_orders(&mango_group_cookie, &user_perp_orders, STARTING_ADVANCED_ORDER_ID + 1); } #[tokio::test] async fn test_perp_trigger_orders_health() { // === Arrange === let config = MangoProgramTestConfig::default_two_mints(); let mut test = MangoProgramTest::start_new(&config).await; // Disable all logs except error // solana_logger::setup_with("error"); let mut mango_group_cookie = MangoGroupCookie::default(&mut test).await; mango_group_cookie.full_setup(&mut test, config.num_users, config.num_mints - 1).await; // General parameters let user_index: usize = 0; let user2_index: usize = 1; let agent_user_index = user2_index; let mint_index: usize = 0; let base_price: f64 = 10_000.0; let base_size: f64 = 1.0; let mint = test.with_mint(mint_index); // Set oracles mango_group_cookie.set_oracle(&mut test, mint_index, base_price).await; // Deposit let user_deposits = vec![ (user_index, test.quote_index, base_price * base_size), //(user_index, mint_index, base_size), (user2_index, test.quote_index, base_price * base_size), (user2_index, mint_index, base_size), ]; deposit_scenario(&mut test, &mut mango_group_cookie, &user_deposits).await; // Make an advanced orders account let mango_account_cookie = &mango_group_cookie.mango_accounts[user_index]; let mut advanced_orders_cookie = AdvancedOrdersCookie::init(&mut test, mango_account_cookie).await; // Add trigger orders let mut perp_market = mango_group_cookie.perp_markets[0]; perp_market .add_trigger_order( &mut test, &mut mango_group_cookie, &mut advanced_orders_cookie, user_index, OrderType::Limit, Side::Ask, TriggerCondition::Above, base_price, 11.0 * base_size, I80F48::from_num(base_price * 0.01), ) .await; perp_market .add_trigger_order( &mut test, &mut mango_group_cookie, &mut advanced_orders_cookie, user_index, OrderType::Limit, Side::Ask, TriggerCondition::Above, base_price, 9.0 * base_size, I80F48::from_num(base_price * 0.01), ) .await; perp_market .add_trigger_order( &mut test, &mut mango_group_cookie, &mut advanced_orders_cookie, user_index, OrderType::Limit, Side::Ask, TriggerCondition::Above, base_price, 0.001 * base_size, I80F48::from_num(base_price * 0.01), ) .await; perp_market .add_trigger_order( &mut test, &mut mango_group_cookie, &mut advanced_orders_cookie, user_index, OrderType::Market, Side::Bid, TriggerCondition::Above, 0.99 * base_price, 0.001 * base_size, I80F48::from_num(base_price * 0.01), ) .await; // Triggering order 0 would drop health too much returns ok, but doesn't add // the order to the book due to health perp_market .execute_trigger_order( &mut test, &mut mango_group_cookie, &mut advanced_orders_cookie, user_index, agent_user_index, 0, ) .await .expect("order triggered, but not added to book"); assert!( mango_group_cookie.mango_accounts[user_index].mango_account.perp_accounts[0].asks_quantity == 0 ); // Triggering order 1 is acceptable but brings health to the brink perp_market .execute_trigger_order( &mut test, &mut mango_group_cookie, &mut advanced_orders_cookie, user_index, agent_user_index, 1, ) .await .expect("order triggered, added to book"); assert!( mango_group_cookie.mango_accounts[user_index].mango_account.perp_accounts[0].asks_quantity == 90_000 ); // Change the price oracle to make the account unhealthy mango_group_cookie.set_oracle(&mut test, mint_index, 2.0 * base_price).await; mango_group_cookie.run_keeper(&mut test).await; // Triggering order 2 would decrease health a tiny bit - not allowed perp_market .execute_trigger_order( &mut test, &mut mango_group_cookie, &mut advanced_orders_cookie, user_index, agent_user_index, 2, ) .await .expect("order triggered, but not added to book"); assert!( mango_group_cookie.mango_accounts[user_index].mango_account.perp_accounts[0].bids_quantity == 0 ); assert!( mango_group_cookie.mango_accounts[user_index].mango_account.perp_accounts[0].asks_quantity == 90_000 ); // Add an order for user1 to trade against perp_market .place_order( &mut test, &mut mango_group_cookie, user2_index, Side::Ask, base_size, 0.99 * base_price, PlacePerpOptions::default(), ) .await; // Triggering order 3 improves health and is allowed perp_market .execute_trigger_order( &mut test, &mut mango_group_cookie, &mut advanced_orders_cookie, user_index, agent_user_index, 3, ) .await .expect("order triggered"); assert!( mango_group_cookie.mango_accounts[user_index].mango_account.perp_accounts[0].taker_base == test.base_size_number_to_lots(&mint, 0.001 * base_size) as i64 ); }
33.620991
100
0.625304
62ca40e714892ec3023ff9e66e1608158712e618
1,211
#[derive(PartialEq)] pub enum State { Attack(f32), Decay(f32), Sustain, Release(f32), Dead, } pub use State::*; pub struct ADSR { attack: f32, decay: f32, sustain: f32, release: f32, pub state: State, } impl ADSR { pub fn new(attack: f32, decay: f32, sustain: f32, release: f32) -> ADSR { let mut adsr = ADSR { attack, decay, sustain, release, state: Attack(0.0), }; adsr.advance(0.0); adsr } pub fn sample(&self) -> f32 { match self.state { Attack(t) => t / self.attack, Decay(t) => 1.0 - (1.0 - self.sustain) * t / self.decay, Sustain => self.sustain, Release(t) => self.sustain * (1.0 - t / self.release), Dead => 0.0, } } pub fn release(&mut self) { self.sustain = self.sample(); self.state = Release(0.0); } pub fn advance(&mut self, x: f32) { self.state = match self.state { Attack(t) => { if t + x >= self.attack { Decay(0.0) } else { Attack(t + x) } } Decay(t) => { if t + x > self.decay { Sustain } else { Decay(t + x) } } Sustain => Sustain, Release(t) => { if t + x > self.release { Dead } else { Release(t + x) } } Dead => Dead, }; } }
16.146667
74
0.533443
21902a60be394bf8614e3a481b711c6984978020
2,226
use core::{ app::{App, AppTimer, BackendAppRunner}, Scalar, }; use std::{cell::RefCell, rc::Rc, time::Duration}; use wasm_bindgen::{prelude::*, JsCast}; fn window() -> web_sys::Window { web_sys::window().expect("no global `window` exists") } fn request_animation_frame(f: &Closure<dyn FnMut()>) { window() .request_animation_frame(f.as_ref().unchecked_ref()) .expect("Could not perform `requestAnimationFrame`"); } fn performance() -> web_sys::Performance { window() .performance() .expect("no `window.performance` exists") } pub struct WebAppTimer { timer: Scalar, delta_time: Duration, delta_time_seconds: Scalar, } impl Default for WebAppTimer { fn default() -> Self { Self { timer: performance().now() as Scalar * 0.001, delta_time: Duration::default(), delta_time_seconds: 0.0, } } } impl AppTimer for WebAppTimer { fn tick(&mut self) { let t = performance().now() as Scalar * 0.001; let d = t - self.timer; self.timer = t; self.delta_time = Duration::new(d as u64, (d.fract() * 1e9) as u32); self.delta_time_seconds = d; } fn now_since_start(&self) -> Duration { let t = performance().now() as Scalar * 0.001; let d = t - self.timer; Duration::new(d as u64, (d.fract() * 1e9) as u32) } fn delta_time(&self) -> Duration { self.delta_time } fn delta_time_seconds(&self) -> Scalar { self.delta_time_seconds } } #[derive(Default)] pub struct WebAppRunner; impl BackendAppRunner<JsValue> for WebAppRunner { fn run(&mut self, app: Rc<RefCell<App>>) -> Result<(), JsValue> { let f = Rc::new(RefCell::new(None)); let g = f.clone(); *g.borrow_mut() = Some(Closure::wrap(Box::new(move || { if !app.borrow().multiverse.is_running() { drop(f.borrow_mut().take()); return; } app.borrow_mut().process(); request_animation_frame(f.borrow().as_ref().unwrap()); }) as Box<dyn FnMut()>)); request_animation_frame(g.borrow().as_ref().unwrap()); Ok(()) } }
26.819277
76
0.575472
f5ebfd3853ca2a1b05d11f6f41e3ce18c71395dd
31,898
#[cfg(feature = "std")] use crate::ConstraintTrace; use crate::{ format, vec, BTreeMap, LcIndex, LinearCombination, Matrix, Rc, String, SynthesisError, Variable, Vec, }; use algebra_core::Field; use core::cell::{Ref, RefCell, RefMut}; use systemstat::*; /// Computations are expressed in terms of rank-1 constraint systems (R1CS). /// The `generate_constraints` method is called to generate constraints for /// both CRS generation and for proving. /// // TODO: Think: should we replace this with just a closure? pub trait ConstraintSynthesizer<F: Field> { /// Drives generation of new constraints inside `cs`. fn generate_constraints(self, cs: ConstraintSystemRef<F>) -> Result<(), SynthesisError>; } /// An Rank-One `ConstraintSystem`. Enforces constraints of the form /// `⟨a_i, z⟩ ⋅ ⟨b_i, z⟩ = ⟨c_i, z⟩`, where `a_i`, `b_i`, and `c_i` are linear /// combinations over variables, and `z` is the concrete assignment to these /// variables. #[derive(Debug, Clone)] pub struct ConstraintSystem<F: Field> { /// The mode in which the constraint system is operating. `self` can either /// be in setup mode (i.e., `self.mode == SynthesisMode::Setup`) or in proving mode /// (i.e., `self.mode == SynthesisMode::Prove`). If we are in proving mode, then we /// have the additional option of whether or not to construct the A, B, and /// C matrices of the constraint system (see below). pub mode: SynthesisMode, /// The number of variables that are "public inputs" to the constraint system. pub num_instance_variables: usize, /// The number of variables that are "private inputs" to the constraint system. pub num_witness_variables: usize, /// The number of constraints in the constraint system. pub num_constraints: usize, /// The number of linear combinations pub num_linear_combinations: usize, /// Assignments to the public input variables. This is empty if `self.mode == SynthesisMode::Setup`. pub instance_assignment: Vec<F>, /// Assignments to the private input variables. This is empty if `self.mode == SynthesisMode::Setup`. pub witness_assignment: Vec<F>, lc_map: BTreeMap<LcIndex, LinearCombination<F>>, #[cfg(feature = "std")] constraint_traces: Vec<Option<ConstraintTrace>>, a_constraints: Vec<LcIndex>, b_constraints: Vec<LcIndex>, c_constraints: Vec<LcIndex>, lc_assignment_cache: Rc<RefCell<BTreeMap<LcIndex, F>>>, } /// Defines the mode of operation of a `ConstraintSystem`. #[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd)] pub enum SynthesisMode { /// Indicate to the `ConstraintSystem` that it should only generate /// constraint matrices and not populate the variable assignments. Setup, /// Indicate to the `ConstraintSystem` that it populate the variable /// assignments. If additionally `construct_matrices == true`, then generate /// the matrices as in the `Setup` case. Prove { /// If `construct_matrices == true`, then generate /// the matrices as in the `Setup` case. construct_matrices: bool, }, } impl<F: Field> ConstraintSystem<F> { #[inline] fn make_row(&self, l: &LinearCombination<F>) -> Vec<(F, usize)> { let num_input = self.num_instance_variables; l.0.iter() .filter_map(|(coeff, var)| { if coeff.is_zero() { None } else { Some(( *coeff, var.get_index_unchecked(num_input).expect("no symbolic LCs"), )) } }) .collect() } /// Construct an ampty `ConstraintSystem`. pub fn new() -> Self { Self { num_instance_variables: 1, num_witness_variables: 0, num_constraints: 0, num_linear_combinations: 0, a_constraints: Vec::new(), b_constraints: Vec::new(), c_constraints: Vec::new(), instance_assignment: vec![F::one()], witness_assignment: Vec::new(), #[cfg(feature = "std")] constraint_traces: Vec::new(), lc_map: BTreeMap::new(), lc_assignment_cache: Rc::new(RefCell::new(BTreeMap::new())), mode: SynthesisMode::Prove { construct_matrices: true, }, } } /// Create a new `ConstraintSystemRef<F>`. pub fn new_ref() -> ConstraintSystemRef<F> { ConstraintSystemRef::new(Self::new()) } /// Set `self.mode` to `mode`. pub fn set_mode(&mut self, mode: SynthesisMode) { self.mode = mode; } /// Check whether `self.mode == SynthesisMode::Setup`. pub fn is_in_setup_mode(&self) -> bool { self.mode == SynthesisMode::Setup } /// Check whether or not `self` will construct matrices. pub fn should_construct_matrices(&self) -> bool { match self.mode { SynthesisMode::Setup => true, SynthesisMode::Prove { construct_matrices } => construct_matrices, } } /// Return a variable representing the constant "zero" inside the constraint /// system. #[inline] pub fn zero() -> Variable { Variable::Zero } /// Return a variable representing the constant "one" inside the constraint /// system. #[inline] pub fn one() -> Variable { Variable::One } /// Obtain a variable representing a new public instance input. #[inline] pub fn new_input_variable<Func>(&mut self, f: Func) -> Result<Variable, SynthesisError> where Func: FnOnce() -> Result<F, SynthesisError>, { let index = self.num_instance_variables; self.num_instance_variables += 1; if !self.is_in_setup_mode() { self.instance_assignment.push(f()?); } Ok(Variable::Instance(index)) } /// Obtain a variable representing a new private witness input. #[inline] pub fn new_witness_variable<Func>(&mut self, f: Func) -> Result<Variable, SynthesisError> where Func: FnOnce() -> Result<F, SynthesisError>, { let index = self.num_witness_variables; self.num_witness_variables += 1; if !self.is_in_setup_mode() { self.witness_assignment.push(f()?); } Ok(Variable::Witness(index)) } /// Obtain a variable representing a linear combination. #[inline] pub fn new_lc(&mut self, lc: LinearCombination<F>) -> Result<Variable, SynthesisError> { let index = LcIndex(self.num_linear_combinations); let var = Variable::SymbolicLc(index); self.lc_map.insert(index, lc); self.num_linear_combinations += 1; Ok(var) } /// Enforce a R1CS constraint with the name `name`. #[inline] pub fn enforce_constraint( &mut self, a: LinearCombination<F>, b: LinearCombination<F>, c: LinearCombination<F>, ) -> Result<(), SynthesisError> { if self.should_construct_matrices() { let a_index = self.new_lc(a)?.get_lc_index().unwrap(); let b_index = self.new_lc(b)?.get_lc_index().unwrap(); let c_index = self.new_lc(c)?.get_lc_index().unwrap(); self.a_constraints.push(a_index); self.b_constraints.push(b_index); self.c_constraints.push(c_index); } self.num_constraints += 1; #[cfg(feature = "std")] { let trace = ConstraintTrace::capture(); self.constraint_traces.push(trace); } Ok(()) } /// Count the number of times a given LC is used within another LC /// return the inDegree of each LC too. fn lc_num_times_used(&self, count_sinks: bool) -> (Vec<usize>, Vec<usize>) { // step 1: Identify all lcs that have been many times let mut num_times_used = vec![0; self.lc_map.len()]; let mut in_degree = vec![0; self.lc_map.len()]; for (index, lc) in self.lc_map.iter() { num_times_used[index.0] += count_sinks as usize; for &(_, var) in lc.iter() { if var.is_lc() { let lc_index = var.get_lc_index().expect("should be lc"); num_times_used[lc_index.0] += 1; in_degree[index.0] += 1; } } } (num_times_used, in_degree) } /// Naively inlines symbolic linear combinations into the linear combinations /// that use them. /// /// Useful for standard pairing-based SNARKs where addition gates are cheap. /// For example, in the SNARKs such as [[Groth16]](https://eprint.iacr.org/2016/260) and /// [[Groth-Maller17]](https://eprint.iacr.org/2017/540), addition gates /// do not contribute to the size of the multi-scalar multiplication, which /// is the dominating cost. pub fn inline_all_lcs(&mut self) { let mut inlined_lcs = BTreeMap::new(); let (mut num_times_used, in_degree) = self.lc_num_times_used(false); //println!("num times used {:?}", num_times_used.clone()); //TODO a concurrent queue to store LCs to be inlined in topological order. //TODO a worker abstraction to pull LCs from queue println!("before inlining, lc_map len {}", self.lc_map.len()); /* TODO remove current lc from self.lc_map to save more memory space? but the rust compiler will stop this action i guess. */ let map_len = self.lc_map.len(); for i in 0..map_len { let index = LcIndex(i); let lc: LinearCombination<F> = self.lc_map.remove(&index).unwrap(); let mut inlined_lc = LinearCombination::new(); for &(coeff, var) in lc.iter() { if var.is_lc() { let lc_index = var.get_lc_index().expect("should be lc"); // If `var` is a `SymbolicLc`, fetch the corresponding // inlined LC, and substitute it in. let lc = inlined_lcs.get(&lc_index).expect("should be inlined"); let tmp = (lc * coeff).0.into_iter(); inlined_lc.extend(tmp); num_times_used[lc_index.0] -= 1; if num_times_used[lc_index.0] == 0 { // This lc is not used any more, so remove it. inlined_lcs.remove(&lc_index); } } else { // Otherwise, it's a concrete variable and so we // substitute it in directly. inlined_lc.push((coeff, var)); } } inlined_lc.compactify(); inlined_lcs.insert(index, inlined_lc); } // for (&index, lc) in &self.lc_map { // let mut inlined_lc = LinearCombination::new(); // for &(coeff, var) in lc.iter() { // if var.is_lc() { // let lc_index = var.get_lc_index().expect("should be lc"); // // If `var` is a `SymbolicLc`, fetch the corresponding // // inlined LC, and substitute it in. // let lc = inlined_lcs.get(&lc_index).expect("should be inlined"); // let tmp = (lc * coeff).0.into_iter(); // inlined_lc.extend(tmp); // num_times_used[lc_index.0] -= 1; // if num_times_used[lc_index.0] == 0 { // // This lc is not used any more, so remove it. // inlined_lcs.remove(&lc_index); // } // } else { // // Otherwise, it's a concrete variable and so we // // substitute it in directly. // inlined_lc.push((coeff, var)); // } // } // inlined_lc.compactify(); // inlined_lcs.insert(index, inlined_lc); // } let sys = System::new(); match sys.memory() { Ok(mem) => println!( "\nMemory: {} used / {}", saturating_sub_bytes(mem.total, mem.free), mem.total ), Err(x) => println!("\nMemory: error: {}", x), } self.lc_map = inlined_lcs; println!("after inlining lcs, lcs map len is {}", self.lc_map.len()); } /// If a `SymbolicLc` is used in more than one location, this method makes a new /// variable for that `SymbolicLc`, adds a constraint ensuring the equality of /// the variable and the linear combination, and then uses that variable in every /// location the `SymbolicLc` is used. /// /// Useful for SNARKs like `Marlin` or `Fractal`, where addition gates /// are not cheap. pub fn outline_lcs(&mut self) { unimplemented!() } /// This step must be called after constraint generation has completed, and after /// all symbolic LCs have been inlined into the places that they are used. pub fn to_matrices(&self) -> Option<ConstraintMatrices<F>> { if let SynthesisMode::Prove { construct_matrices: false, } = self.mode { None } else { let a: Vec<_> = self .a_constraints .iter() .map(|index| self.make_row(self.lc_map.get(index).unwrap())) .collect(); let b: Vec<_> = self .b_constraints .iter() .map(|index| self.make_row(self.lc_map.get(index).unwrap())) .collect(); let c: Vec<_> = self .c_constraints .iter() .map(|index| self.make_row(self.lc_map.get(index).unwrap())) .collect(); let a_num_non_zero: usize = a.iter().map(|lc| lc.len()).sum(); let b_num_non_zero: usize = b.iter().map(|lc| lc.len()).sum(); let c_num_non_zero: usize = c.iter().map(|lc| lc.len()).sum(); let matrices = ConstraintMatrices { num_instance_variables: self.num_instance_variables, num_witness_variables: self.num_witness_variables, num_constraints: self.num_constraints, a_num_non_zero, b_num_non_zero, c_num_non_zero, a, b, c, }; Some(matrices) } } fn eval_lc(&self, lc: LcIndex) -> Option<F> { let lc = self.lc_map.get(&lc)?; let mut acc = F::zero(); for (coeff, var) in lc.iter() { acc += *coeff * &self.assigned_value(*var)?; } Some(acc) } /// If `self` is satisfied, outputs `Ok(true)`. /// If `self` is unsatisfied, outputs `Ok(false)`. /// If `self.is_in_setup_mode()`, outputs `Err(())`. pub fn is_satisfied(&self) -> Result<bool, SynthesisError> { self.which_is_unsatisfied().map(|s| s.is_none()) } /// If `self` is satisfied, outputs `Ok(None)`. /// If `self` is unsatisfied, outputs `Some(i)`, where `i` is the index of /// the first unsatisfied constraint. If `self.is_in_setup_mode()`, outputs /// `Err(())`. pub fn which_is_unsatisfied(&self) -> Result<Option<String>, SynthesisError> { if self.is_in_setup_mode() { Err(SynthesisError::AssignmentMissing) } else { for i in 0..self.num_constraints { let a = self .eval_lc(self.a_constraints[i]) .ok_or(SynthesisError::AssignmentMissing)?; let b = self .eval_lc(self.b_constraints[i]) .ok_or(SynthesisError::AssignmentMissing)?; let c = self .eval_lc(self.c_constraints[i]) .ok_or(SynthesisError::AssignmentMissing)?; if a * b != c { let trace; #[cfg(feature = "std")] { trace = self.constraint_traces[i].as_ref().map_or_else( || { eprintln!("Constraint trace requires enabling `ConstraintLayer`"); format!("{}", i) }, |t| format!("{}", t), ); } #[cfg(not(feature = "std"))] { trace = format!("{}", i); } return Ok(Some(trace)); } } Ok(None) } } /// Obtain the assignment corresponding to the `Variable` `v`. pub fn assigned_value(&self, v: Variable) -> Option<F> { match v { Variable::One => Some(F::one()), Variable::Zero => Some(F::zero()), Variable::Witness(idx) => self.witness_assignment.get(idx).copied(), Variable::Instance(idx) => self.instance_assignment.get(idx).copied(), Variable::SymbolicLc(idx) => { let value = self.lc_assignment_cache.borrow().get(&idx).copied(); if value.is_some() { value } else { let value = self.eval_lc(idx)?; self.lc_assignment_cache.borrow_mut().insert(idx, value); Some(value) } } } } } /// The A, B and C matrices of a Rank-One `ConstraintSystem`. /// Also contains metadata on the structure of the constraint system /// and the matrices. #[derive(Debug, Clone, PartialEq, Eq)] pub struct ConstraintMatrices<F: Field> { /// The number of variables that are "public instances" to the constraint system. pub num_instance_variables: usize, /// The number of variables that are "private witnesses" to the constraint system. pub num_witness_variables: usize, /// The number of constraints in the constraint system. pub num_constraints: usize, /// The number of non_zero entries in the A matrix. pub a_num_non_zero: usize, /// The number of non_zero entries in the B matrix. pub b_num_non_zero: usize, /// The number of non_zero entries in the C matrix. pub c_num_non_zero: usize, /// The A constraint matrix. This is empty when /// `self.mode == SynthesisMode::Prove { construct_matrices = false }`. pub a: Matrix<F>, /// The B constraint matrix. This is empty when /// `self.mode == SynthesisMode::Prove { construct_matrices = false }`. pub b: Matrix<F>, /// The C constraint matrix. This is empty when /// `self.mode == SynthesisMode::Prove { construct_matrices = false }`. pub c: Matrix<F>, } /// A shared reference to a constraint system that can be stored in high level /// variables. #[derive(Debug, Clone)] pub enum ConstraintSystemRef<F: Field> { /// Represents the case where we *don't* need to allocate variables or enforce /// constraints. Encountered when operating over constant values. None, /// Represents the case where we *do* allocate variables or enforce constraints. CS(Rc<RefCell<ConstraintSystem<F>>>), } impl<F: Field> PartialEq for ConstraintSystemRef<F> { fn eq(&self, other: &Self) -> bool { match (self, other) { (Self::None, Self::None) => true, (_, _) => false, } } } impl<F: Field> Eq for ConstraintSystemRef<F> {} /// A namespaced `ConstraintSystemRef`. #[derive(Debug, Clone)] pub struct Namespace<F: Field> { inner: ConstraintSystemRef<F>, #[cfg(feature = "std")] id: Option<tracing::Id>, } impl<F: Field> From<ConstraintSystemRef<F>> for Namespace<F> { fn from(other: ConstraintSystemRef<F>) -> Self { Self { inner: other, #[cfg(feature = "std")] id: None, } } } impl<F: Field> Namespace<F> { /// Construct a new `Namespace`. #[cfg(feature = "std")] pub fn new(inner: ConstraintSystemRef<F>, id: Option<tracing::Id>) -> Self { Self { inner, id } } /// Obtain the inner `ConstraintSystemRef<F>`. pub fn cs(&self) -> ConstraintSystemRef<F> { self.inner.clone() } /// Manually leave the namespace. pub fn leave_namespace(mut self) { drop(&mut self) } } #[cfg(feature = "std")] impl<F: Field> Drop for Namespace<F> { fn drop(&mut self) { if let Some(id) = self.id.as_ref() { tracing::dispatcher::get_default(|dispatch| dispatch.exit(id)) } drop(&mut self.inner) } } impl<F: Field> ConstraintSystemRef<F> { /// Returns `self` if `!self.is_none()`, otherwise returns `other`. pub fn or(self, other: Self) -> Self { match self { ConstraintSystemRef::None => other, _ => self, } } /// Returns `true` is `self == ConstraintSystemRef::None`. pub fn is_none(&self) -> bool { match self { ConstraintSystemRef::None => true, _ => false, } } /// Construct a `ConstraintSystemRef` from a `ConstraintSystem`. #[inline] pub fn new(inner: ConstraintSystem<F>) -> Self { Self::CS(Rc::new(RefCell::new(inner))) } fn inner(&self) -> Option<&Rc<RefCell<ConstraintSystem<F>>>> { match self { Self::CS(a) => Some(a), Self::None => None, } } /// Obtain an immutable reference to the underlying `ConstraintSystem`. /// /// # Panics /// This method panics if `self` is already mutably borrowed. #[inline] pub fn borrow(&self) -> Option<Ref<ConstraintSystem<F>>> { self.inner().map(|cs| cs.borrow()) } /// Obtain a mutable reference to the underlying `ConstraintSystem`. /// /// # Panics /// This method panics if `self` is already mutably borrowed. #[inline] pub fn borrow_mut(&self) -> Option<RefMut<ConstraintSystem<F>>> { self.inner().map(|cs| cs.borrow_mut()) } /// Set `self.mode` to `mode`. pub fn set_mode(&self, mode: SynthesisMode) { self.inner().map_or((), |cs| cs.borrow_mut().set_mode(mode)) } /// Check whether `self.mode == SynthesisMode::Setup`. #[inline] pub fn is_in_setup_mode(&self) -> bool { self.inner() .map_or(false, |cs| cs.borrow().is_in_setup_mode()) } /// Returns the number of constraints. #[inline] pub fn num_constraints(&self) -> usize { self.inner().map_or(0, |cs| cs.borrow().num_constraints) } /// Returns the number of instance variables. #[inline] pub fn num_instance_variables(&self) -> usize { self.inner() .map_or(0, |cs| cs.borrow().num_instance_variables) } /// Returns the number of witness variables. #[inline] pub fn num_witness_variables(&self) -> usize { self.inner() .map_or(0, |cs| cs.borrow().num_witness_variables) } /// Check whether or not `self` will construct matrices. #[inline] pub fn should_construct_matrices(&self) -> bool { self.inner() .map_or(false, |cs| cs.borrow().should_construct_matrices()) } /// Obtain a variable representing a new public instance input. #[inline] pub fn new_input_variable<Func>(&self, f: Func) -> Result<Variable, SynthesisError> where Func: FnOnce() -> Result<F, SynthesisError>, { self.inner() .ok_or(SynthesisError::MissingCS) .and_then(|cs| { if !self.is_in_setup_mode() { // This is needed to avoid double-borrows, because `f` // might itself mutably borrow `cs` (eg: `f = || g.value()`). let value = f(); cs.borrow_mut().new_input_variable(|| value) } else { cs.borrow_mut().new_input_variable(f) } }) } /// Obtain a variable representing a new private witness input. #[inline] pub fn new_witness_variable<Func>(&self, f: Func) -> Result<Variable, SynthesisError> where Func: FnOnce() -> Result<F, SynthesisError>, { self.inner() .ok_or(SynthesisError::MissingCS) .and_then(|cs| { if !self.is_in_setup_mode() { // This is needed to avoid double-borrows, because `f` // might itself mutably borrow `cs` (eg: `f = || g.value()`). let value = f(); cs.borrow_mut().new_witness_variable(|| value) } else { cs.borrow_mut().new_witness_variable(f) } }) } /// Obtain a variable representing a linear combination. #[inline] pub fn new_lc(&self, lc: LinearCombination<F>) -> Result<Variable, SynthesisError> { self.inner() .ok_or(SynthesisError::MissingCS) .and_then(|cs| cs.borrow_mut().new_lc(lc)) } /// Enforce a R1CS constraint with the name `name`. #[inline] pub fn enforce_constraint( &self, a: LinearCombination<F>, b: LinearCombination<F>, c: LinearCombination<F>, ) -> Result<(), SynthesisError> { self.inner() .ok_or(SynthesisError::MissingCS) .and_then(|cs| cs.borrow_mut().enforce_constraint(a, b, c)) } /// Naively inlines symbolic linear combinations into the linear combinations /// that use them. /// /// Useful for standard pairing-based SNARKs where addition gates are free, /// such as the SNARKs in [[Groth16]](https://eprint.iacr.org/2016/260) and /// [[Groth-Maller17]](https://eprint.iacr.org/2017/540). pub fn inline_all_lcs(&self) { if let Some(cs) = self.inner() { cs.borrow_mut().inline_all_lcs() } } /// If a `SymbolicLc` is used in more than one location, this method makes a new /// variable for that `SymbolicLc`, adds a constraint ensuring the equality of /// the variable and the linear combination, and then uses that variable in every /// location the `SymbolicLc` is used. /// /// Useful for SNARKs like `Marlin` or `Fractal`, where where addition gates /// are not (entirely) free. pub fn outline_lcs(&self) { if let Some(cs) = self.inner() { cs.borrow_mut().outline_lcs() } } /// This step must be called after constraint generation has completed, and after /// all symbolic LCs have been inlined into the places that they are used. #[inline] pub fn to_matrices(&self) -> Option<ConstraintMatrices<F>> { self.inner().map_or(None, |cs| cs.borrow().to_matrices()) } /// If `self` is satisfied, outputs `Ok(true)`. /// If `self` is unsatisfied, outputs `Ok(false)`. /// If `self.is_in_setup_mode()` or if `self == None`, outputs `Err(())`. pub fn is_satisfied(&self) -> Result<bool, SynthesisError> { self.inner() .map_or(Err(SynthesisError::AssignmentMissing), |cs| { cs.borrow().is_satisfied() }) } /// If `self` is satisfied, outputs `Ok(None)`. /// If `self` is unsatisfied, outputs `Some(i)`, where `i` is the index of /// the first unsatisfied constraint. /// If `self.is_in_setup_mode()` or `self == None`, outputs `Err(())`. pub fn which_is_unsatisfied(&self) -> Result<Option<String>, SynthesisError> { self.inner() .map_or(Err(SynthesisError::AssignmentMissing), |cs| { cs.borrow().which_is_unsatisfied() }) } /// Obtain the assignment corresponding to the `Variable` `v`. pub fn assigned_value(&self, v: Variable) -> Option<F> { self.inner() .map_or(None, |cs| cs.borrow().assigned_value(v)) } /// Get trace information about all constraints in the system pub fn constraint_names(&self) -> Option<Vec<String>> { #[cfg(feature = "std")] { self.inner().and_then(|cs| { cs.borrow() .constraint_traces .iter() .map(|trace| { let mut constraint_path = String::new(); let mut prev_module_path = ""; let mut prefixes = crate::BTreeSet::new(); for step in trace.as_ref()?.path() { let module_path = if prev_module_path == step.module_path { prefixes.insert(step.module_path.to_string()); String::new() } else { let mut parts = step .module_path .split("::") .filter(|&part| part != "r1cs_std" && part != "constraints"); let mut path_so_far = String::new(); for part in parts.by_ref() { if path_so_far.is_empty() { path_so_far += part; } else { path_so_far += &["::", part].join(""); } if prefixes.contains(&path_so_far) { continue; } else { prefixes.insert(path_so_far.clone()); break; } } parts.collect::<Vec<_>>().join("::") + "::" }; prev_module_path = step.module_path; constraint_path += &["/", &module_path, step.name].join(""); } Some(constraint_path) }) .collect::<Option<Vec<_>>>() }) } #[cfg(not(feature = "std"))] { None } } } #[cfg(test)] mod tests { use crate::*; use algebra::{bls12_381::Fr, One}; #[test] fn matrix_generation() -> Result<(), SynthesisError> { let cs = ConstraintSystem::<Fr>::new_ref(); let two = Fr::one() + Fr::one(); let a = cs.new_input_variable(|| Ok(Fr::one()))?; let b = cs.new_witness_variable(|| Ok(Fr::one()))?; let c = cs.new_witness_variable(|| Ok(two))?; cs.enforce_constraint(lc!() + a, lc!() + (two, b) + c, lc!() + c)?; let d = cs.new_lc(lc!() + a + b)?; cs.enforce_constraint(lc!() + a, lc!() + d, lc!() + d)?; let e = cs.new_lc(lc!() + d + d)?; cs.enforce_constraint(lc!() + Variable::One, lc!() + e, lc!() + e)?; cs.inline_all_lcs(); let matrices = cs.to_matrices().unwrap(); assert_eq!(matrices.a[0], vec![(Fr::one(), 1)]); assert_eq!(matrices.b[0], vec![(two, 2), (Fr::one(), 3)]); assert_eq!(matrices.c[0], vec![(Fr::one(), 3)]); // assert_eq!(matrices.a[1], vec![(Fr::one(), 1)]); // assert_eq!(matrices.b[1], vec![(Fr::one(), 1), (Fr::one(), 2)]); // assert_eq!(matrices.c[1], vec![(Fr::one(), 1), (Fr::one(), 2)]); // assert_eq!(matrices.a[2], vec![(Fr::one(), 0)]); // assert_eq!(matrices.b[2], vec![(two, 1), (two, 2)]); // assert_eq!(matrices.c[2], vec![(two, 1), (two, 2)]); Ok(()) } }
37.615566
127
0.541884
672e5b8d04985ab5000baac5e6edd0767646521f
7,180
use super::Result; #[derive(Debug, PartialEq)] pub struct DiskUsage { pub filesystem: Option<String>, pub one_k_blocks: u64, pub one_k_blocks_used: u64, pub one_k_blocks_free: u64, pub used_percentage: u32, pub mountpoint: String, } #[derive(Debug, PartialEq)] pub struct DiskInodeUsage { pub filesystem: Option<String>, pub inodes: u64, pub iused: u64, pub ifree: u64, pub iused_percentage: u32, pub mountpoint: String, } /// Read the current usage of all disks #[cfg(target_os = "linux")] pub fn read() -> Result<Vec<DiskUsage>> { os::read() } /// Read the current inode usage of all disks #[cfg(target_os = "linux")] pub fn read_inodes() -> Result<Vec<DiskInodeUsage>> { os::read_inodes() } #[cfg(target_os = "linux")] mod os { use super::super::{parse_u64, ProbeError, Result}; use super::{DiskInodeUsage, DiskUsage}; use std::process::Command; #[inline] pub fn read() -> Result<Vec<DiskUsage>> { let mut out: Vec<DiskUsage> = Vec::new(); let local_out = disk_fs_local_raw()?; let parsed = parse_df_output(&local_out)?; for segment in parsed.iter() { let usage = DiskUsage { filesystem: parse_filesystem(segment[0]), one_k_blocks: parse_u64(segment[1])?, one_k_blocks_used: parse_u64(segment[2])?, one_k_blocks_free: parse_u64(segment[3])?, used_percentage: parse_percentage_segment(segment[4])?, mountpoint: segment[5].to_string(), }; out.push(usage); } Ok(out) } #[inline] pub fn read_inodes() -> Result<Vec<DiskInodeUsage>> { let mut out: Vec<DiskInodeUsage> = Vec::new(); let inodes_out = disk_fs_inodes_raw()?; let parsed = parse_df_output(&inodes_out)?; for segment in parsed.iter() { let usage = DiskInodeUsage { filesystem: parse_filesystem(segment[0]), inodes: parse_u64(segment[1])?, iused: parse_u64(segment[2])?, ifree: parse_u64(segment[3])?, iused_percentage: parse_percentage_segment(segment[4])?, mountpoint: segment[5].to_string(), }; out.push(usage); } Ok(out) } #[inline] pub fn parse_df_output(output: &str) -> Result<Vec<Vec<&str>>> { let mut out: Vec<Vec<&str>> = Vec::new(); // Sometimes the filesystem is on a separate line let mut filesystem_on_previous_line: Option<&str> = None; for line in output.split("\n").skip(1) { let mut segments: Vec<&str> = line.split_whitespace().collect(); match segments.len() { 0 => { // Skip } 1 => filesystem_on_previous_line = Some(segments[0]), 5 => { // Filesystem should be on the previous line if let Some(fs) = filesystem_on_previous_line { // Get filesystem first let mut disk = vec![fs]; disk.append(&mut segments); out.push(disk); // Reset this to none filesystem_on_previous_line = None; } else { return Err(ProbeError::UnexpectedContent( "filesystem expected on previous line".to_owned(), )); } } 6 => { // All information is on 1 line out.push(segments); } _ => { return Err(ProbeError::UnexpectedContent( "Incorrect number of segments".to_owned(), )); } } } Ok(out) } #[inline] fn parse_percentage_segment(segment: &str) -> Result<u32> { // Strip % from the used value let segment_minus_percentage = &segment[..segment.len() - 1]; segment_minus_percentage.parse().map_err(|_| { ProbeError::UnexpectedContent("Could not parse percentage segment".to_owned()) }) } #[inline] fn parse_filesystem(segment: &str) -> Option<String> { match segment { "none" => None, value => Some(value.to_string()), } } #[inline] fn disk_fs_inodes_raw() -> Result<String> { let output = Command::new("df") .arg("-i") .output() .map_err(|e| ProbeError::IO(e, "df -i".to_owned()))? .stdout; Ok(String::from_utf8_lossy(&output).to_string()) } #[inline] fn disk_fs_local_raw() -> Result<String> { let output = Command::new("df") .arg("-l") .output() .map_err(|e| ProbeError::IO(e, "df -l".to_owned()))? .stdout; Ok(String::from_utf8_lossy(&output).to_string()) } } #[cfg(test)] #[cfg(target_os = "linux")] mod tests { use super::super::file_to_string; use super::super::ProbeError; use std::path::Path; #[test] fn test_read_disks() { assert!(super::read().is_ok()); assert!(!super::read().unwrap().is_empty()); } #[test] fn test_parse_df_output() { let expected = vec![ vec![ "/dev/mapper/lucid64-root", "81234688", "2344444", "74763732", "4%", "/", ], vec!["none", "183176", "180", "182996", "1%", "/dev"], vec!["/dev/sda1", "233191", "17217", "203533", "8%", "/boot"], ]; let df = file_to_string(Path::new("fixtures/linux/disk_usage/df")).unwrap(); let disks = super::os::parse_df_output(&df).unwrap(); assert_eq!(expected, disks); } #[test] fn test_parse_df_i_output() { let expected = vec![ vec!["overlay", "2097152", "122591", "1974561", "6%", "/"], vec!["tmpfs", "254863", "16", "254847", "1%", "/dev"], vec!["tmpfs", "254863", "15", "254848", "1%", "/sys/fs/cgroup"], ]; let df = file_to_string(Path::new("fixtures/linux/disk_usage/df_i")).unwrap(); let disks = super::os::parse_df_output(&df).unwrap(); assert_eq!(expected, disks); } #[test] fn test_parse_df_output_incomplete() { let df = file_to_string(Path::new("fixtures/linux/disk_usage/df_incomplete")).unwrap(); match super::os::parse_df_output(&df) { Err(ProbeError::UnexpectedContent(_)) => (), r => panic!("Unexpected result: {:?}", r), } } #[test] fn test_parse_df_output_garbage() { let df = file_to_string(Path::new("fixtures/linux/disk_usage/df_garbage")).unwrap(); match super::os::parse_df_output(&df) { Err(ProbeError::UnexpectedContent(_)) => (), r => panic!("Unexpected result: {:?}", r), } } }
29.792531
95
0.515181
22b38c19bbcd011e792550382e3aa40396ab84a4
834
// Copyright 2013-2014 The Rust Project Developers. // Copyright 2018 The Uuid Project Developers. // // See the COPYRIGHT file at the top-level directory of this distribution. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use prelude::*; pub fn new() -> Uuid { Uuid::from_bytes([ 0xF9, 0x16, 0x8C, 0x5E, 0xCE, 0xB2, 0x4F, 0xAA, 0xB6, 0xBF, 0x32, 0x9B, 0xF3, 0x9F, 0xA1, 0xE4, ]) } pub fn new2() -> Uuid { Uuid::from_bytes([ 0xF9, 0x16, 0x8C, 0x5E, 0xCE, 0xB2, 0x4F, 0xAB, 0xB6, 0xBF, 0x32, 0x9B, 0xF3, 0x9F, 0xA1, 0xE4, ]) }
30.888889
79
0.660671
75485fc095f0cf14a168103a016342017c0ee176
3,487
/* Several function to retrieve version information. */ use crate::pyobject::PyStructSequence; use chrono::prelude::DateTime; use chrono::Local; use std::time::{Duration, UNIX_EPOCH}; const MAJOR: usize = 3; const MINOR: usize = 9; const MICRO: usize = 0; const RELEASELEVEL: &str = "alpha"; const RELEASELEVEL_N: usize = 0xA; const SERIAL: usize = 0; pub const VERSION_HEX: usize = (MAJOR << 24) | (MINOR << 16) | (MICRO << 8) | (RELEASELEVEL_N << 4) | SERIAL; #[pyclass(module = "sys", name = "version_info")] #[derive(Default, Debug, PyStructSequence)] pub struct VersionInfo { major: usize, minor: usize, micro: usize, releaselevel: &'static str, serial: usize, } pub fn get_version() -> String { format!( "{:.80} ({:.80}) \n[{:.80}]", get_version_number(), get_build_info(), get_compiler() ) } #[pyimpl(with(PyStructSequence))] impl VersionInfo { pub const VERSION: VersionInfo = VersionInfo { major: MAJOR, minor: MINOR, micro: MICRO, releaselevel: RELEASELEVEL, serial: SERIAL, }; #[pyslot] fn tp_new( _cls: crate::builtins::pytype::PyTypeRef, _args: crate::function::FuncArgs, vm: &crate::VirtualMachine, ) -> crate::pyobject::PyResult { Err(vm.new_type_error("cannot create 'sys.version_info' instances".to_owned())) } } pub fn get_version_number() -> String { format!("{}.{}.{}{}", MAJOR, MINOR, MICRO, RELEASELEVEL) } pub fn get_compiler() -> String { let rustc_version = rustc_version_runtime::version_meta(); format!("rustc {}", rustc_version.semver) } pub fn get_build_info() -> String { // See: https://reproducible-builds.org/docs/timestamps/ let git_revision = get_git_revision(); let separator = if git_revision.is_empty() { "" } else { ":" }; let git_identifier = get_git_identifier(); format!( "{id}{sep}{revision}, {date:.20}, {time:.9}", id = if git_identifier.is_empty() { "default".to_owned() } else { git_identifier }, sep = separator, revision = git_revision, date = get_git_date(), time = get_git_time(), ) } pub fn get_git_revision() -> String { option_env!("RUSTPYTHON_GIT_HASH").unwrap_or("").to_owned() } pub fn get_git_tag() -> String { option_env!("RUSTPYTHON_GIT_TAG").unwrap_or("").to_owned() } pub fn get_git_branch() -> String { option_env!("RUSTPYTHON_GIT_BRANCH") .unwrap_or("") .to_owned() } pub fn get_git_identifier() -> String { let git_tag = get_git_tag(); let git_branch = get_git_branch(); if git_tag.is_empty() || git_tag == "undefined" { git_branch } else { git_tag } } fn get_git_timestamp_datetime() -> DateTime<Local> { let timestamp = option_env!("RUSTPYTHON_GIT_TIMESTAMP") .unwrap_or("") .to_owned(); let timestamp = timestamp.parse::<u64>().unwrap_or(0); let datetime = UNIX_EPOCH + Duration::from_secs(timestamp); datetime.into() } pub fn get_git_date() -> String { let datetime = get_git_timestamp_datetime(); datetime.format("%b %e %Y").to_string() } pub fn get_git_time() -> String { let datetime = get_git_timestamp_datetime(); datetime.format("%H:%M:%S").to_string() } pub fn get_git_datetime() -> String { let date = get_git_date(); let time = get_git_time(); format!("{} {}", date, time) }
24.730496
87
0.618583
618013f4430e6951f9d8e606babf9d34e6f8a877
231
pub use std::fs::DirEntry; pub use std::path::{Path, PathBuf}; pub type PathList = Vec<PathBuf>; mod access; pub use self::access::*; mod crawling; pub use self::crawling::*; mod helpers; pub use self::helpers::*;
16.5
36
0.645022
508ee01d3220e63e6504c01c24ff8d9d82bd275b
2,171
use geom::{Speed, Time}; use widgetry::{GeomBatch, Prerender}; pub struct Vehicle { pub name: String, pub speed: Speed, pub max_energy: usize, // Paths to SVGs to draw in sequence draw_frames: Vec<&'static str>, scale: f64, } impl Vehicle { pub fn get(name: &str) -> Vehicle { match name { "bike" => Vehicle { name: "bike".to_string(), speed: Speed::miles_per_hour(30.0), max_energy: 100, draw_frames: vec!["bike1.svg", "bike2.svg", "bike1.svg", "bike3.svg"], scale: 0.05, }, "sleigh" => Vehicle { name: "sleigh".to_string(), speed: Speed::miles_per_hour(25.0), max_energy: 300, draw_frames: vec!["sleigh.svg"], scale: 0.08, }, "cargo bike" => Vehicle { name: "cargo bike".to_string(), speed: Speed::miles_per_hour(40.0), max_energy: 150, draw_frames: vec![ "cargo_bike1.svg", "cargo_bike2.svg", "cargo_bike1.svg", "cargo_bike3.svg", ], scale: 0.05, }, x => panic!("Don't know vehicle {}", x), } } pub fn animate(&self, prerender: &Prerender, time: Time) -> GeomBatch { // TODO I don't know what I'm doing let rate = 0.1; let frame = (time.inner_seconds() / rate) as usize; let path = format!( "system/assets/santa/{}", self.draw_frames[frame % self.draw_frames.len()] ); GeomBatch::load_svg(prerender, &path).scale(self.scale) } /// (max speed, max energy) pub fn max_stats() -> (Speed, usize) { let mut speed = Speed::ZERO; let mut energy = 0; for x in vec!["bike", "cargo bike", "sleigh"] { let vehicle = Vehicle::get(x); speed = speed.max(vehicle.speed); energy = energy.max(vehicle.max_energy); } (speed, energy) } }
27.833333
86
0.474896
b9c7d54a56aa950f476866655baa13c922852605
4,067
use std::cell::RefCell; use std::fmt::Display; use std::rc::Rc; type NodeRef<T> = Rc<RefCell<Node<T>>>; struct BinaryTree<T> { head: Option<NodeRef<T>>, } struct Node<T> { data: T, left: Option<NodeRef<T>>, right: Option<NodeRef<T>>, } impl<T> BinaryTree<T> where T: std::cmp::PartialEq + std::cmp::PartialOrd + std::marker::Copy, { fn new() -> Self { Self { head: None } } fn insert(&mut self, value: T) -> NodeRef<T> { let ret = Rc::new(RefCell::new(Node { data: value, left: None, right: None, })); if self.head.is_none() { self.head = Some(ret.clone()); ret } else { let mut head = self.head.as_mut().unwrap().clone(); self.insert_at(&mut head, ret.clone()) } } fn insert_at(&mut self, parent_node: &mut NodeRef<T>, new_node: NodeRef<T>) -> NodeRef<T> { if new_node.borrow().data < parent_node.borrow().data { if parent_node.borrow().left.is_some() { let mut new_parent = parent_node.borrow_mut().left.as_mut().unwrap().clone(); self.insert_at(&mut new_parent, new_node) } else { parent_node.borrow_mut().left = Some(new_node.clone()); new_node } } else if parent_node.borrow().right.is_some() { let mut new_parent = parent_node.borrow_mut().right.as_mut().unwrap().clone(); self.insert_at(&mut new_parent, new_node) } else { parent_node.borrow_mut().right = Some(new_node.clone()); new_node } } fn visit_from<F>(&self, parent_node: &NodeRef<T>, f: &mut F) where F: FnMut(&NodeRef<T>), { f(parent_node); if let Some(left) = parent_node.borrow().left.as_ref() { self.visit_from(left, f); } if let Some(right) = parent_node.borrow().right.as_ref() { self.visit_from(right, f); } } fn visit_all<F>(&self, mut f: F) where F: FnMut(&NodeRef<T>), { if self.head.is_some() { self.visit_from(self.head.as_ref().unwrap(), &mut f) } } fn add_vector(&mut self, arr: &[T]) { if arr.len() > 2 { let middle = arr.len() / 2; self.insert(arr[middle]); self.add_vector(&arr[0..middle]); self.add_vector(&arr[(middle + 1)..arr.len()]); } else { for i in arr { self.insert(*i); } } } fn height_inner(&self, node: &NodeRef<T>, height: usize) -> usize { let mut max_height = height; if let Some(left) = node.borrow().left.as_ref() { max_height = std::cmp::max(self.height_inner(left, height + 1), max_height); } if let Some(right) = node.borrow().right.as_ref() { max_height = std::cmp::max(self.height_inner(right, height + 1), max_height); } max_height } fn height(&self) -> usize { let mut height = 0; if let Some(head) = self.head.as_ref() { height = self.height_inner(head, height + 1); } height } } impl<T: Display> Display for BinaryTree<T> where T: std::cmp::PartialEq + std::cmp::PartialOrd + std::marker::Copy, { fn fmt(&self, w: &mut std::fmt::Formatter) -> std::result::Result<(), std::fmt::Error> { write!(w, "[")?; self.visit_all(|v| { write!(w, "{}, ", v.borrow().data).unwrap(); }); write!(w, "]") } } #[cfg(test)] mod tests { use super::*; #[test] fn test_minimal_tree() { let mut binary_tree = BinaryTree::<i32>::new(); let arr: Vec<i32> = (0..10).collect(); binary_tree.add_vector(&arr); assert_eq!(binary_tree.height(), 4); } } fn main() { let mut binary_tree = BinaryTree::<i32>::new(); let arr: Vec<i32> = (0..10).collect(); binary_tree.add_vector(&arr); binary_tree.height(); }
28.048276
95
0.522006
33efd049b710bc75a82cbe39e40be5e4f546474a
6,810
// SPDX-License-Identifier: MIT OR Apache-2.0 // // Copyright (c) 2020 Andre Richter <[email protected]> //! GICD Driver - GIC Distributor. //! //! # Glossary //! - SPI - Shared Peripheral Interrupt. use crate::{ bsp::device_driver::common::MMIODerefWrapper, state, synchronization, synchronization::IRQSafeNullLock, }; use register::{mmio::*, register_bitfields, register_structs}; //-------------------------------------------------------------------------------------------------- // Private Definitions //-------------------------------------------------------------------------------------------------- register_bitfields! { u32, /// Distributor Control Register CTLR [ Enable OFFSET(0) NUMBITS(1) [] ], /// Interrupt Controller Type Register TYPER [ ITLinesNumber OFFSET(0) NUMBITS(5) [] ], /// Interrupt Processor Targets Registers ITARGETSR [ Offset3 OFFSET(24) NUMBITS(8) [], Offset2 OFFSET(16) NUMBITS(8) [], Offset1 OFFSET(8) NUMBITS(8) [], Offset0 OFFSET(0) NUMBITS(8) [] ] } register_structs! { #[allow(non_snake_case)] SharedRegisterBlock { (0x000 => CTLR: ReadWrite<u32, CTLR::Register>), (0x004 => TYPER: ReadOnly<u32, TYPER::Register>), (0x008 => _reserved1), (0x104 => ISENABLER: [ReadWrite<u32>; 31]), (0x108 => _reserved2), (0x820 => ITARGETSR: [ReadWrite<u32, ITARGETSR::Register>; 248]), (0x824 => @END), } } register_structs! { #[allow(non_snake_case)] BankedRegisterBlock { (0x000 => _reserved1), (0x100 => ISENABLER: ReadWrite<u32>), (0x104 => _reserved2), (0x800 => ITARGETSR: [ReadOnly<u32, ITARGETSR::Register>; 8]), (0x804 => @END), } } /// Abstraction for the non-banked parts of the associated MMIO registers. type SharedRegisters = MMIODerefWrapper<SharedRegisterBlock>; /// Abstraction for the banked parts of the associated MMIO registers. type BankedRegisters = MMIODerefWrapper<BankedRegisterBlock>; //-------------------------------------------------------------------------------------------------- // Public Definitions //-------------------------------------------------------------------------------------------------- /// Representation of the GIC Distributor. pub struct GICD { /// Access to shared registers is guarded with a lock. shared_registers: IRQSafeNullLock<SharedRegisters>, /// Access to banked registers is unguarded. banked_registers: BankedRegisters, } //-------------------------------------------------------------------------------------------------- // Private Code //-------------------------------------------------------------------------------------------------- impl SharedRegisters { /// Return the number of IRQs that this HW implements. #[inline(always)] fn num_irqs(&mut self) -> usize { // Query number of implemented IRQs. // // Refer to GICv2 Architecture Specification, Section 4.3.2. ((self.TYPER.read(TYPER::ITLinesNumber) as usize) + 1) * 32 } /// Return a slice of the implemented ITARGETSR. #[inline(always)] fn implemented_itargets_slice(&mut self) -> &[ReadWrite<u32, ITARGETSR::Register>] { assert!(self.num_irqs() >= 36); // Calculate the max index of the shared ITARGETSR array. // // The first 32 IRQs are private, so not included in `shared_registers`. Each ITARGETS // register has four entries, so shift right by two. Subtract one because we start // counting at zero. let spi_itargetsr_max_index = ((self.num_irqs() - 32) >> 2) - 1; // Rust automatically inserts slice range sanity check, i.e. max >= min. &self.ITARGETSR[0..spi_itargetsr_max_index] } } //-------------------------------------------------------------------------------------------------- // Public Code //-------------------------------------------------------------------------------------------------- use synchronization::interface::Mutex; impl GICD { /// Create an instance. /// /// # Safety /// /// - The user must ensure to provide a correct MMIO start address. pub const unsafe fn new(mmio_start_addr: usize) -> Self { Self { shared_registers: IRQSafeNullLock::new(SharedRegisters::new(mmio_start_addr)), banked_registers: BankedRegisters::new(mmio_start_addr), } } /// Use a banked ITARGETSR to retrieve the executing core's GIC target mask. /// /// Quoting the GICv2 Architecture Specification: /// /// "GICD_ITARGETSR0 to GICD_ITARGETSR7 are read-only, and each field returns a value that /// corresponds only to the processor reading the register." fn local_gic_target_mask(&self) -> u32 { self.banked_registers.ITARGETSR[0].read(ITARGETSR::Offset0) } /// Route all SPIs to the boot core and enable the distributor. pub fn boot_core_init(&self) { assert!( state::state_manager().is_init(), "Only allowed during kernel init phase" ); // Target all SPIs to the boot core only. let mask = self.local_gic_target_mask(); let mut r = &self.shared_registers; r.lock(|regs| { for i in regs.implemented_itargets_slice().iter() { i.write( ITARGETSR::Offset3.val(mask) + ITARGETSR::Offset2.val(mask) + ITARGETSR::Offset1.val(mask) + ITARGETSR::Offset0.val(mask), ); } regs.CTLR.write(CTLR::Enable::SET); }); } /// Enable an interrupt. pub fn enable(&self, irq_num: super::IRQNumber) { let irq_num = irq_num.get(); // Each bit in the u32 enable register corresponds to one IRQ number. Shift right by 5 // (division by 32) and arrive at the index for the respective ISENABLER[i]. let enable_reg_index = irq_num >> 5; let enable_bit: u32 = 1u32 << (irq_num % 32); // Check if we are handling a private or shared IRQ. match irq_num { // Private. 0..=31 => { let enable_reg = &self.banked_registers.ISENABLER; enable_reg.set(enable_reg.get() | enable_bit); } // Shared. _ => { let enable_reg_index_shared = enable_reg_index - 1; let mut r = &self.shared_registers; r.lock(|regs| { let enable_reg = &regs.ISENABLER[enable_reg_index_shared]; enable_reg.set(enable_reg.get() | enable_bit); }); } } } }
34.393939
100
0.532012
5b56c37aa6da804ad016b6f0087de2f96616bea0
803
#[path = "simple_info_text.rs"] pub mod simple_info_text; #[path = "file_helper.rs"] pub mod file_helper; #[cfg(test)] mod tests { use crate::simple_info_text::get_info_text; use crate::simple_info_text::InfoText; use crate::file_helper::read_file; use std::path::Path; use std::io::Result; const PATH_TO_TEST_FILE: &'static str = "test.txt"; #[test] fn check_get_info_text() { let path = Path::new(PATH_TO_TEST_FILE); let data = read_file(path).unwrap(); let actual = get_info_text(&data); let expected: Result<InfoText> = Ok(InfoText { lines: 3, printable_ascii_symbols: 14, size_in_bytes_ascii: 21 }); assert_eq!(format!("{:?}", actual), format!("{:?}", expected)); } }
26.766667
71
0.60523
500227303d3924c3219cd44485b44b8ee8cd1dcd
1,002
extern crate sys_info; use sys_info::*; fn main() { println!("os: {} {}", os_type().unwrap(), os_release().unwrap()); println!("cpu: {} cores, {} MHz", cpu_num().unwrap(), cpu_speed().unwrap()); println!("proc total: {}", proc_total().unwrap()); let load = loadavg().unwrap(); println!("load: {} {} {}", load.one, load.five, load.fifteen); let mem = mem_info().unwrap(); println!("mem: total {} KB, free {} KB, avail {} KB, buffers {} KB, cached {} KB", mem.total, mem.free, mem.avail, mem.buffers, mem.cached); println!("swap: total {} KB, free {} KB", mem.swap_total, mem.swap_free); #[cfg(not(target_os = "solaris"))] { let disk = disk_info().unwrap(); println!("disk: total {} KB, free {} KB", disk.total, disk.free); } println!("hostname: {}", hostname().unwrap()); #[cfg(not(target_os = "windows"))] { let t = boottime().unwrap(); println!("boottime {} sec, {} usec", t.tv_sec, t.tv_usec); } }
35.785714
86
0.556886
eb8632bcf5c4878547f93d186a0e3e7e552d1725
6,979
extern crate clap; use clap::{App, Arg, ArgMatches}; use embedded_hal::{ blocking::delay::{DelayMs, DelayUs}, digital::v2::{InputPin, OutputPin}, }; use rppal::gpio::{Gpio, IoPin, Mode}; use spin_sleep; use std::time; use void; use tmledkey_hal_drv::{self as tm, demo}; /** * Raspberry pi does not have open drain pins so we have to emulate it. * rppal unfortunately is not able to emulate such pins. */ struct OpenPin { iopin: IoPin, mode: Mode, } impl OpenPin { fn new(mut pin: IoPin) -> OpenPin { pin.set_mode(Mode::Input); OpenPin { iopin: pin, mode: Mode::Input, } } fn switch_input(&mut self) { if self.mode != Mode::Input { self.mode = Mode::Input; self.iopin.set_mode(Mode::Input); } } fn switch_output(&mut self) { if self.mode != Mode::Output { self.mode = Mode::Output; self.iopin.set_mode(Mode::Output); } } } impl InputPin for OpenPin { type Error = void::Void; fn is_high(&self) -> Result<bool, Self::Error> { Ok(self.iopin.is_high()) } /// Is the input pin low? fn is_low(&self) -> Result<bool, Self::Error> { Ok(self.iopin.is_low()) } } impl OutputPin for OpenPin { type Error = void::Void; fn set_low(&mut self) -> Result<(), Self::Error> { self.switch_output(); self.iopin.set_low(); Ok(()) } fn set_high(&mut self) -> Result<(), Self::Error> { self.iopin.set_high(); self.switch_input(); Ok(()) } } fn cli_matches() -> ArgMatches<'static> { App::new("DHT tester") .author("Rumato Estorsky") .about("TM 163xx tests") .arg( Arg::with_name("clk") .long("clk") .value_name("PIN") .help("CLK pin number") .required(true), ) .arg( Arg::with_name("dio") .long("dio") .value_name("PIN") .help("DIO pin number") .required(true), ) .arg( Arg::with_name("stb") .long("stb") .value_name("PIN") .help("STB pin number for 3 wire interface") .required(false), ) .get_matches() } /** * rppal delays would not work because it use thread::sleep that does not provide accurate delays */ struct Delayer; impl DelayUs<u16> for Delayer { fn delay_us(&mut self, us: u16) { spin_sleep::sleep(time::Duration::from_micros(us as u64)); } } impl DelayMs<u16> for Delayer { fn delay_ms(&mut self, ms: u16) { spin_sleep::sleep(time::Duration::from_millis(ms as u64)); } } fn main() { let matches = cli_matches(); let clk_num = matches .value_of("clk") .expect("Wrong CLK input") .parse::<u8>() .expect("Can not parse CLI as int"); let dio_num = matches .value_of("dio") .expect("Wrong DIO input") .parse::<u8>() .expect("Can not parse DIO as int"); let stb = matches.value_of("stb"); println!( "Initialized using CLK:{} DIO:{}, STB:{:?}", clk_num, dio_num, stb ); match stb { Some(sstb) => { let stb_num = sstb.parse::<u8>().expect("Can not parse STB as int"); demo_3_wire_start(dio_num, clk_num, stb_num); } None => { demo_2_wire_start(dio_num, clk_num); } } } fn demo_2_wire_start(dio_num: u8, clk_num: u8) { let gpio = Gpio::new().expect("Can not init Gpio structure"); let mut clk = gpio .get(clk_num) .expect("Was not able to get CLK pin") .into_output(); let dio = gpio .get(dio_num) .expect("Was not able to get CLK pin") .into_io(Mode::Input); let mut tm_dio = OpenPin::new(dio); let mut delay = Delayer {}; demo_2_wire_run(&mut tm_dio, &mut clk, &mut delay); } fn demo_2_wire_run<DIO, CLK, D>(dio: &mut DIO, clk: &mut CLK, delay: &mut D) where DIO: InputPin + OutputPin, CLK: OutputPin, D: DelayMs<u16> + DelayUs<u16>, { let delay_time = tm::TM1637_BUS_DELAY_US; println!("Starting 3 wire demo (TM1637)"); let mut demo = demo::Demo::new(4); let init_res = demo.init_2wire(dio, clk, &mut |d: u16| delay.delay_us(d), delay_time); println!("Display initialized {:?}", init_res); let mut last_read = 0; loop { let read = demo.next_2wire(dio, clk, &mut |d: u16| delay.delay_us(d), delay_time); match read { Ok(byte) => { if byte != last_read { last_read = byte; println!("Key scan read: {:04b}_{:04b}", byte >> 4, byte & 0xF) } } Err(e) => { println!("Key scan read error {:?}", e); } }; delay.delay_ms(75_u16); } } fn demo_3_wire_start(dio_num: u8, clk_num: u8, stb_num: u8) { let gpio = Gpio::new().expect("Can not init Gpio structure"); let mut clk = gpio .get(clk_num) .expect("Was not able to get CLK pin") .into_output(); let dio = gpio .get(dio_num) .expect("Was not able to get CLK pin") .into_io(Mode::Input); let mut stb = gpio .get(stb_num) .expect("Was not able to get STB pin") .into_output(); let mut tm_dio = OpenPin::new(dio); let mut delayer = Delayer {}; demo_3_wire_run(&mut tm_dio, &mut clk, &mut stb, &mut delayer); } fn demo_3_wire_run<DIO, CLK, STB, D>(dio: &mut DIO, clk: &mut CLK, stb: &mut STB, delay: &mut D) where DIO: InputPin + OutputPin, CLK: OutputPin, STB: OutputPin, D: DelayMs<u16> + DelayUs<u16>, { let delay_time = tm::TM1638_BUS_DELAY_US; println!("Starting 3 wire demo (TM1638)"); let mut demo = demo::Demo::new(8); let init_res = demo.init_3wire(dio, clk, stb, &mut |d: u16| delay.delay_us(d), delay_time); println!("Display initialized {:?}", init_res); let mut last_read = [0_u8; 4]; loop { let read = demo.next_3wire(dio, clk, stb, &mut |d: u16| delay.delay_us(d), delay_time); match read { Ok(bytes) => { if bytes != last_read { last_read = bytes; println!( "Key scan read: {}", last_read .clone() .into_iter() .map(|b| format!("{:04b}_{:04b}", b >> 4, b & 0xF)) .collect::<Vec<String>>() .join(", ") ) } } Err(e) => { println!("Key scan read error {:?}", e); } }; delay.delay_ms(100_u16); } }
25.658088
97
0.515547
2983a352ba7568cc9d71e8ebe6352d9e5be13a85
7,040
const OPCODE_END: u8 = 0x00; const OPCODE_STMT_AXIOM: u8 = 0x02; const OPCODE_STMT_SORT: u8 = 0x04; const OPCODE_STMT_TERM_DEF: u8 = 0x05; const OPCODE_STMT_THM: u8 = 0x06; const OPCODE_STMT_LOCAL_DEF: u8 = 0x0D; const OPCODE_STMT_LOCAL_TERM: u8 = 0x0E; const OPCODE_PROOF_TERM: u8 = 0x10; const OPCODE_PROOF_TERM_SAVE: u8 = 0x11; const OPCODE_PROOF_REF: u8 = 0x12; const OPCODE_PROOF_DUMMY: u8 = 0x13; const OPCODE_PROOF_THM: u8 = 0x14; const OPCODE_PROOF_THM_SAVE: u8 = 0x15; const OPCODE_PROOF_HYP: u8 = 0x16; const OPCODE_PROOF_CONV: u8 = 0x17; const OPCODE_PROOF_REFL: u8 = 0x18; const OPCODE_PROOF_SYMM: u8 = 0x19; const OPCODE_PROOF_CONG: u8 = 0x1A; const OPCODE_PROOF_UNFOLD: u8 = 0x1B; const OPCODE_PROOF_CONV_CUT: u8 = 0x1C; const OPCODE_PROOF_CONV_REF: u8 = 0x1D; const OPCODE_PROOF_CONV_SAVE: u8 = 0x1E; const OPCODE_PROOF_SAVE: u8 = 0x1F; const OPCODE_UNIFY_TERM: u8 = 0x30; const OPCODE_UNIFY_TERM_SAVE: u8 = 0x31; const OPCODE_UNIFY_REF: u8 = 0x32; const OPCODE_UNIFY_DUMMY: u8 = 0x33; const OPCODE_UNIFY_HYP: u8 = 0x36; #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[repr(u8)] pub enum Opcode { End = OPCODE_END, StAxiom = OPCODE_STMT_AXIOM, StSort = OPCODE_STMT_SORT, StTermDef = OPCODE_STMT_TERM_DEF, StThm = OPCODE_STMT_THM, StLocalDef = OPCODE_STMT_LOCAL_DEF, StLocalTerm = OPCODE_STMT_LOCAL_TERM, PrTerm = OPCODE_PROOF_TERM, PrTermSave = OPCODE_PROOF_TERM_SAVE, PrRef = OPCODE_PROOF_REF, PrDummy = OPCODE_PROOF_DUMMY, PrThm = OPCODE_PROOF_THM, PrThmSave = OPCODE_PROOF_THM_SAVE, PrHyp = OPCODE_PROOF_HYP, PrConv = OPCODE_PROOF_CONV, PrRefl = OPCODE_PROOF_REFL, PrSymm = OPCODE_PROOF_SYMM, PrCong = OPCODE_PROOF_CONG, PrUnfold = OPCODE_PROOF_UNFOLD, PrConvCut = OPCODE_PROOF_CONV_CUT, PrConvRef = OPCODE_PROOF_CONV_REF, PrConvSave = OPCODE_PROOF_CONV_SAVE, PrSave = OPCODE_PROOF_SAVE, UnTerm = OPCODE_UNIFY_TERM, UnTermSave = OPCODE_UNIFY_TERM_SAVE, UnRef = OPCODE_UNIFY_REF, UnDummy = OPCODE_UNIFY_DUMMY, UnHyp = OPCODE_UNIFY_HYP, } #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Command<T> { pub opcode: T, pub operand: u32, } impl<T: Copy> From<&Command<T>> for Command<T> { fn from(c: &Command<T>) -> Command<T> { *c } } #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[repr(u8)] pub enum Statement { End = OPCODE_END, Axiom = OPCODE_STMT_AXIOM, Sort = OPCODE_STMT_SORT, TermDef = OPCODE_STMT_TERM_DEF, Thm = OPCODE_STMT_THM, LocalDef = OPCODE_STMT_LOCAL_DEF, LocalTerm = OPCODE_STMT_LOCAL_TERM, } #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[repr(u8)] pub enum Unify { End = OPCODE_END, Term = OPCODE_UNIFY_TERM, TermSave = OPCODE_UNIFY_TERM_SAVE, Ref = OPCODE_UNIFY_REF, Dummy = OPCODE_UNIFY_DUMMY, Hyp = OPCODE_UNIFY_HYP, } #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[repr(u8)] pub enum Proof { End = OPCODE_END, Term = OPCODE_PROOF_TERM, TermSave = OPCODE_PROOF_TERM_SAVE, Ref = OPCODE_PROOF_REF, Dummy = OPCODE_PROOF_DUMMY, Thm = OPCODE_PROOF_THM, ThmSave = OPCODE_PROOF_THM_SAVE, Hyp = OPCODE_PROOF_HYP, Conv = OPCODE_PROOF_CONV, Refl = OPCODE_PROOF_REFL, Symm = OPCODE_PROOF_SYMM, Cong = OPCODE_PROOF_CONG, Unfold = OPCODE_PROOF_UNFOLD, ConvCut = OPCODE_PROOF_CONV_CUT, ConvRef = OPCODE_PROOF_CONV_REF, ConvSave = OPCODE_PROOF_CONV_SAVE, Save = OPCODE_PROOF_SAVE, } use core::convert::TryFrom; impl TryFrom<u8> for Opcode { type Error = (); fn try_from(value: u8) -> Result<Opcode, ()> { use Opcode::*; match value { OPCODE_END => Ok(End), OPCODE_STMT_AXIOM => Ok(StAxiom), OPCODE_STMT_SORT => Ok(StSort), OPCODE_STMT_TERM_DEF => Ok(StTermDef), OPCODE_STMT_THM => Ok(StThm), OPCODE_STMT_LOCAL_DEF => Ok(StLocalDef), OPCODE_STMT_LOCAL_TERM => Ok(StLocalTerm), OPCODE_PROOF_TERM => Ok(PrTerm), OPCODE_PROOF_TERM_SAVE => Ok(PrTermSave), OPCODE_PROOF_REF => Ok(PrRef), OPCODE_PROOF_DUMMY => Ok(PrDummy), OPCODE_PROOF_THM => Ok(PrThm), OPCODE_PROOF_THM_SAVE => Ok(PrThmSave), OPCODE_PROOF_HYP => Ok(PrHyp), OPCODE_PROOF_CONV => Ok(PrConv), OPCODE_PROOF_REFL => Ok(PrRefl), OPCODE_PROOF_SYMM => Ok(PrSymm), OPCODE_PROOF_CONG => Ok(PrCong), OPCODE_PROOF_UNFOLD => Ok(PrUnfold), OPCODE_PROOF_CONV_CUT => Ok(PrConvCut), OPCODE_PROOF_CONV_REF => Ok(PrConvRef), OPCODE_PROOF_CONV_SAVE => Ok(PrConvSave), OPCODE_PROOF_SAVE => Ok(PrSave), OPCODE_UNIFY_TERM => Ok(UnTerm), OPCODE_UNIFY_TERM_SAVE => Ok(UnTermSave), OPCODE_UNIFY_REF => Ok(UnRef), OPCODE_UNIFY_DUMMY => Ok(UnDummy), OPCODE_UNIFY_HYP => Ok(UnHyp), _ => Err(()), } } } impl TryFrom<u8> for Statement { type Error = (); fn try_from(value: u8) -> Result<Statement, ()> { use Statement::*; match value { OPCODE_END => Ok(End), OPCODE_STMT_AXIOM => Ok(Axiom), OPCODE_STMT_SORT => Ok(Sort), OPCODE_STMT_TERM_DEF => Ok(TermDef), OPCODE_STMT_THM => Ok(Thm), OPCODE_STMT_LOCAL_DEF => Ok(LocalDef), OPCODE_STMT_LOCAL_TERM => Ok(LocalTerm), _ => Err(()), } } } impl TryFrom<u8> for Unify { type Error = (); fn try_from(value: u8) -> Result<Unify, ()> { use Unify::*; match value { OPCODE_END => Ok(End), OPCODE_UNIFY_TERM => Ok(Term), OPCODE_UNIFY_TERM_SAVE => Ok(TermSave), OPCODE_UNIFY_REF => Ok(Ref), OPCODE_UNIFY_DUMMY => Ok(Dummy), OPCODE_UNIFY_HYP => Ok(Hyp), _ => Err(()), } } } impl TryFrom<u8> for Proof { type Error = (); fn try_from(value: u8) -> Result<Proof, ()> { use Proof::*; match value { OPCODE_END => Ok(End), OPCODE_PROOF_TERM => Ok(Term), OPCODE_PROOF_TERM_SAVE => Ok(TermSave), OPCODE_PROOF_REF => Ok(Ref), OPCODE_PROOF_DUMMY => Ok(Dummy), OPCODE_PROOF_THM => Ok(Thm), OPCODE_PROOF_THM_SAVE => Ok(ThmSave), OPCODE_PROOF_HYP => Ok(Hyp), OPCODE_PROOF_CONV => Ok(Conv), OPCODE_PROOF_REFL => Ok(Refl), OPCODE_PROOF_SYMM => Ok(Symm), OPCODE_PROOF_CONG => Ok(Cong), OPCODE_PROOF_UNFOLD => Ok(Unfold), OPCODE_PROOF_CONV_CUT => Ok(ConvCut), OPCODE_PROOF_CONV_REF => Ok(ConvRef), OPCODE_PROOF_CONV_SAVE => Ok(ConvSave), OPCODE_PROOF_SAVE => Ok(Save), _ => Err(()), } } }
31.569507
67
0.631392
fe2b8d40cb77592ee7819fa11d0be67184e9ba53
6,132
use cargo_metadata::{ camino::{Utf8Path, Utf8PathBuf}, CargoOpt, Metadata, MetadataCommand, }; use std::{ collections::HashSet, env, fs, path::PathBuf, process::{Command, Stdio}, }; use structopt::StructOpt; mod target_config; #[derive(Debug, StructOpt)] struct Opt { /// Space or comma separated list of features to activate #[structopt(long)] features: Vec<String>, /// Activate all available features #[structopt(long)] all_features: bool, /// Do not activate the `default` feature #[structopt(long)] no_default_features: bool, /// Path to Cargo.toml #[structopt(long, parse(from_os_str))] manifest_path: Option<PathBuf>, /// TITLEID passed to the vita-mksfoex #[structopt(long)] title: String, /// Arguments for cargo build build_args: Vec<String>, } fn main() { let opt = Opt::from_args(); let metadata = get_metadata(&opt); exists_or_create_dir(&metadata.target_directory); copy_target_configuration(&metadata.target_directory); let executable_elfs = build(&opt, &metadata); for elf in executable_elfs { let out_dir = elf.parent().unwrap(); let stem = elf.file_stem().unwrap(); generate_velf(out_dir, stem); generate_eboot(out_dir, stem); make_sfo(out_dir, stem, &opt.title); eprintln!("Produced vpk: {}", pack_vpk(out_dir, stem)); } } fn get_metadata(opt: &Opt) -> Metadata { let mut cmd = MetadataCommand::new(); cmd.features(CargoOpt::SomeFeatures(opt.features.clone())); if opt.all_features { cmd.features(CargoOpt::AllFeatures); } if opt.no_default_features { cmd.features(CargoOpt::NoDefaultFeatures); } if let Some(manifest_path) = &opt.manifest_path { cmd.manifest_path(manifest_path); } cmd.no_deps() .exec() .expect("could not get the crate's metadata") } fn exists_or_create_dir(dir: &Utf8Path) { if !dir.exists() { fs::create_dir_all(&dir).expect("could not create target directory"); } assert!(dir.is_dir()); } fn copy_target_configuration(target_dir: &Utf8Path) { fs::write(target_dir.join(target_config::NAME), target_config::CONTENT) .expect("could not copy target configuration to the target directory"); } fn build(opt: &Opt, metadata: &Metadata) -> Vec<Utf8PathBuf> { let rustflags = env::var("RUSTFLAGS").unwrap_or_default(); let rustflags = format!( "{} -L {}", rustflags, vitasdk() .join("arm-vita-eabi") .join("lib") .to_str() .unwrap() ); let out = Command::new(env::var("CARGO").unwrap_or(String::from("cargo"))) .arg("build") .args(&[ "--target", metadata.target_directory.join(target_config::NAME).as_str(), ]) .args(&["-Z", "build-std=core"]) .args(&["--message-format", "json-render-diagnostics"]) .args(&opt.features) .args(opt.all_features.then(|| "--all-features")) .args(opt.no_default_features.then(|| "--no-default-features")) .args(&opt.manifest_path) .args(&opt.build_args) .env("RUSTFLAGS", rustflags) .stderr(Stdio::inherit()) .output() .expect("could not execute cargo-build"); assert!(out.status.success(), "cargo-build failed"); // Package ids within workspace (due to `no_deps` argument) let local_pkg_ids = metadata .packages .iter() .map(|pkg| &pkg.id) .collect::<HashSet<_>>(); cargo_metadata::Message::parse_stream(out.stdout.as_slice()) .filter_map(|msg| match msg.expect("error while parsing message") { cargo_metadata::Message::CompilerArtifact(a) => Some(a), _ => None, }) .filter(|artifact| local_pkg_ids.contains(&artifact.package_id)) .filter_map(|artifact| artifact.executable) .collect() } fn generate_velf(out_dir: &Utf8Path, stem: &str) -> Utf8PathBuf { let elf = out_dir.join(format!("{}.elf", stem)); let output = out_dir.join(format!("{}.velf", stem)); assert!( Command::new(vitasdk_bin().join("vita-elf-create")) .args(&[&elf, &output]) .status() .expect("could not execute vita-elf-create") .success(), "failed while executing vita-elf-create" ); output } fn generate_eboot(out_dir: &Utf8Path, stem: &str) -> Utf8PathBuf { let velf = out_dir.join(format!("{}.velf", stem)); let output = out_dir.join(format!("{}.eboot.bin", stem)); assert!( Command::new(vitasdk_bin().join("vita-make-fself")) .arg("-s") .args(&[&velf, &output]) .status() .expect("could not execute vita-make-fself") .success(), "failed while executing vita-make-fself" ); output } fn make_sfo(out_dir: &Utf8Path, stem: &str, title: &str) -> Utf8PathBuf { let output = out_dir.join(format!("{}.sfo", stem)); assert!( Command::new(vitasdk_bin().join("vita-mksfoex")) .arg(title) .arg(&output) .status() .expect("could not execute vita-mksfoex") .success(), "failed while executing vita-mksfoex" ); output } fn pack_vpk(out_dir: &Utf8Path, stem: &str) -> Utf8PathBuf { let sfo = out_dir.join(format!("{}.sfo", stem)); let eboot = out_dir.join(format!("{}.eboot.bin", stem)); let output = out_dir.join(format!("{}.vpk", stem)); assert!( Command::new(vitasdk_bin().join("vita-pack-vpk")) .arg("--sfo") .arg(sfo) .arg("--eboot") .arg(eboot) .arg(&output) .status() .expect("could not execute vita-pack-vpk") .success(), "failed while executing vita-pack-vpk" ); output } fn vitasdk() -> PathBuf { PathBuf::from(env::var_os("VITASDK").expect("could not find VITASDK environment variable")) } fn vitasdk_bin() -> PathBuf { vitasdk().join("bin") }
29.2
95
0.590835
f49604e19288eff28c0f1eb5a48f3551c825734f
4,535
use cosmwasm_std::{Addr, StdResult, Storage}; use cw_storage_plus::{Index, IndexList, IndexedMap, Item, MultiIndex}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use std::iter::Peekable; use crate::cwchess::{CwChessColor, CwChessGame}; // STATE #[derive(Serialize, Deserialize)] #[serde(rename_all = "snake_case")] pub struct State { pub owner: Addr, } pub const STATE: Item<State> = Item::new("state"); // CHALLENGES #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")] pub struct Challenge { pub block_created: u64, pub block_limit: Option<u64>, pub challenge_id: u64, pub created_by: Addr, pub play_as: Option<CwChessColor>, pub opponent: Option<Addr>, } pub const CHALLENGE_ID: Item<u64> = Item::new("challenge_id"); pub fn next_challenge_id(store: &mut dyn Storage) -> StdResult<u64> { let id: u64 = CHALLENGE_ID.may_load(store)?.unwrap_or_default() + 1; CHALLENGE_ID.save(store, &id)?; Ok(id) } pub struct ChallengeIndexes<'a> { pub created_by: MultiIndex<'a, Addr, Challenge, u64>, pub opponent: MultiIndex<'a, Addr, Challenge, u64>, } impl<'a> IndexList<Challenge> for ChallengeIndexes<'a> { fn get_indexes(&'_ self) -> Box<dyn Iterator<Item = &'_ dyn Index<Challenge>> + '_> { let v: Vec<&dyn Index<Challenge>> = vec![&self.created_by, &self.opponent]; Box::new(v.into_iter()) } } pub fn get_challenges_map<'a>() -> IndexedMap<'a, u64, Challenge, ChallengeIndexes<'a>> { let indexes = ChallengeIndexes { created_by: MultiIndex::new( |c: &Challenge| c.created_by.clone(), "challenges", "challenges__created_by", ), opponent: MultiIndex::new( |c: &Challenge| { c.opponent .clone() .unwrap_or_else(|| Addr::unchecked("none")) }, "challenges", "challenges__opponent", ), }; IndexedMap::new("challenges", indexes) } // GAMES pub const GAME_ID: Item<u64> = Item::new("game_id"); pub fn next_game_id(store: &mut dyn Storage) -> StdResult<u64> { let id: u64 = GAME_ID.may_load(store)?.unwrap_or_default() + 1; GAME_ID.save(store, &id)?; Ok(id) } pub struct GameIndexes<'a> { pub player1: MultiIndex<'a, Addr, CwChessGame, u64>, pub player2: MultiIndex<'a, Addr, CwChessGame, u64>, } impl<'a> IndexList<CwChessGame> for GameIndexes<'a> { fn get_indexes(&'_ self) -> Box<dyn Iterator<Item = &'_ dyn Index<CwChessGame>> + '_> { let v: Vec<&dyn Index<CwChessGame>> = vec![&self.player1, &self.player2]; Box::new(v.into_iter()) } } pub fn get_games_map<'a>() -> IndexedMap<'a, u64, CwChessGame, GameIndexes<'a>> { let indexes = GameIndexes { player1: MultiIndex::new( |c: &CwChessGame| c.player1.clone(), "games", "games__player1", ), player2: MultiIndex::new( |c: &CwChessGame| c.player2.clone(), "games", "games__player2", ), }; IndexedMap::new("games", indexes) } pub fn merge_iters<I, J, K>( iter1: I, iter2: J, is_less_than: fn(&I::Item, &J::Item) -> bool, ) -> IterMerge<I, J, K> where I: Iterator<Item = K>, J: Iterator<Item = K>, { IterMerge { iter1: iter1.peekable(), iter2: iter2.peekable(), is_less_than, } } /** * Utility to merge multiple index ranges. * * Inspired by itertools 0.10.0 merge_join_by. */ pub struct IterMerge<I, J, K> where I: Iterator<Item = K>, J: Iterator<Item = K>, { iter1: Peekable<I>, iter2: Peekable<J>, // return true to return first item, false for second item is_less_than: fn(&K, &K) -> bool, } impl<I, J, K> Iterator for IterMerge<I, J, K> where I: Iterator<Item = K>, J: Iterator<Item = K>, { type Item = K; fn next(&mut self) -> Option<Self::Item> { let item1 = self.iter1.peek(); let item2 = self.iter2.peek(); match (item1, item2) { (None, None) => None, (Some(_), None) => self.iter1.next(), (None, Some(_)) => self.iter2.next(), (Some(item1), Some(item2)) => { let is_less_than = self.is_less_than; if is_less_than(item1, item2) { self.iter1.next() } else { self.iter2.next() } } } } }
27.155689
91
0.582359
1837d192c89655e89e50bd584817131a0b2cc001
3,272
use base::Sink; use std::ptr; use std::marker::PhantomData; impl<T> Sink<T> for Vec<T> { #[inline(always)] fn consume(&mut self, x: T) { self.push(x); } } #[derive(new)] pub struct UncheckedVecSink<T> { output: Vec<T> } impl<T> Sink<T> for UncheckedVecSink<T> { #[inline(always)] fn consume(&mut self, item: T) { consume_unchecked(&mut self.output, item); } } #[derive(new)] pub struct UncheckedVecRefSink<'a, T: 'a> { pub output: &'a mut Vec<T> } impl<'a, T: 'a> Sink<T> for UncheckedVecRefSink<'a, T> { #[inline(always)] fn consume(&mut self, item: T) { consume_unchecked(&mut self.output, item); } } pub struct RefCopyingSink<T: Copy, S: Sink<T>> { pub sink: S, _ph: PhantomData<T> } impl<T, S> RefCopyingSink<T, S> where T: Copy, S: Sink<T> { pub fn new(sink: S) -> Self { RefCopyingSink { sink:sink, _ph: PhantomData } } } impl<'a, T, S> Sink<&'a T> for RefCopyingSink<T, S> where T: Copy+'a, S: Sink<T> { #[inline(always)] fn consume(&mut self, x: &'a T) { self.sink.consume(*x); } } pub struct RefCloningSink<T: Clone, S: Sink<T>> { pub sink: S, _ph: PhantomData<T> } impl<T, S> RefCloningSink<T, S> where T: Clone, S: Sink<T> { pub fn new(sink: S) -> Self { RefCloningSink { sink:sink, _ph: PhantomData } } } impl<'a, T, S> Sink<&'a T> for RefCloningSink<T, S> where T: Clone+'a, S: Sink<T> { #[inline(always)] fn consume(&mut self, x: &'a T) { self.sink.consume(x.clone()); } } pub struct SinkAdapter<T, S: Sink<T>> { sink: S, _ph: PhantomData<T> } impl<T, S: Sink<T>> SinkAdapter<T, S> { #[inline] pub fn new(sink: S) -> Self { SinkAdapter { sink: sink, _ph: PhantomData } } } impl<T, S: Sink<T>> Sink<(T, ())> for SinkAdapter<T, S> { #[inline(always)] fn consume(&mut self, entry: (T, ())) { self.sink.consume(entry.0) } } pub struct RefSinkAdapter<'a, T: 'a, S: Sink<&'a T>> { sink: S, _ph: PhantomData<&'a T> } impl<'a, T: 'a, S: Sink<&'a T>> RefSinkAdapter<'a, T, S> { #[inline] pub fn new(sink: S) -> Self { RefSinkAdapter { sink: sink, _ph: PhantomData } } } impl<'a, T: 'a, S: Sink<&'a T>> Sink<&'a (T, ())> for RefSinkAdapter<'a, T, S> { #[inline] fn consume(&mut self, entry: &'a (T, ())) { self.sink.consume(&entry.0) } } pub struct CountingSink<T, S: Sink<T>> { sink: S, count: usize, _ph: PhantomData<T> } impl<T, S: Sink<T>> CountingSink<T, S> { #[inline] pub fn new(sink: S) -> Self { CountingSink { sink: sink, count: 0, _ph: PhantomData } } pub fn count(&self) -> usize { self.count } } impl<T, S: Sink<T>> Sink<T> for CountingSink<T, S> { #[inline] fn consume(&mut self, entry: T) { self.count += 1; self.sink.consume(entry) } } // The caller must make sure output.len() < output.capacity(). #[inline(always)] pub fn consume_unchecked<T>(output: &mut Vec<T>, item: T) { unsafe { let len = output.len(); debug_assert!(len < output.capacity()); output.set_len(len + 1); let p = output.get_unchecked_mut(len); ptr::write(p, item); } }
19.47619
80
0.555318
3947ee9b4e4bccebcca6374613834d7c9b1c70dd
14,720
// Generated from definition io.k8s.api.core.v1.PodStatus /// PodStatus represents information about the status of a pod. Status may trail the actual state of a system, especially if the node that hosts the pod cannot contact the control plane. #[derive(Clone, Debug, Default, PartialEq)] pub struct PodStatus { /// Current service state of pod. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#pod-conditions pub conditions: Option<Vec<crate::v1_16::api::core::v1::PodCondition>>, /// The list has one entry per container in the manifest. Each entry is currently the output of `docker inspect`. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#pod-and-container-status pub container_statuses: Option<Vec<crate::v1_16::api::core::v1::ContainerStatus>>, /// Status for any ephemeral containers that have run in this pod. This field is alpha-level and is only populated by servers that enable the EphemeralContainers feature. pub ephemeral_container_statuses: Option<Vec<crate::v1_16::api::core::v1::ContainerStatus>>, /// IP address of the host to which the pod is assigned. Empty if not yet scheduled. pub host_ip: Option<String>, /// The list has one entry per init container in the manifest. The most recent successful init container will have ready = true, the most recently started container will have startTime set. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#pod-and-container-status pub init_container_statuses: Option<Vec<crate::v1_16::api::core::v1::ContainerStatus>>, /// A human readable message indicating details about why the pod is in this condition. pub message: Option<String>, /// nominatedNodeName is set only when this pod preempts other pods on the node, but it cannot be scheduled right away as preemption victims receive their graceful termination periods. This field does not guarantee that the pod will be scheduled on this node. Scheduler may decide to place the pod elsewhere if other nodes become available sooner. Scheduler may also decide to give the resources on this node to a higher priority pod that is created after preemption. As a result, this field may be different than PodSpec.nodeName when the pod is scheduled. pub nominated_node_name: Option<String>, /// The phase of a Pod is a simple, high-level summary of where the Pod is in its lifecycle. The conditions array, the reason and message fields, and the individual container status arrays contain more detail about the pod's status. There are five possible phase values: /// /// Pending: The pod has been accepted by the Kubernetes system, but one or more of the container images has not been created. This includes time before being scheduled as well as time spent downloading images over the network, which could take a while. Running: The pod has been bound to a node, and all of the containers have been created. At least one container is still running, or is in the process of starting or restarting. Succeeded: All containers in the pod have terminated in success, and will not be restarted. Failed: All containers in the pod have terminated, and at least one container has terminated in failure. The container either exited with non-zero status or was terminated by the system. Unknown: For some reason the state of the pod could not be obtained, typically due to an error in communicating with the host of the pod. /// /// More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#pod-phase pub phase: Option<String>, /// IP address allocated to the pod. Routable at least within the cluster. Empty if not yet allocated. pub pod_ip: Option<String>, /// podIPs holds the IP addresses allocated to the pod. If this field is specified, the 0th entry must match the podIP field. Pods may be allocated at most 1 value for each of IPv4 and IPv6. This list is empty if no IPs have been allocated yet. pub pod_ips: Option<Vec<crate::v1_16::api::core::v1::PodIP>>, /// The Quality of Service (QOS) classification assigned to the pod based on resource requirements See PodQOSClass type for available QOS classes More info: https://git.k8s.io/community/contributors/design-proposals/node/resource-qos.md pub qos_class: Option<String>, /// A brief CamelCase message indicating details about why the pod is in this state. e.g. 'Evicted' pub reason: Option<String>, /// RFC 3339 date and time at which the object was acknowledged by the Kubelet. This is before the Kubelet pulled the container image(s) for the pod. pub start_time: Option<crate::v1_16::apimachinery::pkg::apis::meta::v1::Time>, } impl<'de> serde::Deserialize<'de> for PodStatus { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> { #[allow(non_camel_case_types)] enum Field { Key_conditions, Key_container_statuses, Key_ephemeral_container_statuses, Key_host_ip, Key_init_container_statuses, Key_message, Key_nominated_node_name, Key_phase, Key_pod_ip, Key_pod_ips, Key_qos_class, Key_reason, Key_start_time, Other, } impl<'de> serde::Deserialize<'de> for Field { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> { struct Visitor; impl<'de> serde::de::Visitor<'de> for Visitor { type Value = Field; fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "field identifier") } fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error { Ok(match v { "conditions" => Field::Key_conditions, "containerStatuses" => Field::Key_container_statuses, "ephemeralContainerStatuses" => Field::Key_ephemeral_container_statuses, "hostIP" => Field::Key_host_ip, "initContainerStatuses" => Field::Key_init_container_statuses, "message" => Field::Key_message, "nominatedNodeName" => Field::Key_nominated_node_name, "phase" => Field::Key_phase, "podIP" => Field::Key_pod_ip, "podIPs" => Field::Key_pod_ips, "qosClass" => Field::Key_qos_class, "reason" => Field::Key_reason, "startTime" => Field::Key_start_time, _ => Field::Other, }) } } deserializer.deserialize_identifier(Visitor) } } struct Visitor; impl<'de> serde::de::Visitor<'de> for Visitor { type Value = PodStatus; fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "struct PodStatus") } fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: serde::de::MapAccess<'de> { let mut value_conditions: Option<Vec<crate::v1_16::api::core::v1::PodCondition>> = None; let mut value_container_statuses: Option<Vec<crate::v1_16::api::core::v1::ContainerStatus>> = None; let mut value_ephemeral_container_statuses: Option<Vec<crate::v1_16::api::core::v1::ContainerStatus>> = None; let mut value_host_ip: Option<String> = None; let mut value_init_container_statuses: Option<Vec<crate::v1_16::api::core::v1::ContainerStatus>> = None; let mut value_message: Option<String> = None; let mut value_nominated_node_name: Option<String> = None; let mut value_phase: Option<String> = None; let mut value_pod_ip: Option<String> = None; let mut value_pod_ips: Option<Vec<crate::v1_16::api::core::v1::PodIP>> = None; let mut value_qos_class: Option<String> = None; let mut value_reason: Option<String> = None; let mut value_start_time: Option<crate::v1_16::apimachinery::pkg::apis::meta::v1::Time> = None; while let Some(key) = serde::de::MapAccess::next_key::<Field>(&mut map)? { match key { Field::Key_conditions => value_conditions = serde::de::MapAccess::next_value(&mut map)?, Field::Key_container_statuses => value_container_statuses = serde::de::MapAccess::next_value(&mut map)?, Field::Key_ephemeral_container_statuses => value_ephemeral_container_statuses = serde::de::MapAccess::next_value(&mut map)?, Field::Key_host_ip => value_host_ip = serde::de::MapAccess::next_value(&mut map)?, Field::Key_init_container_statuses => value_init_container_statuses = serde::de::MapAccess::next_value(&mut map)?, Field::Key_message => value_message = serde::de::MapAccess::next_value(&mut map)?, Field::Key_nominated_node_name => value_nominated_node_name = serde::de::MapAccess::next_value(&mut map)?, Field::Key_phase => value_phase = serde::de::MapAccess::next_value(&mut map)?, Field::Key_pod_ip => value_pod_ip = serde::de::MapAccess::next_value(&mut map)?, Field::Key_pod_ips => value_pod_ips = serde::de::MapAccess::next_value(&mut map)?, Field::Key_qos_class => value_qos_class = serde::de::MapAccess::next_value(&mut map)?, Field::Key_reason => value_reason = serde::de::MapAccess::next_value(&mut map)?, Field::Key_start_time => value_start_time = serde::de::MapAccess::next_value(&mut map)?, Field::Other => { let _: serde::de::IgnoredAny = serde::de::MapAccess::next_value(&mut map)?; }, } } Ok(PodStatus { conditions: value_conditions, container_statuses: value_container_statuses, ephemeral_container_statuses: value_ephemeral_container_statuses, host_ip: value_host_ip, init_container_statuses: value_init_container_statuses, message: value_message, nominated_node_name: value_nominated_node_name, phase: value_phase, pod_ip: value_pod_ip, pod_ips: value_pod_ips, qos_class: value_qos_class, reason: value_reason, start_time: value_start_time, }) } } deserializer.deserialize_struct( "PodStatus", &[ "conditions", "containerStatuses", "ephemeralContainerStatuses", "hostIP", "initContainerStatuses", "message", "nominatedNodeName", "phase", "podIP", "podIPs", "qosClass", "reason", "startTime", ], Visitor, ) } } impl serde::Serialize for PodStatus { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer { let mut state = serializer.serialize_struct( "PodStatus", self.conditions.as_ref().map_or(0, |_| 1) + self.container_statuses.as_ref().map_or(0, |_| 1) + self.ephemeral_container_statuses.as_ref().map_or(0, |_| 1) + self.host_ip.as_ref().map_or(0, |_| 1) + self.init_container_statuses.as_ref().map_or(0, |_| 1) + self.message.as_ref().map_or(0, |_| 1) + self.nominated_node_name.as_ref().map_or(0, |_| 1) + self.phase.as_ref().map_or(0, |_| 1) + self.pod_ip.as_ref().map_or(0, |_| 1) + self.pod_ips.as_ref().map_or(0, |_| 1) + self.qos_class.as_ref().map_or(0, |_| 1) + self.reason.as_ref().map_or(0, |_| 1) + self.start_time.as_ref().map_or(0, |_| 1), )?; if let Some(value) = &self.conditions { serde::ser::SerializeStruct::serialize_field(&mut state, "conditions", value)?; } if let Some(value) = &self.container_statuses { serde::ser::SerializeStruct::serialize_field(&mut state, "containerStatuses", value)?; } if let Some(value) = &self.ephemeral_container_statuses { serde::ser::SerializeStruct::serialize_field(&mut state, "ephemeralContainerStatuses", value)?; } if let Some(value) = &self.host_ip { serde::ser::SerializeStruct::serialize_field(&mut state, "hostIP", value)?; } if let Some(value) = &self.init_container_statuses { serde::ser::SerializeStruct::serialize_field(&mut state, "initContainerStatuses", value)?; } if let Some(value) = &self.message { serde::ser::SerializeStruct::serialize_field(&mut state, "message", value)?; } if let Some(value) = &self.nominated_node_name { serde::ser::SerializeStruct::serialize_field(&mut state, "nominatedNodeName", value)?; } if let Some(value) = &self.phase { serde::ser::SerializeStruct::serialize_field(&mut state, "phase", value)?; } if let Some(value) = &self.pod_ip { serde::ser::SerializeStruct::serialize_field(&mut state, "podIP", value)?; } if let Some(value) = &self.pod_ips { serde::ser::SerializeStruct::serialize_field(&mut state, "podIPs", value)?; } if let Some(value) = &self.qos_class { serde::ser::SerializeStruct::serialize_field(&mut state, "qosClass", value)?; } if let Some(value) = &self.reason { serde::ser::SerializeStruct::serialize_field(&mut state, "reason", value)?; } if let Some(value) = &self.start_time { serde::ser::SerializeStruct::serialize_field(&mut state, "startTime", value)?; } serde::ser::SerializeStruct::end(state) } }
59.354839
851
0.611753
bbf6502cdf6e128434c88b6929e1e6ba48d0fece
2,436
// Copyright 2015 Pierre Talbot (IRCAM) // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use rust; use rust::Token as rtok; use rust::{TokenAndSpan, Span}; pub fn generate_rust_code<'a, 'b>(cx: &'a rust::ExtCtxt<'b>, tokens: Vec<TokenAndSpan>) -> Box<rust::MacResult + 'a> { let reader = Box::new(TokenAndSpanArray::new( &cx.parse_sess().span_diagnostic, tokens)); let mut parser = rust::Parser::new(cx.parse_sess(), cx.cfg(), reader); let expr = parser.parse_expr().unwrap(); cx.parse_sess.span_diagnostic.note_without_error( &rust::expr_to_string(&expr)); rust::MacEager::expr(expr) } /// TokenAndSpanArray is used to feed the parser with tokens. struct TokenAndSpanArray<'a> { sp_diag: &'a rust::Handler, tokens: Vec<TokenAndSpan>, current_idx: usize } impl<'a> TokenAndSpanArray<'a> { fn new(sp_diag: &'a rust::Handler, tokens: Vec<TokenAndSpan>) -> TokenAndSpanArray<'a> { TokenAndSpanArray { sp_diag: sp_diag, tokens: tokens, current_idx: 0 } } fn current(&self) -> TokenAndSpan { self.tokens[self.current_idx].clone() } fn current_span(&self) -> Span { self.current().sp } } impl<'a> rust::lexer::Reader for TokenAndSpanArray<'a> { fn is_eof(&self) -> bool { self.current().tok == rtok::Eof } fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> { // See `Reader::try_next_token` impl of `TtReader`, it cannot fail. Hypothesis: Probably because EOF is itself a token. Ok(self.next_token()) } fn next_token(&mut self) -> TokenAndSpan { let cur = self.current(); self.current_idx = self.current_idx + 1; cur } fn fatal(&self, m: &str) -> rust::FatalError { self.sp_diag.span_fatal(self.current_span(), m) } fn emit_fatal_errors(&mut self) {} fn err(&self, m: &str) { self.sp_diag.span_err(self.current_span(), m); } fn peek(&self) -> TokenAndSpan { self.current() } }
27.066667
123
0.675698
6a6b75fdfe8f00af131b4581ad57c65eba79c9e5
8,120
extern crate arraymap; use arraymap::ArrayMap; mod bindings_raw { #![allow(non_upper_case_globals, non_camel_case_types, non_snake_case, dead_code)] include!(concat!(env!("OUT_DIR"), "/bindings.rs")); } use bindings_raw::root as bindings; pub struct SDK; pub struct Effect { id: bindings::RZEFFECTID, } impl Effect { fn new(id: bindings::RZEFFECTID) -> Effect { Effect{ id: id, } } } impl Drop for Effect { fn drop(&mut self) { unsafe { bindings::ChromaSDKDeleteEffect(self.id); } } } #[derive(Clone, Copy, Debug)] pub struct Color(bindings::RZCOLOR); impl Color { pub fn from_rgb(r: u8, g: u8, b: u8) -> Color { Color((r as bindings::RZCOLOR) | ((g as bindings::RZCOLOR) << 8) | ((b as bindings::RZCOLOR) << 16)) } } pub struct Error { pub code: bindings::RZRESULT, } pub enum ChromaLinkEffect { Static{color: Color}, Custom{color: [Color; bindings::ChromaSDK::ChromaLink::MAX_LEDS]}, } pub enum HeadsetEffect { Static{color: Color}, Custom{color: [Color; bindings::ChromaSDK::Headset::MAX_LEDS]}, } pub enum KeyboardEffect { Static{color: Color}, Custom{color: [[Color; bindings::ChromaSDK::Keyboard::MAX_COLUMN]; bindings::ChromaSDK::Keyboard::MAX_ROW]}, } pub enum KeypadEffect { Static{color: Color}, Custom{color: [[Color; bindings::ChromaSDK::Keypad::MAX_COLUMN]; bindings::ChromaSDK::Keypad::MAX_ROW]}, } pub enum MouseEffect { Static{color: Color}, Custom{color: [[Color; bindings::ChromaSDK::Mouse::MAX_COLUMN]; bindings::ChromaSDK::Mouse::MAX_ROW]}, } pub enum MousepadEffect { Static{color: Color}, Custom{color: [Color; bindings::ChromaSDK::Mousepad::MAX_LEDS]}, } impl SDK { pub fn new() -> Option<SDK> { unsafe { if bindings::Load() && bindings::ChromaSDKInit() == bindings::ERROR_SUCCESS as _ { Some(SDK{}) } else { None } } } pub fn create_chroma_link_effect(&mut self, effect: ChromaLinkEffect) -> Result<Effect, Error> { let mut effect_id = bindings::RZEFFECTID{Data1: 0, Data2: 0, Data3: 0, Data4: [0, 0, 0, 0, 0, 0, 0, 0]}; let code = unsafe { match effect { ChromaLinkEffect::Static{color} => { let mut param = bindings::ChromaSDK::ChromaLink::STATIC_EFFECT_TYPE{Color: color.0}; bindings::ChromaSDKCreateChromaLinkEffect(bindings::ChromaSDK::ChromaLink::EFFECT_TYPE_CHROMA_STATIC, &mut param as *mut _ as *mut std::ffi::c_void, &mut effect_id) } ChromaLinkEffect::Custom{color} => { let mut param = bindings::ChromaSDK::ChromaLink::CUSTOM_EFFECT_TYPE{Color: color.map(|v| v.0)}; bindings::ChromaSDKCreateChromaLinkEffect(bindings::ChromaSDK::ChromaLink::EFFECT_TYPE_CHROMA_CUSTOM, &mut param as *mut _ as *mut std::ffi::c_void, &mut effect_id) } } }; if code == bindings::ERROR_SUCCESS as _ { Ok(Effect::new(effect_id)) } else { Err(Error{code: code}) } } pub fn create_headset_effect(&mut self, effect: HeadsetEffect) -> Result<Effect, Error> { let mut effect_id = bindings::RZEFFECTID{Data1: 0, Data2: 0, Data3: 0, Data4: [0, 0, 0, 0, 0, 0, 0, 0]}; let code = unsafe { match effect { HeadsetEffect::Static{color} => { let mut param = bindings::ChromaSDK::Headset::STATIC_EFFECT_TYPE{Color: color.0}; bindings::ChromaSDKCreateHeadsetEffect(bindings::ChromaSDK::Headset::EFFECT_TYPE_CHROMA_STATIC, &mut param as *mut _ as *mut std::ffi::c_void, &mut effect_id) } HeadsetEffect::Custom{color} => { let mut param = bindings::ChromaSDK::Headset::CUSTOM_EFFECT_TYPE{Color: color.map(|v| v.0)}; bindings::ChromaSDKCreateHeadsetEffect(bindings::ChromaSDK::Headset::EFFECT_TYPE_CHROMA_CUSTOM, &mut param as *mut _ as *mut std::ffi::c_void, &mut effect_id) } } }; if code == bindings::ERROR_SUCCESS as _ { Ok(Effect::new(effect_id)) } else { Err(Error{code: code}) } } pub fn create_keyboard_effect(&mut self, effect: KeyboardEffect) -> Result<Effect, Error> { let mut effect_id = bindings::RZEFFECTID{Data1: 0, Data2: 0, Data3: 0, Data4: [0, 0, 0, 0, 0, 0, 0, 0]}; let code = unsafe { match effect { KeyboardEffect::Static{color} => { let mut param = bindings::ChromaSDK::Keyboard::STATIC_EFFECT_TYPE{Color: color.0}; bindings::ChromaSDKCreateKeyboardEffect(bindings::ChromaSDK::Keyboard::EFFECT_TYPE_CHROMA_STATIC, &mut param as *mut _ as *mut std::ffi::c_void, &mut effect_id) } KeyboardEffect::Custom{color} => { let mut param = bindings::ChromaSDK::Keyboard::CUSTOM_EFFECT_TYPE{Color: color.map(|v| v.map(|v| v.0))}; bindings::ChromaSDKCreateKeyboardEffect(bindings::ChromaSDK::Keyboard::EFFECT_TYPE_CHROMA_CUSTOM, &mut param as *mut _ as *mut std::ffi::c_void, &mut effect_id) } } }; if code == bindings::ERROR_SUCCESS as _ { Ok(Effect::new(effect_id)) } else { Err(Error{code: code}) } } pub fn create_keypad_effect(&mut self, effect: KeypadEffect) -> Result<Effect, Error> { let mut effect_id = bindings::RZEFFECTID{Data1: 0, Data2: 0, Data3: 0, Data4: [0, 0, 0, 0, 0, 0, 0, 0]}; let code = unsafe { match effect { KeypadEffect::Static{color} => { let mut param = bindings::ChromaSDK::Keypad::STATIC_EFFECT_TYPE{Color: color.0}; bindings::ChromaSDKCreateKeypadEffect(bindings::ChromaSDK::Keypad::EFFECT_TYPE_CHROMA_STATIC, &mut param as *mut _ as *mut std::ffi::c_void, &mut effect_id) } KeypadEffect::Custom{color} => { let mut param = bindings::ChromaSDK::Keypad::CUSTOM_EFFECT_TYPE{Color: color.map(|v| v.map(|v| v.0))}; bindings::ChromaSDKCreateKeypadEffect(bindings::ChromaSDK::Keypad::EFFECT_TYPE_CHROMA_CUSTOM, &mut param as *mut _ as *mut std::ffi::c_void, &mut effect_id) } } }; if code == bindings::ERROR_SUCCESS as _ { Ok(Effect::new(effect_id)) } else { Err(Error{code: code}) } } pub fn create_mouse_effect(&mut self, effect: MouseEffect) -> Result<Effect, Error> { let mut effect_id = bindings::RZEFFECTID{Data1: 0, Data2: 0, Data3: 0, Data4: [0, 0, 0, 0, 0, 0, 0, 0]}; let code = unsafe { match effect { MouseEffect::Static{color} => { let mut param = bindings::ChromaSDK::Mouse::STATIC_EFFECT_TYPE{LEDId: bindings::ChromaSDK::Mouse::RZLED_RZLED_ALL, Color: color.0}; bindings::ChromaSDKCreateMouseEffect(bindings::ChromaSDK::Mouse::EFFECT_TYPE_CHROMA_STATIC, &mut param as *mut _ as *mut std::ffi::c_void, &mut effect_id) } MouseEffect::Custom{color} => { let mut param = bindings::ChromaSDK::Mouse::CUSTOM_EFFECT_TYPE2{Color: color.map(|v| v.map(|v| v.0))}; bindings::ChromaSDKCreateMouseEffect(bindings::ChromaSDK::Mouse::EFFECT_TYPE_CHROMA_CUSTOM, &mut param as *mut _ as *mut std::ffi::c_void, &mut effect_id) } } }; if code == bindings::ERROR_SUCCESS as _ { Ok(Effect::new(effect_id)) } else { Err(Error{code: code}) } } pub fn create_mousepad_effect(&mut self, effect: MousepadEffect) -> Result<Effect, Error> { let mut effect_id = bindings::RZEFFECTID{Data1: 0, Data2: 0, Data3: 0, Data4: [0, 0, 0, 0, 0, 0, 0, 0]}; let code = unsafe { match effect { MousepadEffect::Static{color} => { let mut param = bindings::ChromaSDK::Mousepad::STATIC_EFFECT_TYPE{Color: color.0}; bindings::ChromaSDKCreateMousepadEffect(bindings::ChromaSDK::Mousepad::EFFECT_TYPE_CHROMA_STATIC, &mut param as *mut _ as *mut std::ffi::c_void, &mut effect_id) } MousepadEffect::Custom{color} => { let mut param = bindings::ChromaSDK::Mousepad::CUSTOM_EFFECT_TYPE{Color: color.map(|v| v.0)}; bindings::ChromaSDKCreateMousepadEffect(bindings::ChromaSDK::Mousepad::EFFECT_TYPE_CHROMA_CUSTOM, &mut param as *mut _ as *mut std::ffi::c_void, &mut effect_id) } } }; if code == bindings::ERROR_SUCCESS as _ { Ok(Effect::new(effect_id)) } else { Err(Error{code: code}) } } pub fn set_effect(&mut self, effect: &Effect) -> Result<(), Error> { let code = unsafe { bindings::ChromaSDKSetEffect(effect.id) }; if code == bindings::ERROR_SUCCESS as _ { Ok(()) } else { Err(Error{code: code}) } } } impl Drop for SDK { fn drop(&mut self) { unsafe { bindings::ChromaSDKUnInit(); } } }
34.849785
169
0.678571
e9ca583005c12f5d43480260b4f4635b321c2768
209
enum option_<T> { none_, some_(T), } impl<T> option_<T> { fn foo() -> bool { true } } enum option__ { none__, some__(int) } impl option__ { fn foo() -> bool { true } } fn main() { }
10.45
29
0.511962
fec0868c540a8de49d93ee285f9bf31e27d4b2dd
743
// Copyright 2012-2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(const_let)] trait Trt {} struct Str {} impl Trt for Str {} const _ : () = { //~^ ERROR is unstable use std::marker::PhantomData; struct ImplementsTrait<T: Trt>(PhantomData<T>); let _ = ImplementsTrait::<Str>(PhantomData); () }; fn main() {}
28.576923
69
0.697174
e5ad397474d17e72c7f6d7ae92cdc9931491d9bd
35,138
// Copyright 2016 PingCAP, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. // Copyright 2015 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use crate::test_util::*; use harness::*; use raft::eraftpb::*; use raft::storage::MemStorage; use raft::*; use slog::Logger; pub fn commit_noop_entry(r: &mut Interface, s: &MemStorage) { assert_eq!(r.state, StateRole::Leader); r.bcast_append(); // simulate the response of MsgAppend let msgs = r.read_messages(); for m in msgs { assert_eq!(m.get_msg_type(), MessageType::MsgAppend); assert_eq!(m.entries.len(), 1); assert!(m.entries[0].data.is_empty()); r.step(accept_and_reply(&m)).expect(""); } // ignore further messages to refresh followers' commit index r.read_messages(); s.wl() .append(r.raft_log.unstable_entries().unwrap_or(&[])) .expect(""); let committed = r.raft_log.committed; r.commit_apply(committed); let (last_index, last_term) = (r.raft_log.last_index(), r.raft_log.last_term()); r.raft_log.stable_to(last_index, last_term); } fn accept_and_reply(m: &Message) -> Message { assert_eq!(m.get_msg_type(), MessageType::MsgAppend); let mut reply = new_message(m.to, m.from, MessageType::MsgAppendResponse, 0); reply.term = m.term; reply.index = m.index + m.entries.len() as u64; reply } #[test] fn test_follower_update_term_from_message() { let l = default_logger(); test_update_term_from_message(StateRole::Follower, &l); } #[test] fn test_candidate_update_term_from_message() { let l = default_logger(); test_update_term_from_message(StateRole::Candidate, &l); } #[test] fn test_leader_update_term_from_message() { let l = default_logger(); test_update_term_from_message(StateRole::Leader, &l); } // test_update_term_from_message tests that if one server’s current term is // smaller than the other’s, then it updates its current term to the larger // value. If a candidate or leader discovers that its term is out of date, // it immediately reverts to follower state. // Reference: section 5.1 fn test_update_term_from_message(state: StateRole, l: &Logger) { let mut r = new_test_raft(1, vec![1, 2, 3], 10, 1, new_storage(), &l); match state { StateRole::Follower => r.become_follower(1, 2), StateRole::PreCandidate => r.become_pre_candidate(), StateRole::Candidate => r.become_candidate(), StateRole::Leader => { r.become_candidate(); r.become_leader(); } } let mut m = new_message(0, 0, MessageType::MsgAppend, 0); m.term = 3; r.step(m).expect(""); assert_eq!(r.term, 3); assert_eq!(r.state, StateRole::Follower); } // test_start_as_follower tests that when servers start up, they begin as followers. // Reference: section 5.2 #[test] fn test_start_as_follower() { let l = default_logger(); let r = new_test_raft(1, vec![1, 2, 3], 10, 1, new_storage(), &l); assert_eq!(r.state, StateRole::Follower); } // test_leader_bcast_beat tests that if the leader receives a heartbeat tick, // it will send a msgApp with m.Index = 0, m.LogTerm=0 and empty entries as // heartbeat to all followers. // Reference: section 5.2 #[test] fn test_leader_bcast_beat() { let l = default_logger(); // heartbeat interval let hi = 1; let mut r = new_test_raft(1, vec![1, 2, 3], 10, hi, new_storage(), &l); r.become_candidate(); r.become_leader(); for i in 0..10 { r.append_entry(&mut [empty_entry(0, i as u64 + 1)]); } for _ in 0..hi { r.tick(); } let mut msgs = r.read_messages(); msgs.sort_by_key(|m| format!("{:?}", m)); let new_message_ext = |f, to| { let mut m = new_message(f, to, MessageType::MsgHeartbeat, 0); m.term = 2; m.commit = 0; m }; let expect_msgs = vec![new_message_ext(1, 2), new_message_ext(1, 3)]; assert_eq!(msgs, expect_msgs); } #[test] fn test_follower_start_election() { let l = default_logger(); test_nonleader_start_election(StateRole::Follower, &l); } #[test] fn test_candidate_start_new_election() { let l = default_logger(); test_nonleader_start_election(StateRole::Candidate, &l); } // test_nonleader_start_election tests that if a follower receives no communication // over election timeout, it begins an election to choose a new leader. It // increments its current term and transitions to candidate state. It then // votes for itself and issues RequestVote RPCs in parallel to each of the // other servers in the cluster. // Reference: section 5.2 // Also if a candidate fails to obtain a majority, it will time out and // start a new election by incrementing its term and initiating another // round of RequestVote RPCs. // Reference: section 5.2 fn test_nonleader_start_election(state: StateRole, l: &Logger) { // election timeout let et = 10; let mut r = new_test_raft(1, vec![1, 2, 3], et, 1, new_storage(), l); match state { StateRole::Follower => r.become_follower(2, 2), StateRole::Candidate => r.become_candidate(), _ => panic!("Only non-leader role is accepted."), } for _ in 1..2 * et { r.tick(); } assert_eq!(r.term, 3); assert_eq!(r.state, StateRole::Candidate); assert!(r.votes[&r.id]); let mut msgs = r.read_messages(); msgs.sort_by_key(|m| format!("{:?}", m)); let new_message_ext = |f, to| { let mut m = new_message(f, to, MessageType::MsgRequestVote, 0); m.term = 3; m.log_term = 1; m.index = 1; m }; let expect_msgs = vec![new_message_ext(1, 2), new_message_ext(1, 3)]; assert_eq!(msgs, expect_msgs); } // test_leader_election_in_one_round_rpc tests all cases that may happen in // leader election during one round of RequestVote RPC: // a) it wins the election // b) it loses the election // c) it is unclear about the result // Reference: section 5.2 #[test] fn test_leader_election_in_one_round_rpc() { let l = default_logger(); let mut tests = vec![ // win the election when receiving votes from a majority of the servers (1, map!(), StateRole::Leader), (3, map!(2 => true, 3 => true), StateRole::Leader), (3, map!(2 => true), StateRole::Leader), ( 5, map!(2 => true, 3 => true, 4 => true, 5 => true), StateRole::Leader, ), (5, map!(2 => true, 3 => true, 4 => true), StateRole::Leader), (5, map!(2 => true, 3 => true), StateRole::Leader), // return to follower state if it receives vote denial from a majority (3, map!(2 => false, 3 => false), StateRole::Follower), ( 5, map!(2 => false, 3 => false, 4 => false, 5 => false), StateRole::Follower, ), ( 5, map!(2 => true, 3 => false, 4 => false, 5 => false), StateRole::Follower, ), // stay in candidate if it does not obtain the majority (3, map!(), StateRole::Candidate), (5, map!(2 => true), StateRole::Candidate), (5, map!(2 => false, 3 => false), StateRole::Candidate), (5, map!(), StateRole::Candidate), ]; for (i, (size, votes, state)) in tests.drain(..).enumerate() { let mut r = new_test_raft(1, (1..=size as u64).collect(), 10, 1, new_storage(), &l); r.step(new_message(1, 1, MessageType::MsgHup, 0)).expect(""); for (id, vote) in votes { let mut m = new_message(id, 1, MessageType::MsgRequestVoteResponse, 0); m.term = r.term; m.reject = !vote; r.step(m).expect(""); } if r.state != state { panic!("#{}: state = {:?}, want {:?}", i, r.state, state); } if r.term != 2 { panic!("#{}: term = {}, want {}", i, r.term, 2); } } } // test_follower_vote tests that each follower will vote for at most one // candidate in a given term, on a first-come-first-served basis. // Reference: section 5.2 #[test] fn test_follower_vote() { let l = default_logger(); let mut tests = vec![ (INVALID_ID, 1, false), (INVALID_ID, 2, false), (1, 1, false), (2, 2, false), (1, 2, true), (2, 1, true), ]; for (i, (vote, nvote, wreject)) in tests.drain(..).enumerate() { let mut r = new_test_raft(1, vec![1, 2, 3], 10, 1, new_storage(), &l); r.load_state(&hard_state(1, 1, vote)); let mut m = new_message(nvote, 1, MessageType::MsgRequestVote, 0); m.term = 1; m.log_term = 1; m.index = 1; r.step(m).expect(""); let msgs = r.read_messages(); let mut m = new_message(1, nvote, MessageType::MsgRequestVoteResponse, 0); m.term = 1; m.reject = wreject; let expect_msgs = vec![m]; if msgs != expect_msgs { panic!("#{}: msgs = {:?}, want {:?}", i, msgs, expect_msgs); } } } // test_candidate_fallback tests that while waiting for votes, // if a candidate receives an AppendEntries RPC from another server claiming // to be leader whose term is at least as large as the candidate's current term, // it recognizes the leader as legitimate and returns to follower state. // Reference: section 5.2 #[test] fn test_candidate_fallback() { let l = default_logger(); let new_message_ext = |f, to, term| { let mut m = new_message(f, to, MessageType::MsgAppend, 0); m.term = term; m }; let mut tests = vec![new_message_ext(2, 1, 2), new_message_ext(2, 1, 3)]; for (i, m) in tests.drain(..).enumerate() { let mut r = new_test_raft(1, vec![1, 2, 3], 10, 1, new_storage(), &l); r.step(new_message(1, 1, MessageType::MsgHup, 0)).expect(""); assert_eq!(r.state, StateRole::Candidate); let term = m.term; r.step(m).expect(""); if r.state != StateRole::Follower { panic!( "#{}: state = {:?}, want {:?}", i, r.state, StateRole::Follower ); } if r.term != term { panic!("#{}: term = {}, want {}", i, r.term, term); } } } #[test] fn test_follower_election_timeout_randomized() { let l = default_logger(); test_non_leader_election_timeout_randomized(StateRole::Follower, &l); } #[test] fn test_candidate_election_timeout_randomized() { let l = default_logger(); test_non_leader_election_timeout_randomized(StateRole::Candidate, &l); } // test_non_leader_election_timeout_randomized tests that election timeout for // follower or candidate is randomized. // Reference: section 5.2 fn test_non_leader_election_timeout_randomized(state: StateRole, l: &Logger) { let et = 10; let mut r = new_test_raft(1, vec![1, 2, 3], et, 1, new_storage(), l); let mut timeouts = map!(); for _ in 0..1000 * et { let term = r.term; match state { StateRole::Follower => r.become_follower(term + 1, 2), StateRole::Candidate => r.become_candidate(), _ => panic!("only non leader state is accepted!"), } let mut time = 0; while r.read_messages().is_empty() { r.tick(); time += 1; } timeouts.insert(time, true); } assert!(timeouts.len() <= et && timeouts.len() >= et - 1); for d in et + 1..2 * et { assert!(timeouts[&d]); } } #[test] fn test_follower_election_timeout_nonconflict() { let l = default_logger(); test_nonleaders_election_timeout_nonconfict(StateRole::Follower, &l); } #[test] fn test_candidates_election_timeout_nonconf() { let l = default_logger(); test_nonleaders_election_timeout_nonconfict(StateRole::Candidate, &l); } // test_nonleaders_election_timeout_nonconfict tests that in most cases only a // single server(follower or candidate) will time out, which reduces the // likelihood of split vote in the new election. // Reference: section 5.2 fn test_nonleaders_election_timeout_nonconfict(state: StateRole, l: &Logger) { let et = 10; let size = 5; let mut rs = Vec::with_capacity(size); let ids: Vec<u64> = (1..=size as u64).collect(); for id in ids.iter().take(size) { rs.push(new_test_raft(*id, ids.clone(), et, 1, new_storage(), l)); } let mut conflicts = 0; for _ in 0..1000 { for r in &mut rs { let term = r.term; match state { StateRole::Follower => r.become_follower(term + 1, INVALID_ID), StateRole::Candidate => r.become_candidate(), _ => panic!("non leader state is expect!"), } } let mut timeout_num = 0; while timeout_num == 0 { for r in &mut rs { r.tick(); if !r.read_messages().is_empty() { timeout_num += 1; } } } // several rafts time out at the same tick if timeout_num > 1 { conflicts += 1; } } assert!(f64::from(conflicts) / 1000.0 <= 0.3); } // test_leader_start_replication tests that when receiving client proposals, // the leader appends the proposal to its log as a new entry, then issues // AppendEntries RPCs in parallel to each of the other servers to replicate // the entry. Also, when sending an AppendEntries RPC, the leader includes // the index and term of the entry in its log that immediately precedes // the new entries. // Also, it writes the new entry into stable storage. // Reference: section 5.3 #[test] fn test_leader_start_replication() { let l = default_logger(); let s = new_storage(); let mut r = new_test_raft(1, vec![1, 2, 3], 10, 1, s.clone(), &l); r.become_candidate(); r.become_leader(); commit_noop_entry(&mut r, &s); let li = r.raft_log.last_index(); r.step(new_message(1, 1, MessageType::MsgPropose, 1)) .expect(""); assert_eq!(r.raft_log.last_index(), li + 1); assert_eq!(r.raft_log.committed, li); let mut msgs = r.read_messages(); msgs.sort_by_key(|m| format!("{:?}", m)); let wents = vec![new_entry(2, li + 1, SOME_DATA)]; let new_message_ext = |f, to, ents| { let mut m = new_message(f, to, MessageType::MsgAppend, 0); m.term = 2; m.index = li; m.log_term = 2; m.commit = li; m.entries = ents; m }; let expect_msgs = vec![ new_message_ext(1, 2, wents.clone().into()), new_message_ext(1, 3, wents.clone().into()), ]; assert_eq!(msgs, expect_msgs); assert_eq!(r.raft_log.unstable_entries(), Some(&*wents)); } // test_leader_commit_entry tests that when the entry has been safely replicated, // the leader gives out the applied entries, which can be applied to its state // machine. // Also, the leader keeps track of the highest index it knows to be committed, // and it includes that index in future AppendEntries RPCs so that the other // servers eventually find out. // Reference: section 5.3 #[test] fn test_leader_commit_entry() { let l = default_logger(); let s = new_storage(); let mut r = new_test_raft(1, vec![1, 2, 3], 10, 1, s.clone(), &l); r.become_candidate(); r.become_leader(); commit_noop_entry(&mut r, &s); let li = r.raft_log.last_index(); r.step(new_message(1, 1, MessageType::MsgPropose, 1)) .expect(""); for m in r.read_messages() { r.step(accept_and_reply(&m)).expect(""); } assert_eq!(r.raft_log.committed, li + 1); let wents = vec![new_entry(2, li + 1, SOME_DATA)]; assert_eq!(r.raft_log.next_entries(), Some(wents)); let mut msgs = r.read_messages(); msgs.sort_by_key(|m| format!("{:?}", m)); for (i, m) in msgs.drain(..).enumerate() { assert_eq!(i as u64 + 2, m.to); assert_eq!(m.get_msg_type(), MessageType::MsgAppend); assert_eq!(m.commit, li + 1); } } // test_leader_acknowledge_commit tests that a log entry is committed once the // leader that created the entry has replicated it on a majority of the servers. // Reference: section 5.3 #[test] fn test_leader_acknowledge_commit() { let l = default_logger(); let mut tests = vec![ (1, map!(), true), (3, map!(), false), (3, map!(2 => true), true), (3, map!(2 => true, 3 => true), true), (5, map!(), false), (5, map!(2 => true), false), (5, map!(2 => true, 3 => true), true), (5, map!(2 => true, 3 => true, 4 => true), true), (5, map!(2 => true, 3 => true, 4 => true, 5 => true), true), ]; for (i, (size, acceptors, wack)) in tests.drain(..).enumerate() { let s = new_storage(); let mut r = new_test_raft(1, (1..=size).collect(), 10, 1, s.clone(), &l); r.become_candidate(); r.become_leader(); commit_noop_entry(&mut r, &s); let li = r.raft_log.last_index(); r.step(new_message(1, 1, MessageType::MsgPropose, 1)) .expect(""); for m in r.read_messages() { if acceptors.contains_key(&m.to) && acceptors[&m.to] { r.step(accept_and_reply(&m)).expect(""); } } let g = r.raft_log.committed > li; if g ^ wack { panic!("#{}: ack commit = {}, want {}", i, g, wack); } } } // test_leader_commit_preceding_entries tests that when leader commits a log entry, // it also commits all preceding entries in the leader’s log, including // entries created by previous leaders. // Also, it applies the entry to its local state machine (in log order). // Reference: section 5.3 #[test] fn test_leader_commit_preceding_entries() { let l = default_logger(); let mut tests = vec![ vec![], vec![empty_entry(2, 2)], vec![empty_entry(1, 2), empty_entry(2, 3)], vec![empty_entry(1, 2)], ]; for (i, mut tt) in tests.drain(..).enumerate() { let mut r = { let store = MemStorage::new_with_conf_state((vec![1, 2, 3], vec![])); store.wl().append(&tt).unwrap(); let cfg = new_test_config(1, 10, 1); new_test_raft_with_config(&cfg, store, &l) }; r.load_state(&hard_state(2, 1, 0)); r.become_candidate(); r.become_leader(); r.step(new_message(1, 1, MessageType::MsgPropose, 1)) .expect(""); for m in r.read_messages() { r.step(accept_and_reply(&m)).expect(""); } let li = tt.len() as u64; tt.append(&mut vec![ empty_entry(3, li + 2), new_entry(3, li + 3, SOME_DATA), ]); let g = r.raft_log.next_entries(); let wg = Some(tt); if g != wg { panic!("#{}: ents = {:?}, want {:?}", i, g, wg); } } } // test_follower_commit_entry tests that once a follower learns that a log entry // is committed, it applies the entry to its local state machine (in log order). // Reference: section 5.3 #[test] fn test_follower_commit_entry() { let l = default_logger(); let mut tests = vec![ (vec![new_entry(1, 2, SOME_DATA)], 2), ( vec![ new_entry(1, 2, SOME_DATA), new_entry(1, 3, Some("somedata2")), ], 3, ), ( vec![ new_entry(1, 2, Some("somedata2")), new_entry(1, 3, SOME_DATA), ], 3, ), ( vec![ new_entry(1, 2, SOME_DATA), new_entry(1, 3, Some("somedata2")), ], 2, ), ]; for (i, (ents, commit)) in tests.drain(..).enumerate() { let mut r = new_test_raft(1, vec![1, 2, 3], 10, 1, new_storage(), &l); r.become_follower(1, 2); let mut m = new_message(2, 1, MessageType::MsgAppend, 0); m.term = 1; m.log_term = 1; m.index = 1; m.commit = commit; m.entries = ents.clone().into(); r.step(m).expect(""); if r.raft_log.committed != commit { panic!( "#{}: committed = {}, want {}", i, r.raft_log.committed, commit ); } let wents = Some(ents[..commit as usize - 1].to_vec()); let g = r.raft_log.next_entries(); if g != wents { panic!("#{}: next_ents = {:?}, want {:?}", i, g, wents); } } } // test_follower_check_msg_append tests that if the follower does not find an // entry in its log with the same index and term as the one in AppendEntries RPC, // then it refuses the new entries. Otherwise it replies that it accepts the // append entries. // Reference: section 5.3 #[test] fn test_follower_check_msg_append() { let l = default_logger(); let ents = vec![empty_entry(1, 2), empty_entry(2, 3)]; let mut tests = vec![ // match with committed entries (1, 2, 2, false, 0), (ents[0].term, ents[0].index, 2, false, 0), // match with uncommitted entries (ents[1].term, ents[1].index, 3, false, 0), // unmatch with existing entry (ents[0].term, ents[1].index, ents[1].index, true, 3), // unexisting entry ( ents[1].term + 1, ents[1].index + 1, ents[1].index + 1, true, 3, ), ]; for (i, (term, index, windex, wreject, wreject_hint)) in tests.drain(..).enumerate() { let mut r = { let store = MemStorage::new_with_conf_state((vec![1, 2, 3], vec![])); store.wl().append(&ents).unwrap(); let cfg = new_test_config(1, 10, 1); new_test_raft_with_config(&cfg, store, &l) }; r.load_state(&hard_state(1, 1, 0)); r.become_follower(2, 2); let mut m = new_message(2, 1, MessageType::MsgAppend, 0); m.term = 2; m.log_term = term; m.index = index; r.step(m).expect(""); let msgs = r.read_messages(); let mut wm = new_message(1, 2, MessageType::MsgAppendResponse, 0); wm.term = 2; wm.index = windex; if wreject { wm.reject = wreject; wm.reject_hint = wreject_hint; } let expect_msgs = vec![wm]; if msgs != expect_msgs { panic!("#{}: msgs = {:?}, want {:?}", i, msgs, expect_msgs); } } } // test_follower_append_entries tests that when AppendEntries RPC is valid, // the follower will delete the existing conflict entry and all that follow it, // and append any new entries not already in the log. // Also, it writes the new entry into stable storage. // Reference: section 5.3 #[test] fn test_follower_append_entries() { let l = default_logger(); let mut tests = vec![ ( 3, 2, vec![empty_entry(3, 4)], vec![empty_entry(1, 2), empty_entry(2, 3), empty_entry(3, 4)], vec![empty_entry(3, 4)], ), ( 2, 1, vec![empty_entry(3, 3), empty_entry(4, 4)], vec![empty_entry(1, 2), empty_entry(3, 3), empty_entry(4, 4)], vec![empty_entry(3, 3), empty_entry(4, 4)], ), ( 1, 1, vec![empty_entry(1, 2)], vec![empty_entry(1, 2), empty_entry(2, 3)], vec![], ), ( 1, 1, vec![empty_entry(3, 2)], vec![empty_entry(3, 2)], vec![empty_entry(3, 2)], ), ]; for (i, (index, term, ents, wents, wunstable)) in tests.drain(..).enumerate() { let mut r = { let store = MemStorage::new_with_conf_state((vec![1, 2, 3], vec![])); store .wl() .append(&[empty_entry(1, 2), empty_entry(2, 3)]) .unwrap(); let cfg = new_test_config(1, 10, 1); new_test_raft_with_config(&cfg, store, &l) }; r.become_follower(2, 2); let mut m = new_message(2, 1, MessageType::MsgAppend, 0); m.term = 2; m.log_term = term; m.index = index; m.entries = ents.into(); r.step(m).expect(""); let g = r.raft_log.all_entries(); if g != wents { panic!("#{}: ents = {:?}, want {:?}", i, g, wents); } let g = r.raft_log.unstable_entries(); let wunstable = if wunstable.is_empty() { None } else { Some(&*wunstable) }; if g != wunstable { panic!("#{}: unstable_entries = {:?}, want {:?}", i, g, wunstable); } } } // test_leader_sync_follower_log tests that the leader could bring a follower's log // into consistency with its own. // Reference: section 5.3, figure 7 #[test] fn test_leader_sync_follower_log() { let l = default_logger(); let ents = vec![ empty_entry(1, 2), empty_entry(1, 3), empty_entry(4, 4), empty_entry(4, 5), empty_entry(5, 6), empty_entry(5, 7), empty_entry(6, 8), empty_entry(6, 9), empty_entry(6, 10), ]; let term = 8u64; let mut tests = vec![ vec![ empty_entry(1, 2), empty_entry(1, 3), empty_entry(4, 4), empty_entry(4, 5), empty_entry(5, 6), empty_entry(5, 7), empty_entry(6, 8), empty_entry(6, 9), ], vec![empty_entry(1, 2), empty_entry(1, 3), empty_entry(4, 4)], vec![ empty_entry(1, 2), empty_entry(1, 3), empty_entry(4, 4), empty_entry(4, 5), empty_entry(5, 6), empty_entry(5, 7), empty_entry(6, 8), empty_entry(6, 9), empty_entry(6, 10), empty_entry(6, 11), ], vec![ empty_entry(1, 2), empty_entry(1, 3), empty_entry(4, 4), empty_entry(4, 5), empty_entry(5, 6), empty_entry(5, 7), empty_entry(6, 8), empty_entry(6, 9), empty_entry(6, 10), empty_entry(7, 11), empty_entry(7, 12), ], vec![ empty_entry(1, 2), empty_entry(1, 3), empty_entry(4, 4), empty_entry(4, 5), empty_entry(4, 6), empty_entry(4, 7), ], vec![ empty_entry(1, 2), empty_entry(1, 3), empty_entry(2, 4), empty_entry(2, 5), empty_entry(2, 6), empty_entry(3, 7), empty_entry(3, 8), empty_entry(3, 9), empty_entry(3, 10), empty_entry(3, 11), ], ]; for (i, tt) in tests.drain(..).enumerate() { let mut lead = { let store = MemStorage::new_with_conf_state((vec![1, 2, 3], vec![])); store.wl().append(&ents).unwrap(); let cfg = new_test_config(1, 10, 1); new_test_raft_with_config(&cfg, store, &l) }; let last_index = lead.raft_log.last_index(); lead.load_state(&hard_state(term, last_index, 0)); let mut follower = { let store = MemStorage::new_with_conf_state((vec![1, 2, 3], vec![])); store.wl().append(&tt).unwrap(); let cfg = new_test_config(2, 10, 1); new_test_raft_with_config(&cfg, store, &l) }; follower.load_state(&hard_state(term - 1, 1, 0)); // It is necessary to have a three-node cluster. // The second may have more up-to-date log than the first one, so the // first node needs the vote from the third node to become the leader. let mut n = Network::new(vec![Some(lead), Some(follower), NOP_STEPPER], &l); n.send(vec![new_message(1, 1, MessageType::MsgHup, 0)]); // The election occurs in the term after the one we loaded with // lead.load_state above. let mut m = new_message(3, 1, MessageType::MsgRequestVoteResponse, 0); m.term = term + 1; n.send(vec![m]); let mut m = new_message(1, 1, MessageType::MsgPropose, 0); m.entries = vec![Entry::default()].into(); n.send(vec![m]); let lead_str = ltoa(&n.peers[&1].raft_log); let follower_str = ltoa(&n.peers[&2].raft_log); if lead_str != follower_str { panic!( "#{}: lead str: {}, follower_str: {}", i, lead_str, follower_str ); } } } // test_vote_request tests that the vote request includes information about the candidate’s log // and are sent to all of the other nodes. // Reference: section 5.4.1 #[test] fn test_vote_request() { let l = default_logger(); let mut tests = vec![ (vec![empty_entry(1, 2)], 2), (vec![empty_entry(1, 2), empty_entry(2, 3)], 3), ]; for (j, (ents, wterm)) in tests.drain(..).enumerate() { let mut r = new_test_raft(1, vec![1, 2, 3], 10, 1, new_storage(), &l); let mut m = new_message(2, 1, MessageType::MsgAppend, 0); m.term = wterm - 1; m.log_term = 1; // log-term must be greater than 0. m.index = 1; m.entries = ents.clone().into(); r.step(m).expect(""); r.read_messages(); for _ in 1..r.election_timeout() * 2 { r.tick_election(); } let mut msgs = r.read_messages(); msgs.sort_by_key(|m| format!("{:?}", m)); if msgs.len() != 2 { panic!("#{}: msg count = {}, want 2", j, msgs.len()); } for (i, m) in msgs.iter().enumerate() { if m.get_msg_type() != MessageType::MsgRequestVote { panic!( "#{}.{}: msg_type = {:?}, want {:?}", j, i, m.get_msg_type(), MessageType::MsgRequestVote ); } if m.to != i as u64 + 2 { panic!("#{}.{}: to = {}, want {}", j, i, m.to, i + 2); } if m.term != wterm { panic!("#{}.{}: term = {}, want {}", j, i, m.term, wterm); } let windex = ents.last().unwrap().index; let wlogterm = ents.last().unwrap().term; if m.index != windex { panic!("#{}.{}: index = {}, want {}", j, i, m.index, windex); } if m.log_term != wlogterm { panic!("#{}.{}: log_term = {}, want {}", j, i, m.log_term, wlogterm); } } } } // test_voter tests the voter denies its vote if its own log is more up-to-date // than that of the candidate. // Reference: section 5.4.1 #[test] fn test_voter() { let l = default_logger(); let mut tests = vec![ // same logterm (vec![empty_entry(1, 2)], 1, 2, false), (vec![empty_entry(1, 2)], 1, 3, false), (vec![empty_entry(1, 2), empty_entry(1, 3)], 1, 1, true), // candidate higher logterm (vec![empty_entry(1, 2)], 2, 2, false), (vec![empty_entry(1, 2)], 2, 3, false), (vec![empty_entry(1, 2), empty_entry(1, 3)], 2, 2, false), // voter higher logterm (vec![empty_entry(2, 2)], 1, 2, true), (vec![empty_entry(2, 2)], 1, 3, true), (vec![empty_entry(2, 2), empty_entry(1, 3)], 1, 2, true), ]; for (i, (ents, log_term, index, wreject)) in tests.drain(..).enumerate() { let s = MemStorage::new_with_conf_state((vec![1, 2], vec![])); s.wl().append(&ents).unwrap(); let cfg = new_test_config(1, 10, 1); let mut r = new_test_raft_with_config(&cfg, s, &l); let mut m = new_message(2, 1, MessageType::MsgRequestVote, 0); m.term = 3; m.log_term = log_term; m.index = index; r.step(m).expect(""); let msgs = r.read_messages(); if msgs.len() != 1 { panic!("#{}: msg count = {}, want {}", i, msgs.len(), 1); } if msgs[0].get_msg_type() != MessageType::MsgRequestVoteResponse { panic!( "#{}: msg_type = {:?}, want {:?}", i, msgs[0].get_msg_type(), MessageType::MsgRequestVoteResponse ); } if msgs[0].reject != wreject { panic!("#{}: reject = {}, want {}", i, msgs[0].reject, wreject); } } } // TestLeaderOnlyCommitsLogFromCurrentTerm tests that only log entries from the leader’s // current term are committed by counting replicas. // Reference: section 5.4.2 #[test] fn test_leader_only_commits_log_from_current_term() { let l = default_logger(); let ents = vec![empty_entry(1, 2), empty_entry(2, 3)]; let mut tests = vec![ // do not commit log entries in previous terms (1, 1), (2, 1), // commit log in current term (4, 4), ]; for (i, (index, wcommit)) in tests.drain(..).enumerate() { let mut r = { let store = MemStorage::new_with_conf_state((vec![1, 2], vec![])); store.wl().append(&ents).unwrap(); let cfg = new_test_config(1, 10, 1); new_test_raft_with_config(&cfg, store, &l) }; r.load_state(&hard_state(2, 1, 0)); // become leader at term 3 r.become_candidate(); r.become_leader(); r.read_messages(); // propose a entry to current term r.step(new_message(1, 1, MessageType::MsgPropose, 1)) .expect(""); let mut m = new_message(2, 1, MessageType::MsgAppendResponse, 0); m.term = r.term; m.index = index; r.step(m).expect(""); if r.raft_log.committed != wcommit { panic!( "#{}: commit = {}, want {}", i, r.raft_log.committed, wcommit ); } } }
33.464762
95
0.554898
23b1823925258b4d6ca5116c18bfbbcd231e9ecc
3,181
// Copyright 2018 Stefan Kroboth // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or // http://opensource.org/licenses/MIT>, at your option. This file may not be // copied, modified, or distributed except according to those terms. //! TODO Documentation #![recursion_limit = "1024"] #![cfg_attr(feature = "clippy", feature(plugin))] #![cfg_attr(feature = "clippy", plugin(clippy))] #![warn(missing_docs)] #[macro_use] extern crate diesel; extern crate dotenv; #[macro_use] extern crate error_chain; extern crate futures; #[macro_use] extern crate serde_derive; // #[macro_use] extern crate serde_json; extern crate tokio_core; extern crate tokio_io; extern crate tokio_serde_json; /// Errors mod errors; /// Database mod db; use std::net::SocketAddr; // use futures::{Sink, Stream}; use futures::Stream; use tokio_core::reactor::{Core, Handle}; use tokio_core::net::{TcpListener, TcpStream}; use tokio_io::codec::length_delimited; // use serde_json::Value; // use tokio_serde_json::{ReadJson, WriteJson}; use tokio_serde_json::ReadJson; use errors::*; use db::*; #[derive(Serialize, Deserialize, Debug)] enum Message { NewData(DataPoint), Request(Request), } /// Request message #[derive(Serialize, Deserialize, Debug)] pub struct Request { req: String, } /// TODO: use the one from models #[derive(Serialize, Deserialize, Debug)] pub struct DataPoint { /// plot id pub plot_id: i32, /// data pub data: Vec<f64>, } /// Process a socket fn process(socket: TcpStream, _addr: SocketAddr, handle: &Handle) { // delimit frames using a length header let length_delimited = length_delimited::FramedRead::new(socket); // deserialize frames let deserialized = ReadJson::<_, Message>::new(length_delimited).map_err(|e| println!("Err: {:?}", e)); handle.spawn(deserialized.for_each(|msg: Message| { println!("Got: {:?}", msg); match msg { Message::NewData(DataPoint { plot_id, data }) => { insert_dataset(plot_id, data[0], data[1]).unwrap(); print_data(1).unwrap(); } Message::Request(Request { req: _req }) => { // println!("{:?}: {:?}", socket, req); // let ld = length_delimited::FramedWrite::new(socket); // let serialized = WriteJson::new(ld); // serialized.send(json!({ // "plot_id": 1_u32, // "data": [ // 1, // 2 // ] // })); } } Ok(()) })); } fn run() -> Result<()> { let mut core = Core::new()?; let handle = core.handle(); // bind a server socket let listener = TcpListener::bind(&"127.0.0.1:17653".parse()?, &handle)?; println!("Listening on {:?}", listener.local_addr()); core.run(listener.incoming().for_each(|(socket, addr)| { process(socket, addr, &handle); Ok(()) })).unwrap(); Ok(()) } fn main() { if let Err(ref e) = run() { println!("error: {}", e); } }
26.289256
92
0.598554
9c7cd3b537130e9dee837cd3d6c9b94dabf5281c
15,047
use cglinalg::{ Vector3, Vector4, Matrix4x4, Quaternion, Radians, ScalarFloat, Unit, }; use core::fmt; pub type PointLight<S> = Light<S, PointLightModel<S>>; pub type SpotLight<S> = Light<S, SpotLightModel<S>>; /// A type with this trait can be used as a lighting model. /// /// A lighting model is the model that a light uses to illuminate objects /// in a scene. pub trait IlluminationModel { /// The type containing the parameters for constructing the lighting model. type Spec; /// Construct a camera model from a description of the /// camera model's parameters. fn from_spec(spec: &Self::Spec) -> Self; } /// This type carries all the information describing the change in attitude of /// a light in a scene in Euclidean space. #[repr(C)] #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct DeltaAttitude<S> { /// The change in the position of the light. pub delta_position: Vector3<S>, /// The change in the orientation of the light about the **negative z-axis**. pub roll: Radians<S>, /// The change in the orientation of the light about the **positive y-axis**. pub yaw: Radians<S>, /// The change in the orientation of the light about the **positive x-axis**. pub pitch: Radians<S>, } impl<S> DeltaAttitude<S> where S: ScalarFloat { /// Construct a new change in attitude. #[inline] pub fn new<A: Into<Radians<S>>>(delta_position: Vector3<S>, roll: A, yaw: A, pitch: A) -> Self { Self { delta_position: delta_position, roll: roll.into(), yaw: yaw.into(), pitch: pitch.into(), } } /// Construct zero change in attitude. #[inline] pub fn zero() -> Self { Self { delta_position: Vector3::zero(), roll: Radians::zero(), yaw: Radians::zero(), pitch: Radians::zero(), } } } impl<S> fmt::Display for DeltaAttitude<S> where S: fmt::Display { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!( formatter, "DeltaAttitude [x={}, y={}, z={}, roll={}, yaw={}, pitch={}]", self.delta_position[0], self.delta_position[1], self.delta_position[2], self.roll, self.yaw, self.pitch ) } } #[derive(Clone,)] pub struct PointLightModelSpec<S> { pub ambient: Vector3<S>, pub diffuse: Vector3<S>, pub specular: Vector3<S>, } impl<S> PointLightModelSpec<S> where S: ScalarFloat { /// Construct a new point light specification. #[inline] pub fn new( ambient: Vector3<S>, diffuse: Vector3<S>, specular: Vector3<S>) -> PointLightModelSpec<S> { PointLightModelSpec { ambient: ambient, diffuse: diffuse, specular: specular, } } } #[derive(Copy, Clone, Debug, PartialEq)] pub struct PointLightModel<S> { pub ambient: Vector3<S>, pub diffuse: Vector3<S>, pub specular: Vector3<S>, } impl<S> IlluminationModel for PointLightModel<S> where S: ScalarFloat { type Spec = PointLightModelSpec<S>; #[inline] fn from_spec(spec: &Self::Spec) -> Self { PointLightModel { ambient: spec.ambient, diffuse: spec.diffuse, specular: spec.specular, } } } #[derive(Copy, Clone, Debug, PartialEq)] pub struct SpotLightModelSpec<S> { cutoff: S, outer_cutoff: S, /// The spotlight illumination parameters. ambient: Vector3<S>, diffuse: Vector3<S>, specular: Vector3<S>, /// The spotlight attenuation parameters. constant: S, linear: S, quadratic: S, } impl<S> SpotLightModelSpec<S> where S: ScalarFloat { #[inline] pub fn new( cutoff: S, outer_cutoff: S, ambient: Vector3<S>, diffuse: Vector3<S>, specular: Vector3<S>, constant: S, linear: S, quadratic: S) -> SpotLightModelSpec<S> { SpotLightModelSpec { cutoff: cutoff, outer_cutoff: outer_cutoff, ambient: ambient, diffuse: diffuse, specular: specular, constant: constant, linear: linear, quadratic: quadratic, } } } #[derive(Copy, Clone, Debug, PartialEq)] pub struct SpotLightModel<S> { pub cutoff: S, pub outer_cutoff: S, /// The spotlight illumination parameters. pub ambient: Vector3<S>, pub diffuse: Vector3<S>, pub specular: Vector3<S>, /// The spotlight attenuation parameters. pub constant: S, pub linear: S, pub quadratic: S, } impl<S> IlluminationModel for SpotLightModel<S> where S: ScalarFloat { type Spec = SpotLightModelSpec<S>; #[inline] fn from_spec(spec: &Self::Spec) -> Self { SpotLightModel { cutoff: spec.cutoff, outer_cutoff: spec.outer_cutoff, ambient: spec.ambient, diffuse: spec.diffuse, specular: spec.specular, constant: spec.constant, linear: spec.linear, quadratic: spec.quadratic, } } } /// A specification describing a rigid body transformation for the attitude /// (position and orientation) of a spotlight. The spec describes the location, /// local coordinate system, and rotation axis for the light in world space. /// The coordinate transformation is right-handed orthonormal transformation. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct LightAttitudeSpec<S> { /// The location of the light in world space. position: Vector3<S>, /// The direction of the **negative z-axis** (forward axis) of the light. forward: Vector3<S>, /// The direction of the **positive x-axis** (right axis) of the light. right: Vector3<S>, /// The direction of the **positive y-axis** (up axis) of the light. up: Vector3<S>, /// The **axis of rotation** of the light. It is not necessary that /// the axis of rotation of the light be the same as one of the coordinate /// axes. axis: Vector3<S>, } impl<S> LightAttitudeSpec<S> where S: ScalarFloat { /// Construct a new camera attitude specification. #[inline] pub fn new( position: Vector3<S>, forward: Vector3<S>, right: Vector3<S>, up: Vector3<S>, axis: Vector3<S>) -> Self { LightAttitudeSpec { position: position, forward: forward, right: right, up: up, axis: axis, } } } impl<S> fmt::Display for LightAttitudeSpec<S> where S: fmt::Display { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!( formatter, "LightAttitudeSpec [position={}, forward={}, right={} up={}, axis={}]", self.position, self.forward, self.right, self.up, self.axis ) } } /// This type contains all the data for tracking the position and orientation /// of a light in world space. The light attitude here uses a right-handed /// coordinate system facing along the light's **negative z-axis**. /// The coordinate system is a right-handed coordinate system with orthonormal /// basis vectors. #[repr(C)] #[derive(Clone, Debug)] struct LightAttitude<S> { /// The world space position of the light. position: Vector3<S>, /// The distance from the light perpendicular to the light's **xy-plane**. forward: Vector4<S>, /// The horizontal axis of the light's coordinate system. right: Vector4<S>, /// The vertical axis of the light's coordinate system. up: Vector4<S>, /// The **axis of rotation** of the light. It is not necessary that /// the axis of rotation of the light be the same as one of the coordinate /// axes. axis: Quaternion<S>, /// The translation matrix mapping objects from the world space coordinate /// frame to the coordinate frame centered at the eye position of the camera. translation_matrix: Matrix4x4<S>, /// The rotation matrix rotating the a vector in world space to the coordinate /// system of the camera's view space. rotation_matrix: Matrix4x4<S>, /// The viewing matrix of the camera mapping the complete translation + rotation /// of the camera. view_matrix: Matrix4x4<S>, } impl<S> LightAttitude<S> where S: ScalarFloat { /// Construct the camera's viewing transformation from its specification. #[inline] fn from_spec(spec: &LightAttitudeSpec<S>) -> Self { let axis = Quaternion::from_parts(S::zero(), spec.axis); let translation_matrix = Matrix4x4::from_affine_translation( &(-spec.position) ); let rotation_matrix = Matrix4x4::from(&axis); let view_matrix = rotation_matrix * translation_matrix; Self { position: spec.position, forward: spec.forward.to_homogeneous(), right: spec.right.to_homogeneous(), up: spec.up.to_homogeneous(), axis: axis, translation_matrix: translation_matrix, rotation_matrix: rotation_matrix, view_matrix: view_matrix, } } /// Get the camera's up direction in camera space. #[inline] fn up_axis_eye(&self) -> Vector3<S> { let zero = S::zero(); let one = S::one(); Vector3::new(zero, one, zero) } /// Get the camera's right axis in camera space. #[inline] fn right_axis_eye(&self) -> Vector3<S> { let zero = S::zero(); let one = S::one(); Vector3::new(one, zero ,zero) } /// Get the camera's forward axis in camera space. #[inline] fn forward_axis_eye(&self) -> Vector3<S> { let zero = S::zero(); let one = S::one(); Vector3::new(zero, zero, -one) } /// Update the light position based on the change in the camera's /// attitude. #[inline] fn update_position_eye(&mut self, delta_attitude: &DeltaAttitude<S>) { self.position += self.forward.contract() * -delta_attitude.delta_position.z; self.position += self.up.contract() * delta_attitude.delta_position.y; self.position += self.right.contract() * delta_attitude.delta_position.x; let translation_inv = Matrix4x4::from_affine_translation( &self.position ); self.translation_matrix = translation_inv.inverse().unwrap(); } /// Update the light axes so we can rotate the camera about the new rotation axes. #[inline] fn update_orientation_eye(&mut self, delta_attitude: &DeltaAttitude<S>) { let axis_yaw = Unit::from_value(self.up.contract()); let q_yaw = Quaternion::from_axis_angle( &axis_yaw, delta_attitude.yaw ); self.axis = q_yaw * self.axis; let axis_pitch = Unit::from_value(self.right.contract()); let q_pitch = Quaternion::from_axis_angle( &axis_pitch, delta_attitude.pitch ); self.axis = q_pitch * self.axis; let axis_roll = Unit::from_value(self.forward.contract()); let q_roll = Quaternion::from_axis_angle( &axis_roll, delta_attitude.roll, ); self.axis = q_roll * self.axis; let rotation_matrix_inv = Matrix4x4::from(&self.axis); self.forward = rotation_matrix_inv * Vector4::new(S::zero(), S::zero(), -S::one(), S::zero()); self.right = rotation_matrix_inv * Vector4::new(S::one(), S::zero(), S::zero(), S::zero()); self.up = rotation_matrix_inv * Vector4::new(S::zero(), S::one(), S::zero(), S::zero()); self.rotation_matrix = rotation_matrix_inv.inverse().unwrap(); } #[inline] fn update_position_world(&mut self, new_position: &Vector3<S>) { self.position = new_position.clone(); let translation_inv = Matrix4x4::from_affine_translation( &self.position ); self.translation_matrix = translation_inv.inverse().unwrap(); self.view_matrix = self.rotation_matrix * self.translation_matrix; } /// Update the light's attitude based on the input change in light /// attitude. #[inline] fn update(&mut self, delta_attitude: &DeltaAttitude<S>) { self.update_orientation_eye(delta_attitude); self.update_position_eye(delta_attitude); self.view_matrix = self.rotation_matrix * self.translation_matrix; } } pub struct Light<S, M> { model: M, attitude: LightAttitude<S>, } impl<S, M> Light<S, M> where S: ScalarFloat, M: IlluminationModel, { pub fn new( model_spec: &M::Spec, attitude_spec: &LightAttitudeSpec<S>) -> Self { Light { model: M::from_spec(model_spec), attitude: LightAttitude::from_spec(attitude_spec), } } /// Update the light's attitude (i.e. position and orientation) in /// the light's local coordinate frame. #[inline] pub fn update_attitude_eye(&mut self, delta_attitude: &DeltaAttitude<S>) { self.attitude.update(delta_attitude); } #[inline] pub fn update_position_world(&mut self, new_position: &Vector3<S>) { self.attitude.update_position_world(new_position); } #[inline] pub fn model(&self) -> &M { &self.model } /// Get the camera's position in world space. #[inline] pub fn position(&self) -> Vector3<S> { self.attitude.position } /// Get the camera's up direction in world space. #[inline] pub fn up_axis(&self) -> Vector3<S> { self.attitude.up.contract() } /// Get the camera's right axis in world space. #[inline] pub fn right_axis(&self) -> Vector3<S> { self.attitude.right.contract() } /// Get the camera's forward axis in world space. #[inline] pub fn forward_axis(&self) -> Vector3<S> { self.attitude.forward.contract() } /// Get the camera's **vertical y-axis** in camera view space. #[inline] pub fn up_axis_eye(&self) -> Vector3<S> { self.attitude.up_axis_eye() } /// Get the camera's **horizontal x-axis** in camera view space. #[inline] pub fn right_axis_eye(&self) -> Vector3<S> { self.attitude.right_axis_eye() } /// Get the camera's **forward z-axis** in camera view space. #[inline] pub fn forward_axis_eye(&self) -> Vector3<S> { self.attitude.forward_axis_eye() } /// Get the camera's axis of rotation. #[inline] pub fn rotation_axis(&self) -> Vector3<S> { self.attitude.axis.v } #[inline] pub fn view_matrix(&self) -> &Matrix4x4<S> { &self.attitude.view_matrix } #[inline] pub fn model_matrix(&self) -> Matrix4x4<S> { Matrix4x4::from_affine_translation(&self.position()) } }
30.645621
102
0.605968
9c470cfd3e240a325a131b58f4a0feef194d8286
2,116
use super::{ParseError, a, b}; #[test] fn parser_star() { rule!(rule:Vec<u32> = a*); assert_eq!(rule("aaa"), Ok((vec![1, 1, 1], ""))); assert_eq!(rule("b"), Ok((vec![], "b"))); assert_eq!(rule(""), Ok((vec![], ""))); } #[test] fn subexpression_star() { rule!(rule:Vec<u32> = (a)*); assert_eq!(rule("aaa"), Ok((vec![1, 1, 1], ""))); } #[test] fn labeled_parser_star() { rule!(rule:Vec<u32> = x:a*); assert_eq!(rule("aaa"), Ok((vec![1, 1, 1], ""))); } #[test] fn labeled_subexpression_star() { rule!(rule:Vec<u32> = x:(a)*); assert_eq!(rule("aaa"), Ok((vec![1, 1, 1], ""))); } #[test] fn parser_star_in_subexpression() { rule!(rule:Vec<u32> = (a*)); assert_eq!(rule("aaa"), Ok((vec![1, 1, 1], ""))); } #[test] fn parser_star_first_in_pair() { rule!(rule:u32 = a* b); assert_eq!(rule("aaab"), Ok((2, ""))); } #[test] fn subexpression_star_first_in_pair() { rule!(rule:u32 = (a)* b); assert_eq!(rule("aaab"), Ok((2, ""))); } #[test] fn string_star() { rule!(rule:Vec<&str> = ["a"]*); assert_eq!(rule("aaa"), Ok((vec!["a", "a", "a"], ""))); } #[test] fn parser_star_into_choice() { rule!(rule:Vec<u32> = a* / b*); assert_eq!(rule("aaa"), Ok((vec![1, 1, 1], ""))); } #[test] fn string_star_into_choice() { rule!(rule:Vec<&str> = ["a"]* / ["b"]*); assert_eq!(rule("aaa"), Ok((vec!["a", "a", "a"], ""))); assert_eq!(rule(""), Ok((vec![], ""))); } #[test] fn subexpression_star_into_choice() { rule!(rule:Vec<u32> = (a)* / (b)*); assert_eq!(rule("aaa"), Ok((vec![1, 1, 1], ""))); } #[test] fn labeled_parser_star_into_action() { rule!(rule:Vec<u32> = x:a* => { x }); assert_eq!(rule("aaa"), Ok((vec![1, 1, 1], ""))); assert_eq!(rule("b"), Ok((vec![], "b"))); assert_eq!(rule(""), Ok((vec![], ""))); } #[test] fn labeled_parser_star_into_sequence_into_action() { rule!(rule:Vec<u32> = x:a* b => { x }); assert_eq!(rule("aaab"), Ok((vec![1, 1, 1], ""))); assert_eq!(rule("aaa"), Err(ParseError)); assert_eq!(rule("c"), Err(ParseError)); assert_eq!(rule(""), Err(ParseError)); }
24.045455
59
0.520794
2fe69a7c4662562cc623a6cc4534bd3b0623fb86
6,494
use chrono::Utc; use deltalake::action::*; use deltalake::*; use maplit::hashmap; use std::collections::HashMap; use std::fs; use std::path::{Path, PathBuf}; use uuid::Uuid; // NOTE: The below is a useful external command for inspecting the written checkpoint schema visually: // parquet-tools inspect tests/data/checkpoints/_delta_log/00000000000000000005.checkpoint.parquet // TODO Add Remove actions to checkpoints tests as well! #[tokio::test] async fn write_simple_checkpoint() { let table_location = "./tests/data/checkpoints"; let table_path = PathBuf::from(table_location); let log_path = table_path.join("_delta_log"); // Delete checkpoint files from previous runs cleanup_checkpoint_files(log_path.as_path()); // Load the delta table at version 5 let mut table = deltalake::open_table_with_version(table_location, 5) .await .unwrap(); // Write a checkpoint checkpoints::create_checkpoint_from_table(&table) .await .unwrap(); // checkpoint should exist let checkpoint_path = log_path.join("00000000000000000005.checkpoint.parquet"); assert!(checkpoint_path.as_path().exists()); // _last_checkpoint should exist and point to the correct version let version = get_last_checkpoint_version(&log_path); assert_eq!(5, version); table.load_version(10).await.unwrap(); checkpoints::create_checkpoint_from_table(&table) .await .unwrap(); // checkpoint should exist let checkpoint_path = log_path.join("00000000000000000010.checkpoint.parquet"); assert!(checkpoint_path.as_path().exists()); // _last_checkpoint should exist and point to the correct version let version = get_last_checkpoint_version(&log_path); assert_eq!(10, version); // delta table should load just fine with the checkpoint in place let table_result = deltalake::open_table(table_location).await.unwrap(); let table = table_result; let files = table.get_files(); assert_eq!(12, files.len()); } fn get_last_checkpoint_version(log_path: &PathBuf) -> i64 { let last_checkpoint_path = log_path.join("_last_checkpoint"); assert!(last_checkpoint_path.as_path().exists()); let last_checkpoint_content = fs::read_to_string(last_checkpoint_path.as_path()).unwrap(); let last_checkpoint_content: serde_json::Value = serde_json::from_str(last_checkpoint_content.trim()).unwrap(); last_checkpoint_content .get("version") .unwrap() .as_i64() .unwrap() } fn cleanup_checkpoint_files(log_path: &Path) { let paths = fs::read_dir(log_path).unwrap(); for p in paths { match p { Ok(d) => { let path = d.path(); if path.file_name().unwrap() == "_last_checkpoint" || (path.extension().is_some() && path.extension().unwrap() == "parquet") { fs::remove_file(path).unwrap(); } } _ => {} } } } mod fs_common; #[tokio::test] async fn test_checkpoints_with_tombstones() { let main_branch = false; if main_branch { test_checkpoints_with_tombstones_main().await } else { test_checkpoints_with_tombstones_map_support().await } } async fn test_checkpoints_with_tombstones_main() {} async fn test_checkpoints_with_tombstones_map_support() { let path = "./tests/data/checkpoints_rw"; let log_dir = Path::new(path).join("_delta_log"); fs::create_dir_all(&log_dir).unwrap(); fs_common::cleanup_dir_except(log_dir, vec![]); let schema = Schema::new(vec![SchemaField::new( "id".to_string(), SchemaDataType::primitive("integer".to_string()), true, HashMap::new(), )]); let config = hashmap! { delta_config::TOMBSTONE_RETENTION.key.clone() => Some("interval 1 minute".to_string()) }; let mut table = fs_common::create_test_table(path, schema, config).await; let a1 = add(3 * 60 * 1000); // 3 mins ago, let a2 = add(2 * 60 * 1000); // 2 mins ago, assert_eq!(1, commit_add(&mut table, &a1).await); assert_eq!(2, commit_add(&mut table, &a2).await); checkpoints::create_checkpoint_from_table(&table) .await .unwrap(); table.update().await.unwrap(); // make table to read the checkpoint assert_eq!(table.get_files(), vec![a1.path.as_str(), a2.path.as_str()]); let (removes1, opt1) = pseudo_optimize(&mut table, 5 * 59 * 1000).await; assert_eq!(table.get_files(), vec![opt1.path.as_str()]); assert_eq!(table.get_state().all_tombstones(), &removes1); checkpoints::create_checkpoint_from_table(&table) .await .unwrap(); table.update().await.unwrap(); // make table to read the checkpoint assert_eq!(table.get_files(), vec![opt1.path.as_str()]); assert_eq!(table.get_state().all_tombstones(), &vec![]); // stale removes are deleted from the state } async fn pseudo_optimize(table: &mut DeltaTable, offset_millis: i64) -> (Vec<Remove>, Add) { let removes: Vec<Remove> = table .get_files() .iter() .map(|p| Remove { path: p.to_string(), deletion_timestamp: Some(Utc::now().timestamp_millis() - offset_millis), data_change: false, extended_file_metadata: None, partition_values: None, size: None, tags: None, }) .collect(); let add = Add { data_change: false, ..add(offset_millis) }; let actions = removes .iter() .cloned() .map(Action::remove) .chain(std::iter::once(Action::add(add.clone()))) .collect(); commit_actions(table, actions).await; (removes, add) } fn add(offset_millis: i64) -> Add { Add { path: Uuid::new_v4().to_string(), size: 100, partition_values: Default::default(), partition_values_parsed: None, modification_time: Utc::now().timestamp_millis() - offset_millis, data_change: true, stats: None, stats_parsed: None, tags: None, } } async fn commit_add(table: &mut DeltaTable, add: &Add) -> i64 { commit_actions(table, vec![Action::add(add.clone())]).await } async fn commit_actions(table: &mut DeltaTable, actions: Vec<Action>) -> i64 { let mut tx = table.create_transaction(None); tx.add_actions(actions); tx.commit(None).await.unwrap() }
31.990148
104
0.643055
e25099f0921a1db23046f2c005220ddc150b6359
37,775
//! Releases web handlers use super::error::Nope; use super::page::Page; use super::{duration_to_str, match_version, redirect_base}; use crate::db::Pool; use crate::BuildQueue; use chrono::{DateTime, NaiveDateTime, Utc}; use iron::prelude::*; use iron::status; use postgres::Connection; use router::Router; use serde::ser::{Serialize, SerializeStruct, Serializer}; use serde_json::Value; /// Number of release in home page const RELEASES_IN_HOME: i64 = 15; /// Releases in /releases page const RELEASES_IN_RELEASES: i64 = 30; /// Releases in recent releases feed const RELEASES_IN_FEED: i64 = 150; #[derive(Debug, Clone, PartialEq, Eq)] pub struct Release { pub(crate) name: String, pub(crate) version: String, description: Option<String>, target_name: Option<String>, rustdoc_status: bool, pub(crate) release_time: DateTime<Utc>, stars: i32, } impl Default for Release { fn default() -> Release { Release { name: String::new(), version: String::new(), description: None, target_name: None, rustdoc_status: false, release_time: Utc::now(), stars: 0, } } } impl Serialize for Release { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let mut state = serializer.serialize_struct("Release", 8)?; state.serialize_field("name", &self.name)?; state.serialize_field("version", &self.version)?; state.serialize_field("description", &self.description)?; state.serialize_field("target_name", &self.target_name)?; state.serialize_field("rustdoc_status", &self.rustdoc_status)?; state.serialize_field("release_time", &duration_to_str(self.release_time))?; state.serialize_field( "release_time_rfc3339", &self.release_time.format("%+").to_string(), )?; state.serialize_field("stars", &self.stars)?; state.end() } } #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub(crate) enum Order { ReleaseTime, // this is default order GithubStars, RecentFailures, FailuresByGithubStars, } impl Default for Order { fn default() -> Self { Self::ReleaseTime } } pub(crate) fn get_releases(conn: &Connection, page: i64, limit: i64, order: Order) -> Vec<Release> { let offset = (page - 1) * limit; // TODO: This function changed so much during development and current version have code // repeats for queries. There is definitely room for improvements. let query = match order { Order::ReleaseTime => { "SELECT crates.name, releases.version, releases.description, releases.target_name, releases.release_time, releases.rustdoc_status, crates.github_stars FROM crates INNER JOIN releases ON crates.id = releases.crate_id ORDER BY releases.release_time DESC LIMIT $1 OFFSET $2" } Order::GithubStars => { "SELECT crates.name, releases.version, releases.description, releases.target_name, releases.release_time, releases.rustdoc_status, crates.github_stars FROM crates INNER JOIN releases ON releases.id = crates.latest_version_id ORDER BY crates.github_stars DESC LIMIT $1 OFFSET $2" } Order::RecentFailures => { "SELECT crates.name, releases.version, releases.description, releases.target_name, releases.release_time, releases.rustdoc_status, crates.github_stars FROM crates INNER JOIN releases ON crates.id = releases.crate_id WHERE releases.build_status = FALSE AND releases.is_library = TRUE ORDER BY releases.release_time DESC LIMIT $1 OFFSET $2" } Order::FailuresByGithubStars => { "SELECT crates.name, releases.version, releases.description, releases.target_name, releases.release_time, releases.rustdoc_status, crates.github_stars FROM crates INNER JOIN releases ON releases.id = crates.latest_version_id WHERE releases.build_status = FALSE AND releases.is_library = TRUE ORDER BY crates.github_stars DESC LIMIT $1 OFFSET $2" } }; let query = conn.query(&query, &[&limit, &offset]).unwrap(); query .into_iter() .map(|row| Release { name: row.get(0), version: row.get(1), description: row.get(2), target_name: row.get(3), release_time: DateTime::from_utc(row.get::<_, NaiveDateTime>(4), Utc), rustdoc_status: row.get(5), stars: row.get(6), }) .collect() } fn get_releases_by_author( conn: &Connection, page: i64, limit: i64, author: &str, ) -> (String, Vec<Release>) { let offset = (page - 1) * limit; let query = " SELECT crates.name, releases.version, releases.description, releases.target_name, releases.release_time, releases.rustdoc_status, crates.github_stars, authors.name FROM crates INNER JOIN releases ON releases.id = crates.latest_version_id INNER JOIN author_rels ON releases.id = author_rels.rid INNER JOIN authors ON authors.id = author_rels.aid WHERE authors.slug = $1 ORDER BY crates.github_stars DESC LIMIT $2 OFFSET $3"; let query = conn.query(&query, &[&author, &limit, &offset]).unwrap(); let mut author_name = None; let packages = query .into_iter() .map(|row| { if author_name.is_none() { author_name = Some(row.get(7)); } Release { name: row.get(0), version: row.get(1), description: row.get(2), target_name: row.get(3), release_time: DateTime::from_utc(row.get::<_, NaiveDateTime>(4), Utc), rustdoc_status: row.get(5), stars: row.get(6), } }) .collect(); (author_name.unwrap_or_default(), packages) } fn get_releases_by_owner( conn: &Connection, page: i64, limit: i64, author: &str, ) -> (String, Vec<Release>) { let offset = (page - 1) * limit; let query = "SELECT crates.name, releases.version, releases.description, releases.target_name, releases.release_time, releases.rustdoc_status, crates.github_stars, owners.name, owners.login FROM crates INNER JOIN releases ON releases.id = crates.latest_version_id INNER JOIN owner_rels ON owner_rels.cid = crates.id INNER JOIN owners ON owners.id = owner_rels.oid WHERE owners.login = $1 ORDER BY crates.github_stars DESC LIMIT $2 OFFSET $3"; let query = conn.query(&query, &[&author, &limit, &offset]).unwrap(); let mut author_name = None; let packages = query .into_iter() .map(|row| { if author_name.is_none() { author_name = Some(if !row.get::<usize, String>(7).is_empty() { row.get(7) } else { row.get(8) }); } Release { name: row.get(0), version: row.get(1), description: row.get(2), target_name: row.get(3), release_time: DateTime::from_utc(row.get::<_, NaiveDateTime>(4), Utc), rustdoc_status: row.get(5), stars: row.get(6), } }) .collect(); (author_name.unwrap_or_default(), packages) } /// Get the search results for a crate search query /// /// Retrieves crates which names have a levenshtein distance of less than or equal to 3, /// crates who fit into or otherwise are made up of the query or crates whose descriptions /// match the search query. /// /// * `query`: The query string, unfiltered /// * `page`: The page of results to show (1-indexed) /// * `limit`: The number of results to return /// /// Returns 0 and an empty Vec when no results are found or if a database error occurs /// fn get_search_results( conn: &Connection, mut query: &str, page: i64, limit: i64, ) -> (i64, Vec<Release>) { query = query.trim(); let offset = (page - 1) * limit; let statement = " SELECT crates.name AS name, releases.version AS version, releases.description AS description, releases.target_name AS target_name, releases.release_time AS release_time, releases.rustdoc_status AS rustdoc_status, crates.github_stars AS github_stars, COUNT(*) OVER() as total FROM crates INNER JOIN ( SELECT releases.id, releases.crate_id FROM ( SELECT releases.id, releases.crate_id, RANK() OVER (PARTITION BY crate_id ORDER BY release_time DESC) as rank FROM releases WHERE releases.rustdoc_status AND NOT releases.yanked ) AS releases WHERE releases.rank = 1 ) AS latest_release ON latest_release.crate_id = crates.id INNER JOIN releases ON latest_release.id = releases.id WHERE ((char_length($1)::float - levenshtein(crates.name, $1)::float) / char_length($1)::float) >= 0.65 OR crates.name ILIKE CONCAT('%', $1, '%') GROUP BY crates.id, releases.id ORDER BY levenshtein(crates.name, $1) ASC, crates.name ILIKE CONCAT('%', $1, '%'), releases.downloads DESC LIMIT $2 OFFSET $3"; let rows = if let Ok(rows) = conn.query(statement, &[&query, &limit, &offset]) { rows } else { return (0, Vec::new()); }; // Each row contains the total number of possible/valid results, just get it once let total_results = rows .iter() .next() .map(|row| row.get::<_, i64>("total")) .unwrap_or_default(); let packages: Vec<Release> = rows .into_iter() .map(|row| Release { name: row.get("name"), version: row.get("version"), description: row.get("description"), target_name: row.get("target_name"), release_time: DateTime::from_utc(row.get("release_time"), Utc), rustdoc_status: row.get("rustdoc_status"), stars: row.get::<_, i32>("github_stars"), }) .collect(); (total_results, packages) } pub fn home_page(req: &mut Request) -> IronResult<Response> { let conn = extension!(req, Pool).get()?; let packages = get_releases(&conn, 1, RELEASES_IN_HOME, Order::ReleaseTime); Page::new(packages) .set_true("show_search_form") .set_true("hide_package_navigation") .to_resp("releases") } pub fn releases_feed_handler(req: &mut Request) -> IronResult<Response> { let conn = extension!(req, Pool).get()?; let packages = get_releases(&conn, 1, RELEASES_IN_FEED, Order::ReleaseTime); let mut resp = ctry!(Page::new(packages).to_resp("releases_feed")); resp.headers.set(::iron::headers::ContentType( "application/atom+xml".parse().unwrap(), )); Ok(resp) } fn releases_handler( packages: Vec<Release>, page_number: i64, release_type: &str, tab: &str, title: &str, ) -> IronResult<Response> { if packages.is_empty() { return Err(IronError::new(Nope::CrateNotFound, status::NotFound)); } // Show next and previous page buttons // This is a temporary solution to avoid expensive COUNT(*) let (show_next_page, show_previous_page) = ( packages.len() == RELEASES_IN_RELEASES as usize, page_number != 1, ); Page::new(packages) .title("Releases") .set("description", title) .set("release_type", release_type) .set_true("show_releases_navigation") .set_true(tab) .set_bool("show_next_page_button", show_next_page) .set_int("next_page", page_number + 1) .set_bool("show_previous_page_button", show_previous_page) .set_int("previous_page", page_number - 1) .to_resp("releases") } // Following functions caused a code repeat due to design of our /releases/ URL routes pub fn recent_releases_handler(req: &mut Request) -> IronResult<Response> { let page_number: i64 = extension!(req, Router) .find("page") .unwrap_or("1") .parse() .unwrap_or(1); let conn = extension!(req, Pool).get()?; let packages = get_releases(&conn, page_number, RELEASES_IN_RELEASES, Order::ReleaseTime); releases_handler( packages, page_number, "recent", "releases_navigation_recent_tab", "Recently uploaded crates", ) } pub fn releases_by_stars_handler(req: &mut Request) -> IronResult<Response> { let page_number: i64 = extension!(req, Router) .find("page") .unwrap_or("1") .parse() .unwrap_or(1); let conn = extension!(req, Pool).get()?; let packages = get_releases(&conn, page_number, RELEASES_IN_RELEASES, Order::GithubStars); releases_handler( packages, page_number, "stars", "releases_navigation_stars_tab", "Crates with most stars", ) } pub fn releases_recent_failures_handler(req: &mut Request) -> IronResult<Response> { let page_number: i64 = extension!(req, Router) .find("page") .unwrap_or("1") .parse() .unwrap_or(1); let conn = extension!(req, Pool).get()?; let packages = get_releases( &conn, page_number, RELEASES_IN_RELEASES, Order::RecentFailures, ); releases_handler( packages, page_number, "recent-failures", "releases_navigation_recent_failures_tab", "Recent crates failed to build", ) } pub fn releases_failures_by_stars_handler(req: &mut Request) -> IronResult<Response> { let page_number: i64 = extension!(req, Router) .find("page") .unwrap_or("1") .parse() .unwrap_or(1); let conn = extension!(req, Pool).get()?; let packages = get_releases( &conn, page_number, RELEASES_IN_RELEASES, Order::FailuresByGithubStars, ); releases_handler( packages, page_number, "failures", "releases_navigation_failures_by_stars_tab", "Crates with most stars failed to build", ) } pub fn author_handler(req: &mut Request) -> IronResult<Response> { let router = extension!(req, Router); // page number of releases let page_number: i64 = router.find("page").unwrap_or("1").parse().unwrap_or(1); let conn = extension!(req, Pool).get()?; #[allow(clippy::or_fun_call)] let author = ctry!(router .find("author") .ok_or(IronError::new(Nope::CrateNotFound, status::NotFound))); let (author_name, packages) = if author.starts_with('@') { let mut author = author.split('@'); get_releases_by_owner( &conn, page_number, RELEASES_IN_RELEASES, cexpect!(author.nth(1)), ) } else { get_releases_by_author(&conn, page_number, RELEASES_IN_RELEASES, author) }; if packages.is_empty() { return Err(IronError::new(Nope::CrateNotFound, status::NotFound)); } // Show next and previous page buttons // This is a temporary solution to avoid expensive COUNT(*) let (show_next_page, show_previous_page) = ( packages.len() == RELEASES_IN_RELEASES as usize, page_number != 1, ); Page::new(packages) .title("Releases") .set("description", &format!("Crates from {}", author_name)) .set("author", &author_name) .set("release_type", author) .set_true("show_releases_navigation") .set_true("show_stars") .set_bool("show_next_page_button", show_next_page) .set_int("next_page", page_number + 1) .set_bool("show_previous_page_button", show_previous_page) .set_int("previous_page", page_number - 1) .to_resp("releases") } pub fn search_handler(req: &mut Request) -> IronResult<Response> { use params::{Params, Value}; let params = ctry!(req.get::<Params>()); let query = params.find(&["query"]); let conn = extension!(req, Pool).get()?; if let Some(&Value::String(ref query)) = query { // check if I am feeling lucky button pressed and redirect user to crate page // if there is a match // TODO: Redirecting to latest doc might be more useful if params.find(&["i-am-feeling-lucky"]).is_some() { use iron::modifiers::Redirect; use iron::Url; // redirect to a random crate if query is empty if query.is_empty() { let rows = ctry!(conn.query( "SELECT crates.name, releases.version, releases.target_name FROM crates INNER JOIN releases ON crates.latest_version_id = releases.id WHERE github_stars >= 100 AND rustdoc_status = true OFFSET FLOOR(RANDOM() * 280) LIMIT 1", &[] )); // ~~~~~~^ // FIXME: This is a fast query but using a constant // There are currently 280 crates with docs and 100+ // starts. This should be fine for a while. let name: String = rows.get(0).get(0); let version: String = rows.get(0).get(1); let target_name: String = rows.get(0).get(2); let url = ctry!(Url::parse(&format!( "{}/{}/{}/{}", redirect_base(req), name, version, target_name ))); let mut resp = Response::with((status::Found, Redirect(url))); use iron::headers::{Expires, HttpDate}; resp.headers.set(Expires(HttpDate(time::now()))); return Ok(resp); } // since we never pass a version into `match_version` here, we'll never get // `MatchVersion::Exact`, so the distinction between `Exact` and `Semver` doesn't // matter if let Some(matchver) = match_version(&conn, &query, None) { let (version, id) = matchver.version.into_parts(); let query = matchver.corrected_name.unwrap_or_else(|| query.to_string()); // FIXME: This is a super dirty way to check if crate have rustdocs generated. // match_version should handle this instead of this code block. // This block is introduced to fix #163 let rustdoc_status = { let rows = ctry!(conn.query( "SELECT rustdoc_status FROM releases WHERE releases.id = $1", &[&id] )); if rows.is_empty() { false } else { rows.get(0).get(0) } }; let url = if rustdoc_status { ctry!(Url::parse( &format!("{}/{}/{}", redirect_base(req), query, version)[..] )) } else { ctry!(Url::parse( &format!("{}/crate/{}/{}", redirect_base(req), query, version)[..] )) }; let mut resp = Response::with((status::Found, Redirect(url))); use iron::headers::{Expires, HttpDate}; resp.headers.set(Expires(HttpDate(time::now()))); return Ok(resp); } } let (_, results) = get_search_results(&conn, &query, 1, RELEASES_IN_RELEASES); let title = if results.is_empty() { format!("No results found for '{}'", query) } else { format!("Search results for '{}'", query) }; // FIXME: There is no pagination Page::new(results) .set("search_query", &query) .title(&title) .to_resp("releases") } else { Err(IronError::new(Nope::NoResults, status::NotFound)) } } pub fn activity_handler(req: &mut Request) -> IronResult<Response> { let conn = extension!(req, Pool).get()?; let release_activity_data: Value = ctry!(conn.query( "SELECT value FROM config WHERE name = 'release_activity'", &[] )) .get(0) .get(0); Page::new(release_activity_data) .title("Releases") .set("description", "Monthly release activity") .set_true("show_releases_navigation") .set_true("releases_navigation_activity_tab") .set_true("javascript_highchartjs") .to_resp("releases_activity") } pub fn build_queue_handler(req: &mut Request) -> IronResult<Response> { let queue = extension!(req, BuildQueue); let mut crates = ctry!(queue.queued_crates()); for krate in &mut crates { // The priority here is inverted: in the database if a crate has a higher priority it // will be built after everything else, which is counter-intuitive for people not // familiar with docs.rs's inner workings. krate.priority = -krate.priority; } let is_empty = crates.is_empty(); Page::new(crates) .title("Build queue") .set("description", "List of crates scheduled to build") .set_bool("queue_empty", is_empty) .set_true("show_releases_navigation") .set_true("releases_queue_tab") .to_resp("releases_queue") } #[cfg(test)] mod tests { use super::*; use crate::test::{assert_success, wrapper}; use chrono::TimeZone; use kuchiki::traits::TendrilSink; use serde_json::json; #[test] fn database_search() { wrapper(|env| { let db = env.db(); db.fake_release().name("foo").version("0.0.0").create()?; db.fake_release() .name("bar-foo") .version("0.0.0") .create()?; db.fake_release() .name("foo-bar") .version("0.0.1") .create()?; db.fake_release().name("fo0").version("0.0.0").create()?; db.fake_release() .name("fool") .version("0.0.0") .build_result_successful(false) .create()?; db.fake_release() .name("freakin") .version("0.0.0") .create()?; db.fake_release() .name("something unreleated") .version("0.0.0") .create()?; let (num_results, results) = get_search_results(&db.conn(), "foo", 1, 100); assert_eq!(num_results, 4); let mut results = results.into_iter(); let expected = ["foo", "fo0", "bar-foo", "foo-bar"]; for expected in expected.iter() { assert_eq!(expected, &results.next().unwrap().name); } assert_eq!(results.count(), 0); Ok(()) }) } #[test] fn exacts_dont_care() { wrapper(|env| { let db = env.db(); let releases = ["regex", "regex-", "regex-syntax"]; for release in releases.iter() { db.fake_release().name(release).version("0.0.0").create()?; } let near_matches = ["Regex", "rEgex", "reGex", "regEx", "regeX"]; for name in near_matches.iter() { let (num_results, mut results) = dbg!(get_search_results(&db.conn(), *name, 1, 100)); assert_eq!(num_results, 3); for name in releases.iter() { assert_eq!(results.remove(0).name, *name); } assert!(results.is_empty()); } Ok(()) }) } #[test] fn unsuccessful_not_shown() { wrapper(|env| { let db = env.db(); db.fake_release() .name("regex") .version("0.0.0") .build_result_successful(false) .create()?; let (num_results, results) = get_search_results(&db.conn(), "regex", 1, 100); assert_eq!(num_results, 0); let results = results.into_iter(); assert_eq!(results.count(), 0); Ok(()) }) } #[test] fn yanked_not_shown() { wrapper(|env| { let db = env.db(); db.fake_release() .name("regex") .version("0.0.0") .yanked(true) .create()?; let (num_results, results) = get_search_results(&db.conn(), "regex", 1, 100); assert_eq!(num_results, 0); let results = results.into_iter(); assert_eq!(results.count(), 0); Ok(()) }) } #[test] fn fuzzily_match() { wrapper(|env| { let db = env.db(); db.fake_release().name("regex").version("0.0.0").create()?; let (num_results, results) = get_search_results(&db.conn(), "redex", 1, 100); assert_eq!(num_results, 1); let mut results = results.into_iter(); assert_eq!(results.next().unwrap().name, "regex"); assert_eq!(results.count(), 0); Ok(()) }) } // Description searching more than doubles search time // #[test] // fn search_descriptions() { // wrapper(|env| { // let db = env.db(); // db.fake_release() // .name("something_completely_unrelated") // .description("Supercalifragilisticexpialidocious") // .create()?; // // let (num_results, results) = // get_search_results(&db.conn(), "supercalifragilisticexpialidocious", 1, 100); // assert_eq!(num_results, 1); // // let mut results = results.into_iter(); // assert_eq!( // results.next().unwrap().name, // "something_completely_unrelated" // ); // assert_eq!(results.count(), 0); // // Ok(()) // }) // } #[test] fn search_limits() { wrapper(|env| { let db = env.db(); db.fake_release().name("something_magical").create()?; db.fake_release().name("something_sinister").create()?; db.fake_release().name("something_fantastical").create()?; db.fake_release() .name("something_completely_unrelated") .create()?; let (num_results, results) = get_search_results(&db.conn(), "something", 1, 2); assert_eq!(num_results, 4); let mut results = results.into_iter(); assert_eq!(results.next().unwrap().name, "something_magical"); assert_eq!(results.next().unwrap().name, "something_sinister"); assert_eq!(results.count(), 0); Ok(()) }) } #[test] fn search_offsets() { wrapper(|env| { let db = env.db(); db.fake_release().name("something_magical").create()?; db.fake_release().name("something_sinister").create()?; db.fake_release().name("something_fantastical").create()?; db.fake_release() .name("something_completely_unrelated") .create()?; let (num_results, results) = get_search_results(&db.conn(), "something", 2, 2); assert_eq!(num_results, 4); let mut results = results.into_iter(); assert_eq!(results.next().unwrap().name, "something_fantastical"); assert_eq!( results.next().unwrap().name, "something_completely_unrelated", ); assert_eq!(results.count(), 0); Ok(()) }) } #[test] fn release_dates() { wrapper(|env| { let db = env.db(); db.fake_release() .name("somethang") .release_time(Utc.ymd(2021, 4, 16).and_hms(4, 33, 50)) .version("0.3.0") .description("this is the correct choice") .create()?; db.fake_release() .name("somethang") .release_time(Utc.ymd(2020, 4, 16).and_hms(4, 33, 50)) .description("second") .version("0.2.0") .create()?; db.fake_release() .name("somethang") .release_time(Utc.ymd(2019, 4, 16).and_hms(4, 33, 50)) .description("third") .version("0.1.0") .create()?; db.fake_release() .name("somethang") .release_time(Utc.ymd(2018, 4, 16).and_hms(4, 33, 50)) .description("fourth") .version("0.0.0") .create()?; let (num_results, results) = get_search_results(&db.conn(), "somethang", 1, 100); assert_eq!(num_results, 1); let mut results = results.into_iter(); assert_eq!( results.next().unwrap().description, Some("this is the correct choice".into()), ); assert_eq!(results.count(), 0); Ok(()) }) } // Description searching more than doubles search time // #[test] // fn fuzzy_over_description() { // wrapper(|env| { // let db = env.db(); // db.fake_release() // .name("name_better_than_description") // .description("this is the correct choice") // .create()?; // db.fake_release() // .name("im_completely_unrelated") // .description("name_better_than_description") // .create()?; // db.fake_release() // .name("i_have_zero_relation_whatsoever") // .create()?; // // let (num_results, results) = // get_search_results(&db.conn(), "name_better_than_description", 1, 100); // assert_eq!(num_results, 2); // // let mut results = results.into_iter(); // // let next = results.next().unwrap(); // assert_eq!(next.name, "name_better_than_description"); // assert_eq!(next.description, Some("this is the correct choice".into())); // // let next = results.next().unwrap(); // assert_eq!(next.name, "im_completely_unrelated"); // assert_eq!( // next.description, // Some("name_better_than_description".into()) // ); // // assert_eq!(results.count(), 0); // // Ok(()) // }) // } #[test] fn dont_return_unrelated() { wrapper(|env| { let db = env.db(); db.fake_release().name("match").create()?; db.fake_release().name("matcher").create()?; db.fake_release().name("matchest").create()?; db.fake_release() .name("i_am_useless_and_mean_nothing") .create()?; let (num_results, results) = get_search_results(&db.conn(), "match", 1, 100); assert_eq!(num_results, 3); let mut results = results.into_iter(); assert_eq!(results.next().unwrap().name, "match"); assert_eq!(results.next().unwrap().name, "matcher"); assert_eq!(results.next().unwrap().name, "matchest"); assert_eq!(results.count(), 0); Ok(()) }) } #[test] fn order_by_downloads() { wrapper(|env| { let db = env.db(); db.fake_release().name("matca").downloads(100).create()?; db.fake_release().name("matcb").downloads(10).create()?; db.fake_release().name("matcc").downloads(1).create()?; let (num_results, results) = get_search_results(&db.conn(), "match", 1, 100); assert_eq!(num_results, 3); let mut results = results.into_iter(); assert_eq!(results.next().unwrap().name, "matca"); assert_eq!(results.next().unwrap().name, "matcb"); assert_eq!(results.next().unwrap().name, "matcc"); assert_eq!(results.count(), 0); Ok(()) }) } #[test] fn serialize_releases() { let now = Utc::now(); let mut release = Release { name: "serde".to_string(), version: "0.0.0".to_string(), description: Some("serde makes things other things".to_string()), target_name: Some("x86_64-pc-windows-msvc".to_string()), rustdoc_status: true, release_time: now, stars: 100, }; let correct_json = json!({ "name": "serde", "version": "0.0.0", "description": "serde makes things other things", "target_name": "x86_64-pc-windows-msvc", "rustdoc_status": true, "release_time": duration_to_str(now), "release_time_rfc3339": now.format("%+").to_string(), "stars": 100 }); assert_eq!(correct_json, serde_json::to_value(&release).unwrap()); release.target_name = None; let correct_json = json!({ "name": "serde", "version": "0.0.0", "description": "serde makes things other things", "target_name": null, "rustdoc_status": true, "release_time": duration_to_str(now), "release_time_rfc3339": now.format("%+").to_string(), "stars": 100 }); assert_eq!(correct_json, serde_json::to_value(&release).unwrap()); release.description = None; let correct_json = json!({ "name": "serde", "version": "0.0.0", "description": null, "target_name": null, "rustdoc_status": true, "release_time": duration_to_str(now), "release_time_rfc3339": now.format("%+").to_string(), "stars": 100 }); assert_eq!(correct_json, serde_json::to_value(&release).unwrap()); } #[test] fn release_feed() { wrapper(|env| { let web = env.frontend(); assert_success("/releases/feed", web) }) } #[test] fn test_releases_queue() { wrapper(|env| { let queue = env.build_queue(); let web = env.frontend(); let empty = kuchiki::parse_html().one(web.get("/releases/queue").send()?.text()?); assert!(empty .select(".release > strong") .expect("missing heading") .any(|el| el.text_contents().contains("nothing"))); queue.add_crate("foo", "1.0.0", 0)?; queue.add_crate("bar", "0.1.0", -10)?; queue.add_crate("baz", "0.0.1", 10)?; let full = kuchiki::parse_html().one(web.get("/releases/queue").send()?.text()?); let items = full .select(".queue-list > li") .expect("missing list items") .collect::<Vec<_>>(); assert_eq!(items.len(), 3); let expected = [ ("bar", "0.1.0", Some(10)), ("foo", "1.0.0", None), ("baz", "0.0.1", Some(-10)), ]; for (li, expected) in items.iter().zip(&expected) { let a = li.as_node().select_first("a").expect("missing link"); assert!(a.text_contents().contains(expected.0)); assert!(a.text_contents().contains(expected.1)); if let Some(priority) = expected.2 { assert!(li .text_contents() .contains(&format!("priority: {}", priority))); } } Ok(()) }); } }
33.909336
109
0.524844
2651ee2fdfcc68dd7d8f9c0ae77d9f047a57bd0f
7,694
use crate::Endpoint; use serde::{Deserialize, Serialize}; use std::{borrow::Cow, collections::HashMap}; use typed_builder::TypedBuilder; /// Send a Single email /// /// ``` /// # use postmark::api::email::{SendEmailRequest, Body}; /// let req = SendEmailRequest::builder() /// .from("[email protected]") /// .to("[email protected]") /// .body(Body::Text("Hi, this is me!".to_string())) /// .build(); /// ``` #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default)] #[serde(rename_all = "PascalCase")] #[derive(TypedBuilder)] pub struct SendEmailRequest { /// The sender email address. Must have a registered and confirmed Sender Signature. /// To include a name, use the format "Full Name <[email protected]>" for the address. #[builder(setter(into))] pub from: String, /// Recipient email address. Multiple addresses are comma separated. Max 50. #[builder(setter(into))] pub to: String, /// The body of the message #[serde(flatten)] pub body: Body, /// Cc recipient email address. Multiple addresses are comma separated. Max 50. #[serde(skip_serializing_if = "Option::is_none")] #[builder(default, setter(into, strip_option))] pub cc: Option<String>, /// Bcc recipient email address. Multiple addresses are comma separated. Max 50. #[serde(skip_serializing_if = "Option::is_none")] #[builder(default, setter(into, strip_option))] pub bcc: Option<String>, /// Email subject #[serde(skip_serializing_if = "Option::is_none")] #[builder(default, setter(into, strip_option))] pub subject: Option<String>, /// Email tag that allows you to categorize outgoing emails and get detailed statistics. #[serde(skip_serializing_if = "Option::is_none")] #[builder(default, setter(into, strip_option))] pub tag: Option<String>, /// Reply To override email address. Defaults to the Reply To set in the sender signature. #[serde(skip_serializing_if = "Option::is_none")] #[builder(default, setter(into, strip_option))] pub reply_to: Option<String>, /// List of custom headers to include. #[serde(skip_serializing_if = "Option::is_none")] #[builder(default, setter(into, strip_option))] pub headers: Option<Vec<Header>>, /// Activate open tracking for this email. #[serde(skip_serializing_if = "Option::is_none")] #[builder(default, setter(into, strip_option))] pub track_opens: Option<bool>, /// Activate link tracking for links in the HTML or Text bodies of this email. #[serde(skip_serializing_if = "Option::is_none")] #[builder(default, setter(into, strip_option))] pub track_links: Option<TrackLink>, /// List of attachments #[serde(skip_serializing_if = "Option::is_none")] #[builder(default, setter(into, strip_option))] pub attachments: Option<Vec<Attachment>>, /// Custom metadata key/value pairs. #[serde(skip_serializing_if = "Option::is_none")] #[builder(default, setter(into, strip_option))] pub metadata: Option<HashMap<String, String>>, /// Set message stream ID that's used for sending. If not provided, message will default to the "outbound" transactional stream. #[serde(skip_serializing_if = "Option::is_none")] #[builder(default, setter(into, strip_option))] pub message_stream: Option<String>, } /// The body of a email message #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub enum Body { #[serde(rename = "TextBody")] Text(String), #[serde(rename = "HtmlBody")] Html(String), } impl Default for Body { fn default() -> Self { Body::Text("".into()) } } /// A custom headers to include in a email. #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "PascalCase")] pub struct Header { pub name: String, pub value: String, } /// And attachment to an email. #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "PascalCase")] pub struct Attachment { pub name: String, pub content: String, pub content_type: String, #[serde(skip_serializing_if = "Option::is_none")] pub content_id: Option<String>, } /// Activate link tracking for links in the HTML or Text bodies of this email. #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub enum TrackLink { None, HtmlAndText, HtmlOnly, TextOnly, } impl Default for TrackLink { fn default() -> Self { Self::None } } /// Response for the [`SendEmailRequest`] Endpoint. /// /// On a success all fields will be filled, `error_code` will be 0 and /// message "OK". /// On a failure Option fields will be empty and details will be held /// in error_code and message. #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "PascalCase")] pub struct SendEmailResponse { pub to: Option<String>, pub submitted_at: Option<String>, #[serde(rename = "MessageID")] pub message_id: Option<String>, pub error_code: i64, pub message: String, } impl Endpoint for SendEmailRequest { type Request = SendEmailRequest; type Response = SendEmailResponse; fn endpoint(&self) -> Cow<'static, str> { "/email".into() } fn body(&self) -> &Self::Request { self } } #[cfg(test)] mod tests { use httptest::matchers::request; use httptest::{responders::*, Expectation, Server}; use serde_json::json; use super::{Body, SendEmailRequest}; use crate::reqwest::PostmarkClient; use crate::Query; #[tokio::test] pub async fn send_email_test() { let server = Server::run(); server.expect( Expectation::matching(request::method_path("POST", "/email")).respond_with( json_encoded(json!({ "To": "[email protected]", "SubmittedAt": "2014-02-17T07:25:01.4178645-05:00", "MessageID": "0a129aee-e1cd-480d-b08d-4f48548ff48d", "ErrorCode": 0, "Message": "OK" })), ), ); let client = PostmarkClient::builder() .base_url(server.url("/").to_string()) .build(); let req = SendEmailRequest::builder() .from("[email protected]") .to("[email protected]") .body(Body::Text("hello matt".into())) .subject("hello") .build(); req.execute(&client) .await .expect("Should get a response and be able to json decode it"); } #[tokio::test] pub async fn send_email_test_should_not_error_on_postmark_error() { let server = Server::run(); server.expect( Expectation::matching(request::method_path("POST", "/email")).respond_with( json_encoded(json!({ "ErrorCode": 406, "Message": "You tried to send to a recipient that has been marked as inactive. Found inactive addresses: [email protected]. Inactive recipients are ones that have generated a hard bounce, a spam complaint, or a manual suppression. " })), ), ); let client = PostmarkClient::builder() .base_url(server.url("/").to_string()) .build(); let req = SendEmailRequest::builder() .from("[email protected]") .to("[email protected]") .body(Body::Text("hello matt".into())) .subject("hello") .build(); req.execute(&client) .await .expect("Should get a response and be able to json decode it"); } }
32.327731
274
0.629711
dbae4517110440d5d4d7d80b1b87fc987fbe36e5
4,593
use anyhow::Result; use core::mem; use super::{ wrap::*, Context, }; use crate::{ command::Skip, io, types::{ ArrayLength, Bytes, Fallback, NBytes, Size, SkipFallback, Uint16, Uint32, Uint64, Uint8, }, }; struct SkipContext<F, OS> { ctx: Context<F, OS>, } impl<F, OS> AsMut<SkipContext<F, OS>> for Context<F, OS> { fn as_mut<'a>(&'a mut self) -> &'a mut SkipContext<F, OS> { unsafe { mem::transmute::<&'a mut Context<F, OS>, &'a mut SkipContext<F, OS>>(self) } } } impl<F, OS> AsMut<Context<F, OS>> for SkipContext<F, OS> { fn as_mut<'a>(&'a mut self) -> &'a mut Context<F, OS> { unsafe { mem::transmute::<&'a mut SkipContext<F, OS>, &'a mut Context<F, OS>>(self) } } } impl<F, OS: io::OStream> Wrap for SkipContext<F, OS> { fn wrap_u8(&mut self, u: u8) -> Result<&mut Self> { let slice = self.ctx.stream.try_advance(1)?; slice[0] = u; Ok(self) } fn wrapn(&mut self, bytes: &[u8]) -> Result<&mut Self> { self.ctx.stream.try_advance(bytes.len())?.copy_from_slice(bytes); Ok(self) } } fn wrap_skip_u8<'a, F, OS: io::OStream>( ctx: &'a mut SkipContext<F, OS>, u: Uint8, ) -> Result<&'a mut SkipContext<F, OS>> { ctx.wrap_u8(u.0) } fn wrap_skip_u16<'a, F, OS: io::OStream>( ctx: &'a mut SkipContext<F, OS>, u: Uint16, ) -> Result<&'a mut SkipContext<F, OS>> { ctx.wrap_u16(u.0) } fn wrap_skip_u32<'a, F, OS: io::OStream>( ctx: &'a mut SkipContext<F, OS>, u: Uint32, ) -> Result<&'a mut SkipContext<F, OS>> { ctx.wrap_u32(u.0) } fn wrap_skip_u64<'a, F, OS: io::OStream>( ctx: &'a mut SkipContext<F, OS>, u: Uint64, ) -> Result<&'a mut SkipContext<F, OS>> { ctx.wrap_u64(u.0) } fn wrap_skip_size<'a, F, OS: io::OStream>( ctx: &'a mut SkipContext<F, OS>, size: Size, ) -> Result<&'a mut SkipContext<F, OS>> { ctx.wrap_size(size) } fn wrap_skip_trits<'a, F, OS: io::OStream>( ctx: &'a mut SkipContext<F, OS>, bytes: &[u8], ) -> Result<&'a mut SkipContext<F, OS>> { ctx.wrapn(bytes) } impl<'a, F, OS: io::OStream> Skip<&'a Uint8> for Context<F, OS> { fn skip(&mut self, u: &'a Uint8) -> Result<&mut Self> { Ok(wrap_skip_u8(self.as_mut(), *u)?.as_mut()) } } impl<F, OS: io::OStream> Skip<Uint8> for Context<F, OS> { fn skip(&mut self, val: Uint8) -> Result<&mut Self> { self.skip(&val) } } impl<'a, F, OS: io::OStream> Skip<&'a Uint16> for Context<F, OS> { fn skip(&mut self, u: &'a Uint16) -> Result<&mut Self> { Ok(wrap_skip_u16(self.as_mut(), *u)?.as_mut()) } } impl<F, OS: io::OStream> Skip<Uint16> for Context<F, OS> { fn skip(&mut self, val: Uint16) -> Result<&mut Self> { self.skip(&val) } } impl<'a, F, OS: io::OStream> Skip<&'a Uint32> for Context<F, OS> { fn skip(&mut self, u: &'a Uint32) -> Result<&mut Self> { Ok(wrap_skip_u32(self.as_mut(), *u)?.as_mut()) } } impl<F, OS: io::OStream> Skip<Uint32> for Context<F, OS> { fn skip(&mut self, val: Uint32) -> Result<&mut Self> { self.skip(&val) } } impl<'a, F, OS: io::OStream> Skip<&'a Uint64> for Context<F, OS> { fn skip(&mut self, u: &'a Uint64) -> Result<&mut Self> { Ok(wrap_skip_u64(self.as_mut(), *u)?.as_mut()) } } impl<F, OS: io::OStream> Skip<Uint64> for Context<F, OS> { fn skip(&mut self, val: Uint64) -> Result<&mut Self> { self.skip(&val) } } impl<'a, F, OS: io::OStream> Skip<&'a Size> for Context<F, OS> { fn skip(&mut self, size: &'a Size) -> Result<&mut Self> { Ok(wrap_skip_size(self.as_mut(), *size)?.as_mut()) } } impl<F, OS: io::OStream> Skip<Size> for Context<F, OS> { fn skip(&mut self, val: Size) -> Result<&mut Self> { self.skip(&val) } } impl<'a, F, N: ArrayLength<u8>, OS: io::OStream> Skip<&'a NBytes<N>> for Context<F, OS> { fn skip(&mut self, nbytes: &'a NBytes<N>) -> Result<&mut Self> { Ok(wrap_skip_trits(self.as_mut(), nbytes.as_slice())?.as_mut()) } } impl<'a, F, OS: io::OStream> Skip<&'a Bytes> for Context<F, OS> { fn skip(&mut self, bytes: &'a Bytes) -> Result<&mut Self> { wrap_skip_size(self.as_mut(), Size((bytes.0).len()))?; Ok(wrap_skip_trits(self.as_mut(), &(bytes.0)[..])?.as_mut()) } } impl<'a, F, T: 'a + SkipFallback<F>, OS: io::OStream> Skip<&'a Fallback<T>> for Context<F, OS> { fn skip(&mut self, val: &'a Fallback<T>) -> Result<&mut Self> { (val.0).wrap_skip(self)?; Ok(self) } }
27.502994
96
0.558241
3ad38b106cbb4313b6f38504de9b28164c2dab0c
4,585
//! Collection of functions to set up a TCP connection using a smoltcp device #![no_std] #[macro_use] extern crate log; #[macro_use] extern crate alloc; extern crate smoltcp; extern crate network_manager; extern crate spin; extern crate hpet; use core::convert::TryInto; use spin::Once; use hpet::get_hpet; use smoltcp::{ wire::IpEndpoint, socket::{SocketSet, TcpSocket, SocketHandle}, time::Instant }; use network_manager::{NetworkInterfaceRef, NETWORK_INTERFACES}; /// The starting number for freely-available (non-reserved) standard TCP/UDP ports. pub const STARTING_FREE_PORT: u16 = 49152; /// A simple macro to get the current HPET clock ticks. #[macro_export] macro_rules! hpet_ticks { () => { get_hpet().as_ref().ok_or("coudln't get HPET timer")?.get_counter() }; } /// Function to calculate the currently elapsed time (in milliseconds) since the given `start_time` (hpet ticks). pub fn millis_since(start_time: u64) -> Result<u64, &'static str> { const FEMTOSECONDS_PER_MILLISECOND: u64 = 1_000_000_000_000; static HPET_PERIOD_FEMTOSECONDS: Once<u32> = Once::new(); let hpet_freq = match HPET_PERIOD_FEMTOSECONDS.try() { Some(period) => period, _ => { let freq = get_hpet().as_ref().ok_or("couldn't get HPET")?.counter_period_femtoseconds(); HPET_PERIOD_FEMTOSECONDS.call_once(|| freq) } }; let hpet_freq = *hpet_freq as u64; let end_time: u64 = get_hpet().as_ref().ok_or("coudln't get HPET timer")?.get_counter(); // Convert to ms let diff = (end_time - start_time) * hpet_freq / FEMTOSECONDS_PER_MILLISECOND; Ok(diff) } /// Returns the first network interface available in the system. pub fn get_default_iface() -> Result<NetworkInterfaceRef, &'static str> { NETWORK_INTERFACES.lock() .iter() .next() .cloned() .ok_or_else(|| "no network interfaces available") } /// A convenience function for connecting a socket. /// If the given socket is already open, it is forcibly closed immediately and reconnected. pub fn connect( iface: &NetworkInterfaceRef, sockets: &mut SocketSet, tcp_handle: SocketHandle, remote_endpoint: IpEndpoint, local_port: u16, startup_time: u64, ) -> Result<(), &'static str> { if sockets.get::<TcpSocket>(tcp_handle).is_open() { return Err("smoltcp_helper: when connecting socket, it was already open..."); } let timeout_millis = 3000; // 3 second timeout let start = hpet_ticks!(); debug!("smoltcp_helper: connecting from {}:{} to {} ...", iface.lock().ip_addrs().get(0).map(|ip| format!("{}", ip)).unwrap_or_else(|| format!("ERROR")), local_port, remote_endpoint, ); let _packet_io_occurred = poll_iface(&iface, sockets, startup_time)?; sockets.get::<TcpSocket>(tcp_handle).connect(remote_endpoint, local_port).map_err(|_e| { error!("smoltcp_helper: failed to connect socket, error: {:?}", _e); "smoltcp_helper: failed to connect socket" })?; loop { let _packet_io_occurred = poll_iface(&iface, sockets, startup_time)?; // if the socket actually connected, it should be able to send/recv let socket = sockets.get::<TcpSocket>(tcp_handle); if socket.may_send() && socket.may_recv() { break; } // check to make sure we haven't timed out if millis_since(start)? > timeout_millis { error!("smoltcp_helper: failed to connect to socket, timed out after {} ms", timeout_millis); return Err("smoltcp_helper: failed to connect to socket, timed out."); } } debug!("smoltcp_helper: connected! (took {} ms)", millis_since(start)?); Ok(()) } /// A convenience function to poll the given network interface (i.e., flush tx/rx). /// Returns true if any packets were sent or received through that interface on the given `sockets`. pub fn poll_iface(iface: &NetworkInterfaceRef, sockets: &mut SocketSet, startup_time: u64) -> Result<bool, &'static str> { let timestamp: i64 = millis_since(startup_time)? .try_into() .map_err(|_e| "millis_since() u64 timestamp was larger than i64")?; // debug!("calling iface.poll() with timestamp: {:?}", timestamp); let packets_were_sent_or_received = match iface.lock().poll(sockets, Instant::from_millis(timestamp)) { Ok(b) => b, Err(err) => { warn!("smoltcp_helper: poll error: {}", err); false } }; Ok(packets_were_sent_or_received) }
35.269231
122
0.659106
763b4e29e5c835f74ea52b62a7f0f3500be5c4cc
7,004
//! BAM index (BAI) and fields. //! //! A BAM index (BAI) is used with an associated coordinate-sorted BAM file that allows random //! access to records, e.g., [querying]. //! //! The index contains a list of reference sequences parallel to the one defined in the BAM file. //! Each indexed reference sequence has a calculated set of hierarchical bins at different //! granularities. The bins then define a list of physical file positions in the BAM to search for //! overlapping records. //! //! When reading entire BAM files sequentially, a BAM index is not necessary. //! //! [querying]: crate::Reader::query //! //! # Examples //! //! ## Reading a BAM index //! //! ```no_run //! # use std::io; //! use noodles_bam::bai; //! let index = bai::read("sample.bam.bai")?; //! # Ok::<(), io::Error>(()) //! ``` pub mod index; mod reader; mod writer; pub use self::{index::Index, reader::Reader, writer::Writer}; use std::{fs::File, io, path::Path}; use noodles_bgzf::VirtualPosition; use self::index::reference_sequence::{bin::Chunk, Bin}; static MAGIC_NUMBER: &[u8] = b"BAI\x01"; /// Reads the entire contents of a BAM index. /// /// This is a convenience function and is equivalent to opening the file at the given path, reading /// the header, and reading the index. /// /// # Examples /// /// ```no_run /// # use std::io; /// use noodles_bam::bai; /// let index = bai::read("sample.bam.bai")?; /// # Ok::<(), io::Error>(()) /// ``` pub fn read<P>(src: P) -> io::Result<Index> where P: AsRef<Path>, { let mut reader = File::open(src).map(Reader::new)?; reader.read_header()?; reader.read_index() } /// Writes a BAM index to a file. /// /// This is a convenience function and is equivalent to creating a file at the given path, writing /// the header, and writing the index. /// /// # Examples /// /// ```no_run /// # use std::io; /// use noodles_bam::bai; /// let index = bai::Index::default(); /// bai::write("sample.bam.bai", &index)?; /// # Ok::<(), io::Error>(()) /// ``` pub fn write<P>(dst: P, index: &Index) -> io::Result<()> where P: AsRef<Path>, { let mut writer = File::create(dst).map(Writer::new)?; writer.write_header()?; writer.write_index(index) } /// Merges a list of chunks into a list of non-overlapping chunks. /// /// This is the same as calling [`optimize_chunks`] with a `min_offset` of 0. /// /// # Examples /// /// ``` /// use noodles_bam::bai::{self, index::reference_sequence::bin::Chunk}; /// use noodles_bgzf as bgzf; /// /// let chunks = [ /// Chunk::new(bgzf::VirtualPosition::from(2), bgzf::VirtualPosition::from(3)), /// Chunk::new(bgzf::VirtualPosition::from(5), bgzf::VirtualPosition::from(8)), /// Chunk::new(bgzf::VirtualPosition::from(7), bgzf::VirtualPosition::from(13)), /// Chunk::new(bgzf::VirtualPosition::from(21), bgzf::VirtualPosition::from(34)), /// ]; /// /// let actual = bai::merge_chunks(&chunks); /// /// let expected = [ /// Chunk::new(bgzf::VirtualPosition::from(2), bgzf::VirtualPosition::from(3)), /// Chunk::new(bgzf::VirtualPosition::from(5), bgzf::VirtualPosition::from(13)), /// Chunk::new(bgzf::VirtualPosition::from(21), bgzf::VirtualPosition::from(34)), /// ]; /// /// assert_eq!(actual, expected); /// ``` pub fn merge_chunks(chunks: &[Chunk]) -> Vec<Chunk> { optimize_chunks(chunks, VirtualPosition::default()) } /// Optimizes a list of chunks into a list of non-overlapping chunks. /// /// Unlike [`merge_chunks`], `min_offset` (typically from the linear index) is given to remove /// chunks that cannot be in the query. /// /// # Examples /// /// ``` /// use noodles_bam::bai::{self, index::reference_sequence::bin::Chunk}; /// use noodles_bgzf as bgzf; /// /// let chunks = [ /// Chunk::new(bgzf::VirtualPosition::from(2), bgzf::VirtualPosition::from(3)), /// Chunk::new(bgzf::VirtualPosition::from(5), bgzf::VirtualPosition::from(8)), /// Chunk::new(bgzf::VirtualPosition::from(7), bgzf::VirtualPosition::from(13)), /// Chunk::new(bgzf::VirtualPosition::from(21), bgzf::VirtualPosition::from(34)), /// ]; /// let min_offset = bgzf::VirtualPosition::from(5); /// /// let actual = bai::optimize_chunks(&chunks, min_offset); /// /// let expected = [ /// Chunk::new(bgzf::VirtualPosition::from(5), bgzf::VirtualPosition::from(13)), /// Chunk::new(bgzf::VirtualPosition::from(21), bgzf::VirtualPosition::from(34)), /// ]; /// /// assert_eq!(actual, expected); /// ``` pub fn optimize_chunks(chunks: &[Chunk], min_offset: VirtualPosition) -> Vec<Chunk> { let mut chunks: Vec<_> = chunks .iter() .filter(|c| c.end() > min_offset) .copied() .collect(); if chunks.is_empty() { return chunks; } chunks.sort_unstable_by_key(|c| c.start()); // At worst, no chunks are merged, and the resulting list will be the same size as the input. let mut merged_chunks = Vec::with_capacity(chunks.len()); // `chunks` is guaranteed to be non-empty. let mut current_chunk = chunks[0]; for next_chunk in chunks.iter().skip(1) { if next_chunk.start() > current_chunk.end() { merged_chunks.push(current_chunk); current_chunk = *next_chunk; } else if current_chunk.end() < next_chunk.end() { current_chunk = Chunk::new(current_chunk.start(), next_chunk.end()); } } merged_chunks.push(current_chunk); merged_chunks } #[cfg(test)] mod tests { use super::*; fn build_chunks() -> Vec<Chunk> { vec![ Chunk::new(VirtualPosition::from(2), VirtualPosition::from(5)), Chunk::new(VirtualPosition::from(3), VirtualPosition::from(4)), Chunk::new(VirtualPosition::from(5), VirtualPosition::from(7)), Chunk::new(VirtualPosition::from(9), VirtualPosition::from(12)), Chunk::new(VirtualPosition::from(10), VirtualPosition::from(15)), Chunk::new(VirtualPosition::from(16), VirtualPosition::from(21)), ] } #[test] fn test_merge_chunks() { let chunks = build_chunks(); let actual = merge_chunks(&chunks); let expected = [ Chunk::new(VirtualPosition::from(2), VirtualPosition::from(7)), Chunk::new(VirtualPosition::from(9), VirtualPosition::from(15)), Chunk::new(VirtualPosition::from(16), VirtualPosition::from(21)), ]; assert_eq!(actual, expected); } #[test] fn test_merge_chunks_with_empty_list() { let chunks = Vec::new(); let merged_chunks = merge_chunks(&chunks); assert!(merged_chunks.is_empty()); } #[test] fn test_optimize_chunks() { let chunks = build_chunks(); let actual = optimize_chunks(&chunks, VirtualPosition::from(10)); let expected = [ Chunk::new(VirtualPosition::from(9), VirtualPosition::from(15)), Chunk::new(VirtualPosition::from(16), VirtualPosition::from(21)), ]; assert_eq!(actual, expected); } }
30.99115
99
0.6245
4a4d948b97b7e8c473d74ef2c00d95691c99b118
643
// Copyright 2022 The Engula Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. mod error; pub use self::error::{Error, Result};
35.722222
75
0.744946
87a63a34cb642ef5c978cdb563fb3738a99be6a3
2,693
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // extern crate mdbook; #[macro_use] extern crate clap; use std::env; use std::path::{Path, PathBuf}; use clap::{App, ArgMatches, SubCommand, AppSettings}; use mdbook::MDBook; use mdbook::errors::Result; fn main() { let d_message = "-d, --dest-dir=[dest-dir] 'The output directory for your book{n}(Defaults to ./book when omitted)'"; let dir_message = "[dir] 'A directory for your book{n}(Defaults to Current Directory when omitted)'"; let matches = App::new("rustbook") .about("Build a book with mdBook") .author("Steve Klabnik <[email protected]>") .version(&*format!("v{}", crate_version!())) .setting(AppSettings::SubcommandRequired) .subcommand(SubCommand::with_name("build") .about("Build the book from the markdown files") .arg_from_usage(d_message) .arg_from_usage(dir_message)) .get_matches(); // Check which subcomamnd the user ran... let res = match matches.subcommand() { ("build", Some(sub_matches)) => build(sub_matches), (_, _) => unreachable!(), }; if let Err(e) = res { eprintln!("Error: {}", e); for cause in e.iter().skip(1) { eprintln!("\tCaused By: {}", cause); } ::std::process::exit(101); } } // Build command implementation pub fn build(args: &ArgMatches) -> Result<()> { let book_dir = get_book_dir(args); let mut book = MDBook::load(&book_dir)?; // Set this to allow us to catch bugs in advance. book.config.build.create_missing = false; if let Some(dest_dir) = args.value_of("dest-dir") { book.config.build.build_dir = PathBuf::from(dest_dir); } book.build()?; Ok(()) } fn get_book_dir(args: &ArgMatches) -> PathBuf { if let Some(dir) = args.value_of("dir") { // Check if path is relative from current dir, or absolute... let p = Path::new(dir); if p.is_relative() { env::current_dir().unwrap().join(dir) } else { p.to_path_buf() } } else { env::current_dir().unwrap() } }
31.313953
76
0.59599
d9d96ab7b078aa35156fc64c7120b5072bff73ea
5,090
use crate::buckets::create_buckets_limits; use bitcoin::Txid; use std::collections::{HashMap, HashSet}; use std::iter::FromIterator; pub struct MempoolWeightBuckets { /// contain the total weight for this bucket buckets_weights: Vec<u64>, /// contain the fee rate limits for every bucket ith buckets_limits: Vec<f64>, /// in which bucket the Txid is in tx_bucket: HashMap<Txid, TxBucket>, } pub struct TxBucket { index: usize, weight: u64, } pub struct MempoolBuckets { /// contain the number of elements for bucket ith buckets: Vec<u64>, /// contain the fee rate limits for every bucket ith buckets_limits: Vec<f64>, /// in which bucket the Txid is in tx_bucket: HashMap<Txid, usize>, } impl MempoolBuckets { pub fn new(increment_percent: u32, upper_limit: f64) -> Self { let (buckets_limits, buckets) = create_buckets(increment_percent, upper_limit); MempoolBuckets { buckets, buckets_limits, tx_bucket: HashMap::new(), } } pub fn clear(&mut self) { self.tx_bucket.clear(); for el in self.buckets.iter_mut() { *el = 0; } } pub fn add(&mut self, txid: Txid, rate: f64) { if rate > 1.0 && self.tx_bucket.get(&txid).is_none() { // TODO MempoolBuckets use array of indexes to avoid many comparisons? let index = self .buckets_limits .iter() .position(|e| e > &rate) .unwrap_or(self.buckets_limits.len() - 1); self.buckets[index] += 1; self.tx_bucket.insert(txid, index); } } pub fn remove(&mut self, txid: &Txid) { if let Some(index) = self.tx_bucket.remove(txid) { self.buckets[index] -= 1; } } pub fn number_of_buckets(&self) -> usize { self.buckets.len() } pub fn buckets_str(&self) -> String { self.buckets .iter() .map(|e| e.to_string()) .collect::<Vec<_>>() .join(",") } pub fn len(&self) -> usize { self.tx_bucket.len() } pub fn is_empty(&self) -> bool { self.tx_bucket.is_empty() } pub fn txids_set(&self) -> HashSet<&Txid> { HashSet::from_iter(self.tx_bucket.keys()) } } impl MempoolWeightBuckets { pub fn new(increment_percent: u32, upper_limit: f64) -> Self { let (buckets_limits, buckets_weights) = create_buckets(increment_percent, upper_limit); MempoolWeightBuckets { buckets_weights, buckets_limits, tx_bucket: HashMap::new(), } } pub fn clear(&mut self) { self.tx_bucket.clear(); for el in self.buckets_weights.iter_mut() { *el = 0; } } pub fn add(&mut self, txid: Txid, rate: f64, weight: u64) { if rate > 1.0 && self.tx_bucket.get(&txid).is_none() { // TODO MempoolBuckets use array of indexes to avoid many comparisons? let index = self .buckets_limits .iter() .position(|e| e > &rate) .unwrap_or(self.buckets_limits.len() - 1); self.buckets_weights[index] += weight; self.tx_bucket.insert(txid, TxBucket { index, weight }); } } pub fn remove(&mut self, txid: &Txid) { if let Some(bucket) = self.tx_bucket.remove(txid) { self.buckets_weights[bucket.index] -= bucket.weight; } } pub fn number_of_buckets(&self) -> usize { self.buckets_weights.len() } /// returns vbytes pub fn buckets_str(&self) -> String { self.buckets_weights .iter() .map(|e| (e / 4).to_string()) .collect::<Vec<_>>() .join(",") } pub fn len(&self) -> usize { self.tx_bucket.len() } pub fn is_empty(&self) -> bool { self.tx_bucket.is_empty() } pub fn txids_set(&self) -> HashSet<&Txid> { HashSet::from_iter(self.tx_bucket.keys()) } } fn create_buckets(increment_percent: u32, upper_limit: f64) -> (Vec<f64>, Vec<u64>) { let buckets_limits = create_buckets_limits(increment_percent, upper_limit); let buckets = vec![0u64; buckets_limits.len()]; (buckets_limits, buckets) } #[cfg(test)] mod tests { use crate::buckets::mempool::MempoolBuckets; use bitcoin::Txid; #[test] fn test_mempool() { let mut mempool = MempoolBuckets::new(20, 10.0); let txid = Txid::default(); mempool.add(txid, 5.0); assert_eq!(mempool.len(), 1); assert_eq!(mempool.number_of_buckets(), 13); assert_eq!(mempool.buckets_str(), "0,0,0,0,0,0,0,0,1,0,0,0,0"); mempool.remove(&txid); assert_eq!(mempool.len(), 0); assert_eq!(mempool.buckets_str(), "0,0,0,0,0,0,0,0,0,0,0,0,0"); mempool.remove(&txid); assert_eq!(mempool.len(), 0); assert_eq!(mempool.buckets_str(), "0,0,0,0,0,0,0,0,0,0,0,0,0"); } }
28.277778
95
0.564637
5dec1885a1cada45719e8a308cf1331c836a8860
942
//! Persistent Memory allocation APIs mod alg; mod pool; pub mod heap; pub use alg::buddy::*; pub use pool::*; /// Determines how much of the `MemPool` is used for the trait object. /// /// This is useful for obtaining the size of the referent of the persistent /// pointers. pub trait PmemUsage where Self: Sized, { /// Size of the object on Persistent Memory /// Assuming that self is not on PM, or considered else were, the size of allocated persistent memory /// is the sum of all persistent objects pointed by this object. fn size_of() -> usize { 0 } /// Size of the object on Persistent Memory including `Self` /// Assuming that self is also on PM (e.g. the root object), the size of allocated persistent memory /// includes the size of all objects pointed by this object and the size `Self`. fn size_of_pmem() -> usize { Self::size_of() + std::mem::size_of::<Self>() } }
29.4375
105
0.669851
69df69c402a220f29ba8f903762b5770d73fb9f5
14,613
// Copyright (c) The Diem Core Contributors // Copyright (c) The Move Contributors // SPDX-License-Identifier: Apache-2.0 use invalid_mutations::bounds::{ ApplyCodeUnitBoundsContext, ApplyOutOfBoundsContext, CodeUnitBoundsMutation, OutOfBoundsMutation, }; use move_binary_format::{ check_bounds::BoundsChecker, file_format::*, file_format_common, proptest_types::CompiledModuleStrategyGen, }; use move_core_types::{ account_address::AccountAddress, identifier::Identifier, vm_status::StatusCode, }; use proptest::{collection::vec, prelude::*}; #[test] fn empty_module_no_errors() { BoundsChecker::verify_module(&basic_test_module()).unwrap(); } #[test] fn empty_script_no_errors() { BoundsChecker::verify_script(&basic_test_script()).unwrap(); } #[test] fn invalid_default_module() { BoundsChecker::verify_module(&CompiledModule { version: file_format_common::VERSION_MAX, ..Default::default() }) .unwrap_err(); } #[test] fn invalid_self_module_handle_index() { let mut m = basic_test_module(); m.self_module_handle_idx = ModuleHandleIndex(12); assert_eq!( BoundsChecker::verify_module(&m).unwrap_err().major_status(), StatusCode::INDEX_OUT_OF_BOUNDS ); } #[test] fn invalid_type_param_in_fn_return_() { use SignatureToken::*; let mut m = basic_test_module(); m.function_handles[0].return_ = SignatureIndex(1); m.signatures.push(Signature(vec![TypeParameter(0)])); assert_eq!(m.signatures.len(), 2); assert_eq!( BoundsChecker::verify_module(&m).unwrap_err().major_status(), StatusCode::INDEX_OUT_OF_BOUNDS ); } #[test] fn invalid_type_param_in_fn_parameters() { use SignatureToken::*; let mut m = basic_test_module(); m.function_handles[0].parameters = SignatureIndex(1); m.signatures.push(Signature(vec![TypeParameter(0)])); assert_eq!( BoundsChecker::verify_module(&m).unwrap_err().major_status(), StatusCode::INDEX_OUT_OF_BOUNDS ); } #[test] fn invalid_type_param_in_script_parameters() { use SignatureToken::*; let mut s = basic_test_script(); s.parameters = SignatureIndex(1); s.signatures.push(Signature(vec![TypeParameter(0)])); assert_eq!( BoundsChecker::verify_script(&s).unwrap_err().major_status(), StatusCode::INDEX_OUT_OF_BOUNDS ); } #[test] fn invalid_struct_in_fn_return_() { use SignatureToken::*; let mut m = basic_test_module(); m.function_handles[0].return_ = SignatureIndex(1); m.signatures .push(Signature(vec![Struct(StructHandleIndex::new(1))])); assert_eq!( BoundsChecker::verify_module(&m).unwrap_err().major_status(), StatusCode::INDEX_OUT_OF_BOUNDS ); } #[test] fn invalid_type_param_in_field() { use SignatureToken::*; let mut m = basic_test_module(); match &mut m.struct_defs[0].field_information { StructFieldInformation::Declared(ref mut fields) => { fields[0].signature.0 = TypeParameter(0); assert_eq!( BoundsChecker::verify_module(&m).unwrap_err().major_status(), StatusCode::INDEX_OUT_OF_BOUNDS ); } _ => panic!("attempt to change a field that does not exist"), } } #[test] fn invalid_struct_in_field() { use SignatureToken::*; let mut m = basic_test_module(); match &mut m.struct_defs[0].field_information { StructFieldInformation::Declared(ref mut fields) => { fields[0].signature.0 = Struct(StructHandleIndex::new(3)); assert_eq!( BoundsChecker::verify_module(&m).unwrap_err().major_status(), StatusCode::INDEX_OUT_OF_BOUNDS ); } _ => panic!("attempt to change a field that does not exist"), } } #[test] fn invalid_struct_with_actuals_in_field() { use SignatureToken::*; let mut m = basic_test_module(); match &mut m.struct_defs[0].field_information { StructFieldInformation::Declared(ref mut fields) => { fields[0].signature.0 = StructInstantiation(StructHandleIndex::new(0), vec![TypeParameter(0)]); assert_eq!( BoundsChecker::verify_module(&m).unwrap_err().major_status(), StatusCode::NUMBER_OF_TYPE_ARGUMENTS_MISMATCH ); } _ => panic!("attempt to change a field that does not exist"), } } #[test] fn invalid_locals_id_in_call() { use Bytecode::*; let mut m = basic_test_module(); m.function_instantiations.push(FunctionInstantiation { handle: FunctionHandleIndex::new(0), type_parameters: SignatureIndex::new(1), }); let func_inst_idx = FunctionInstantiationIndex(m.function_instantiations.len() as u16 - 1); m.function_defs[0].code.as_mut().unwrap().code = vec![CallGeneric(func_inst_idx)]; assert_eq!( BoundsChecker::verify_module(&m).unwrap_err().major_status(), StatusCode::INDEX_OUT_OF_BOUNDS ); } #[test] fn script_invalid_locals_id_in_call() { use Bytecode::*; let mut s = basic_test_script(); s.function_instantiations.push(FunctionInstantiation { handle: FunctionHandleIndex::new(0), type_parameters: SignatureIndex::new(1), }); let func_inst_idx = FunctionInstantiationIndex(s.function_instantiations.len() as u16 - 1); s.code.code = vec![CallGeneric(func_inst_idx)]; assert_eq!( BoundsChecker::verify_script(&s).unwrap_err().major_status(), StatusCode::INDEX_OUT_OF_BOUNDS ); } #[test] fn invalid_type_param_in_call() { use Bytecode::*; use SignatureToken::*; let mut m = basic_test_module(); m.signatures.push(Signature(vec![TypeParameter(0)])); m.function_instantiations.push(FunctionInstantiation { handle: FunctionHandleIndex::new(0), type_parameters: SignatureIndex::new(1), }); let func_inst_idx = FunctionInstantiationIndex(m.function_instantiations.len() as u16 - 1); m.function_defs[0].code.as_mut().unwrap().code = vec![CallGeneric(func_inst_idx)]; assert_eq!( BoundsChecker::verify_module(&m).unwrap_err().major_status(), StatusCode::INDEX_OUT_OF_BOUNDS ); } #[test] fn script_invalid_type_param_in_call() { use Bytecode::*; use SignatureToken::*; let mut s = basic_test_script(); s.signatures.push(Signature(vec![TypeParameter(0)])); s.function_instantiations.push(FunctionInstantiation { handle: FunctionHandleIndex::new(0), type_parameters: SignatureIndex::new(1), }); let func_inst_idx = FunctionInstantiationIndex(s.function_instantiations.len() as u16 - 1); s.code.code = vec![CallGeneric(func_inst_idx)]; assert_eq!( BoundsChecker::verify_script(&s).unwrap_err().major_status(), StatusCode::INDEX_OUT_OF_BOUNDS ); } #[test] fn invalid_struct_as_type_actual_in_exists() { use Bytecode::*; use SignatureToken::*; let mut m = basic_test_module(); m.signatures .push(Signature(vec![Struct(StructHandleIndex::new(3))])); m.function_instantiations.push(FunctionInstantiation { handle: FunctionHandleIndex::new(0), type_parameters: SignatureIndex::new(1), }); let func_inst_idx = FunctionInstantiationIndex(m.function_instantiations.len() as u16 - 1); m.function_defs[0].code.as_mut().unwrap().code = vec![CallGeneric(func_inst_idx)]; assert_eq!( BoundsChecker::verify_module(&m).unwrap_err().major_status(), StatusCode::INDEX_OUT_OF_BOUNDS ); } #[test] fn script_invalid_struct_as_type_argument_in_exists() { use Bytecode::*; use SignatureToken::*; let mut s = basic_test_script(); s.signatures .push(Signature(vec![Struct(StructHandleIndex::new(3))])); s.function_instantiations.push(FunctionInstantiation { handle: FunctionHandleIndex::new(0), type_parameters: SignatureIndex::new(1), }); let func_inst_idx = FunctionInstantiationIndex(s.function_instantiations.len() as u16 - 1); s.code.code = vec![CallGeneric(func_inst_idx)]; assert_eq!( BoundsChecker::verify_script(&s).unwrap_err().major_status(), StatusCode::INDEX_OUT_OF_BOUNDS ); } #[test] fn invalid_friend_module_address() { let mut m = basic_test_module(); m.friend_decls.push(ModuleHandle { address: AddressIdentifierIndex::new(m.address_identifiers.len() as TableIndex), name: IdentifierIndex::new(0), }); assert_eq!( BoundsChecker::verify_module(&m).unwrap_err().major_status(), StatusCode::INDEX_OUT_OF_BOUNDS ); } #[test] fn invalid_friend_module_name() { let mut m = basic_test_module(); m.friend_decls.push(ModuleHandle { address: AddressIdentifierIndex::new(0), name: IdentifierIndex::new(m.identifiers.len() as TableIndex), }); assert_eq!( BoundsChecker::verify_module(&m).unwrap_err().major_status(), StatusCode::INDEX_OUT_OF_BOUNDS ); } #[test] fn script_missing_signature() { // The basic test script includes parameters pointing to an empty signature. let mut s = basic_test_script(); // Remove the empty signature from the script. s.signatures.clear(); // Bounds-checking the script should now result in an out-of-bounds error. assert_eq!( BoundsChecker::verify_script(&s).unwrap_err().major_status(), StatusCode::INDEX_OUT_OF_BOUNDS ); } #[test] fn invalid_signature_for_vector_operation() { use Bytecode::*; let skeleton = basic_test_module(); let sig_index = SignatureIndex(skeleton.signatures.len() as u16); for bytecode in vec![ VecPack(sig_index, 0), VecLen(sig_index), VecImmBorrow(sig_index), VecMutBorrow(sig_index), VecPushBack(sig_index), VecPopBack(sig_index), VecUnpack(sig_index, 0), VecSwap(sig_index), ] { let mut m = skeleton.clone(); m.function_defs[0].code.as_mut().unwrap().code = vec![bytecode]; assert_eq!( BoundsChecker::verify_module(&m).unwrap_err().major_status(), StatusCode::INDEX_OUT_OF_BOUNDS ); } } #[test] fn invalid_struct_for_vector_operation() { use Bytecode::*; use SignatureToken::*; let mut skeleton = basic_test_module(); skeleton .signatures .push(Signature(vec![Struct(StructHandleIndex::new(3))])); let sig_index = SignatureIndex((skeleton.signatures.len() - 1) as u16); for bytecode in vec![ VecPack(sig_index, 0), VecLen(sig_index), VecImmBorrow(sig_index), VecMutBorrow(sig_index), VecPushBack(sig_index), VecPopBack(sig_index), VecUnpack(sig_index, 0), VecSwap(sig_index), ] { let mut m = skeleton.clone(); m.function_defs[0].code.as_mut().unwrap().code = vec![bytecode]; assert_eq!( BoundsChecker::verify_module(&m).unwrap_err().major_status(), StatusCode::INDEX_OUT_OF_BOUNDS ); } } #[test] fn invalid_type_param_for_vector_operation() { use Bytecode::*; use SignatureToken::*; let mut skeleton = basic_test_module(); skeleton.signatures.push(Signature(vec![TypeParameter(0)])); let sig_index = SignatureIndex((skeleton.signatures.len() - 1) as u16); for bytecode in vec![ VecPack(sig_index, 0), VecLen(sig_index), VecImmBorrow(sig_index), VecMutBorrow(sig_index), VecPushBack(sig_index), VecPopBack(sig_index), VecUnpack(sig_index, 0), VecSwap(sig_index), ] { let mut m = skeleton.clone(); m.function_defs[0].code.as_mut().unwrap().code = vec![bytecode]; assert_eq!( BoundsChecker::verify_module(&m).unwrap_err().major_status(), StatusCode::INDEX_OUT_OF_BOUNDS ); } } proptest! { #[test] fn valid_bounds(_module in CompiledModule::valid_strategy(20)) { // valid_strategy will panic if there are any bounds check issues. } } /// Ensure that valid modules that don't have any members (e.g. function args, struct fields) pass /// bounds checks. /// /// There are some potentially tricky edge cases around ranges that are captured here. #[test] fn valid_bounds_no_members() { let mut gen = CompiledModuleStrategyGen::new(20); gen.zeros_all(); proptest!(|(_module in gen.generate())| { // gen.generate() will panic if there are any bounds check issues. }); } proptest! { #[test] fn invalid_out_of_bounds( module in CompiledModule::valid_strategy(20), oob_mutations in vec(OutOfBoundsMutation::strategy(), 0..40), ) { let (module, expected_violations) = { let oob_context = ApplyOutOfBoundsContext::new(module, oob_mutations); oob_context.apply() }; let actual_violations = BoundsChecker::verify_module(&module); prop_assert_eq!(expected_violations.is_empty(), actual_violations.is_ok()); } #[test] fn code_unit_out_of_bounds( mut module in CompiledModule::valid_strategy(20), mutations in vec(CodeUnitBoundsMutation::strategy(), 0..40), ) { let expected_violations = { let context = ApplyCodeUnitBoundsContext::new(&mut module, mutations); context.apply() }; let actual_violations = BoundsChecker::verify_module(&module); prop_assert_eq!(expected_violations.is_empty(), actual_violations.is_ok()); } #[test] fn no_module_handles( identifiers in vec(any::<Identifier>(), 0..20), address_identifiers in vec(any::<AccountAddress>(), 0..20), ) { // If there are no module handles, the only other things that can be stored are intrinsic // data. let module = CompiledModule { identifiers, address_identifiers, ..Default::default() }; prop_assert_eq!( BoundsChecker::verify_module(&module).map_err(|e| e.major_status()), Err(StatusCode::NO_MODULE_HANDLES) ); } } proptest! { // Generating arbitrary compiled modules is really slow, possibly because of // https://github.com/AltSysrq/proptest/issues/143. #![proptest_config(ProptestConfig::with_cases(16))] /// Make sure that garbage inputs don't crash the bounds checker. #[test] fn garbage_inputs(module in any_with::<CompiledModule>(16)) { let _ = BoundsChecker::verify_module(&module); } }
31.425806
98
0.660097
0ac12dd5d1581e6515795d01481efe9e7a6c66a9
708
use crate::types::Il2CppException; use std::ops::{Deref, DerefMut}; pub struct Exception(*mut Il2CppException); impl<T: std::error::Error> From<T> for Exception { fn from(error: T) -> Self { let exception: *mut Il2CppException = Il2CppException::new(error.description()).unwrap(); Self(exception) } } impl From<&mut Il2CppException> for Exception { fn from(ex: &mut Il2CppException) -> Self { Self(ex) } } impl Deref for Exception { type Target = Il2CppException; fn deref(&self) -> &Self::Target { unsafe { &*self.0 } } } impl DerefMut for Exception { fn deref_mut(&mut self) -> &mut Self::Target { unsafe { &mut *self.0 } } }
23.6
97
0.622881
e2a6f7a23b10fa3090dfea6bc85c5c285999f58c
23,853
// WARNING: This file was autogenerated by jni-bindgen. Any changes to this file may be lost!!! #[cfg(any(feature = "all", feature = "org-xml-sax-helpers-DefaultHandler"))] __jni_bindgen! { /// public class [DefaultHandler](https://developer.android.com/reference/org/xml/sax/helpers/DefaultHandler.html) /// /// Required feature: org-xml-sax-helpers-DefaultHandler public class DefaultHandler ("org/xml/sax/helpers/DefaultHandler") extends crate::java::lang::Object, implements crate::org::xml::sax::EntityResolver, crate::org::xml::sax::DTDHandler, crate::org::xml::sax::ContentHandler, crate::org::xml::sax::ErrorHandler { /// [DefaultHandler](https://developer.android.com/reference/org/xml/sax/helpers/DefaultHandler.html#DefaultHandler()) pub fn new<'env>(__jni_env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::result::Result<__jni_bindgen::Local<'env, crate::org::xml::sax::helpers::DefaultHandler>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "org/xml/sax/helpers/DefaultHandler", java.flags == PUBLIC, .name == "<init>", .descriptor == "()V" unsafe { let __jni_args = []; let (__jni_class, __jni_method) = __jni_env.require_class_method("org/xml/sax/helpers/DefaultHandler\0", "<init>\0", "()V\0"); __jni_env.new_object_a(__jni_class, __jni_method, __jni_args.as_ptr()) } } /// [resolveEntity](https://developer.android.com/reference/org/xml/sax/helpers/DefaultHandler.html#resolveEntity(java.lang.String,%20java.lang.String)) /// /// Required features: "java-lang-String", "org-xml-sax-InputSource" #[cfg(any(feature = "all", all(feature = "java-lang-String", feature = "org-xml-sax-InputSource")))] pub fn resolveEntity<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>, arg1: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::org::xml::sax::InputSource>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "org/xml/sax/helpers/DefaultHandler", java.flags == PUBLIC, .name == "resolveEntity", .descriptor == "(Ljava/lang/String;Ljava/lang/String;)Lorg/xml/sax/InputSource;" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into()), __jni_bindgen::AsJValue::as_jvalue(&arg1.into())]; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("org/xml/sax/helpers/DefaultHandler\0", "resolveEntity\0", "(Ljava/lang/String;Ljava/lang/String;)Lorg/xml/sax/InputSource;\0"); __jni_env.call_object_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [notationDecl](https://developer.android.com/reference/org/xml/sax/helpers/DefaultHandler.html#notationDecl(java.lang.String,%20java.lang.String,%20java.lang.String)) /// /// Required features: "java-lang-String" #[cfg(any(feature = "all", all(feature = "java-lang-String")))] pub fn notationDecl<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>, arg1: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>, arg2: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "org/xml/sax/helpers/DefaultHandler", java.flags == PUBLIC, .name == "notationDecl", .descriptor == "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into()), __jni_bindgen::AsJValue::as_jvalue(&arg1.into()), __jni_bindgen::AsJValue::as_jvalue(&arg2.into())]; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("org/xml/sax/helpers/DefaultHandler\0", "notationDecl\0", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V\0"); __jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [unparsedEntityDecl](https://developer.android.com/reference/org/xml/sax/helpers/DefaultHandler.html#unparsedEntityDecl(java.lang.String,%20java.lang.String,%20java.lang.String,%20java.lang.String)) /// /// Required features: "java-lang-String" #[cfg(any(feature = "all", all(feature = "java-lang-String")))] pub fn unparsedEntityDecl<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>, arg1: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>, arg2: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>, arg3: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "org/xml/sax/helpers/DefaultHandler", java.flags == PUBLIC, .name == "unparsedEntityDecl", .descriptor == "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into()), __jni_bindgen::AsJValue::as_jvalue(&arg1.into()), __jni_bindgen::AsJValue::as_jvalue(&arg2.into()), __jni_bindgen::AsJValue::as_jvalue(&arg3.into())]; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("org/xml/sax/helpers/DefaultHandler\0", "unparsedEntityDecl\0", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V\0"); __jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [setDocumentLocator](https://developer.android.com/reference/org/xml/sax/helpers/DefaultHandler.html#setDocumentLocator(org.xml.sax.Locator)) /// /// Required features: "org-xml-sax-Locator" #[cfg(any(feature = "all", all(feature = "org-xml-sax-Locator")))] pub fn setDocumentLocator<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::org::xml::sax::Locator>>) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "org/xml/sax/helpers/DefaultHandler", java.flags == PUBLIC, .name == "setDocumentLocator", .descriptor == "(Lorg/xml/sax/Locator;)V" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())]; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("org/xml/sax/helpers/DefaultHandler\0", "setDocumentLocator\0", "(Lorg/xml/sax/Locator;)V\0"); __jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [startDocument](https://developer.android.com/reference/org/xml/sax/helpers/DefaultHandler.html#startDocument()) pub fn startDocument<'env>(&'env self) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "org/xml/sax/helpers/DefaultHandler", java.flags == PUBLIC, .name == "startDocument", .descriptor == "()V" unsafe { let __jni_args = []; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("org/xml/sax/helpers/DefaultHandler\0", "startDocument\0", "()V\0"); __jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [endDocument](https://developer.android.com/reference/org/xml/sax/helpers/DefaultHandler.html#endDocument()) pub fn endDocument<'env>(&'env self) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "org/xml/sax/helpers/DefaultHandler", java.flags == PUBLIC, .name == "endDocument", .descriptor == "()V" unsafe { let __jni_args = []; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("org/xml/sax/helpers/DefaultHandler\0", "endDocument\0", "()V\0"); __jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [startPrefixMapping](https://developer.android.com/reference/org/xml/sax/helpers/DefaultHandler.html#startPrefixMapping(java.lang.String,%20java.lang.String)) /// /// Required features: "java-lang-String" #[cfg(any(feature = "all", all(feature = "java-lang-String")))] pub fn startPrefixMapping<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>, arg1: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "org/xml/sax/helpers/DefaultHandler", java.flags == PUBLIC, .name == "startPrefixMapping", .descriptor == "(Ljava/lang/String;Ljava/lang/String;)V" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into()), __jni_bindgen::AsJValue::as_jvalue(&arg1.into())]; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("org/xml/sax/helpers/DefaultHandler\0", "startPrefixMapping\0", "(Ljava/lang/String;Ljava/lang/String;)V\0"); __jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [endPrefixMapping](https://developer.android.com/reference/org/xml/sax/helpers/DefaultHandler.html#endPrefixMapping(java.lang.String)) /// /// Required features: "java-lang-String" #[cfg(any(feature = "all", all(feature = "java-lang-String")))] pub fn endPrefixMapping<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "org/xml/sax/helpers/DefaultHandler", java.flags == PUBLIC, .name == "endPrefixMapping", .descriptor == "(Ljava/lang/String;)V" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())]; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("org/xml/sax/helpers/DefaultHandler\0", "endPrefixMapping\0", "(Ljava/lang/String;)V\0"); __jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [startElement](https://developer.android.com/reference/org/xml/sax/helpers/DefaultHandler.html#startElement(java.lang.String,%20java.lang.String,%20java.lang.String,%20org.xml.sax.Attributes)) /// /// Required features: "java-lang-String", "org-xml-sax-Attributes" #[cfg(any(feature = "all", all(feature = "java-lang-String", feature = "org-xml-sax-Attributes")))] pub fn startElement<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>, arg1: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>, arg2: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>, arg3: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::org::xml::sax::Attributes>>) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "org/xml/sax/helpers/DefaultHandler", java.flags == PUBLIC, .name == "startElement", .descriptor == "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lorg/xml/sax/Attributes;)V" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into()), __jni_bindgen::AsJValue::as_jvalue(&arg1.into()), __jni_bindgen::AsJValue::as_jvalue(&arg2.into()), __jni_bindgen::AsJValue::as_jvalue(&arg3.into())]; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("org/xml/sax/helpers/DefaultHandler\0", "startElement\0", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lorg/xml/sax/Attributes;)V\0"); __jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [endElement](https://developer.android.com/reference/org/xml/sax/helpers/DefaultHandler.html#endElement(java.lang.String,%20java.lang.String,%20java.lang.String)) /// /// Required features: "java-lang-String" #[cfg(any(feature = "all", all(feature = "java-lang-String")))] pub fn endElement<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>, arg1: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>, arg2: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "org/xml/sax/helpers/DefaultHandler", java.flags == PUBLIC, .name == "endElement", .descriptor == "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into()), __jni_bindgen::AsJValue::as_jvalue(&arg1.into()), __jni_bindgen::AsJValue::as_jvalue(&arg2.into())]; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("org/xml/sax/helpers/DefaultHandler\0", "endElement\0", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V\0"); __jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [characters](https://developer.android.com/reference/org/xml/sax/helpers/DefaultHandler.html#characters(char%5B%5D,%20int,%20int)) pub fn characters<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env __jni_bindgen::CharArray>>, arg1: i32, arg2: i32) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "org/xml/sax/helpers/DefaultHandler", java.flags == PUBLIC, .name == "characters", .descriptor == "([CII)V" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into()), __jni_bindgen::AsJValue::as_jvalue(&arg1), __jni_bindgen::AsJValue::as_jvalue(&arg2)]; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("org/xml/sax/helpers/DefaultHandler\0", "characters\0", "([CII)V\0"); __jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [ignorableWhitespace](https://developer.android.com/reference/org/xml/sax/helpers/DefaultHandler.html#ignorableWhitespace(char%5B%5D,%20int,%20int)) pub fn ignorableWhitespace<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env __jni_bindgen::CharArray>>, arg1: i32, arg2: i32) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "org/xml/sax/helpers/DefaultHandler", java.flags == PUBLIC, .name == "ignorableWhitespace", .descriptor == "([CII)V" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into()), __jni_bindgen::AsJValue::as_jvalue(&arg1), __jni_bindgen::AsJValue::as_jvalue(&arg2)]; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("org/xml/sax/helpers/DefaultHandler\0", "ignorableWhitespace\0", "([CII)V\0"); __jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [processingInstruction](https://developer.android.com/reference/org/xml/sax/helpers/DefaultHandler.html#processingInstruction(java.lang.String,%20java.lang.String)) /// /// Required features: "java-lang-String" #[cfg(any(feature = "all", all(feature = "java-lang-String")))] pub fn processingInstruction<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>, arg1: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "org/xml/sax/helpers/DefaultHandler", java.flags == PUBLIC, .name == "processingInstruction", .descriptor == "(Ljava/lang/String;Ljava/lang/String;)V" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into()), __jni_bindgen::AsJValue::as_jvalue(&arg1.into())]; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("org/xml/sax/helpers/DefaultHandler\0", "processingInstruction\0", "(Ljava/lang/String;Ljava/lang/String;)V\0"); __jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [skippedEntity](https://developer.android.com/reference/org/xml/sax/helpers/DefaultHandler.html#skippedEntity(java.lang.String)) /// /// Required features: "java-lang-String" #[cfg(any(feature = "all", all(feature = "java-lang-String")))] pub fn skippedEntity<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "org/xml/sax/helpers/DefaultHandler", java.flags == PUBLIC, .name == "skippedEntity", .descriptor == "(Ljava/lang/String;)V" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())]; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("org/xml/sax/helpers/DefaultHandler\0", "skippedEntity\0", "(Ljava/lang/String;)V\0"); __jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [warning](https://developer.android.com/reference/org/xml/sax/helpers/DefaultHandler.html#warning(org.xml.sax.SAXParseException)) /// /// Required features: "org-xml-sax-SAXParseException" #[cfg(any(feature = "all", all(feature = "org-xml-sax-SAXParseException")))] pub fn warning<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::org::xml::sax::SAXParseException>>) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "org/xml/sax/helpers/DefaultHandler", java.flags == PUBLIC, .name == "warning", .descriptor == "(Lorg/xml/sax/SAXParseException;)V" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())]; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("org/xml/sax/helpers/DefaultHandler\0", "warning\0", "(Lorg/xml/sax/SAXParseException;)V\0"); __jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [error](https://developer.android.com/reference/org/xml/sax/helpers/DefaultHandler.html#error(org.xml.sax.SAXParseException)) /// /// Required features: "org-xml-sax-SAXParseException" #[cfg(any(feature = "all", all(feature = "org-xml-sax-SAXParseException")))] pub fn error<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::org::xml::sax::SAXParseException>>) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "org/xml/sax/helpers/DefaultHandler", java.flags == PUBLIC, .name == "error", .descriptor == "(Lorg/xml/sax/SAXParseException;)V" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())]; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("org/xml/sax/helpers/DefaultHandler\0", "error\0", "(Lorg/xml/sax/SAXParseException;)V\0"); __jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [fatalError](https://developer.android.com/reference/org/xml/sax/helpers/DefaultHandler.html#fatalError(org.xml.sax.SAXParseException)) /// /// Required features: "org-xml-sax-SAXParseException" #[cfg(any(feature = "all", all(feature = "org-xml-sax-SAXParseException")))] pub fn fatalError<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::org::xml::sax::SAXParseException>>) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "org/xml/sax/helpers/DefaultHandler", java.flags == PUBLIC, .name == "fatalError", .descriptor == "(Lorg/xml/sax/SAXParseException;)V" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())]; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("org/xml/sax/helpers/DefaultHandler\0", "fatalError\0", "(Lorg/xml/sax/SAXParseException;)V\0"); __jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } } }
96.181452
614
0.661804
6a05e79e54cb56f30b58bb563770fb868be0502a
12,966
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. use clap::{clap_app, value_t}; use env_logger; use log::*; use std::collections::{BTreeMap, BTreeSet}; use std::thread; use std::time::Duration; use thrift; use thrift::protocol::{ TBinaryInputProtocolFactory, TBinaryOutputProtocolFactory, TCompactInputProtocolFactory, TCompactOutputProtocolFactory, TInputProtocolFactory, TOutputProtocolFactory, }; use thrift::server::{TMultiplexedProcessor, TServer}; use thrift::transport::{ TBufferedReadTransportFactory, TBufferedWriteTransportFactory, TFramedReadTransportFactory, TFramedWriteTransportFactory, TReadTransportFactory, TWriteTransportFactory, }; use thrift::OrderedFloat; use thrift_test::*; fn main() { env_logger::init(); debug!("initialized logger - running cross-test server"); match run() { Ok(()) => info!("cross-test server succeeded"), Err(e) => { info!("cross-test server failed with error {:?}", e); std::process::exit(1); } } } fn run() -> thrift::Result<()> { // unsupported options: // --domain-socket // --pipe // --ssl let matches = clap_app!(rust_test_client => (version: "1.0") (author: "Apache Thrift Developers <[email protected]>") (about: "Rust Thrift test server") (@arg port: --port +takes_value "port on which the test server listens") (@arg transport: --transport +takes_value "transport implementation to use (\"buffered\", \"framed\")") (@arg protocol: --protocol +takes_value "protocol implementation to use (\"binary\", \"compact\")") (@arg server_type: --server_type +takes_value "type of server instantiated (\"simple\", \"thread-pool\")") (@arg workers: -n --workers +takes_value "number of thread-pool workers (\"4\")") ) .get_matches(); let port = value_t!(matches, "port", u16).unwrap_or(9090); let transport = matches.value_of("transport").unwrap_or("buffered"); let protocol = matches.value_of("protocol").unwrap_or("binary"); let server_type = matches.value_of("server_type").unwrap_or("thread-pool"); let workers = value_t!(matches, "workers", usize).unwrap_or(4); let listen_address = format!("127.0.0.1:{}", port); info!("binding to {}", listen_address); let (i_transport_factory, o_transport_factory): ( Box<dyn TReadTransportFactory>, Box<dyn TWriteTransportFactory>, ) = match &*transport { "buffered" => ( Box::new(TBufferedReadTransportFactory::new()), Box::new(TBufferedWriteTransportFactory::new()), ), "framed" => ( Box::new(TFramedReadTransportFactory::new()), Box::new(TFramedWriteTransportFactory::new()), ), unknown => { return Err(format!("unsupported transport type {}", unknown).into()); } }; let (i_protocol_factory, o_protocol_factory): ( Box<dyn TInputProtocolFactory>, Box<dyn TOutputProtocolFactory>, ) = match &*protocol { "binary" | "multi" | "multi:binary" => ( Box::new(TBinaryInputProtocolFactory::new()), Box::new(TBinaryOutputProtocolFactory::new()), ), "compact" | "multic" | "multi:compact" => ( Box::new(TCompactInputProtocolFactory::new()), Box::new(TCompactOutputProtocolFactory::new()), ), unknown => { return Err(format!("unsupported transport type {}", unknown).into()); } }; let test_processor = ThriftTestSyncProcessor::new(ThriftTestSyncHandlerImpl {}); match &*server_type { "simple" | "thread-pool" => { if protocol == "multi" || protocol == "multic" { let second_service_processor = SecondServiceSyncProcessor::new(SecondServiceSyncHandlerImpl {}); let mut multiplexed_processor = TMultiplexedProcessor::new(); multiplexed_processor.register("ThriftTest", Box::new(test_processor), true)?; multiplexed_processor.register( "SecondService", Box::new(second_service_processor), false, )?; let mut server = TServer::new( i_transport_factory, i_protocol_factory, o_transport_factory, o_protocol_factory, multiplexed_processor, workers, ); server.listen(&listen_address) } else { let mut server = TServer::new( i_transport_factory, i_protocol_factory, o_transport_factory, o_protocol_factory, test_processor, workers, ); server.listen(&listen_address) } } unknown => Err(format!("unsupported server type {}", unknown).into()), } } struct ThriftTestSyncHandlerImpl; impl ThriftTestSyncHandler for ThriftTestSyncHandlerImpl { fn handle_test_void(&self) -> thrift::Result<()> { info!("testVoid()"); Ok(()) } fn handle_test_string(&self, thing: String) -> thrift::Result<String> { info!("testString({})", &thing); Ok(thing) } fn handle_test_bool(&self, thing: bool) -> thrift::Result<bool> { info!("testBool({})", thing); Ok(thing) } fn handle_test_byte(&self, thing: i8) -> thrift::Result<i8> { info!("testByte({})", thing); Ok(thing) } fn handle_test_i32(&self, thing: i32) -> thrift::Result<i32> { info!("testi32({})", thing); Ok(thing) } fn handle_test_i64(&self, thing: i64) -> thrift::Result<i64> { info!("testi64({})", thing); Ok(thing) } fn handle_test_double(&self, thing: OrderedFloat<f64>) -> thrift::Result<OrderedFloat<f64>> { info!("testDouble({})", thing); Ok(thing) } fn handle_test_binary(&self, thing: Vec<u8>) -> thrift::Result<Vec<u8>> { info!("testBinary({:?})", thing); Ok(thing) } fn handle_test_struct(&self, thing: Xtruct) -> thrift::Result<Xtruct> { info!("testStruct({:?})", thing); Ok(thing) } fn handle_test_nest(&self, thing: Xtruct2) -> thrift::Result<Xtruct2> { info!("testNest({:?})", thing); Ok(thing) } fn handle_test_map(&self, thing: BTreeMap<i32, i32>) -> thrift::Result<BTreeMap<i32, i32>> { info!("testMap({:?})", thing); Ok(thing) } fn handle_test_string_map( &self, thing: BTreeMap<String, String>, ) -> thrift::Result<BTreeMap<String, String>> { info!("testStringMap({:?})", thing); Ok(thing) } fn handle_test_set(&self, thing: BTreeSet<i32>) -> thrift::Result<BTreeSet<i32>> { info!("testSet({:?})", thing); Ok(thing) } fn handle_test_list(&self, thing: Vec<i32>) -> thrift::Result<Vec<i32>> { info!("testList({:?})", thing); Ok(thing) } fn handle_test_enum(&self, thing: Numberz) -> thrift::Result<Numberz> { info!("testEnum({:?})", thing); Ok(thing) } fn handle_test_typedef(&self, thing: UserId) -> thrift::Result<UserId> { info!("testTypedef({})", thing); Ok(thing) } /// @return map<i32,map<i32,i32>> - returns a dictionary with these values: /// {-4 => {-4 => -4, -3 => -3, -2 => -2, -1 => -1, }, 4 => {1 => 1, 2 => /// 2, 3 => 3, 4 => 4, }, } fn handle_test_map_map(&self, hello: i32) -> thrift::Result<BTreeMap<i32, BTreeMap<i32, i32>>> { info!("testMapMap({})", hello); let mut inner_map_0: BTreeMap<i32, i32> = BTreeMap::new(); for i in -4..(0 as i32) { inner_map_0.insert(i, i); } let mut inner_map_1: BTreeMap<i32, i32> = BTreeMap::new(); for i in 1..5 { inner_map_1.insert(i, i); } let mut ret_map: BTreeMap<i32, BTreeMap<i32, i32>> = BTreeMap::new(); ret_map.insert(-4, inner_map_0); ret_map.insert(4, inner_map_1); Ok(ret_map) } /// Creates a the returned map with these values and prints it out: /// { 1 => { 2 => argument, /// 3 => argument, /// }, /// 2 => { 6 => <empty Insanity struct>, }, /// } /// return map<UserId, map<Numberz,Insanity>> - a map with the above values fn handle_test_insanity( &self, argument: Insanity, ) -> thrift::Result<BTreeMap<UserId, BTreeMap<Numberz, Insanity>>> { info!("testInsanity({:?})", argument); let mut map_0: BTreeMap<Numberz, Insanity> = BTreeMap::new(); map_0.insert(Numberz::TWO, argument.clone()); map_0.insert(Numberz::THREE, argument); let mut map_1: BTreeMap<Numberz, Insanity> = BTreeMap::new(); let insanity = Insanity { user_map: None, xtructs: None, }; map_1.insert(Numberz::SIX, insanity); let mut ret: BTreeMap<UserId, BTreeMap<Numberz, Insanity>> = BTreeMap::new(); ret.insert(1, map_0); ret.insert(2, map_1); Ok(ret) } /// returns an Xtruct with: /// string_thing = "Hello2", byte_thing = arg0, i32_thing = arg1 and /// i64_thing = arg2 fn handle_test_multi( &self, arg0: i8, arg1: i32, arg2: i64, _: BTreeMap<i16, String>, _: Numberz, _: UserId, ) -> thrift::Result<Xtruct> { let x_ret = Xtruct { string_thing: Some("Hello2".to_owned()), byte_thing: Some(arg0), i32_thing: Some(arg1), i64_thing: Some(arg2), }; Ok(x_ret) } /// if arg == "Xception" throw Xception with errorCode = 1001 and message = /// arg /// else if arg == "TException" throw TException /// else do not throw anything fn handle_test_exception(&self, arg: String) -> thrift::Result<()> { info!("testException({})", arg); match &*arg { "Xception" => Err((Xception { error_code: Some(1001), message: Some(arg), }) .into()), "TException" => Err("this is a random error".into()), _ => Ok(()), } } /// if arg0 == "Xception": /// throw Xception with errorCode = 1001 and message = "This is an /// Xception" /// else if arg0 == "Xception2": /// throw Xception2 with errorCode = 2002 and struct_thing.string_thing = /// "This is an Xception2" // else: // do not throw anything and return Xtruct with string_thing = arg1 fn handle_test_multi_exception(&self, arg0: String, arg1: String) -> thrift::Result<Xtruct> { match &*arg0 { "Xception" => Err((Xception { error_code: Some(1001), message: Some("This is an Xception".to_owned()), }) .into()), "Xception2" => Err((Xception2 { error_code: Some(2002), struct_thing: Some(Xtruct { string_thing: Some("This is an Xception2".to_owned()), byte_thing: None, i32_thing: None, i64_thing: None, }), }) .into()), _ => Ok(Xtruct { string_thing: Some(arg1), byte_thing: None, i32_thing: None, i64_thing: None, }), } } fn handle_test_oneway(&self, seconds_to_sleep: i32) -> thrift::Result<()> { thread::sleep(Duration::from_secs(seconds_to_sleep as u64)); Ok(()) } } struct SecondServiceSyncHandlerImpl; impl SecondServiceSyncHandler for SecondServiceSyncHandlerImpl { fn handle_secondtest_string(&self, thing: String) -> thrift::Result<String> { info!("(second)testString({})", &thing); let ret = format!("testString(\"{}\")", &thing); Ok(ret) } }
33.942408
114
0.566327
8f934a1845d3e64ab855cee5f9721cba273f8b72
3,149
#[doc = "Register `USER_KEK_KEY_CODE8` reader"] pub struct R(crate::R<USER_KEK_KEY_CODE8_SPEC>); impl core::ops::Deref for R { type Target = crate::R<USER_KEK_KEY_CODE8_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::convert::From<crate::R<USER_KEK_KEY_CODE8_SPEC>> for R { fn from(reader: crate::R<USER_KEK_KEY_CODE8_SPEC>) -> Self { R(reader) } } #[doc = "Register `USER_KEK_KEY_CODE8` writer"] pub struct W(crate::W<USER_KEK_KEY_CODE8_SPEC>); impl core::ops::Deref for W { type Target = crate::W<USER_KEK_KEY_CODE8_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl core::convert::From<crate::W<USER_KEK_KEY_CODE8_SPEC>> for W { fn from(writer: crate::W<USER_KEK_KEY_CODE8_SPEC>) -> Self { W(writer) } } #[doc = "Field `FIELD` reader - ."] pub struct FIELD_R(crate::FieldReader<u32, u32>); impl FIELD_R { pub(crate) fn new(bits: u32) -> Self { FIELD_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for FIELD_R { type Target = crate::FieldReader<u32, u32>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `FIELD` writer - ."] pub struct FIELD_W<'a> { w: &'a mut W, } impl<'a> FIELD_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !0xffff_ffff) | (value as u32 & 0xffff_ffff); self.w } } impl R { #[doc = "Bits 0:31 - ."] #[inline(always)] pub fn field(&self) -> FIELD_R { FIELD_R::new((self.bits & 0xffff_ffff) as u32) } } impl W { #[doc = "Bits 0:31 - ."] #[inline(always)] pub fn field(&mut self) -> FIELD_W { FIELD_W { w: self } } #[doc = "Writes raw bits to the register."] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = ".\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [user_kek_key_code8](index.html) module"] pub struct USER_KEK_KEY_CODE8_SPEC; impl crate::RegisterSpec for USER_KEK_KEY_CODE8_SPEC { type Ux = u32; } #[doc = "`read()` method returns [user_kek_key_code8::R](R) reader structure"] impl crate::Readable for USER_KEK_KEY_CODE8_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [user_kek_key_code8::W](W) writer structure"] impl crate::Writable for USER_KEK_KEY_CODE8_SPEC { type Writer = W; } #[doc = "`reset()` method sets USER_KEK_KEY_CODE8 to value 0"] impl crate::Resettable for USER_KEK_KEY_CODE8_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
31.49
400
0.62242
bff5ba8960b9149f5ee7e8c4105dac4273d2a200
156,215
#[cfg(feature = "with-bigint")] use super::{BigInt, BigUint}; use error::ParseError; use std::iter::{Product, Sum}; use super::{ Bounded, CheckedAdd, CheckedDiv, CheckedMul, CheckedSub, FromPrimitive, Integer, Num, One, ParseRatioError, Ratio, Signed, ToPrimitive, Zero, }; use generic::{read_generic_integer, GenericInteger}; use std::cmp::{Eq, Ordering, PartialEq, PartialOrd}; use std::hash::{Hash, Hasher}; use std::num::FpCategory; use std::ops::{ Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Rem, RemAssign, Sub, SubAssign, }; use std::f64; use std::fmt; use std::mem; pub mod display; #[cfg(feature = "with-juniper-support")] pub mod juniper_support; #[cfg(feature = "with-postgres-support")] pub mod postgres_support; /// Sign representation /// /// Fractions keep the sign represented by this enum, /// so that we can use unsigned ints as base data types. #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] #[cfg_attr(feature = "with-serde-support", derive(Serialize, Deserialize))] pub enum Sign { Plus, Minus, } impl Sign { pub fn is_positive(self) -> bool { matches!(self, Sign::Plus) } pub fn is_negative(self) -> bool { matches!(self, Sign::Minus) } } impl Mul for Sign { type Output = Self; fn mul(self, oth: Sign) -> Self::Output { if self == oth { Sign::Plus } else { Sign::Minus } } } impl Neg for Sign { type Output = Self; fn neg(self) -> Sign { match self { Sign::Plus => Sign::Minus, Sign::Minus => Sign::Plus, } } } impl PartialOrd for Sign { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { match (self, other) { (Sign::Minus, Sign::Minus) => Some(Ordering::Equal), (Sign::Plus, Sign::Minus) => Some(Ordering::Greater), (Sign::Minus, Sign::Plus) => Some(Ordering::Less), (Sign::Plus, Sign::Plus) => Some(Ordering::Equal), } } } impl Ord for Sign { fn cmp(&self, other: &Self) -> Ordering { match (self, other) { (Sign::Minus, Sign::Minus) => Ordering::Equal, (Sign::Plus, Sign::Minus) => Ordering::Greater, (Sign::Minus, Sign::Plus) => Ordering::Less, (Sign::Plus, Sign::Plus) => Ordering::Equal, } } } impl fmt::Display for Sign { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let format = display::Format::new(f); display::format_sign(*self, f, &format) } } impl From<Sign> for char { fn from(sign: Sign) -> char { match sign { Sign::Plus => '+', Sign::Minus => '-', } } } /// Generic implementation of the fraction type /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// /// type F = GenericFraction<u8>; /// /// let first = F::new (1u8, 2u8); /// let second = F::new (2u8, 8u8); /// /// assert_eq! (first + second, F::new (3u8, 4u8)); /// ``` /// /// /// Since GenericFraction keeps its sign explicitly and independently of the numerics, /// it is not recommended to use signed types, although it's completely valid with the cost of target type capacity. /// /// ``` /// use fraction::GenericFraction; /// /// type F = GenericFraction<i8>; /// /// let first = F::new (1, 2); /// let second = F::new (2, 8); /// /// assert_eq! (first + second, F::new (3, 4)); /// ``` #[derive(Clone, Debug)] #[cfg_attr(feature = "with-serde-support", derive(Serialize, Deserialize))] pub enum GenericFraction<T> where T: Clone + Integer, { Rational(Sign, Ratio<T>), Infinity(Sign), NaN, } /// Copy semantics to be applied for the target type, but only if T also has it. impl<T> Copy for GenericFraction<T> where T: Copy + Integer {} impl<T> Default for GenericFraction<T> where T: Clone + Integer, { fn default() -> Self { Self::zero() } } impl<T> GenericFraction<T> where T: Clone + Integer, { /// Constructs a new fraction with the specified numerator and denominator /// Handles gracefully signed integers even if the storage type is unsigned and vise versa /// The arguments can be of any integer types imlementing the necessary traits /// /// # Examples /// /// ``` /// use fraction::{GenericFraction, Sign}; /// type F = GenericFraction<u16>; /// /// let f12 = F::new_generic(Sign::Plus, 1i8, 2u8).unwrap(); /// let f34 = F::new_generic(Sign::Plus, 3i16, 4u32).unwrap(); /// let f56 = F::new_generic(Sign::Plus, 5i64, 6u128).unwrap(); /// let f78 = F::new_generic(Sign::Plus, 7usize, 8isize).unwrap(); /// /// assert_eq! ((*f12.numer().unwrap(), *f12.denom().unwrap()), (1u16, 2u16)); /// assert_eq! ((*f34.numer().unwrap(), *f34.denom().unwrap()), (3u16, 4u16)); /// assert_eq! ((*f56.numer().unwrap(), *f56.denom().unwrap()), (5u16, 6u16)); /// assert_eq! ((*f78.numer().unwrap(), *f78.denom().unwrap()), (7u16, 8u16)); /// ```` pub fn new_generic<N, D>(sign: Sign, num: N, den: D) -> Option<GenericFraction<T>> where N: GenericInteger + PartialOrd, D: GenericInteger + PartialOrd, T: GenericInteger, { let (ns, num): (Sign, T) = read_generic_integer(num)?; let (ds, den): (Sign, T) = read_generic_integer(den)?; let sign = ns * ds * sign; Some(Self::_new(sign, num, den)) } fn _new<N, D>(sign: Sign, num: N, den: D) -> GenericFraction<T> where N: Into<T>, D: Into<T>, { let num: T = num.into(); let den: T = den.into(); if den.is_zero() { if num.is_zero() { GenericFraction::NaN } else { GenericFraction::Infinity(sign) } } else { GenericFraction::Rational(sign, Ratio::new(num, den)) } } /// Constructs a new fraction with the specified numerator and denominator /// /// The arguments must me either of `T` type, or implement `Into<T>` trait. /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u16>; /// /// let _f = F::new(1u8, 2u16); /// ``` pub fn new<N, D>(num: N, den: D) -> GenericFraction<T> where N: Into<T>, D: Into<T>, { Self::_new(Sign::Plus, num, den) } /// Constructs a new negative fraction with the specified numerator and denominator /// /// The arguments must be either of `T` type, or implement `Into<T>` trait. /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u16>; /// /// let _f = F::new_neg (1u8, 2u16); /// ``` pub fn new_neg<N, D>(num: N, den: D) -> GenericFraction<T> where N: Into<T>, D: Into<T>, { Self::_new(Sign::Minus, num, den) } /// Constructs a new fraction without types casting, checking for denom == 0 and reducing numbers. /// /// You must be careful with this function because all the other functionality parts rely on the /// numbers to be reduced. That said, in the normal case 2/4 has to be reduced to 1/2, but it will not /// happen with new_raw. /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// let _f = F::new_raw (1u8, 2u8); /// ``` pub fn new_raw(num: T, den: T) -> GenericFraction<T> { GenericFraction::Rational(Sign::Plus, Ratio::new_raw(num, den)) } /// The same as [fn new_raw](enum.GenericFraction.html#method.new_raw), but produces negative fractions. /// /// DEPRECATED! Use [fn new_raw_signed](enum.GenericFraction.html#method.new_raw_signed) instead /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// let _f = F::new_raw_neg (1u8, 2u8); /// ``` #[deprecated(note = "Use `new_raw_signed` instead")] pub fn new_raw_neg(num: T, den: T) -> GenericFraction<T> { GenericFraction::Rational(Sign::Minus, Ratio::new_raw(num, den)) } /// The same as [fn new_raw](enum.GenericFraction.html#method.new_raw), but allows explicitly set sign. /// /// # Examples /// /// ``` /// use fraction::{GenericFraction, Sign}; /// type F = GenericFraction<u8>; /// /// let _f = F::new_raw_signed(Sign::Minus, 1u8, 2u8); /// ``` pub fn new_raw_signed(sign: Sign, num: T, den: T) -> GenericFraction<T> { GenericFraction::Rational(sign, Ratio::new_raw(num, den)) } /// Returns a reference to the numerator value /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// let fra = F::new (5u8, 6u8); /// assert_eq! (5, *fra.numer ().unwrap ()); /// ``` pub fn numer(&self) -> Option<&T> { match *self { GenericFraction::Rational(_, ref r) => Some(r.numer()), _ => None, } } /// Returns a reference to the denominator value /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// let fra = F::new (5u8, 6u8); /// assert_eq! (6, *fra.denom ().unwrap ()); /// ``` pub fn denom(&self) -> Option<&T> { match *self { GenericFraction::Rational(_, ref r) => Some(r.denom()), _ => None, } } /// Returns a reference to the sign value /// /// # Examples /// /// ``` /// use fraction::{ GenericFraction, Sign }; /// type F = GenericFraction<u8>; /// /// /// let fra = F::new (5u8, 6u8); /// assert_eq! (Sign::Plus, fra.sign ().unwrap ()); /// /// let fra = F::new_neg (5u8, 6u8); /// assert_eq! (Sign::Minus, fra.sign ().unwrap ()); /// /// /// let fra = F::infinity (); /// assert_eq! (Sign::Plus, fra.sign ().unwrap ()); /// /// let fra = F::neg_infinity (); /// assert_eq! (Sign::Minus, fra.sign ().unwrap ()); /// /// /// let fra = F::nan (); /// assert_eq! (None, fra.sign ()); /// ``` pub fn sign(&self) -> Option<Sign> { match self { GenericFraction::Rational(s, _) => Some(*s), GenericFraction::Infinity(s) => Some(*s), _ => None, } } /// Generates a GenericFraction<T> from GenericFraction<F> /// where T: From<F> /// /// ``` /// use fraction::{ Fraction, GenericFraction }; /// type F8 = GenericFraction<u8>; /// /// let fra8 = F8::new (5u8, 6u8); /// assert_eq! (Fraction::new (5u64, 6u64), Fraction::from_fraction(fra8)); /// ``` pub fn from_fraction<F>(from: GenericFraction<F>) -> GenericFraction<T> where T: From<F>, F: Clone + Integer, { match from { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(sign) => GenericFraction::Infinity(sign), GenericFraction::Rational(sign, ratio) => { let (num, den): (F, F) = ratio.into(); GenericFraction::Rational(sign, Ratio::new_raw(T::from(num), T::from(den))) } } } /// Generates a GenericFraction<I> from GenericFraction<T> /// where T: Into<I> /// /// ``` /// use fraction::{ Fraction, GenericFraction }; /// type F8 = GenericFraction<u8>; /// /// let fra8 = F8::new (5u8, 6u8); /// assert_eq! (Fraction::new (5u64, 6u64), fra8.into_fraction()); /// ``` pub fn into_fraction<I>(self) -> GenericFraction<I> where T: Into<I>, I: Clone + Integer, { match self { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(sign) => GenericFraction::Infinity(sign), GenericFraction::Rational(sign, ratio) => { let (num, den): (T, T) = ratio.into(); GenericFraction::Rational(sign, Ratio::new_raw(num.into(), den.into())) } } } /// Returns a decimal representation of the fraction /// /// DEPRECATED! Use `format!("{:.1$}", fraction, precision)` instead /// /// If you have a fraction "1/2", in decimal it should be "0.5". /// /// Returns None in case we couldn't write the result into a string, /// e.g. not enough RAM. /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// assert_eq! ("0.5", &F::new (1u8, 2u8).format_as_decimal (1).unwrap ()); /// assert_eq! ("0.8", &F::new (8u8, 10u8).format_as_decimal (2).unwrap ()); /// assert_eq! (&F::new (1u8, 3u8).format_as_decimal(32).unwrap(), "0.33333333333333333333333333333333"); /// ``` #[deprecated(note = "Use `format!(\"{:.1$}\", fraction, precision)`")] pub fn format_as_decimal(&self, precision: usize) -> Option<String> where T: Clone + GenericInteger, { Some(format!("{:.1$}", &self, precision)) } /// Parse a decimal string into a fraction and return the result. /// Returns ParseError::OverflowError if there's not enough space in T to represent the decimal (use BigFraction in such a case) /// Returns ParseError::ParseIntError if the string contains incorrect junk data (e.g. non-numeric characters). /// May return ParseIntError if there is not enough volume in T to read whole part of the number into it. /// /// # Examples /// /// ``` /// use fraction::Fraction; /// /// let f = Fraction::from_decimal_str ("1.5"); /// assert_eq! (f, Ok (Fraction::new(3u8, 2u8))); /// ``` pub fn from_decimal_str(src: &str) -> Result<Self, ParseError> where T: CheckedAdd + CheckedMul, { let (sign, start) = if src.starts_with('-') { (Sign::Minus, 1) } else if src.starts_with('+') { (Sign::Plus, 1) } else { (Sign::Plus, 0) }; let dot = src.find('.'); let who = match dot { Some(dot) => &src[start..dot], None => &src[start..], }; let mut num = match T::from_str_radix(who, 10) { Err(_) => return Err(ParseError::ParseIntError), Ok(value) => value, }; let (fra, len) = if let Some(dot) = dot { (T::from_str_radix(&src[dot + 1..], 10), src.len() - dot - 1) } else { (Ok(T::zero()), 0) }; let fra = match fra { Err(_) => return Err(ParseError::ParseIntError), Ok(value) => value, }; let mut den = T::one(); if len > 0 { let mut t10 = T::one(); for _ in 0..9 { t10 = if let Some(t10) = t10.checked_add(&den) { t10 } else { return Err(ParseError::OverflowError); }; } for _ in 0..len { num = if let Some(num) = num.checked_mul(&t10) { num } else { return Err(ParseError::OverflowError); }; den = if let Some(den) = den.checked_mul(&t10) { den } else { return Err(ParseError::OverflowError); }; } } let num = if let Some(num) = num.checked_add(&fra) { num } else { return Err(ParseError::OverflowError); }; Ok(GenericFraction::Rational(sign, Ratio::new(num, den))) } } impl<T: Bounded + Clone + Integer> Bounded for GenericFraction<T> { fn min_value() -> Self { let one = T::one(); let max = T::max_value(); GenericFraction::Rational(Sign::Minus, Ratio::new(max, one)) } fn max_value() -> Self { let one = T::one(); let max = T::max_value(); GenericFraction::Rational(Sign::Plus, Ratio::new(max, one)) } } impl<T: Clone + Integer> PartialEq for GenericFraction<T> { fn eq(&self, other: &Self) -> bool { match (self, other) { (GenericFraction::NaN, GenericFraction::NaN) => true, (GenericFraction::Infinity(sign), GenericFraction::Infinity(osign)) => sign == osign, ( GenericFraction::Rational(ref ls, ref l), GenericFraction::Rational(ref rs, ref r), ) => { if ls == rs { l.eq(r) } else { l.is_zero() && r.is_zero() } } _ => false, } } } impl<T: Clone + Integer + Hash> Hash for GenericFraction<T> { fn hash<H: Hasher>(&self, state: &mut H) { match self { GenericFraction::NaN => state.write_u8(0u8), GenericFraction::Infinity(sign) => { if let Sign::Plus = sign { state.write_u8(1u8) } else { state.write_u8(2u8) } } GenericFraction::Rational(sign, ratio) => { if *sign == Sign::Plus || ratio.is_zero() { state.write_u8(3u8); } else { state.write_u8(4u8); } ratio.hash(state); } } } } impl<T: Clone + Integer> Eq for GenericFraction<T> {} impl<T: Clone + Integer> PartialOrd for GenericFraction<T> { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { match *self { GenericFraction::NaN => None, GenericFraction::Infinity(sign) => match *other { GenericFraction::NaN => None, GenericFraction::Infinity(osign) => sign.partial_cmp(&osign), GenericFraction::Rational(_, _) => { if sign == Sign::Plus { Some(Ordering::Greater) } else { Some(Ordering::Less) } } }, GenericFraction::Rational(ref ls, ref l) => match *other { GenericFraction::NaN => None, GenericFraction::Infinity(rs) => { if rs == Sign::Plus { Some(Ordering::Less) } else { Some(Ordering::Greater) } } GenericFraction::Rational(ref rs, ref r) => { if ls == rs { match *ls { Sign::Plus => l.partial_cmp(r), Sign::Minus => r.partial_cmp(l), } } else if l.is_zero() && r.is_zero() { Some(Ordering::Equal) } else if *ls == Sign::Minus { Some(Ordering::Less) } else { Some(Ordering::Greater) } } }, } } } impl<T: Clone + Integer> Neg for GenericFraction<T> { type Output = GenericFraction<T>; fn neg(self) -> Self { match self { GenericFraction::NaN => self, GenericFraction::Infinity(sign) => GenericFraction::Infinity(-sign), GenericFraction::Rational(s, r) => { if r.is_zero() { GenericFraction::Rational(Sign::Plus, r) } else { GenericFraction::Rational(s.neg(), r) } } } } } impl<'a, T: Clone + Integer> Neg for &'a GenericFraction<T> { type Output = GenericFraction<T>; fn neg(self) -> Self::Output { match *self { GenericFraction::NaN => GenericFraction::nan(), GenericFraction::Infinity(sign) => GenericFraction::Infinity(-sign), GenericFraction::Rational(s, ref r) => { if r.is_zero() { GenericFraction::Rational(Sign::Plus, r.clone()) } else { GenericFraction::Rational(-s, r.clone()) } } } } } impl<T: Clone + Integer> Add for GenericFraction<T> { type Output = Self; fn add(self, other: Self) -> Self { match self { GenericFraction::NaN => self, GenericFraction::Infinity(sign) => match other { GenericFraction::NaN => other, GenericFraction::Rational(_, _) => self, GenericFraction::Infinity(osign) => { if sign != osign { GenericFraction::NaN } else { self } } }, GenericFraction::Rational(ls, l) => match other { GenericFraction::NaN => other, GenericFraction::Infinity(_) => other, GenericFraction::Rational(rs, r) => { if ls == Sign::Plus && rs == Sign::Plus { GenericFraction::Rational(Sign::Plus, l.add(r)) } else if ls == Sign::Plus { if l < r { GenericFraction::Rational(Sign::Minus, r.sub(l)) } else { GenericFraction::Rational(Sign::Plus, l.sub(r)) } } else if rs == Sign::Plus { if r < l { GenericFraction::Rational(Sign::Minus, l.sub(r)) } else { GenericFraction::Rational(Sign::Plus, r.sub(l)) } } else { GenericFraction::Rational(Sign::Minus, l.add(r)) } } }, } } } impl<'a, T> Add for &'a GenericFraction<T> where T: Clone + Integer, { type Output = GenericFraction<T>; fn add(self, other: Self) -> GenericFraction<T> { match *self { GenericFraction::NaN => self.clone(), GenericFraction::Infinity(sign) => match *other { GenericFraction::NaN => other.clone(), GenericFraction::Rational(_, _) => self.clone(), GenericFraction::Infinity(osign) => { if sign != osign { GenericFraction::NaN } else { self.clone() } } }, GenericFraction::Rational(ls, ref l) => match *other { GenericFraction::NaN => other.clone(), GenericFraction::Infinity(_) => other.clone(), GenericFraction::Rational(rs, ref r) => { if ls == Sign::Plus && rs == Sign::Plus { GenericFraction::Rational(Sign::Plus, l.add(r)) } else if ls == Sign::Plus { if l < r { GenericFraction::Rational(Sign::Minus, r.sub(l)) } else { GenericFraction::Rational(Sign::Plus, l.sub(r)) } } else if rs == Sign::Plus { if r < l { GenericFraction::Rational(Sign::Minus, l.sub(r)) } else { GenericFraction::Rational(Sign::Plus, r.sub(l)) } } else { GenericFraction::Rational(Sign::Minus, l.add(r)) } } }, } } } impl<T> CheckedAdd for GenericFraction<T> where T: Clone + Integer + CheckedAdd + CheckedSub + CheckedMul, { fn checked_add(&self, other: &Self) -> Option<GenericFraction<T>> { match *self { GenericFraction::NaN => Some(self.clone()), GenericFraction::Infinity(sign) => match *other { GenericFraction::NaN => Some(other.clone()), GenericFraction::Rational(_, _) => Some(self.clone()), GenericFraction::Infinity(osign) => { if sign != osign { Some(GenericFraction::NaN) } else { Some(self.clone()) } } }, GenericFraction::Rational(ls, ref l) => match *other { GenericFraction::NaN => Some(other.clone()), GenericFraction::Infinity(_) => Some(other.clone()), GenericFraction::Rational(rs, ref r) => { if ls == Sign::Plus && rs == Sign::Plus { l.checked_add(r) .map(|value| GenericFraction::Rational(Sign::Plus, value)) } else if ls == Sign::Plus { if l < r { r.checked_sub(l) .map(|value| GenericFraction::Rational(Sign::Minus, value)) } else { l.checked_sub(r) .map(|value| GenericFraction::Rational(Sign::Plus, value)) } } else if rs == Sign::Plus { if r < l { l.checked_sub(r) .map(|value| GenericFraction::Rational(Sign::Minus, value)) } else { r.checked_sub(l) .map(|value| GenericFraction::Rational(Sign::Plus, value)) } } else { l.checked_add(r) .map(|value| GenericFraction::Rational(Sign::Minus, value)) } } }, } } } impl<T: Clone + Integer> AddAssign for GenericFraction<T> { fn add_assign(&mut self, other: Self) { *self = match *self { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(ls) => match other { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Rational(_, _) => GenericFraction::Infinity(ls), GenericFraction::Infinity(rs) => { if ls != rs { GenericFraction::NaN } else { GenericFraction::Infinity(ls) } } }, GenericFraction::Rational(ls, ref mut l) => match other { GenericFraction::NaN => other, GenericFraction::Infinity(_) => other, GenericFraction::Rational(rs, r) => { let l_ = mem::replace(l, Ratio::new_raw(T::zero(), T::zero())); if ls == Sign::Plus && rs == Sign::Plus { GenericFraction::Rational(Sign::Plus, l_.add(r)) } else if ls == Sign::Plus { if l_ < r { GenericFraction::Rational(Sign::Minus, r.sub(l_)) } else { GenericFraction::Rational(Sign::Plus, l_.sub(r)) } } else if rs == Sign::Plus { if r < l_ { GenericFraction::Rational(Sign::Minus, l_.sub(r)) } else { GenericFraction::Rational(Sign::Plus, r.sub(l_)) } } else { GenericFraction::Rational(Sign::Minus, l_.add(r)) } } }, }; } } impl<'a, T> AddAssign<&'a GenericFraction<T>> for GenericFraction<T> where T: Clone + Integer, { fn add_assign(&mut self, other: &'a Self) { *self = match *self { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(ls) => match *other { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Rational(_, _) => GenericFraction::Infinity(ls), GenericFraction::Infinity(rs) => { if ls != rs { GenericFraction::NaN } else { GenericFraction::Infinity(ls) } } }, GenericFraction::Rational(ls, ref mut l) => match *other { GenericFraction::NaN => other.clone(), GenericFraction::Infinity(_) => other.clone(), GenericFraction::Rational(rs, ref r) => { let l_ = mem::replace(l, Ratio::new_raw(T::zero(), T::zero())); if ls == Sign::Plus && rs == Sign::Plus { GenericFraction::Rational(Sign::Plus, l_.add(r)) } else if ls == Sign::Plus { if l_ < *r { GenericFraction::Rational(Sign::Minus, r.sub(l_)) } else { GenericFraction::Rational(Sign::Plus, l_.sub(r)) } } else if rs == Sign::Plus { if *r < l_ { GenericFraction::Rational(Sign::Minus, l_.sub(r)) } else { GenericFraction::Rational(Sign::Plus, r.sub(l_)) } } else { GenericFraction::Rational(Sign::Minus, l_.add(r)) } } }, }; } } impl<T: Clone + Integer> Sub for GenericFraction<T> { type Output = Self; fn sub(self, other: Self) -> Self { match self { GenericFraction::NaN => self, GenericFraction::Infinity(sign) => match other { GenericFraction::NaN => other, GenericFraction::Infinity(osign) => { if sign == osign { GenericFraction::NaN } else { self } } GenericFraction::Rational(_, _) => self, }, GenericFraction::Rational(ls, l) => match other { GenericFraction::NaN => other, GenericFraction::Infinity(sign) => GenericFraction::Infinity(-sign), GenericFraction::Rational(rs, r) => { if ls == Sign::Plus && rs == Sign::Plus { if l < r { GenericFraction::Rational(Sign::Minus, r.sub(l)) } else { GenericFraction::Rational(Sign::Plus, l.sub(r)) } } else if ls == Sign::Plus { GenericFraction::Rational(Sign::Plus, l.add(r)) } else if rs == Sign::Plus { GenericFraction::Rational(Sign::Minus, l.add(r)) } else if l < r { GenericFraction::Rational(Sign::Plus, r.sub(l)) } else { GenericFraction::Rational(Sign::Minus, l.sub(r)) } } }, } } } impl<'a, T> Sub for &'a GenericFraction<T> where T: Clone + Integer, { type Output = GenericFraction<T>; fn sub(self, other: Self) -> GenericFraction<T> { match *self { GenericFraction::NaN => self.clone(), GenericFraction::Infinity(sign) => match *other { GenericFraction::NaN => other.clone(), GenericFraction::Infinity(osign) => { if sign == osign { GenericFraction::NaN } else { self.clone() } } GenericFraction::Rational(_, _) => self.clone(), }, GenericFraction::Rational(ls, ref l) => match *other { GenericFraction::NaN => other.clone(), GenericFraction::Infinity(sign) => GenericFraction::Infinity(-sign), GenericFraction::Rational(rs, ref r) => { if ls == Sign::Plus && rs == Sign::Plus { if l < r { GenericFraction::Rational(Sign::Minus, r.sub(l)) } else { GenericFraction::Rational(Sign::Plus, l.sub(r)) } } else if ls == Sign::Plus { GenericFraction::Rational(Sign::Plus, l.add(r)) } else if rs == Sign::Plus { GenericFraction::Rational(Sign::Minus, l.add(r)) } else if l < r { GenericFraction::Rational(Sign::Plus, r.sub(l)) } else { GenericFraction::Rational(Sign::Minus, l.sub(r)) } } }, } } } impl<T> CheckedSub for GenericFraction<T> where T: Clone + Integer + CheckedAdd + CheckedSub + CheckedMul, { fn checked_sub(&self, other: &Self) -> Option<GenericFraction<T>> { match *self { GenericFraction::NaN => Some(self.clone()), GenericFraction::Infinity(sign) => match *other { GenericFraction::NaN => Some(other.clone()), GenericFraction::Infinity(osign) => { if sign == osign { Some(GenericFraction::NaN) } else { Some(self.clone()) } } GenericFraction::Rational(_, _) => Some(self.clone()), }, GenericFraction::Rational(ls, ref l) => match *other { GenericFraction::NaN => Some(other.clone()), GenericFraction::Infinity(sign) => Some(GenericFraction::Infinity(-sign)), GenericFraction::Rational(rs, ref r) => { if ls == Sign::Plus && rs == Sign::Plus { if l < r { r.checked_sub(l) .map(|value| GenericFraction::Rational(Sign::Minus, value)) } else { l.checked_sub(r) .map(|value| GenericFraction::Rational(Sign::Plus, value)) } } else if ls == Sign::Plus { l.checked_add(r) .map(|value| GenericFraction::Rational(Sign::Plus, value)) } else if rs == Sign::Plus { l.checked_add(r) .map(|value| GenericFraction::Rational(Sign::Minus, value)) } else if l < r { r.checked_sub(l) .map(|value| GenericFraction::Rational(Sign::Plus, value)) } else { l.checked_sub(r) .map(|value| GenericFraction::Rational(Sign::Minus, value)) } } }, } } } impl<T: Clone + Integer> SubAssign for GenericFraction<T> { fn sub_assign(&mut self, other: Self) { *self = match *self { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(ls) => match other { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(rs) => { if ls == rs { GenericFraction::NaN } else { GenericFraction::Infinity(ls) } } GenericFraction::Rational(_, _) => GenericFraction::Infinity(ls), }, GenericFraction::Rational(ls, ref mut l) => match other { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(s) => GenericFraction::Infinity(-s), GenericFraction::Rational(rs, r) => { let l_ = mem::replace(l, Ratio::new_raw(T::zero(), T::zero())); if ls == Sign::Plus && rs == Sign::Plus { if l_ < r { GenericFraction::Rational(Sign::Minus, r.sub(l_)) } else { GenericFraction::Rational(Sign::Plus, l_.sub(r)) } } else if ls == Sign::Plus { GenericFraction::Rational(Sign::Plus, l_.add(r)) } else if rs == Sign::Plus { GenericFraction::Rational(Sign::Minus, l_.add(r)) } else if l_ < r { GenericFraction::Rational(Sign::Plus, r.sub(l_)) } else { GenericFraction::Rational(Sign::Minus, l_.sub(r)) } } }, }; } } impl<'a, T> SubAssign<&'a GenericFraction<T>> for GenericFraction<T> where T: Clone + Integer, { fn sub_assign(&mut self, other: &'a Self) { *self = match *self { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(ls) => match *other { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(rs) => { if ls == rs { GenericFraction::NaN } else { GenericFraction::Infinity(ls) } } GenericFraction::Rational(_, _) => GenericFraction::Infinity(ls), }, GenericFraction::Rational(ls, ref mut l) => match *other { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(s) => GenericFraction::Infinity(-s), GenericFraction::Rational(rs, ref r) => { let l_ = mem::replace(l, Ratio::new_raw(T::zero(), T::zero())); if ls == Sign::Plus && rs == Sign::Plus { if l_ < *r { GenericFraction::Rational(Sign::Minus, r.sub(l_)) } else { GenericFraction::Rational(Sign::Plus, l_.sub(r)) } } else if ls == Sign::Plus { GenericFraction::Rational(Sign::Plus, l_.add(r)) } else if rs == Sign::Plus { GenericFraction::Rational(Sign::Minus, l_.add(r)) } else if l_ < *r { GenericFraction::Rational(Sign::Plus, r.sub(l_)) } else { GenericFraction::Rational(Sign::Minus, l_.sub(r)) } } }, }; } } impl<T: Clone + Integer> Mul for GenericFraction<T> { type Output = Self; fn mul(self, other: Self) -> Self { match self { GenericFraction::NaN => self, GenericFraction::Infinity(sign) => match other { GenericFraction::NaN => other, GenericFraction::Infinity(osign) => GenericFraction::Infinity(if sign == osign { Sign::Plus } else { Sign::Minus }), GenericFraction::Rational(osign, l) => { if l.is_zero() { GenericFraction::NaN } else { GenericFraction::Infinity(if sign == osign { Sign::Plus } else { Sign::Minus }) } } }, GenericFraction::Rational(sign, l) => match other { GenericFraction::NaN => other, GenericFraction::Infinity(osign) => { if l.is_zero() { GenericFraction::NaN } else { GenericFraction::Infinity(if sign == osign { Sign::Plus } else { Sign::Minus }) } } GenericFraction::Rational(osign, r) => { let s = if l.is_zero() || r.is_zero() || sign == osign { Sign::Plus } else { Sign::Minus }; GenericFraction::Rational(s, l.mul(r)) } }, } } } impl<'a, T> Mul for &'a GenericFraction<T> where T: Clone + Integer, { type Output = GenericFraction<T>; fn mul(self, other: Self) -> GenericFraction<T> { match *self { GenericFraction::NaN => self.clone(), GenericFraction::Infinity(sign) => match *other { GenericFraction::NaN => other.clone(), GenericFraction::Infinity(osign) => GenericFraction::Infinity(if sign == osign { Sign::Plus } else { Sign::Minus }), GenericFraction::Rational(osign, ref l) => { if l.is_zero() { GenericFraction::NaN } else { GenericFraction::Infinity(if sign == osign { Sign::Plus } else { Sign::Minus }) } } }, GenericFraction::Rational(sign, ref l) => match *other { GenericFraction::NaN => other.clone(), GenericFraction::Infinity(osign) => { if l.is_zero() { GenericFraction::NaN } else { GenericFraction::Infinity(if sign == osign { Sign::Plus } else { Sign::Minus }) } } GenericFraction::Rational(osign, ref r) => { let s = if l.is_zero() || r.is_zero() || sign == osign { Sign::Plus } else { Sign::Minus }; GenericFraction::Rational(s, l.mul(r)) } }, } } } impl<T> CheckedMul for GenericFraction<T> where T: Clone + Integer + CheckedMul, { fn checked_mul(&self, other: &Self) -> Option<GenericFraction<T>> { match *self { GenericFraction::NaN => Some(self.clone()), GenericFraction::Infinity(sign) => match *other { GenericFraction::NaN => Some(other.clone()), GenericFraction::Infinity(osign) => { Some(GenericFraction::Infinity(if sign == osign { Sign::Plus } else { Sign::Minus })) } GenericFraction::Rational(osign, ref l) => { if l.is_zero() { Some(GenericFraction::NaN) } else { Some(GenericFraction::Infinity(if sign == osign { Sign::Plus } else { Sign::Minus })) } } }, GenericFraction::Rational(sign, ref l) => match *other { GenericFraction::NaN => Some(other.clone()), GenericFraction::Infinity(osign) => { if l.is_zero() { Some(GenericFraction::NaN) } else { Some(GenericFraction::Infinity(if sign == osign { Sign::Plus } else { Sign::Minus })) } } GenericFraction::Rational(osign, ref r) => l.checked_mul(r).map(|value| { GenericFraction::Rational( if l.is_zero() || r.is_zero() || sign == osign { Sign::Plus } else { Sign::Minus }, value, ) }), }, } } } impl<T: Clone + Integer> MulAssign for GenericFraction<T> { fn mul_assign(&mut self, other: Self) { *self = match *self { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(ls) => match other { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(rs) => { GenericFraction::Infinity(if ls == rs { Sign::Plus } else { Sign::Minus }) } GenericFraction::Rational(rs, r) => { if r.is_zero() { GenericFraction::NaN } else { GenericFraction::Infinity(if ls == rs { Sign::Plus } else { Sign::Minus }) } } }, GenericFraction::Rational(ls, ref mut l) => match other { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(rs) => { if l.is_zero() { GenericFraction::NaN } else { GenericFraction::Infinity(if ls == rs { Sign::Plus } else { Sign::Minus }) } } GenericFraction::Rational(rs, r) => { let l_ = mem::replace(l, Ratio::new_raw(T::zero(), T::zero())); let s = if l_.is_zero() || r.is_zero() || ls == rs { Sign::Plus } else { Sign::Minus }; GenericFraction::Rational(s, l_.mul(r)) } }, }; } } impl<'a, T> MulAssign<&'a GenericFraction<T>> for GenericFraction<T> where T: Clone + Integer, { fn mul_assign(&mut self, other: &'a Self) { *self = match *self { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(ls) => match *other { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(rs) => { GenericFraction::Infinity(if ls == rs { Sign::Plus } else { Sign::Minus }) } GenericFraction::Rational(rs, ref r) => { if r.is_zero() { GenericFraction::NaN } else { GenericFraction::Infinity(if ls == rs { Sign::Plus } else { Sign::Minus }) } } }, GenericFraction::Rational(ls, ref mut l) => match *other { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(rs) => { if l.is_zero() { GenericFraction::NaN } else { GenericFraction::Infinity(if ls == rs { Sign::Plus } else { Sign::Minus }) } } GenericFraction::Rational(rs, ref r) => { let l_ = mem::replace(l, Ratio::new_raw(T::zero(), T::zero())); let s = if l_.is_zero() || r.is_zero() || ls == rs { Sign::Plus } else { Sign::Minus }; GenericFraction::Rational(s, l_.mul(r)) } }, }; } } impl<T: Clone + Integer> Div for GenericFraction<T> { type Output = Self; fn div(self, other: Self) -> Self { match self { GenericFraction::NaN => self, GenericFraction::Infinity(sign) => match other { GenericFraction::NaN => other, GenericFraction::Infinity(_) => GenericFraction::NaN, GenericFraction::Rational(osign, _) => { GenericFraction::Infinity(if sign == osign { Sign::Plus } else { Sign::Minus }) } }, GenericFraction::Rational(sign, l) => match other { GenericFraction::NaN => other, GenericFraction::Infinity(_) => { GenericFraction::Rational(Sign::Plus, Ratio::zero()) } GenericFraction::Rational(osign, r) => { if l.is_zero() && r.is_zero() { GenericFraction::NaN } else if r.is_zero() { GenericFraction::Infinity(sign) } else if l.is_zero() { GenericFraction::Rational(Sign::Plus, l) } else { GenericFraction::Rational( if sign == osign { Sign::Plus } else { Sign::Minus }, l.div(r), ) } } }, } } } impl<'a, T> Div for &'a GenericFraction<T> where T: Clone + Integer, { type Output = GenericFraction<T>; fn div(self, other: Self) -> GenericFraction<T> { match *self { GenericFraction::NaN => self.clone(), GenericFraction::Infinity(sign) => match *other { GenericFraction::NaN => other.clone(), GenericFraction::Infinity(_) => GenericFraction::NaN, GenericFraction::Rational(osign, _) => { GenericFraction::Infinity(if sign == osign { Sign::Plus } else { Sign::Minus }) } }, GenericFraction::Rational(sign, ref l) => match *other { GenericFraction::NaN => other.clone(), GenericFraction::Infinity(_) => { GenericFraction::Rational(Sign::Plus, Ratio::zero()) } GenericFraction::Rational(osign, ref r) => { if l.is_zero() && r.is_zero() { GenericFraction::NaN } else if r.is_zero() { GenericFraction::Infinity(sign) } else if l.is_zero() { GenericFraction::Rational(Sign::Plus, l.clone()) } else { GenericFraction::Rational( if sign == osign { Sign::Plus } else { Sign::Minus }, l.div(r), ) } } }, } } } impl<T> CheckedDiv for GenericFraction<T> where T: Clone + Integer + CheckedDiv + CheckedMul, { fn checked_div(&self, other: &Self) -> Option<GenericFraction<T>> { match *self { GenericFraction::NaN => Some(self.clone()), GenericFraction::Infinity(sign) => match *other { GenericFraction::NaN => Some(other.clone()), GenericFraction::Infinity(_) => Some(GenericFraction::NaN), GenericFraction::Rational(osign, _) => { Some(GenericFraction::Infinity(if sign == osign { Sign::Plus } else { Sign::Minus })) } }, GenericFraction::Rational(sign, ref l) => match *other { GenericFraction::NaN => Some(other.clone()), GenericFraction::Infinity(_) => { Some(GenericFraction::Rational(Sign::Plus, Ratio::zero())) } GenericFraction::Rational(osign, ref r) => { if l.is_zero() && r.is_zero() { Some(GenericFraction::NaN) } else if r.is_zero() { Some(GenericFraction::Infinity(sign)) } else if l.is_zero() { Some(GenericFraction::Rational(Sign::Plus, l.clone())) } else { l.checked_div(r).map(|value| { GenericFraction::Rational( if sign == osign { Sign::Plus } else { Sign::Minus }, value, ) }) } } }, } } } impl<T: Clone + Integer> DivAssign for GenericFraction<T> { fn div_assign(&mut self, other: Self) { *self = match *self { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(ls) => match other { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(_) => GenericFraction::NaN, GenericFraction::Rational(rs, _) => { GenericFraction::Infinity(if ls == rs { Sign::Plus } else { Sign::Minus }) } }, GenericFraction::Rational(ls, ref mut l) => match other { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(_) => { GenericFraction::Rational(Sign::Plus, Ratio::zero()) } GenericFraction::Rational(rs, r) => { let l_ = mem::replace(l, Ratio::new_raw(T::zero(), T::zero())); if l_.is_zero() && r.is_zero() { GenericFraction::NaN } else if r.is_zero() { GenericFraction::Infinity(ls) } else if l_.is_zero() { GenericFraction::Rational(Sign::Plus, l_) } else { GenericFraction::Rational( if ls == rs { Sign::Plus } else { Sign::Minus }, l_.div(r), ) } } }, }; } } impl<'a, T> DivAssign<&'a GenericFraction<T>> for GenericFraction<T> where T: Clone + Integer, { fn div_assign(&mut self, other: &'a Self) { *self = match *self { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(ls) => match *other { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(_) => GenericFraction::NaN, GenericFraction::Rational(rs, _) => { GenericFraction::Infinity(if ls == rs { Sign::Plus } else { Sign::Minus }) } }, GenericFraction::Rational(ls, ref mut l) => match *other { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(_) => { GenericFraction::Rational(Sign::Plus, Ratio::zero()) } GenericFraction::Rational(rs, ref r) => { let l_ = mem::replace(l, Ratio::new_raw(T::zero(), T::zero())); if l_.is_zero() && r.is_zero() { GenericFraction::NaN } else if r.is_zero() { GenericFraction::Infinity(ls) } else if l_.is_zero() { GenericFraction::Rational(Sign::Plus, l_) } else { GenericFraction::Rational( if ls == rs { Sign::Plus } else { Sign::Minus }, l_.div(r), ) } } }, }; } } impl<T: Clone + Integer> Rem for GenericFraction<T> { type Output = Self; fn rem(self, other: Self) -> Self { match self { GenericFraction::NaN => self, GenericFraction::Infinity(_) => match other { GenericFraction::NaN => other, GenericFraction::Infinity(_) => GenericFraction::NaN, GenericFraction::Rational(_, _) => GenericFraction::NaN, }, GenericFraction::Rational(sign, l) => match other { GenericFraction::NaN => other, GenericFraction::Infinity(_) => GenericFraction::Rational(sign, l), GenericFraction::Rational(_, r) => { if r.is_zero() { GenericFraction::NaN } else if l == r { GenericFraction::Rational(Sign::Plus, Ratio::zero()) } else { GenericFraction::Rational(sign, l % r) } } }, } } } impl<'a, T> Rem for &'a GenericFraction<T> where T: Clone + Integer, { type Output = GenericFraction<T>; fn rem(self, other: Self) -> GenericFraction<T> { match *self { GenericFraction::NaN => self.clone(), GenericFraction::Infinity(_) => match *other { GenericFraction::NaN => other.clone(), GenericFraction::Infinity(_) => GenericFraction::NaN, GenericFraction::Rational(_, _) => GenericFraction::NaN, }, GenericFraction::Rational(sign, ref l) => match *other { GenericFraction::NaN => other.clone(), GenericFraction::Infinity(_) => GenericFraction::Rational(sign, l.clone()), GenericFraction::Rational(_, ref r) => { if r.is_zero() { GenericFraction::NaN } else if l == r { GenericFraction::Rational(Sign::Plus, Ratio::zero()) } else { GenericFraction::Rational(sign, l.rem(r)) } } }, } } } impl<T: Clone + Integer> RemAssign for GenericFraction<T> { fn rem_assign(&mut self, other: Self) { *self = match *self { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(_) => match other { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(_) => GenericFraction::NaN, GenericFraction::Rational(_, _) => GenericFraction::NaN, }, GenericFraction::Rational(ls, ref mut l) => match other { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(_) => GenericFraction::Rational( ls, mem::replace(l, Ratio::new_raw(T::zero(), T::zero())), ), GenericFraction::Rational(_, r) => { let l_ = mem::replace(l, Ratio::new_raw(T::zero(), T::zero())); if r.is_zero() { GenericFraction::NaN } else if l_ == r { GenericFraction::Rational(Sign::Plus, Ratio::zero()) } else { GenericFraction::Rational(ls, l_.rem(r)) } } }, }; } } impl<'a, T> RemAssign<&'a GenericFraction<T>> for GenericFraction<T> where T: Clone + Integer, { fn rem_assign(&mut self, other: &'a Self) { *self = match *self { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(_) => match *other { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(_) => GenericFraction::NaN, GenericFraction::Rational(_, _) => GenericFraction::NaN, }, GenericFraction::Rational(ls, ref mut l) => match *other { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(_) => GenericFraction::Rational( ls, mem::replace(l, Ratio::new_raw(T::zero(), T::zero())), ), GenericFraction::Rational(_, ref r) => { let l_ = mem::replace(l, Ratio::new_raw(T::zero(), T::zero())); if r.is_zero() { GenericFraction::NaN } else if l_ == *r { GenericFraction::Rational(Sign::Plus, Ratio::zero()) } else { GenericFraction::Rational(ls, l_.rem(r)) } } }, }; } } impl<T: Clone + Integer> Sum for GenericFraction<T> { fn sum<I: Iterator<Item = Self>>(iter: I) -> Self { iter.fold(GenericFraction::<T>::zero(), Add::add) } } impl<'a, T: 'a + Clone + Integer> Sum<&'a GenericFraction<T>> for GenericFraction<T> { fn sum<I: Iterator<Item = &'a Self>>(iter: I) -> Self { iter.fold(GenericFraction::<T>::zero(), |ref s, x| Add::add(s, x)) } } impl<T: Clone + Integer> Product for GenericFraction<T> { fn product<I: Iterator<Item = Self>>(iter: I) -> Self { iter.fold(GenericFraction::<T>::one(), Mul::mul) } } impl<'a, T: 'a + Clone + Integer> Product<&'a GenericFraction<T>> for GenericFraction<T> { fn product<I: Iterator<Item = &'a Self>>(iter: I) -> Self { iter.fold(GenericFraction::<T>::one(), |ref s, x| Mul::mul(s, x)) } } impl<T: Clone + Integer> Zero for GenericFraction<T> { fn zero() -> Self { GenericFraction::Rational(Sign::Plus, Ratio::zero()) } fn is_zero(&self) -> bool { match *self { GenericFraction::Rational(_, ref r) => r.is_zero(), _ => false, } } } impl<T: Clone + Integer> One for GenericFraction<T> { fn one() -> Self { GenericFraction::Rational(Sign::Plus, Ratio::one()) } } impl<T: Clone + Integer> Num for GenericFraction<T> { type FromStrRadixErr = ParseRatioError; fn from_str_radix(str: &str, radix: u32) -> Result<Self, Self::FromStrRadixErr> { if let Some(rem) = str.strip_prefix('-') { Ratio::from_str_radix(rem, radix) .map(|ratio| GenericFraction::Rational(Sign::Minus, ratio)) } else if let Some(rem) = str.strip_prefix('+') { Ratio::from_str_radix(rem, radix) .map(|ratio| GenericFraction::Rational(Sign::Plus, ratio)) } else { Ratio::from_str_radix(str, radix) .map(|ratio| GenericFraction::Rational(Sign::Plus, ratio)) } } } impl<T: Clone + Integer> Signed for GenericFraction<T> { fn abs(&self) -> Self { GenericFraction::abs(self) } fn abs_sub(&self, other: &Self) -> Self { match *self { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(sign) => match *other { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(osign) => { if sign == Sign::Minus || osign == Sign::Plus { GenericFraction::zero() } else { GenericFraction::Infinity(Sign::Plus) } } GenericFraction::Rational(_, _) => { if sign == Sign::Plus { GenericFraction::Infinity(sign) } else { GenericFraction::zero() } } }, GenericFraction::Rational(sign, ref l) => match *other { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(osign) => { if osign == Sign::Plus { GenericFraction::zero() } else if sign == Sign::Minus { GenericFraction::Infinity(Sign::Minus) } else { GenericFraction::Infinity(Sign::Plus) } } GenericFraction::Rational(_, ref r) => { if l < r { GenericFraction::Rational(Sign::Plus, r - l) } else { GenericFraction::Rational(Sign::Plus, l - r) } } }, } } fn signum(&self) -> Self { GenericFraction::signum(self) } fn is_positive(&self) -> bool { GenericFraction::is_sign_positive(self) } fn is_negative(&self) -> bool { GenericFraction::is_sign_negative(self) } } impl<T: Clone + Integer + PartialEq + ToPrimitive> ToPrimitive for GenericFraction<T> { fn to_i64(&self) -> Option<i64> { match *self { GenericFraction::NaN => None, GenericFraction::Infinity(_) => None, GenericFraction::Rational(sign, ref r) if *r.denom() == T::one() => { if let Some(n) = r.numer().to_i64() { if sign == Sign::Minus { Some(-n) } else { Some(n) } } else { None } } _ => None, } } fn to_u64(&self) -> Option<u64> { match *self { GenericFraction::NaN => None, GenericFraction::Infinity(_) => None, GenericFraction::Rational(sign, ref r) if *r.denom() == T::one() => { if sign == Sign::Minus { None } else { r.numer().to_u64() } } _ => None, } } fn to_f64(&self) -> Option<f64> { match *self { GenericFraction::NaN => Some(f64::NAN), GenericFraction::Infinity(sign) => Some(if sign == Sign::Minus { f64::NEG_INFINITY } else { f64::INFINITY }), GenericFraction::Rational(sign, ref r) => r .numer() .to_f64() .and_then(|n| r.denom().to_f64().map(|d| n / d)) .map(|x| if sign == Sign::Minus { -x } else { x }), } } } impl<T: Clone + Integer> GenericFraction<T> { /// Returns NaN value /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// assert_eq! (F::nan (), F::new (0, 0)); /// ``` pub fn nan() -> Self { GenericFraction::NaN } /// Returns positive Infinity value /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// assert_eq! (F::infinity (), F::new (1, 0)); /// ``` pub fn infinity() -> Self { GenericFraction::Infinity(Sign::Plus) } /// Returns negative Infinity value /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// assert_eq! (F::neg_infinity (), F::new_neg (1, 0)); /// ``` pub fn neg_infinity() -> Self { GenericFraction::Infinity(Sign::Minus) } /// Returns zero with negative sign /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// assert_eq! (F::neg_zero (), F::new_neg (0, 1)); /// ``` pub fn neg_zero() -> Self { GenericFraction::Rational(Sign::Minus, Ratio::zero()) } /// Returns minimal value greater than zero /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F8 = GenericFraction<u8>; /// type F16 = GenericFraction<u16>; /// /// assert_eq! (F8::min_positive_value (), F8::new (1u8, 255u8)); /// assert_eq! (F16::min_positive_value (), F16::new (1u16, 65535u16)); /// ``` pub fn min_positive_value() -> Self where T: Bounded, { GenericFraction::Rational(Sign::Plus, Ratio::new(T::one(), T::max_value())) } /// Returns true if the value is NaN /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// assert! (F::nan ().is_nan ()); /// assert! (F::new (0, 0).is_nan ()); /// ``` pub fn is_nan(&self) -> bool { matches!(*self, GenericFraction::NaN) } /// Returns true if the value is Infinity (does not matter positive or negative) /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// assert! (F::infinity ().is_infinite ()); /// assert! (F::new (1u8, 0).is_infinite ()); /// assert! (F::new_neg (1u8, 0).is_infinite ()); /// ``` pub fn is_infinite(&self) -> bool { matches!(*self, GenericFraction::Infinity(_)) } /// Returns true if the value is not Infinity (does not matter positive or negative) /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// assert! (! F::infinity ().is_finite ()); /// assert! (! F::new (1u8, 0).is_finite ()); /// assert! (! F::new_neg (1u8, 0).is_finite ()); /// ``` pub fn is_finite(&self) -> bool { !matches!(*self, GenericFraction::Infinity(_)) } /// Returns true if the number is neither zero, Infinity or NaN /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// assert! (! F::nan ().is_normal ()); /// assert! (! F::infinity ().is_normal ()); /// assert! (! F::neg_infinity ().is_normal ()); /// assert! (! F::new (0, 1u8).is_normal ()); /// assert! (! F::neg_zero ().is_normal ()); /// ``` pub fn is_normal(&self) -> bool { match *self { GenericFraction::Rational(_, ref v) => !v.is_zero(), _ => false, } } /// Returns the floating point category of the number /// /// # Examples /// /// ``` /// use std::num::FpCategory; /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// assert_eq! (F::nan ().classify (), FpCategory::Nan); /// assert_eq! (F::infinity ().classify (), FpCategory::Infinite); /// assert_eq! (F::new (0, 1u8).classify (), FpCategory::Zero); /// assert_eq! (F::new (1u8, 1u8).classify (), FpCategory::Normal); /// ``` pub fn classify(&self) -> FpCategory { match *self { GenericFraction::NaN => FpCategory::Nan, GenericFraction::Infinity(_) => FpCategory::Infinite, GenericFraction::Rational(_, ref v) if v.is_zero() => FpCategory::Zero, _ => FpCategory::Normal, } } /// Returns the largest integer less than or equal to the value /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// assert_eq! (F::new (7u8, 5u8).floor (), F::new (5u8, 5u8)); /// ``` pub fn floor(&self) -> Self { match *self { GenericFraction::Rational(s, ref r) => GenericFraction::Rational(s, r.floor()), _ => self.clone(), } } /// Returns the smallest integer greater than or equal to the value /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// assert_eq! (F::new (7u8, 5u8).ceil (), F::new (10u8, 5u8)); /// ``` pub fn ceil(&self) -> Self { match *self { GenericFraction::Rational(s, ref r) => GenericFraction::Rational(s, r.ceil()), _ => self.clone(), } } /// Returns the nearest integer to the value (.5 goes up) /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// assert_eq! (F::new (7u8, 5u8).round (), F::new (5u8, 5u8)); /// assert_eq! (F::new (8u8, 5u8).round (), F::new (10u8, 5u8)); /// assert_eq! (F::new (3u8, 2u8).round (), F::new (4u8, 2u8)); /// assert_eq! (F::new (1u8, 2u8).round (), F::new (2u8, 2u8)); /// ``` pub fn round(&self) -> Self { match *self { GenericFraction::Rational(s, ref r) => GenericFraction::Rational(s, r.round()), _ => self.clone(), } } /// Returns the integer part of the value /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// assert_eq! (F::new (7u8, 5u8).trunc (), F::new (5u8, 5u8)); /// assert_eq! (F::new (8u8, 5u8).trunc (), F::new (5u8, 5u8)); /// ``` pub fn trunc(&self) -> Self { match *self { GenericFraction::Rational(s, ref r) => GenericFraction::Rational(s, r.trunc()), _ => self.clone(), } } /// Returns the fractional part of a number /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// assert_eq! (F::new (7u8, 5u8).fract (), F::new (2u8, 5u8)); /// assert_eq! (F::new (8u8, 5u8).fract (), F::new (3u8, 5u8)); /// ``` pub fn fract(&self) -> Self { match *self { GenericFraction::Rational(s, ref r) => GenericFraction::Rational(s, r.fract()), _ => GenericFraction::NaN, } } /// Returns the absolute value of self /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// assert_eq! (F::nan ().abs (), F::nan ()); /// assert_eq! (F::infinity ().abs (), F::infinity ()); /// assert_eq! (F::neg_infinity ().abs (), F::infinity ()); /// assert_eq! (F::new (1u8, 2u8).abs (), F::new (1u8, 2u8)); /// assert_eq! (F::new_neg (1u8, 2u8).abs (), F::new (1u8, 2u8)); /// ``` pub fn abs(&self) -> Self { match *self { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(_) => GenericFraction::Infinity(Sign::Plus), GenericFraction::Rational(_, ref r) => GenericFraction::Rational(Sign::Plus, r.clone()), } } /// Returns a number that represents the sign of self /// /// * 1.0 if the number is positive, +0.0 or INFINITY /// * -1.0 if the number is negative, -0.0 or NEG_INFINITY /// * NAN if the number is NAN /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// assert_eq! (F::new (1u8, 2u8).signum (), F::new (1u8, 1u8)); /// assert_eq! (F::new (0u8, 1u8).signum (), F::new (1u8, 1u8)); /// assert_eq! (F::infinity ().signum (), F::new (1u8, 1u8)); /// assert_eq! (F::new_neg (1u8, 2u8).signum (), F::new_neg (1u8, 1u8)); /// assert_eq! (F::neg_zero ().signum (), F::new_neg (1u8, 1u8)); /// assert_eq! (F::neg_infinity ().signum (), F::new_neg (1u8, 1u8)); /// assert_eq! (F::nan ().signum (), F::nan ()); /// ``` pub fn signum(&self) -> Self { match *self { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(s) => { if s == Sign::Plus { GenericFraction::Rational(Sign::Plus, Ratio::new(T::one(), T::one())) } else { GenericFraction::Rational(Sign::Minus, Ratio::new(T::one(), T::one())) } } GenericFraction::Rational(s, _) => { if s == Sign::Plus { GenericFraction::Rational(Sign::Plus, Ratio::new(T::one(), T::one())) } else { GenericFraction::Rational(Sign::Minus, Ratio::new(T::one(), T::one())) } } } } /// Returns true if the sign is positive /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// assert! (F::new (1u8, 2u8).is_sign_positive ()); /// assert! (F::infinity ().is_sign_positive ()); /// assert! (! F::nan ().is_sign_positive ()); /// ``` pub fn is_sign_positive(&self) -> bool { match *self { GenericFraction::NaN => false, GenericFraction::Infinity(sign) => sign == Sign::Plus, GenericFraction::Rational(sign, _) => sign == Sign::Plus, } } /// Returns true if the sign is negative /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// assert! (F::new_neg (1u8, 2u8).is_sign_negative ()); /// assert! (F::neg_zero ().is_sign_negative ()); /// assert! (F::neg_infinity ().is_sign_negative ()); /// assert! (! F::nan ().is_sign_negative ()); /// ``` pub fn is_sign_negative(&self) -> bool { match *self { GenericFraction::NaN => false, GenericFraction::Infinity(sign) => sign == Sign::Minus, GenericFraction::Rational(sign, _) => sign == Sign::Minus, } } /// self.clone () * a + b /// /// Added for interface compatibility with float types pub fn mul_add(&self, a: Self, b: Self) -> Self { self.clone() * a + b } /// Takes the reciprocal (inverse) of the value (1/x) /// /// # Examples /// /// ``` /// use fraction::GenericFraction; /// type F = GenericFraction<u8>; /// /// assert_eq! (F::new (1u8, 2u8).recip (), F::new (2u8, 1u8)); /// assert_eq! (F::new (0u8, 1u8).recip (), F::infinity ()); /// assert_eq! (F::infinity ().recip (), F::new (0u8, 1u8)); /// assert_eq! (F::nan ().recip (), F::nan ()); /// ``` pub fn recip(&self) -> Self { match *self { GenericFraction::NaN => GenericFraction::NaN, GenericFraction::Infinity(_) => { GenericFraction::Rational(Sign::Plus, Ratio::new(T::zero(), T::one())) } GenericFraction::Rational(s, ref r) if r.is_zero() => GenericFraction::Infinity(s), GenericFraction::Rational(s, ref r) => GenericFraction::Rational(s, r.recip()), } } /* ... Some stuff here that has not been implemented for Ratio<T> ... */ } impl<T: Clone + GenericInteger> fmt::Display for GenericFraction<T> { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let format = display::Format::new(formatter); display::format_fraction(self, formatter, &format) } } macro_rules! fraction_from_generic_int { ( $($F:ty),* ) => { $( impl<T> From<$F> for GenericFraction<T> where T: Clone + Integer + GenericInteger + CheckedAdd + CheckedMul + CheckedSub, $F: GenericInteger + CheckedAdd + CheckedDiv + CheckedMul + CheckedSub + PartialOrd { fn from (val: $F) -> GenericFraction<T> { if let Some((sign, value)) = read_generic_integer::<T, $F>(val) { GenericFraction::Rational(sign, Ratio::new (value, T::one())) } else { GenericFraction::nan() } } } )* }; } #[cfg(feature = "with-bigint")] fraction_from_generic_int!(BigUint, BigInt); fraction_from_generic_int!(u8, i8, u16, i16, u32, i32, u64, i64, u128, i128, usize, isize); macro_rules! generic_fraction_from_float { ( $($from:ty),*) => { $( impl<T: Clone + FromPrimitive + Integer + CheckedAdd + CheckedMul + CheckedSub> From<$from> for GenericFraction<T> { fn from(val: $from) -> GenericFraction<T> { if val.is_nan () { return Self::NaN }; if val.is_infinite () { return Self::Infinity (if val.is_sign_negative () { Sign::Minus } else { Sign::Plus }) }; let sign = if val < 0.0 { Sign::Minus } else { Sign::Plus }; // Using https://math.stackexchange.com/a/1049723/17452 , but rely on Ratio::new() to compute the gcd. // Find the max precision of this number // Note: the power computations happen in i32 until the end. let mut p: i32 = 0; let mut new_val = val; let ten: $from = 10.0; let fallback_to_string_conversion = || Self::from_decimal_str(&format!("{:+}", val)).unwrap_or(Self::NaN); loop { if (new_val.floor() - new_val).abs() < <$from>::EPSILON { // Yay, we've found the precision of this number break; } // Multiply by the precision // Note: we multiply by powers of ten to avoid this kind of round error with f32s: // https://play.rust-lang.org/?version=stable&mode=debug&edition=2018&gist=b760579f103b7192c20413ebbe167b90 p += 1; new_val = val * ten.powi(p); if new_val.is_infinite() { return fallback_to_string_conversion(); } } // We store the sign of the ratio externally, so let's oppose the numerator if need be. // The denominator is always positive. if new_val < 0.0 { new_val = -new_val; } let numer: T = match T::from_f64(new_val.into()) { Some(v) => v, None => { return fallback_to_string_conversion(); } }; let denom: T = match T::from_f64(ten.powi(p).into()) { Some(v) => v, None => { return fallback_to_string_conversion(); } }; Self::Rational(sign, Ratio::new(numer, denom)) } } )* }; } generic_fraction_from_float!(f32, f64); impl<T, N, D> From<(N, D)> for GenericFraction<T> where T: Clone + Integer, N: fmt::Display, D: fmt::Display, { fn from(pair: (N, D)) -> GenericFraction<T> { let (num, den) = pair; let num = format!("{:+}", num); let n_sign = if num.starts_with('-') { Sign::Minus } else if num.starts_with('+') { Sign::Plus } else { return GenericFraction::NaN; }; let n: Result<T, T::FromStrRadixErr> = T::from_str_radix(&num[1..], 10); if n.is_err() { return GenericFraction::NaN; } let den = format!("{:+}", den); let d_sign = if den.starts_with('-') { Sign::Minus } else if den.starts_with('+') { Sign::Plus } else { return GenericFraction::NaN; }; let d: Result<T, T::FromStrRadixErr> = T::from_str_radix(&den[1..], 10); if d.is_err() { return GenericFraction::NaN; } GenericFraction::Rational( if n_sign == d_sign { Sign::Plus } else { Sign::Minus }, Ratio::new(n.ok().unwrap(), d.ok().unwrap()), ) } } #[cfg(test)] mod tests { #[cfg(feature = "with-bigint")] use prelude::BigFraction; #[cfg(feature = "with-bigint")] use super::{BigInt, BigUint}; use super::{ super::Fraction, Bounded, FpCategory, GenericFraction, Num, One, ParseError, Sign, Signed, ToPrimitive, Zero, }; use super::{CheckedAdd, CheckedDiv, CheckedMul, CheckedSub}; use std::collections::HashMap; use std::hash::{Hash, Hasher}; type Frac = GenericFraction<u8>; fn hash_it(target: &impl Hash) -> u64 { use std::collections::hash_map::DefaultHasher; let mut h = DefaultHasher::new(); target.hash(&mut h); h.finish() } generate_ops_tests! ( NaN => {Frac::nan()}; NegInf => {Frac::neg_infinity()}; PosInf => {Frac::infinity()}; Zero => {Frac::new(0, 1)}; Half => {Frac::new(1, 2)}; One => {Frac::new(1, 1)}; Two => {Frac::new(2, 1)}; Three => {Frac::new(3, 1)}; Four => {Frac::new(4, 1)}; ); #[test] fn op_ord() { let pin = Frac::infinity(); let nin = Frac::neg_infinity(); let nil = Frac::zero(); let a = Frac::new(3, 4); let b = Frac::new(5, 7); assert!(a > b); assert!(a > nil); assert!(b > nil); assert!(nin < nil); assert!(nil < pin); } #[test] fn from_i8() { let f = Fraction::from(-2i8); assert_eq!(Sign::Minus, f.sign().unwrap()); assert_eq!(2, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(0i8); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(0, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(2i8); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(2, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); } #[test] fn from_u8() { let f = Fraction::from(0u8); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(0, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(2u8); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(2, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(u8::max_value()); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(u8::max_value() as u64, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); } #[test] fn from_i16() { let f = Fraction::from(-2i16); assert_eq!(Sign::Minus, f.sign().unwrap()); assert_eq!(2, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(0i16); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(0, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(2i16); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(2, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); } #[test] fn from_u16() { let f = Fraction::from(0u16); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(0, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(2u16); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(2, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); } #[test] fn from_i32() { let f = Fraction::from(-2i32); assert_eq!(Sign::Minus, f.sign().unwrap()); assert_eq!(2, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(0i32); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(0, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(2i32); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(2, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); } #[test] fn from_u32() { let f = Fraction::from(0u32); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(0, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(2u32); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(2, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); } #[test] fn from_i64() { let f = Fraction::from(-2i64); assert_eq!(Sign::Minus, f.sign().unwrap()); assert_eq!(2, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(0i64); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(0, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(2i64); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(2, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); } #[test] fn from_u64() { let f = Fraction::from(0u64); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(0, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(2u64); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(2, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); } #[test] fn from_i128() { let f = Fraction::from(0i128); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(0, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(2i128); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(2, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(-2i128); assert_eq!(Sign::Minus, f.sign().unwrap()); assert_eq!(2, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(22460602606i128); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(22460602606, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); } #[test] fn from_u128() { let f = Fraction::from(0u128); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(0, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(2u128); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(2, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); } #[test] fn from_isize() { let f = Fraction::from(-2isize); assert_eq!(Sign::Minus, f.sign().unwrap()); assert_eq!(2, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(0isize); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(0, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(2isize); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(2, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); } #[test] fn from_usize() { let f = Fraction::from(0usize); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(0, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(2usize); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(2, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); } #[test] fn from_f32() { let f = Fraction::from(0f32); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(0, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(0.01f32); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(1, *f.numer().unwrap()); assert_eq!(100, *f.denom().unwrap()); let f = Fraction::from(-0.01f32); assert_eq!(Sign::Minus, f.sign().unwrap()); assert_eq!(1, *f.numer().unwrap()); assert_eq!(100, *f.denom().unwrap()); let f = Fraction::from(16584253f32); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(16584253u64, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); } #[test] fn from_f64() { let f = Fraction::from(0f64); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(0, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); let f = Fraction::from(0.01f64); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(1, *f.numer().unwrap()); assert_eq!(100, *f.denom().unwrap()); let f = Fraction::from(-0.01f64); assert_eq!(Sign::Minus, f.sign().unwrap()); assert_eq!(1, *f.numer().unwrap()); assert_eq!(100, *f.denom().unwrap()); let f = Fraction::from(1658425342060f64); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(1658425342060u64, *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); } #[test] #[cfg(feature = "with-bigint")] fn from_insanity() { let number = "2062065394209534095362056240654064520645230645230645230645230645206452064520645203642530940959212130935957"; let fraction = format!("{}/1", number); let f = BigFraction::from_str_radix(&fraction, 10); assert!(f.is_ok()); let f = f.ok().unwrap(); assert_eq!( BigUint::from_str_radix(&number, 10).ok().unwrap(), *f.numer().unwrap() ); assert_eq!(BigUint::from(1u8), *f.denom().unwrap()); } #[test] #[cfg(feature = "with-bigint")] fn from_bigint() { let number = BigInt::from(42); let frac = BigFraction::from(number); assert_eq!(frac, BigFraction::from((42, 1))); let number = BigInt::from(-44); let frac = BigFraction::from(number); assert_eq!(frac, -BigFraction::from((44, 1))); } #[test] #[cfg(feature = "with-bigint")] fn from_biguint() { let number = BigUint::from(42u32); let frac = BigFraction::from(number); assert_eq!(frac, BigFraction::from((42, 1))); } #[test] fn from_extremum() { type F8 = GenericFraction<u8>; let f = F8::from(u8::max_value()); assert_eq!(Sign::Plus, f.sign().unwrap()); assert_eq!(u8::max_value(), *f.numer().unwrap()); assert_eq!(1, *f.denom().unwrap()); } #[test] fn hashy() { { let mut map: HashMap<Fraction, ()> = HashMap::new(); map.insert(Fraction::from(0.75), ()); assert!(map.contains_key(&Fraction::new(3u8, 4u8))); // 0.75 == 3/4 assert!(map.contains_key(&Fraction::new(6u16, 8u16))); // 0.75 == 6/8 assert!(map.contains_key(&Fraction::new(12u32, 16u32))); // 0.75 == 12/16 assert!(map.contains_key(&Fraction::new(24u64, 32u64))); // 0.75 == 24/32 assert!(map.contains_key(&Fraction::new(48u8, 64u16))); // 0.75 == 48/64 assert!(map.contains_key(&Fraction::from((3i8, 4i8)))); assert!(map.contains_key(&Fraction::from((6i16, 8i16)))); assert!(map.contains_key(&Fraction::from((12i32, 16i32)))); assert!(map.contains_key(&Fraction::from((24i64, 32i64)))); assert!(map.contains_key(&Fraction::from((48i8, 64i16)))); assert!(!map.contains_key(&Fraction::from(0.5))); // 0.75 != 1/2 } { assert_eq!(hash_it(&Fraction::nan()), hash_it(&Fraction::nan())); assert_ne!(hash_it(&Fraction::nan()), hash_it(&Fraction::zero())); assert_ne!( hash_it(&Fraction::infinity()), hash_it(&Fraction::neg_infinity()) ); assert_ne!(hash_it(&Fraction::infinity()), hash_it(&Fraction::nan())); assert_eq!( hash_it(&Fraction::infinity()), hash_it(&Fraction::infinity()) ); assert_eq!( hash_it(&Fraction::neg_infinity()), hash_it(&Fraction::neg_infinity()) ); assert_eq!( hash_it(&Fraction::neg_infinity()), hash_it(&Fraction::neg_infinity()) ); assert_eq!( hash_it(&Fraction::new(1u8, 2u8)), hash_it(&Fraction::new(2u8, 4u8)) ); assert_eq!( hash_it(&Fraction::new(1u8, 0u8)), hash_it(&Fraction::new(2u8, 0u8)) ); assert_eq!( hash_it(&Fraction::new(0u8, 1u8)), hash_it(&Fraction::new(0u8, 2u8)) ); assert_eq!(hash_it(&Fraction::zero()), hash_it(&Fraction::zero())); assert_eq!(hash_it(&Frac::zero()), hash_it(&Frac::neg_zero())); } } #[test] fn comparison() { assert_eq!(Frac::zero(), Frac::zero()); assert_eq!(Frac::zero(), Frac::neg_zero()); assert_eq!(Frac::from(0), Frac::zero()); assert_eq!(Frac::from(0), Frac::neg_zero()); assert_eq!(Frac::from(0.5), Frac::new(1u8, 2u8)); assert_eq!(Frac::from(-0.5), Frac::new_neg(1u8, 2u8)); assert_ne!(Frac::from(-0.5), Frac::new(1u8, 2u8)); assert!(!(Frac::zero() < Frac::neg_zero())); assert!(!(Frac::neg_zero() < Frac::zero())); assert!(!(Frac::zero() > Frac::neg_zero())); assert!(!(Frac::neg_zero() > Frac::zero())); assert!(Frac::neg_zero() < Frac::new(1u8, 2u8)); assert!(!(Frac::neg_zero() > Frac::new(1u8, 2u8))); assert!(Frac::zero() < Frac::new(1u8, 2u8)); assert!(!(Frac::zero() > Frac::new(1u8, 2u8))); assert!(Frac::new_neg(1u8, 2u8) < Frac::neg_zero()); assert!(Frac::new_neg(1u8, 2u8) < Frac::zero()); assert!(!(Frac::new_neg(1u8, 2u8) > Frac::neg_zero())); assert!(!(Frac::new_neg(1u8, 2u8) > Frac::zero())); assert_eq!(Frac::new(1u8, 2u8), Frac::new(1u8, 2u8)); assert_eq!(Frac::new_neg(1u8, 2u8), Frac::new_neg(1u8, 2u8)); assert!(Frac::new_neg(1u8, 2u8) < Frac::new(1u8, 2u8)); assert!(!(Frac::new(1u8, 2u8) < Frac::new_neg(1u8, 2u8))); assert!(!(Frac::new_neg(1u8, 2u8) < Frac::new_neg(1u8, 2u8))); assert!(Frac::new_neg(1u8, 2u8) < Frac::new_neg(1u8, 4u8)); assert!(Frac::new_neg(1u8, 2u8) < Frac::neg_zero()); assert!(Frac::new_neg(1u8, 2u8) < Frac::zero()); assert!(!(Frac::neg_zero() < Frac::new_neg(1u8, 2u8))); assert!(!(Frac::zero() < Frac::new_neg(1u8, 2u8))); assert!(Frac::neg_zero() < Frac::new(1u8, 2u8)); assert!(Frac::neg_zero() > Frac::new_neg(1u8, 2u8)); assert!(Frac::zero() > Frac::new_neg(1u8, 2u8)); assert!(Frac::new(1u8, 2u8) > Frac::neg_zero()); assert!(!(Frac::new(1u8, 2u8) < Frac::neg_zero())); assert!(Frac::zero() < Frac::new(1u8, 2u8)); } #[test] fn from_decimal_str() { assert_eq!(Ok(Frac::zero()), Frac::from_decimal_str("0")); assert_eq!(Ok(Frac::zero()), Frac::from_decimal_str("-0")); assert_eq!(Ok(Frac::zero()), Frac::from_decimal_str("+0")); assert_eq!(Ok(Frac::zero()), Frac::from_decimal_str("0.0")); assert_eq!(Ok(Frac::zero()), Frac::from_decimal_str("-0.0")); assert_eq!(Ok(Frac::zero()), Frac::from_decimal_str("+0.0")); assert_eq!(Ok(Fraction::zero()), Fraction::from_decimal_str("0.000000")); assert_eq!( Ok(Fraction::zero()), Fraction::from_decimal_str("-0.000000") ); assert_eq!( Ok(Fraction::zero()), Fraction::from_decimal_str("+0.000000") ); #[cfg(feature = "with-bigint")] { assert_eq!( Ok(BigFraction::zero()), BigFraction::from_decimal_str( "00000000000000000000000000.0000000000000000000000000000000000000000000" ) ); assert_eq!( Ok(BigFraction::zero()), BigFraction::from_decimal_str( "-0000000000000000000000000.0000000000000000000000000000000000000000000" ) ); assert_eq!( Ok(BigFraction::zero()), BigFraction::from_decimal_str( "+0000000000000000000000000.0000000000000000000000000000000000000000000" ) ); } assert_eq!(Ok(Frac::one()), Frac::from_decimal_str("1")); assert_eq!(Ok(Frac::new_neg(1, 1)), Frac::from_decimal_str("-1")); assert_eq!(Ok(Frac::one()), Frac::from_decimal_str("+1")); assert_eq!(Ok(Frac::one()), Frac::from_decimal_str("1.0")); assert_eq!(Ok(Frac::new_neg(1, 1)), Frac::from_decimal_str("-1.0")); assert_eq!(Ok(Frac::one()), Frac::from_decimal_str("+1.00")); assert_eq!(Ok(Frac::new(1, 2)), Frac::from_decimal_str("0.5")); assert_eq!( Ok(Fraction::new(3333u64, 5000u64)), Fraction::from_decimal_str("0.6666") ); assert_eq!( Err(ParseError::ParseIntError), Frac::from_decimal_str("test") ); assert_eq!( Err(ParseError::ParseIntError), Frac::from_decimal_str("1test") ); assert_eq!( Err(ParseError::ParseIntError), Frac::from_decimal_str("1.26t8") ); // this is due to the std library which issues ParseIntError on the whole part overflow assert_eq!( Err(ParseError::ParseIntError), Frac::from_decimal_str("120202040") ); assert_eq!( Err(ParseError::ParseIntError), Frac::from_decimal_str("1.20602604") ); assert_eq!( Err(ParseError::OverflowError), Frac::from_decimal_str("255.255") ); } #[test] fn new_generic() { { type F = GenericFraction<u8>; let f12 = F::new_generic(Sign::Plus, 1i8, 2u8).unwrap(); let f34 = F::new_generic(Sign::Minus, 3i16, 4u32).unwrap(); let f56 = F::new_generic(Sign::Plus, -5i64, 6u128).unwrap(); let f78 = F::new_generic(Sign::Minus, 7usize, -8isize).unwrap(); #[cfg(feature = "with-bigint")] { let fbig = F::new_generic(Sign::Minus, -BigInt::from(254), BigUint::from(255u32)).unwrap(); assert_eq!( ( fbig.sign().unwrap(), *fbig.numer().unwrap(), *fbig.denom().unwrap() ), (Sign::Plus, 254u8, 255u8) ); } assert_eq!( ( f12.sign().unwrap(), *f12.numer().unwrap(), *f12.denom().unwrap() ), (Sign::Plus, 1u8, 2u8) ); assert_eq!( ( f34.sign().unwrap(), *f34.numer().unwrap(), *f34.denom().unwrap() ), (Sign::Minus, 3u8, 4u8) ); assert_eq!( ( f56.sign().unwrap(), *f56.numer().unwrap(), *f56.denom().unwrap() ), (Sign::Minus, 5u8, 6u8) ); assert_eq!( ( f78.sign().unwrap(), *f78.numer().unwrap(), *f78.denom().unwrap() ), (Sign::Plus, 7u8, 8u8) ); assert_eq!(None, F::new_generic(Sign::Plus, 256, 1)); // overflow assert_eq!(None, F::new_generic(Sign::Plus, 1, 257)); // overflow } { type F = GenericFraction<i8>; let f12 = F::new_generic(Sign::Plus, 1i8, 2u8).unwrap(); let f34 = F::new_generic(Sign::Minus, 3i16, 4u32).unwrap(); let f56 = F::new_generic(Sign::Plus, -5i64, 6u128).unwrap(); let f78 = F::new_generic(Sign::Minus, 7usize, -8isize).unwrap(); assert_eq!( ( f12.sign().unwrap(), *f12.numer().unwrap(), *f12.denom().unwrap() ), (Sign::Plus, 1i8, 2i8) ); assert_eq!( ( f34.sign().unwrap(), *f34.numer().unwrap(), *f34.denom().unwrap() ), (Sign::Minus, 3i8, 4i8) ); assert_eq!( ( f56.sign().unwrap(), *f56.numer().unwrap(), *f56.denom().unwrap() ), (Sign::Minus, 5i8, 6i8) ); assert_eq!( ( f78.sign().unwrap(), *f78.numer().unwrap(), *f78.denom().unwrap() ), (Sign::Plus, 7i8, 8i8) ); assert_eq!(None, F::new_generic(Sign::Plus, 128, 1)); // overflow assert_eq!(None, F::new_generic(Sign::Plus, 256, 1)); // overflow #[cfg(feature = "with-bigint")] { let fbig = F::new_generic(Sign::Minus, -BigInt::from(126), BigUint::from(127u8)).unwrap(); assert_eq!( ( fbig.sign().unwrap(), *fbig.numer().unwrap(), *fbig.denom().unwrap() ), (Sign::Plus, 126i8, 127i8) ); } } #[cfg(feature = "with-bigint")] { type F = GenericFraction<BigUint>; let f12 = F::new_generic(Sign::Plus, 1i8, 2u8).unwrap(); let f34 = F::new_generic(Sign::Minus, 3i16, 4u32).unwrap(); let f56 = F::new_generic(Sign::Plus, -5i64, 6u128).unwrap(); let f78 = F::new_generic(Sign::Minus, 7usize, -8isize).unwrap(); let fbig = F::new_generic(Sign::Minus, -BigInt::from(254), BigUint::from(255u32)).unwrap(); assert_eq!( ( f12.sign().unwrap(), f12.numer().unwrap(), f12.denom().unwrap() ), (Sign::Plus, &BigUint::from(1u8), &BigUint::from(2u8)) ); assert_eq!( ( f34.sign().unwrap(), f34.numer().unwrap(), f34.denom().unwrap() ), (Sign::Minus, &BigUint::from(3u8), &BigUint::from(4u8)) ); assert_eq!( ( f56.sign().unwrap(), f56.numer().unwrap(), f56.denom().unwrap() ), (Sign::Minus, &BigUint::from(5u8), &BigUint::from(6u8)) ); assert_eq!( ( f78.sign().unwrap(), f78.numer().unwrap(), f78.denom().unwrap() ), (Sign::Plus, &BigUint::from(7u8), &BigUint::from(8u8)) ); assert_eq!( ( fbig.sign().unwrap(), fbig.numer().unwrap(), fbig.denom().unwrap() ), (Sign::Plus, &BigUint::from(254u8), &BigUint::from(255u8)) ); } #[cfg(feature = "with-bigint")] { type F = GenericFraction<BigInt>; let f12 = F::new_generic(Sign::Plus, 1i8, 2u8).unwrap(); let f34 = F::new_generic(Sign::Minus, 3i16, 4u32).unwrap(); let f56 = F::new_generic(Sign::Plus, -5i64, 6u128).unwrap(); let f78 = F::new_generic(Sign::Minus, 7usize, -8isize).unwrap(); let fbig = F::new_generic(Sign::Minus, -BigInt::from(254), BigUint::from(255u32)).unwrap(); assert_eq!( ( f12.sign().unwrap(), f12.numer().unwrap(), f12.denom().unwrap() ), (Sign::Plus, &BigInt::from(1u8), &BigInt::from(2u8)) ); assert_eq!( ( f34.sign().unwrap(), f34.numer().unwrap(), f34.denom().unwrap() ), (Sign::Minus, &BigInt::from(3u8), &BigInt::from(4u8)) ); assert_eq!( ( f56.sign().unwrap(), f56.numer().unwrap(), f56.denom().unwrap() ), (Sign::Minus, &BigInt::from(5u8), &BigInt::from(6u8)) ); assert_eq!( ( f78.sign().unwrap(), f78.numer().unwrap(), f78.denom().unwrap() ), (Sign::Plus, &BigInt::from(7u8), &BigInt::from(8u8)) ); assert_eq!( ( fbig.sign().unwrap(), fbig.numer().unwrap(), fbig.denom().unwrap() ), (Sign::Plus, &BigInt::from(254u8), &BigInt::from(255u8)) ); } { type F = GenericFraction<u128>; let f1 = F::new_generic(Sign::Plus, 123456788u64, 123456789i32).unwrap(); let f2 = F::new_generic(Sign::Minus, 1234567890122u64, -1234567890123i64).unwrap(); assert_eq!( ( f1.sign().unwrap(), *f1.numer().unwrap(), *f1.denom().unwrap() ), (Sign::Plus, 123456788u128, 123456789u128) ); assert_eq!( ( f2.sign().unwrap(), *f2.numer().unwrap(), *f2.denom().unwrap() ), (Sign::Plus, 1234567890122u128, 1234567890123u128) ); } } #[test] fn sign() { let p = Sign::Plus; let m = Sign::Minus; assert_ne!(p, m); assert_eq!(p, Sign::Plus); assert_eq!(m, Sign::Minus); assert_eq!(m, p * m); assert_eq!(p, p * p); assert_eq!(p, m * m); assert_eq!(m, m * p); assert!(p.is_positive()); assert!(!p.is_negative()); assert!(!m.is_positive()); assert!(m.is_negative()); } #[test] fn fraction() { assert_eq!(Frac::nan(), Frac::new(0, 0)); assert_eq!(Frac::infinity(), Frac::new(1, 0)); assert!(Frac::nan().numer().is_none()); assert!(Frac::nan().denom().is_none()); assert_eq!( Fraction::new(5u64, 8u64), Fraction::from_fraction(Frac::new(5u8, 8u8)) ); assert_eq!(Fraction::nan(), Fraction::from_fraction(Frac::nan())); assert_eq!( Fraction::infinity(), Fraction::from_fraction(Frac::infinity()) ); assert_eq!(Frac::min_value(), Frac::new_neg(255, 1)); assert_eq!(Frac::max_value(), Frac::new(255, 1)); assert_ne!(Frac::neg_infinity(), Frac::infinity()); assert_ne!(Frac::nan(), Frac::zero()); assert_ne!(Frac::zero(), Frac::nan()); assert_ne!(Frac::new_neg(1, 2), Frac::new(1, 2)); assert_eq!(Frac::neg_zero(), Frac::zero()); assert_ne!(Frac::infinity(), Frac::nan()); assert!(!(Frac::infinity() > Frac::infinity())); assert!((Frac::infinity() > Frac::neg_infinity())); assert!(!(Frac::neg_infinity() > Frac::infinity())); assert!(Frac::infinity() > Frac::max_value()); assert!(Frac::min_value() > Frac::neg_infinity()); assert_eq!(-Frac::infinity(), Frac::neg_infinity()); assert_eq!(-&Frac::nan(), Frac::nan()); assert_eq!(-&Frac::infinity(), Frac::neg_infinity()); assert_eq!(-&Frac::one(), Frac::new_neg(1, 1)); assert_eq!(-&Frac::zero(), Frac::zero()); assert_eq!( Frac::new_neg(1, 1) + Frac::new_neg(1, 1), Frac::new_neg(2, 1) ); assert_eq!( &Frac::new_neg(1, 1) + &Frac::new_neg(1, 1), Frac::new_neg(2, 1) ); assert_eq!(Frac::new_neg(1, 1) - Frac::new_neg(1, 1), Frac::zero()); assert_eq!(Frac::new_neg(1, 1) - Frac::new_neg(2, 1), Frac::one()); assert_eq!(&Frac::new_neg(1, 1) - &Frac::new_neg(1, 1), Frac::zero()); assert_eq!(&Frac::new_neg(1, 1) - &Frac::new_neg(2, 1), Frac::one()); assert_eq!(Frac::new(1, 255), Frac::min_positive_value()); assert!(Frac::infinity().is_infinite()); assert!(Frac::neg_infinity().is_infinite()); assert!(!Frac::one().is_infinite()); assert!(!Frac::infinity().is_finite()); assert!(!Frac::neg_infinity().is_finite()); assert!(Frac::one().is_finite()); assert_eq!(FpCategory::Normal, Frac::one().classify()); assert_eq!(FpCategory::Infinite, Frac::infinity().classify()); assert_eq!(FpCategory::Zero, Frac::zero().classify()); assert_eq!(FpCategory::Nan, Frac::nan().classify()); assert_eq!(Frac::nan(), Frac::nan().floor()); assert_eq!(Frac::one(), Frac::new(3, 2).floor()); assert_eq!(Frac::nan(), Frac::nan().ceil()); assert_eq!(Frac::one(), Frac::new(1, 2).ceil()); assert_eq!(Frac::nan(), Frac::nan().round()); assert_eq!(Frac::one(), Frac::new(1, 2).round()); assert_eq!(Frac::new(2, 1), Frac::new(3, 2).round()); assert_eq!(Frac::one(), Frac::new(4, 3).round()); assert_eq!(Frac::nan(), Frac::nan().trunc()); assert_eq!(Frac::zero(), Frac::new(1, 2).trunc()); assert_eq!(Frac::one(), Frac::new(3, 2).trunc()); assert_eq!(Frac::nan(), Frac::nan().fract()); assert_eq!(Frac::new(1, 2), Frac::new(1, 2).fract()); assert_eq!(Frac::new(1, 2), Frac::new(3, 2).fract()); assert!(!Frac::nan().is_sign_positive()); assert!(!Frac::nan().is_sign_negative()); assert!(Frac::infinity().is_sign_positive()); assert!(!Frac::neg_infinity().is_sign_positive()); assert!(!Frac::infinity().is_sign_negative()); assert!(Frac::neg_infinity().is_sign_negative()); assert!(Frac::one().is_sign_positive()); assert!(!Frac::one().is_sign_negative()); assert!(!Frac::new_neg(1, 1).is_sign_positive()); assert!(Frac::new_neg(1, 1).is_sign_negative()); assert_eq!( Frac::new(3, 1), Frac::one().mul_add(Frac::new(2, 1), Frac::one()) ); assert_eq!(Frac::nan(), Frac::nan().recip()); assert_eq!(Frac::zero(), Frac::infinity().recip()); assert_eq!(Frac::zero(), Frac::neg_infinity().recip()); assert_eq!(Frac::infinity(), Frac::zero().recip()); assert_eq!(Frac::new(2, 1), Frac::new(1, 2).recip()); } #[test] fn add_assign() { { let mut v = Frac::zero(); v += Frac::infinity(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::infinity(); v += Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v += Frac::infinity(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::infinity(); v += Frac::neg_infinity(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v += Frac::one(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::one(); v += Frac::new_neg(1, 1); assert_eq!(v, Frac::zero()); } { let mut v = Frac::one(); v += Frac::new_neg(2, 1); assert_eq!(v, Frac::new_neg(1, 1)); } { let mut v = Frac::new_neg(1, 1); v += Frac::new_neg(1, 1); assert_eq!(v, Frac::new_neg(2, 1)); } { let mut v = Frac::new_neg(1, 1); v += Frac::new(1, 1); assert_eq!(v, Frac::zero()); } { let mut v = Frac::new_neg(2, 1); v += Frac::new(1, 1); assert_eq!(v, Frac::new_neg(1, 1)); } /* Refs */ { let mut v = Frac::zero(); v += &Frac::infinity(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::infinity(); v += &Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v += &Frac::infinity(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::infinity(); v += &Frac::neg_infinity(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v += &Frac::one(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::one(); v += &Frac::new_neg(1, 1); assert_eq!(v, Frac::zero()); } { let mut v = Frac::one(); v += &Frac::new_neg(2, 1); assert_eq!(v, Frac::new_neg(1, 1)); } { let mut v = Frac::new_neg(1, 1); v += &Frac::new_neg(1, 1); assert_eq!(v, Frac::new_neg(2, 1)); } { let mut v = Frac::new_neg(1, 1); v += &Frac::new(1, 1); assert_eq!(v, Frac::zero()); } { let mut v = Frac::new_neg(2, 1); v += &Frac::new(1, 1); assert_eq!(v, Frac::new_neg(1, 1)); } } #[test] fn sub_assign() { { let mut v = Frac::nan(); v -= Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v -= Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v -= Frac::infinity(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v -= Frac::neg_infinity(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::infinity(); v -= Frac::one(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::one(); v -= Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::one(); v -= Frac::infinity(); assert_eq!(v, Frac::neg_infinity()); } { let mut v = Frac::one(); v -= Frac::neg_infinity(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::one(); v -= Frac::new_neg(1, 1); assert_eq!(v, Frac::new(2, 1)); } { let mut v = Frac::new_neg(1, 1); v -= Frac::new(2, 1); assert_eq!(v, Frac::new_neg(3, 1)); } { let mut v = Frac::new_neg(1, 1); v -= Frac::new_neg(1, 1); assert_eq!(v, Frac::zero()); } { let mut v = Frac::new_neg(1, 1); v -= Frac::new_neg(2, 1); assert_eq!(v, Frac::one()); } /* Refs */ { let mut v = Frac::nan(); v -= &Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v -= &Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v -= &Frac::infinity(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v -= &Frac::neg_infinity(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::infinity(); v -= &Frac::one(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::one(); v -= &Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::one(); v -= &Frac::infinity(); assert_eq!(v, Frac::neg_infinity()); } { let mut v = Frac::one(); v -= &Frac::neg_infinity(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::one(); v -= &Frac::new_neg(1, 1); assert_eq!(v, Frac::new(2, 1)); } { let mut v = Frac::new_neg(1, 1); v -= &Frac::new(2, 1); assert_eq!(v, Frac::new_neg(3, 1)); } { let mut v = Frac::new_neg(1, 1); v -= &Frac::new_neg(1, 1); assert_eq!(v, Frac::zero()); } { let mut v = Frac::new_neg(1, 1); v -= &Frac::new_neg(2, 1); assert_eq!(v, Frac::one()); } } #[test] fn mul_assign() { { let mut v = Frac::nan(); v *= Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v *= Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v *= Frac::infinity(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::infinity(); v *= Frac::neg_infinity(); assert_eq!(v, Frac::neg_infinity()); } { let mut v = Frac::infinity(); v *= Frac::one(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::one(); v *= Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::one(); v *= Frac::infinity(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::one(); v *= Frac::neg_infinity(); assert_eq!(v, Frac::neg_infinity()); } { let mut v = Frac::one(); v *= Frac::new_neg(1, 1); assert_eq!(v, Frac::new_neg(1, 1)); } { let mut v = Frac::new_neg(1, 1); v *= Frac::new(2, 1); assert_eq!(v, Frac::new_neg(2, 1)); } { let mut v = Frac::new_neg(1, 1); v *= Frac::new_neg(1, 1); assert_eq!(v, Frac::one()); } { let mut v = Frac::new_neg(1, 1); v *= Frac::new_neg(2, 1); assert_eq!(v, Frac::new(2, 1)); } { let mut v = Frac::infinity(); v *= Frac::zero(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::zero(); v *= Frac::infinity(); assert_eq!(v, Frac::nan()); } /* Refs */ { let mut v = Frac::nan(); v *= &Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v *= &Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v *= &Frac::infinity(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::infinity(); v *= &Frac::neg_infinity(); assert_eq!(v, Frac::neg_infinity()); } { let mut v = Frac::infinity(); v *= &Frac::one(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::one(); v *= &Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::one(); v *= &Frac::infinity(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::one(); v *= &Frac::neg_infinity(); assert_eq!(v, Frac::neg_infinity()); } { let mut v = Frac::one(); v *= &Frac::new_neg(1, 1); assert_eq!(v, Frac::new_neg(1, 1)); } { let mut v = Frac::new_neg(1, 1); v *= &Frac::new(2, 1); assert_eq!(v, Frac::new_neg(2, 1)); } { let mut v = Frac::new_neg(1, 1); v *= &Frac::new_neg(1, 1); assert_eq!(v, Frac::one()); } { let mut v = Frac::new_neg(1, 1); v *= &Frac::new_neg(2, 1); assert_eq!(v, Frac::new(2, 1)); } { let mut v = Frac::infinity(); v *= &Frac::zero(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::zero(); v *= &Frac::infinity(); assert_eq!(v, Frac::nan()); } } #[test] fn div_assign() { { let mut v = Frac::nan(); v /= Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v /= Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v /= Frac::infinity(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v /= Frac::neg_infinity(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v /= Frac::one(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::one(); v /= Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::one(); v /= Frac::infinity(); assert_eq!(v, Frac::zero()); } { let mut v = Frac::one(); v /= Frac::neg_infinity(); assert_eq!(v, Frac::zero()); } { let mut v = Frac::one(); v /= Frac::new_neg(1, 1); assert_eq!(v, Frac::new_neg(1, 1)); } { let mut v = Frac::new_neg(1, 1); v /= Frac::new(2, 1); assert_eq!(v, Frac::new_neg(1, 2)); } { let mut v = Frac::new_neg(1, 1); v /= Frac::new_neg(1, 1); assert_eq!(v, Frac::one()); } { let mut v = Frac::new_neg(1, 1); v /= Frac::new_neg(2, 1); assert_eq!(v, Frac::new(1, 2)); } { let mut v = Frac::infinity(); v /= Frac::zero(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::zero(); v /= Frac::infinity(); assert_eq!(v, Frac::zero()); } { let mut v = Frac::one(); v /= Frac::zero(); assert_eq!(v, Frac::infinity()); } /* Refs */ { let mut v = Frac::nan(); v /= &Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v /= &Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v /= &Frac::infinity(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v /= &Frac::neg_infinity(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v /= &Frac::one(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::one(); v /= &Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::one(); v /= &Frac::infinity(); assert_eq!(v, Frac::zero()); } { let mut v = Frac::one(); v /= &Frac::neg_infinity(); assert_eq!(v, Frac::zero()); } { let mut v = Frac::one(); v /= &Frac::new_neg(1, 1); assert_eq!(v, Frac::new_neg(1, 1)); } { let mut v = Frac::new_neg(1, 1); v /= &Frac::new(2, 1); assert_eq!(v, Frac::new_neg(1, 2)); } { let mut v = Frac::new_neg(1, 1); v /= &Frac::new_neg(1, 1); assert_eq!(v, Frac::one()); } { let mut v = Frac::new_neg(1, 1); v /= &Frac::new_neg(2, 1); assert_eq!(v, Frac::new(1, 2)); } { let mut v = Frac::infinity(); v /= &Frac::zero(); assert_eq!(v, Frac::infinity()); } { let mut v = Frac::zero(); v /= &Frac::infinity(); assert_eq!(v, Frac::zero()); } { let mut v = Frac::one(); v /= &Frac::zero(); assert_eq!(v, Frac::infinity()); } } #[test] fn rem_assign() { { let mut v = Frac::infinity(); v %= Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v %= Frac::infinity(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v %= Frac::one(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::one(); v %= Frac::infinity(); assert_eq!(v, Frac::one()); } /* Refs */ { let mut v = Frac::infinity(); v %= &Frac::nan(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v %= &Frac::infinity(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::infinity(); v %= &Frac::one(); assert_eq!(v, Frac::nan()); } { let mut v = Frac::one(); v %= &Frac::infinity(); assert_eq!(v, Frac::one()); } } #[test] fn checked_add() { assert_eq!(Some(Frac::nan()), Frac::nan().checked_add(&Frac::nan())); assert_eq!( Some(Frac::nan()), Frac::infinity().checked_add(&Frac::nan()) ); assert_eq!( Some(Frac::infinity()), Frac::infinity().checked_add(&Frac::infinity()) ); assert_eq!( Some(Frac::nan()), Frac::infinity().checked_add(&Frac::neg_infinity()) ); assert_eq!( Some(Frac::infinity()), Frac::infinity().checked_add(&Frac::one()) ); assert_eq!(Some(Frac::nan()), Frac::one().checked_add(&Frac::nan())); assert_eq!( Some(Frac::infinity()), Frac::one().checked_add(&Frac::infinity()) ); assert_eq!( Some(Frac::neg_infinity()), Frac::one().checked_add(&Frac::neg_infinity()) ); assert_eq!(Some(Frac::new(2, 1)), Frac::one().checked_add(&Frac::one())); assert_eq!( Some(Frac::zero()), Frac::one().checked_add(&Frac::new_neg(1, 1)) ); assert_eq!( Some(Frac::zero()), Frac::new_neg(1, 1).checked_add(&Frac::one()) ); assert_eq!( Some(Frac::new_neg(2, 1)), Frac::new_neg(1, 1).checked_add(&Frac::new_neg(1, 1)) ); assert_eq!( Some(Frac::new_neg(1, 1)), Frac::one().checked_add(&Frac::new_neg(2, 1)) ); assert_eq!( Some(Frac::new_neg(1, 1)), Frac::new_neg(2, 1).checked_add(&Frac::one()) ); } #[test] fn checked_sub() { assert_eq!(Some(Frac::nan()), Frac::nan().checked_sub(&Frac::nan())); assert_eq!( Some(Frac::nan()), Frac::infinity().checked_sub(&Frac::nan()) ); assert_eq!( Some(Frac::nan()), Frac::infinity().checked_sub(&Frac::infinity()) ); assert_eq!( Some(Frac::infinity()), Frac::infinity().checked_sub(&Frac::neg_infinity()) ); assert_eq!( Some(Frac::infinity()), Frac::infinity().checked_sub(&Frac::one()) ); assert_eq!(Some(Frac::nan()), Frac::one().checked_sub(&Frac::nan())); assert_eq!( Some(Frac::neg_infinity()), Frac::one().checked_sub(&Frac::infinity()) ); assert_eq!( Some(Frac::infinity()), Frac::one().checked_sub(&Frac::neg_infinity()) ); assert_eq!(Some(Frac::zero()), Frac::one().checked_sub(&Frac::one())); assert_eq!( Some(Frac::new(2, 1)), Frac::one().checked_sub(&Frac::new_neg(1, 1)) ); assert_eq!( Some(Frac::new_neg(2, 1)), Frac::new_neg(1, 1).checked_sub(&Frac::one()) ); assert_eq!( Some(Frac::zero()), Frac::new_neg(1, 1).checked_sub(&Frac::new_neg(1, 1)) ); assert_eq!( Some(Frac::new_neg(1, 1)), Frac::one().checked_sub(&Frac::new(2, 1)) ); assert_eq!( Some(Frac::one()), Frac::new_neg(1, 1).checked_sub(&Frac::new_neg(2, 1)) ); } #[test] fn checked_mul() { assert_eq!(Some(Frac::nan()), Frac::nan().checked_mul(&Frac::nan())); assert_eq!( Some(Frac::nan()), Frac::infinity().checked_mul(&Frac::nan()) ); assert_eq!( Some(Frac::infinity()), Frac::infinity().checked_mul(&Frac::infinity()) ); assert_eq!( Some(Frac::neg_infinity()), Frac::infinity().checked_mul(&Frac::neg_infinity()) ); assert_eq!( Some(Frac::infinity()), Frac::infinity().checked_mul(&Frac::one()) ); assert_eq!( Some(Frac::nan()), Frac::infinity().checked_mul(&Frac::zero()) ); assert_eq!( Some(Frac::nan()), Frac::zero().checked_mul(&Frac::infinity()) ); assert_eq!( Some(Frac::neg_infinity()), Frac::infinity().checked_mul(&Frac::new_neg(1, 1)) ); assert_eq!(Some(Frac::nan()), Frac::one().checked_mul(&Frac::nan())); assert_eq!( Some(Frac::infinity()), Frac::one().checked_mul(&Frac::infinity()) ); assert_eq!( Some(Frac::neg_infinity()), Frac::one().checked_mul(&Frac::neg_infinity()) ); assert_eq!(Some(Frac::one()), Frac::one().checked_mul(&Frac::one())); assert_eq!( Some(Frac::new_neg(1, 1)), Frac::one().checked_mul(&Frac::new_neg(1, 1)) ); assert_eq!( Some(Frac::new_neg(1, 1)), Frac::new_neg(1, 1).checked_mul(&Frac::one()) ); assert_eq!( Some(Frac::one()), Frac::new_neg(1, 1).checked_mul(&Frac::new_neg(1, 1)) ); assert_eq!( Some(Frac::new(2, 1)), Frac::one().checked_mul(&Frac::new(2, 1)) ); assert_eq!( Some(Frac::new(2, 1)), Frac::new_neg(1, 1).checked_mul(&Frac::new_neg(2, 1)) ); assert_eq!(Some(Frac::zero()), Frac::one().checked_mul(&Frac::zero())); assert_eq!( Some(Frac::zero()), Frac::new_neg(1, 1).checked_mul(&Frac::zero()) ); assert_eq!(Some(Frac::zero()), Frac::zero().checked_mul(&Frac::one())); assert_eq!( Some(Frac::zero()), Frac::zero().checked_mul(&Frac::new_neg(1, 1)) ); } #[test] fn checked_div() { assert_eq!(Some(Frac::nan()), Frac::nan().checked_div(&Frac::nan())); assert_eq!( Some(Frac::nan()), Frac::infinity().checked_div(&Frac::nan()) ); assert_eq!( Some(Frac::nan()), Frac::infinity().checked_div(&Frac::infinity()) ); assert_eq!( Some(Frac::nan()), Frac::infinity().checked_div(&Frac::neg_infinity()) ); assert_eq!( Some(Frac::infinity()), Frac::infinity().checked_div(&Frac::one()) ); assert_eq!( Some(Frac::neg_infinity()), Frac::infinity().checked_div(&Frac::new_neg(1, 1)) ); assert_eq!( Some(Frac::infinity()), Frac::infinity().checked_div(&Frac::zero()) ); assert_eq!( Some(Frac::zero()), Frac::zero().checked_div(&Frac::infinity()) ); assert_eq!( Some(Frac::neg_infinity()), Frac::infinity().checked_div(&Frac::new_neg(1, 1)) ); assert_eq!(Some(Frac::nan()), Frac::one().checked_div(&Frac::nan())); assert_eq!( Some(Frac::zero()), Frac::one().checked_div(&Frac::infinity()) ); assert_eq!( Some(Frac::zero()), Frac::one().checked_div(&Frac::neg_infinity()) ); assert_eq!(Some(Frac::one()), Frac::one().checked_div(&Frac::one())); assert_eq!( Some(Frac::new_neg(1, 1)), Frac::one().checked_div(&Frac::new_neg(1, 1)) ); assert_eq!( Some(Frac::new_neg(1, 1)), Frac::new_neg(1, 1).checked_div(&Frac::one()) ); assert_eq!( Some(Frac::one()), Frac::new_neg(1, 1).checked_div(&Frac::new_neg(1, 1)) ); assert_eq!( Some(Frac::new(1, 2)), Frac::one().checked_div(&Frac::new(2, 1)) ); assert_eq!( Some(Frac::new(1, 2)), Frac::new_neg(1, 1).checked_div(&Frac::new_neg(2, 1)) ); assert_eq!( Some(Frac::infinity()), Frac::one().checked_div(&Frac::zero()) ); assert_eq!( Some(Frac::neg_infinity()), Frac::new_neg(1, 1).checked_div(&Frac::zero()) ); assert_eq!(Some(Frac::zero()), Frac::zero().checked_div(&Frac::one())); assert_eq!( Some(Frac::zero()), Frac::zero().checked_div(&Frac::new_neg(1, 1)) ); assert_eq!(Some(Frac::nan()), Frac::zero().checked_div(&Frac::zero())); } #[test] fn from_str_radix() { assert_eq!(Frac::one(), Frac::from_str_radix("5/5", 10).unwrap()); assert_eq!(Frac::one(), Frac::from_str_radix("+5/5", 10).unwrap()); assert_eq!(Frac::new(1, 2), Frac::from_str_radix("+5/10", 10).unwrap()); assert_eq!( Frac::new_neg(4, 3), Frac::from_str_radix("-4/3", 10).unwrap() ); } #[test] fn signed() { // abs assert_eq!(Frac::one(), <Frac as Signed>::abs(&Frac::new_neg(1, 1))); assert_eq!(Frac::nan(), <Frac as Signed>::abs(&Frac::nan())); assert_eq!(Frac::infinity(), <Frac as Signed>::abs(&Frac::infinity())); assert_eq!( Frac::infinity(), <Frac as Signed>::abs(&Frac::neg_infinity()) ); // abs_sub assert_eq!(Frac::nan(), Frac::nan().abs_sub(&Frac::nan())); assert_eq!(Frac::nan(), Frac::infinity().abs_sub(&Frac::nan())); assert_eq!(Frac::zero(), Frac::infinity().abs_sub(&Frac::infinity())); assert_eq!( Frac::infinity(), Frac::infinity().abs_sub(&Frac::neg_infinity()) ); assert_eq!( Frac::zero(), Frac::neg_infinity().abs_sub(&Frac::neg_infinity()) ); assert_eq!( Frac::zero(), Frac::neg_infinity().abs_sub(&Frac::infinity()) ); assert_eq!(Frac::infinity(), Frac::infinity().abs_sub(&Frac::one())); assert_eq!( Frac::infinity(), Frac::infinity().abs_sub(&Frac::new_neg(1, 1)) ); assert_eq!(Frac::zero(), Frac::neg_infinity().abs_sub(&Frac::one())); assert_eq!(Frac::nan(), Frac::one().abs_sub(&Frac::nan())); assert_eq!(Frac::zero(), Frac::one().abs_sub(&Frac::infinity())); assert_eq!(Frac::infinity(), Frac::one().abs_sub(&Frac::neg_infinity())); assert_eq!( Frac::neg_infinity(), Frac::new_neg(1, 1).abs_sub(&Frac::neg_infinity()) ); assert_eq!(Frac::one(), Frac::new(2, 1).abs_sub(&Frac::one())); assert_eq!(Frac::one(), Frac::one().abs_sub(&Frac::new(2, 1))); // signum assert_eq!(Frac::nan(), Frac::nan().signum()); assert_eq!(Frac::one(), Frac::infinity().signum()); assert_eq!(Frac::one(), Frac::zero().signum()); assert_eq!(Frac::one(), Frac::one().signum()); assert_eq!(-Frac::one(), Frac::neg_infinity().signum()); assert_eq!(-Frac::one(), Frac::new_neg(1, 1).signum()); } #[test] fn to_primitive() { assert!(Frac::nan().to_i64().is_none()); assert!(Frac::nan().to_u64().is_none()); assert!(Frac::infinity().to_i64().is_none()); assert!(Frac::infinity().to_u64().is_none()); assert!(Frac::neg_infinity().to_i64().is_none()); assert!(Frac::neg_infinity().to_u64().is_none()); assert_eq!(Some(1), Frac::one().to_i64()); assert_eq!(Some(1), Frac::one().to_u64()); assert!(Frac::new(1, 2).to_i64().is_none()); assert!(Frac::new(1, 2).to_u64().is_none()); assert_eq!(Some(-1), Frac::new_neg(1, 1).to_i64()); assert!(Frac::new_neg(1, 1).to_u64().is_none()); /* f64 */ assert!(Frac::nan().to_f64().unwrap().is_nan()); assert_eq!(::std::f64::INFINITY, Frac::infinity().to_f64().unwrap()); assert_eq!( ::std::f64::NEG_INFINITY, Frac::neg_infinity().to_f64().unwrap() ); assert_eq!(1f64, Frac::one().to_f64().unwrap()); } #[test] fn summing_iterator() { let values = vec![Fraction::new(2u64, 3u64), Fraction::new(1u64, 3u64)]; let sum: Fraction = values.iter().sum(); assert_eq!(sum, Fraction::new(1u8, 1u8)); } #[test] fn product_iterator() { let values = vec![Fraction::new(2u64, 3u64), Fraction::new(1u64, 3u64)]; let product: Fraction = values.iter().product(); assert_eq!(product, Fraction::new(2u8, 9u8)); } #[test] fn fraction_from_float() { macro_rules! test_for_smaller_t { ( $($t:ty),*) => { $( // f32 tests let f = GenericFraction::<$t>::from(-std::f32::NAN); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(std::f32::NAN); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(-std::f32::MIN); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(std::f32::MIN); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(-std::f32::MAX); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(std::f32::MAX); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(-std::f32::INFINITY); assert_eq!(format!("{}", f), "-inf"); let f = GenericFraction::<$t>::from(std::f32::INFINITY); assert_eq!(format!("{}", f), "inf"); let f = GenericFraction::<$t>::from(-0.0_f32); assert_eq!(format!("{}", f), "0"); let f = GenericFraction::<$t>::from(0.0_f32); assert_eq!(format!("{}", f), "0"); let f = GenericFraction::<$t>::from(-1.0_f32); assert_eq!(format!("{}", f), "-1"); let f = GenericFraction::<$t>::from(1.0_f32); assert_eq!(format!("{}", f), "1"); // f64 tests let f = GenericFraction::<$t>::from(-std::f64::NAN); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(std::f64::NAN); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(-std::f64::MIN); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(std::f64::MIN); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(-std::f64::MAX); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(std::f64::MAX); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(-std::f64::INFINITY); assert_eq!(format!("{}", f), "-inf"); let f = GenericFraction::<$t>::from(std::f64::INFINITY); assert_eq!(format!("{}", f), "inf"); let f = GenericFraction::<$t>::from(-0.0_f64); assert_eq!(format!("{}", f), "0"); let f = GenericFraction::<$t>::from(0.0_f64); assert_eq!(format!("{}", f), "0"); let f = GenericFraction::<$t>::from(-1.0_f64); assert_eq!(format!("{}", f), "-1"); let f = GenericFraction::<$t>::from(1.0_f64); assert_eq!(format!("{}", f), "1"); // Arbitrary tests assert_eq!(format!("{}", f), "1"); let f = GenericFraction::<$t>::from(2.0); assert_eq!(format!("{}", f), "2"); let f = GenericFraction::<$t>::from(0.5); assert_eq!(format!("{}", f), "1/2"); let f = GenericFraction::<$t>::from(15978.649); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(-0.75); assert_eq!(format!("{}", f), "-3/4"); )* }; } macro_rules! test_for_larger_t { ( $($t:ty),*) => { $( // f32 tests let f = GenericFraction::<$t>::from(-std::f32::NAN); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(std::f32::NAN); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(-std::f32::MIN); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(std::f32::MIN); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(-std::f32::MAX); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(std::f32::MAX); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(-std::f32::INFINITY); assert_eq!(format!("{}", f), "-inf"); let f = GenericFraction::<$t>::from(std::f32::INFINITY); assert_eq!(format!("{}", f), "inf"); let f = GenericFraction::<$t>::from(-0.0_f32); assert_eq!(format!("{}", f), "0"); let f = GenericFraction::<$t>::from(0.0_f32); assert_eq!(format!("{}", f), "0"); let f = GenericFraction::<$t>::from(-1.0_f32); assert_eq!(format!("{}", f), "-1"); let f = GenericFraction::<$t>::from(1.0_f32); assert_eq!(format!("{}", f), "1"); // f64 tests let f = GenericFraction::<$t>::from(-std::f64::NAN); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(std::f64::NAN); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(-std::f64::MIN); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(std::f64::MIN); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(-std::f64::MAX); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(std::f64::MAX); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(-std::f64::INFINITY); assert_eq!(format!("{}", f), "-inf"); let f = GenericFraction::<$t>::from(std::f64::INFINITY); assert_eq!(format!("{}", f), "inf"); let f = GenericFraction::<$t>::from(-0.0_f64); assert_eq!(format!("{}", f), "0"); let f = GenericFraction::<$t>::from(0.0_f64); assert_eq!(format!("{}", f), "0"); let f = GenericFraction::<$t>::from(-1.0_f64); assert_eq!(format!("{}", f), "-1"); let f = GenericFraction::<$t>::from(1.0_f64); assert_eq!(format!("{}", f), "1"); // Arbitrary tests let f = GenericFraction::<$t>::from(2.0); assert_eq!(format!("{}", f), "2"); let f = GenericFraction::<$t>::from(0.5); assert_eq!(format!("{}", f), "1/2"); let f = GenericFraction::<$t>::from(15978.649); assert_eq!(format!("{}", f), "15978649/1000"); let f = GenericFraction::<$t>::from(-0.75); assert_eq!(format!("{}", f), "-3/4"); )* }; } macro_rules! test_for_big_t { ( $($t:ty),*) => { $( // Note: we don't test min/max for big_t because the value depends on the type // f32 tests let f = GenericFraction::<$t>::from(-std::f32::NAN); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(std::f32::NAN); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(-std::f32::INFINITY); assert_eq!(format!("{}", f), "-inf"); let f = GenericFraction::<$t>::from(std::f32::INFINITY); assert_eq!(format!("{}", f), "inf"); let f = GenericFraction::<$t>::from(-0.0_f32); assert_eq!(format!("{}", f), "0"); let f = GenericFraction::<$t>::from(0.0_f32); assert_eq!(format!("{}", f), "0"); let f = GenericFraction::<$t>::from(-1.0_f32); assert_eq!(format!("{}", f), "-1"); let f = GenericFraction::<$t>::from(1.0_f32); assert_eq!(format!("{}", f), "1"); // f64 tests let f = GenericFraction::<$t>::from(-std::f64::NAN); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(std::f64::NAN); assert_eq!(format!("{}", f), "NaN"); let f = GenericFraction::<$t>::from(-std::f64::INFINITY); assert_eq!(format!("{}", f), "-inf"); let f = GenericFraction::<$t>::from(std::f64::INFINITY); assert_eq!(format!("{}", f), "inf"); let f = GenericFraction::<$t>::from(-0.0_f64); assert_eq!(format!("{}", f), "0"); let f = GenericFraction::<$t>::from(0.0_f64); assert_eq!(format!("{}", f), "0"); let f = GenericFraction::<$t>::from(-1.0_f64); assert_eq!(format!("{}", f), "-1"); let f = GenericFraction::<$t>::from(1.0_f64); assert_eq!(format!("{}", f), "1"); // Arbitrary tests let f = GenericFraction::<$t>::from(2.0); assert_eq!(format!("{}", f), "2"); let f = GenericFraction::<$t>::from(0.5); assert_eq!(format!("{}", f), "1/2"); let f = GenericFraction::<$t>::from(15978.649); assert_eq!(format!("{}", f), "15978649/1000"); let f = GenericFraction::<$t>::from(-0.75); assert_eq!(format!("{}", f), "-3/4"); let f = GenericFraction::<$t>::from(-0.5); assert_eq!(format!("{}", f), "-1/2"); )* }; } test_for_smaller_t!(u8, i8, u16, i16); test_for_larger_t!(u32, i32, u64, i64, usize, isize); test_for_big_t!(u128, i128); #[cfg(feature = "with-bigint")] { use crate::{BigInt, BigUint}; test_for_big_t!(BigUint, BigInt); } } #[test] fn fraction_test_default() { let fra = Frac::default(); assert_eq!(fra.numer(), Some(&0u8)); assert_eq!(fra.denom(), Some(&1u8)); #[cfg(feature = "with-bigint")] { let fra = BigFraction::default(); assert_eq!(fra.numer(), Some(&BigUint::from(0u8))); assert_eq!(fra.denom(), Some(&BigUint::from(1u8))); } } }
33.536926
132
0.450354
797e7fff94fe9ec35629b9563ef3dcd5d5f7222f
340
// primitive_types3.rs // Create an array with at least 100 elements in it where the ??? is. // Execute `rustlings hint primitive_types3` for hints! fn main() { let a = [0, 101]; if a.len() >= 100 { println!("Wow, that's a big array!"); } else { println!("Meh, I eat arrays like that for breakfast."); } }
24.285714
69
0.594118
0328df5d7983d61ceaaa214ec94f8fc85b0a19a2
2,956
use bevy_app::App; use bevy_doryen::doryen::AppOptions; use bevy_doryen::{DoryenPlugin, DoryenPluginSettings, RenderSystemExtensions, RootConsole}; use bevy_ecs::bundle::Bundle; use bevy_ecs::entity::Entity; use bevy_ecs::system::{Commands, IntoSystem, Query, Res, ResMut}; #[derive(Default)] struct Circle; #[derive(Default, Copy, Clone, PartialEq)] struct Position { x: f32, y: f32, } #[derive(Default, Copy, Clone, PartialEq)] struct Radius(f32); #[derive(Default, Copy, Clone, PartialEq)] struct Angle(f32); #[derive(Bundle)] struct CircleBundle { circle: Circle, position: Position, radius: Radius, angle: Angle, } struct Entities { circle: Entity, } fn main() { App::build() .insert_resource(DoryenPluginSettings { app_options: AppOptions { window_title: String::from("alpha test"), ..Default::default() }, ..Default::default() }) .add_plugin(DoryenPlugin) .add_startup_system(init.system()) .add_system(update_circle.system()) .add_doryen_render_system(render.system()) .run(); } fn init(mut commands: Commands) { let circle = commands .spawn_bundle(CircleBundle { circle: Circle, position: Position { x: 0., y: 0. }, radius: Radius(10.), angle: Angle(0.), }) .id(); commands.insert_resource(Entities { circle }); } fn update_circle( root_console: Res<RootConsole>, entities: Res<Entities>, mut circle_query: Query<(&mut Position, &mut Radius, &mut Angle, &Circle)>, ) { let (mut position, mut radius, mut angle, _) = circle_query.get_mut(entities.circle).unwrap(); // update the circle radius and center position angle.0 += 0.6; radius.0 = 10.0 + 3.0 * (angle.0 / 10.0).sin(); let cs = (angle.0 / 20.0).cos(); let sn = (angle.0 / 15.0).sin(); position.x = (root_console.get_width() / 2) as f32 + cs * 15.0; position.y = (root_console.get_height() / 2) as f32 + sn * 15.0; } fn render( entities: Res<Entities>, mut root_console: ResMut<RootConsole>, circle_query: Query<(&Position, &Radius, &Angle, &Circle)>, ) { // fill the console with transparent black. The more opaque it is, the faster the previous frames will fade to black. // replace alpha with a lower value, like 10 or 5 and the effect will last longer. root_console.clear(None, Some((0, 0, 0, 20)), None); let (position, radius, angle, _) = circle_query.get(entities.circle).unwrap(); // here we render current frame (only a circle of blue dots) for r in 0..10 { let angle = angle.0 + r as f32 * std::f32::consts::PI * 2.0 / 10.0; let cs = angle.cos(); let sn = angle.sin(); let x = position.x + radius.0 * cs; let y = position.y + radius.0 * sn; root_console.back(x as i32, y as i32, (0, 0, 255, 255)); } }
30.163265
121
0.615697
1db15aef986821fce29b948b5d07f99155f6e9e7
1,925
fn main() { if std::env::var_os("RUST_LOG").is_none() { std::env::set_var("RUST_LOG", "pathfinder=info"); } tracing_subscriber::fmt::init(); // simple tool for running and timing the database migrations on a given file. let path = match std::env::args().nth(1) { Some(name) if std::env::args().count() == 2 => name, _ => { println!( "USAGE: {} db_file", std::env::args().next().as_deref().unwrap_or("migrate_db") ); std::process::exit(1); } }; let path = std::path::PathBuf::from(path); let size_before = std::fs::metadata(&path).expect("Path does not exist").len() as i64; let started_at = std::time::Instant::now(); let storage = pathfinder_lib::storage::Storage::migrate(path.clone()).unwrap(); let migrated_at = std::time::Instant::now(); let size_after_migration = std::fs::metadata(&path) .expect("Migration removed the database?") .len() as i64; println!( "migrated in {:?}, size change: {}", migrated_at - started_at, size_after_migration - size_before ); // in general one does not want to do the full vacuum because it's going to take a long time if false { let conn = storage.connection().unwrap(); let vacuum_started = std::time::Instant::now(); let vacuum_ret = conn.execute("VACUUM", []).expect("vacuum failed"); drop(conn); drop(storage); let vacuumed_at = std::time::Instant::now(); let size_after_vacuum = std::fs::metadata(&path) .expect("Vacuuming removed the database?") .len() as i64; println!( "vacuumed in {:?}, size change: {}, VACUUM returned {}", vacuumed_at - vacuum_started, size_after_vacuum - size_after_migration, vacuum_ret ); } }
32.627119
96
0.566234
4841c3dfe9a444a3873fa192c1d052feb77d772b
332
use maple_core::prelude::*; #[component(C<G>)] fn c() -> TemplateResult<G> { template! { div } } fn compile_fail<G: GenericNode>() { let _: TemplateResult<G> = template! { UnknownComponent() }; let _: TemplateResult<G> = template! { C }; let _: TemplateResult<G> = template! { C(1) }; } fn main() {}
18.444444
64
0.578313
90696f89881a9203dd6b63beadd6f8e8aaedaaad
1,222
/* TODO (MIKAEL) - Add or build db migration lib. */ use super::entities::AppConfig; use super::schema::*; use rusqlite::NO_PARAMS; pub fn setup(app_config: &AppConfig) { let db_context = DBContext::new(app_config); println!("Database migration started..."); let settings_table_exists = db_context.check_if_table_exists("Settings").unwrap(); if !settings_table_exists { db_context .connection .execute( "CREATE TABLE Settings( id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT NOT NULL, value TEXT NOT NULL)", NO_PARAMS, ) .unwrap(); let setting_row = DBSetting::from_values(DB_VERSION_SETTING_NAME.to_string(), 1.to_string()); db_context.insert_db_setting(&setting_row).unwrap(); print_database_version_update(1); } let mut version_row = db_context .get_db_setting_by_name(DB_VERSION_SETTING_NAME) .unwrap(); let mut version = version_row.value.parse().unwrap(); if version < 2 { version = 2; version_row.value = 2.to_string(); db_context.update_db_setting(&version_row).unwrap(); print_database_version_update(version); } println!("Database setup completed"); } fn print_database_version_update(version: i32) { println!("v.{} installed", version); }
24.44
83
0.728314
67d02538b4b0d1e6f27f0b75977beba37c7082bc
20,872
//! Request effects. //! //! Requests typically ask other components to perform a service and report back the result. See the //! top-level module documentation for details. use std::{ collections::{HashMap, HashSet}, fmt::{self, Debug, Display, Formatter}, net::SocketAddr, }; use semver::Version; use casper_execution_engine::{ core::engine_state::{ self, balance::{BalanceRequest, BalanceResult}, execute_request::ExecuteRequest, execution_result::ExecutionResults, genesis::GenesisResult, query::{QueryRequest, QueryResult}, upgrade::{UpgradeConfig, UpgradeResult}, }, shared::{additive_map::AdditiveMap, transform::Transform}, storage::global_state::CommitResult, }; use casper_types::{Key, URef}; use super::Responder; use crate::{ components::{ fetcher::FetchResult, storage::{ DeployHashes, DeployHeaderResults, DeployMetadata, DeployResults, StorageType, Value, }, }, crypto::{asymmetric_key::Signature, hash::Digest}, types::{ json_compatibility::ExecutionResult, Block as LinearBlock, BlockHash, BlockHeader, Deploy, DeployHash, FinalizedBlock, Item, ProtoBlockHash, StatusFeed, Timestamp, }, utils::DisplayIter, Chainspec, }; type DeployAndMetadata<S> = ( <S as StorageType>::Deploy, DeployMetadata<<S as StorageType>::Block>, ); /// A metrics request. #[derive(Debug)] pub enum MetricsRequest { /// Render current node metrics as prometheus-formatted string. RenderNodeMetricsText { /// Resopnder returning the rendered metrics or `None`, if an internal error occurred. responder: Responder<Option<String>>, }, } impl Display for MetricsRequest { fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { match self { MetricsRequest::RenderNodeMetricsText { .. } => write!(formatter, "get metrics text"), } } } /// A networking request. #[derive(Debug)] #[must_use] pub enum NetworkRequest<I, P> { /// Send a message on the network to a specific peer. SendMessage { /// Message destination. dest: I, /// Message payload. payload: P, /// Responder to be called when the message is queued. responder: Responder<()>, }, /// Send a message on the network to all peers. /// Note: This request is deprecated and should be phased out, as not every network /// implementation is likely to implement broadcast support. Broadcast { /// Message payload. payload: P, /// Responder to be called when all messages are queued. responder: Responder<()>, }, /// Gossip a message to a random subset of peers. Gossip { /// Payload to gossip. payload: P, /// Number of peers to gossip to. This is an upper bound, otherwise best-effort. count: usize, /// Node IDs of nodes to exclude from gossiping to. exclude: HashSet<I>, /// Responder to be called when all messages are queued. responder: Responder<HashSet<I>>, }, } impl<I, P> NetworkRequest<I, P> { /// Transform a network request by mapping the contained payload. /// /// This is a replacement for a `From` conversion that is not possible without specialization. pub(crate) fn map_payload<F, P2>(self, wrap_payload: F) -> NetworkRequest<I, P2> where F: FnOnce(P) -> P2, { match self { NetworkRequest::SendMessage { dest, payload, responder, } => NetworkRequest::SendMessage { dest, payload: wrap_payload(payload), responder, }, NetworkRequest::Broadcast { payload, responder } => NetworkRequest::Broadcast { payload: wrap_payload(payload), responder, }, NetworkRequest::Gossip { payload, count, exclude, responder, } => NetworkRequest::Gossip { payload: wrap_payload(payload), count, exclude, responder, }, } } } impl<I, P> Display for NetworkRequest<I, P> where I: Display, P: Display, { fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { match self { NetworkRequest::SendMessage { dest, payload, .. } => { write!(formatter, "send to {}: {}", dest, payload) } NetworkRequest::Broadcast { payload, .. } => { write!(formatter, "broadcast: {}", payload) } NetworkRequest::Gossip { payload, .. } => write!(formatter, "gossip: {}", payload), } } } /// A networking info request. #[derive(Debug)] #[must_use] pub enum NetworkInfoRequest<I> { /// Get incoming and outgoing peers. GetPeers { /// Responder to be called with all connected peers. responder: Responder<HashMap<I, SocketAddr>>, }, } impl<I> Display for NetworkInfoRequest<I> where I: Display, { fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { match self { NetworkInfoRequest::GetPeers { responder: _ } => write!(formatter, "get peers"), } } } #[derive(Debug)] /// A storage request. #[must_use] pub enum StorageRequest<S: StorageType + 'static> { /// Store given block. PutBlock { /// Block to be stored. block: Box<S::Block>, /// Responder to call with the result. Returns true if the block was stored on this /// attempt or false if it was previously stored. responder: Responder<bool>, }, /// Retrieve block with given hash. GetBlock { /// Hash of block to be retrieved. block_hash: <S::Block as Value>::Id, /// Responder to call with the result. Returns `None` is the block doesn't exist in local /// storage. responder: Responder<Option<S::Block>>, }, /// Retrieve block header with given hash. GetBlockHeader { /// Hash of block to get header of. block_hash: <S::Block as Value>::Id, /// Responder to call with the result. Returns `None` is the block header doesn't exist in /// local storage. responder: Responder<Option<<S::Block as Value>::Header>>, }, /// Store given deploy. PutDeploy { /// Deploy to store. deploy: Box<S::Deploy>, /// Responder to call with the result. Returns true if the deploy was stored on this /// attempt or false if it was previously stored. responder: Responder<bool>, }, /// Retrieve deploys with given hashes. GetDeploys { /// Hashes of deploys to be retrieved. deploy_hashes: DeployHashes<S>, /// Responder to call with the results. responder: Responder<DeployResults<S>>, }, /// Retrieve deploy headers with given hashes. GetDeployHeaders { /// Hashes of deploy headers to be retrieved. deploy_hashes: DeployHashes<S>, /// Responder to call with the results. responder: Responder<DeployHeaderResults<S>>, }, /// Store the given execution results for the deploys in the given block. PutExecutionResults { /// Hash of block. block_hash: <S::Block as Value>::Id, /// Execution results. execution_results: HashMap<<S::Deploy as Value>::Id, ExecutionResult>, /// Responder to call with the result. Returns true if the execution results were stored /// on this attempt or false if they were previously stored. responder: Responder<()>, }, /// Retrieve deploy and its metadata. GetDeployAndMetadata { /// Hash of deploy to be retrieved. deploy_hash: <S::Deploy as Value>::Id, /// Responder to call with the results. responder: Responder<Option<DeployAndMetadata<S>>>, }, /// Store given chainspec. PutChainspec { /// Chainspec. chainspec: Box<Chainspec>, /// Responder to call with the result. responder: Responder<()>, }, /// Retrieve chainspec with given version. GetChainspec { /// Version. version: Version, /// Responder to call with the result. responder: Responder<Option<Chainspec>>, }, } impl<S: StorageType> Display for StorageRequest<S> { fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { match self { StorageRequest::PutBlock { block, .. } => write!(formatter, "put {}", block), StorageRequest::GetBlock { block_hash, .. } => write!(formatter, "get {}", block_hash), StorageRequest::GetBlockHeader { block_hash, .. } => { write!(formatter, "get {}", block_hash) } StorageRequest::PutDeploy { deploy, .. } => write!(formatter, "put {}", deploy), StorageRequest::GetDeploys { deploy_hashes, .. } => { write!(formatter, "get {}", DisplayIter::new(deploy_hashes.iter())) } StorageRequest::GetDeployHeaders { deploy_hashes, .. } => write!( formatter, "get headers {}", DisplayIter::new(deploy_hashes.iter()) ), StorageRequest::PutExecutionResults { block_hash, .. } => { write!(formatter, "put execution results for {}", block_hash) } StorageRequest::GetDeployAndMetadata { deploy_hash, .. } => { write!(formatter, "get deploy and metadata for {}", deploy_hash) } StorageRequest::PutChainspec { chainspec, .. } => write!( formatter, "put chainspec {}", chainspec.genesis.protocol_version ), StorageRequest::GetChainspec { version, .. } => { write!(formatter, "get chainspec {}", version) } } } } /// A `DeployBuffer` request. #[derive(Debug)] #[must_use] pub enum DeployBufferRequest { /// Request a list of deploys to propose in a new block. ListForInclusion { /// The instant for which the deploy is requested. current_instant: Timestamp, /// Set of block hashes pointing to blocks whose deploys should be excluded. past_blocks: HashSet<ProtoBlockHash>, /// Responder to call with the result. responder: Responder<HashSet<DeployHash>>, }, } impl Display for DeployBufferRequest { fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { match self { DeployBufferRequest::ListForInclusion { current_instant, past_blocks, responder: _, } => write!( formatter, "list for inclusion: instant {} past {}", current_instant, past_blocks.len() ), } } } /// Abstract API request. /// /// An API request is an abstract request that does not concern itself with serialization or /// transport. #[derive(Debug)] #[must_use] pub enum ApiRequest<I> { /// Submit a deploy to be announced. SubmitDeploy { /// The deploy to be announced. deploy: Box<Deploy>, /// Responder to call. responder: Responder<()>, }, /// If `maybe_hash` is `Some`, return the specified block if it exists, else `None`. If /// `maybe_hash` is `None`, return the latest block. GetBlock { /// The hash of the block to be retrieved. maybe_hash: Option<BlockHash>, /// Responder to call with the result. responder: Responder<Option<LinearBlock>>, }, /// Query the global state at the given root hash. QueryGlobalState { /// The global state hash. global_state_hash: Digest, /// Hex-encoded `casper_types::Key`. base_key: Key, /// The path components starting from the key as base. path: Vec<String>, /// Responder to call with the result. responder: Responder<Result<QueryResult, engine_state::Error>>, }, /// Query the global state at the given root hash. GetBalance { /// The global state hash. global_state_hash: Digest, /// The purse URef. purse_uref: URef, /// Responder to call with the result. responder: Responder<Result<BalanceResult, engine_state::Error>>, }, /// Return the specified deploy and metadata if it exists, else `None`. GetDeploy { /// The hash of the deploy to be retrieved. hash: DeployHash, /// Responder to call with the result. responder: Responder<Option<(Deploy, DeployMetadata<LinearBlock>)>>, }, /// Return the connected peers. GetPeers { /// Responder to call with the result. responder: Responder<HashMap<I, SocketAddr>>, }, /// Return string formatted status or `None` if an error occurred. GetStatus { /// Responder to call with the result. responder: Responder<StatusFeed<I>>, }, /// Return string formatted, prometheus compatible metrics or `None` if an error occurred. GetMetrics { /// Responder to call with the result. responder: Responder<Option<String>>, }, } impl<I> Display for ApiRequest<I> { fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { match self { ApiRequest::SubmitDeploy { deploy, .. } => write!(formatter, "submit {}", *deploy), ApiRequest::GetBlock { maybe_hash: Some(hash), .. } => write!(formatter, "get {}", hash), ApiRequest::GetBlock { maybe_hash: None, .. } => write!(formatter, "get latest block"), ApiRequest::QueryGlobalState { global_state_hash, base_key, path, .. } => write!( formatter, "query {}, base_key: {}, path: {:?}", global_state_hash, base_key, path ), ApiRequest::GetBalance { global_state_hash, purse_uref, .. } => write!( formatter, "balance {}, purse_uref: {}", global_state_hash, purse_uref ), ApiRequest::GetDeploy { hash, .. } => write!(formatter, "get {}", hash), ApiRequest::GetPeers { .. } => write!(formatter, "get peers"), ApiRequest::GetStatus { .. } => write!(formatter, "get status"), ApiRequest::GetMetrics { .. } => write!(formatter, "get metrics"), } } } /// A contract runtime request. #[derive(Debug)] #[must_use] pub enum ContractRuntimeRequest { /// Commit genesis chainspec. CommitGenesis { /// The chainspec. chainspec: Box<Chainspec>, /// Responder to call with the result. responder: Responder<Result<GenesisResult, engine_state::Error>>, }, /// An `ExecuteRequest` that contains multiple deploys that will be executed. Execute { /// Execution request containing deploys. execute_request: ExecuteRequest, /// Responder to call with the execution result. responder: Responder<Result<ExecutionResults, engine_state::RootNotFound>>, }, /// A request to commit existing execution transforms. Commit { /// A valid pre state hash. pre_state_hash: Digest, /// Effects obtained through `ExecutionResult` effects: AdditiveMap<Key, Transform>, /// Responder to call with the commit result. responder: Responder<Result<CommitResult, engine_state::Error>>, }, /// A request to run upgrade. Upgrade { /// Upgrade config. upgrade_config: UpgradeConfig, /// Responder to call with the upgrade result. responder: Responder<Result<UpgradeResult, engine_state::Error>>, }, /// A query request. Query { /// Query request. query_request: QueryRequest, /// Responder to call with the query result. responder: Responder<Result<QueryResult, engine_state::Error>>, }, /// A balance request. GetBalance { /// Balance request. balance_request: BalanceRequest, /// Responder to call with the balance result. responder: Responder<Result<BalanceResult, engine_state::Error>>, }, } impl Display for ContractRuntimeRequest { fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { match self { ContractRuntimeRequest::CommitGenesis { chainspec, .. } => write!( formatter, "commit genesis {}", chainspec.genesis.protocol_version ), ContractRuntimeRequest::Execute { execute_request, .. } => write!( formatter, "execute request: {}", execute_request.parent_state_hash ), ContractRuntimeRequest::Commit { pre_state_hash, effects, .. } => write!( formatter, "commit request: {} {:?}", pre_state_hash, effects ), ContractRuntimeRequest::Upgrade { upgrade_config, .. } => { write!(formatter, "upgrade request: {:?}", upgrade_config) } ContractRuntimeRequest::Query { query_request, .. } => { write!(formatter, "query request: {:?}", query_request) } ContractRuntimeRequest::GetBalance { balance_request, .. } => write!(formatter, "balance request: {:?}", balance_request), } } } /// Fetcher related requests. #[derive(Debug)] #[must_use] pub enum FetcherRequest<I, T: Item> { /// Return the specified item if it exists, else `None`. Fetch { /// The ID of the item to be retrieved. id: T::Id, /// The peer id of the peer to be asked if the item is not held locally peer: I, /// Responder to call with the result. responder: Responder<Option<FetchResult<T>>>, }, } impl<I, T: Item> Display for FetcherRequest<I, T> { fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { match self { FetcherRequest::Fetch { id, .. } => write!(formatter, "request item by id {}", id), } } } /// A contract runtime request. #[derive(Debug)] #[must_use] pub enum BlockExecutorRequest { /// A request to execute finalized block. ExecuteBlock(FinalizedBlock), } impl Display for BlockExecutorRequest { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { BlockExecutorRequest::ExecuteBlock(finalized_block) => { write!(f, "execute block {}", finalized_block) } } } } /// A block validator request. #[derive(Debug)] #[must_use] pub struct BlockValidationRequest<T, I> { /// The block to be validated. pub(crate) block: T, /// The sender of the block, which will be asked to provide all missing deploys. pub(crate) sender: I, /// Responder to call with the result. /// /// Indicates whether or not validation was successful and returns `block` unchanged. pub(crate) responder: Responder<(bool, T)>, } impl<T: Display, I: Display> Display for BlockValidationRequest<T, I> { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let BlockValidationRequest { block, sender, .. } = self; write!(f, "validate block {} from {}", block, sender) } } #[derive(Debug)] /// Requests issued to the Linear Chain component. pub enum LinearChainRequest<I> { /// Request whole block from the linear chain, by hash. BlockRequest(BlockHash, I), /// Get last finalized block. LastFinalizedBlock(Responder<Option<LinearBlock>>), } impl<I: Display> Display for LinearChainRequest<I> { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { LinearChainRequest::BlockRequest(bh, peer) => { write!(f, "block request for hash {} from {}", bh, peer) } LinearChainRequest::LastFinalizedBlock(_) => write!(f, "last finalized block request"), } } } #[derive(Debug)] #[must_use] /// Consensus component requests. pub enum ConsensusRequest { /// Request for consensus to sign a new linear chain block and possibly start a new era. HandleLinearBlock(Box<BlockHeader>, Responder<Signature>), }
33.938211
100
0.58164
e6abe499b430d891ec49b68a9ad32e1f5e90ee9b
14,871
//! Read configurations files. #![allow(clippy::module_name_repetitions)] use serde::de::{Deserializer, IgnoredAny, IntoDeserializer, MapAccess, Visitor}; use serde::Deserialize; use std::error::Error; use std::path::{Path, PathBuf}; use std::{env, fmt, fs, io}; /// Holds information used by `MISSING_ENFORCED_IMPORT_RENAMES` lint. #[derive(Clone, Debug, Deserialize)] pub struct Rename { pub path: String, pub rename: String, } /// A single disallowed method, used by the `DISALLOWED_METHODS` lint. #[derive(Clone, Debug, Deserialize)] #[serde(untagged)] pub enum DisallowedMethod { Simple(String), WithReason { path: String, reason: Option<String> }, } impl DisallowedMethod { pub fn path(&self) -> &str { let (Self::Simple(path) | Self::WithReason { path, .. }) = self; path } } /// A single disallowed type, used by the `DISALLOWED_TYPES` lint. #[derive(Clone, Debug, Deserialize)] #[serde(untagged)] pub enum DisallowedType { Simple(String), WithReason { path: String, reason: Option<String> }, } /// Conf with parse errors #[derive(Default)] pub struct TryConf { pub conf: Conf, pub errors: Vec<String>, } impl TryConf { fn from_error(error: impl Error) -> Self { Self { conf: Conf::default(), errors: vec![error.to_string()], } } } macro_rules! define_Conf { ($( $(#[doc = $doc:literal])+ $(#[conf_deprecated($dep:literal)])? ($name:ident: $ty:ty = $default:expr), )*) => { /// Clippy lint configuration pub struct Conf { $($(#[doc = $doc])+ pub $name: $ty,)* } mod defaults { $(pub fn $name() -> $ty { $default })* } impl Default for Conf { fn default() -> Self { Self { $($name: defaults::$name(),)* } } } impl<'de> Deserialize<'de> for TryConf { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de> { deserializer.deserialize_map(ConfVisitor) } } #[derive(Deserialize)] #[serde(field_identifier, rename_all = "kebab-case")] #[allow(non_camel_case_types)] enum Field { $($name,)* third_party, } struct ConfVisitor; impl<'de> Visitor<'de> for ConfVisitor { type Value = TryConf; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str("Conf") } fn visit_map<V>(self, mut map: V) -> Result<Self::Value, V::Error> where V: MapAccess<'de> { let mut errors = Vec::new(); $(let mut $name = None;)* // could get `Field` here directly, but get `str` first for diagnostics while let Some(name) = map.next_key::<&str>()? { match Field::deserialize(name.into_deserializer())? { $(Field::$name => { $(errors.push(format!("deprecated field `{}`. {}", name, $dep));)? match map.next_value() { Err(e) => errors.push(e.to_string()), Ok(value) => match $name { Some(_) => errors.push(format!("duplicate field `{}`", name)), None => $name = Some(value), } } })* // white-listed; ignore Field::third_party => drop(map.next_value::<IgnoredAny>()) } } let conf = Conf { $($name: $name.unwrap_or_else(defaults::$name),)* }; Ok(TryConf { conf, errors }) } } #[cfg(feature = "internal")] pub mod metadata { use crate::utils::internal_lints::metadata_collector::ClippyConfiguration; macro_rules! wrap_option { () => (None); ($x:literal) => (Some($x)); } pub(crate) fn get_configuration_metadata() -> Vec<ClippyConfiguration> { vec![ $( { let deprecation_reason = wrap_option!($($dep)?); ClippyConfiguration::new( stringify!($name), stringify!($ty), format!("{:?}", super::defaults::$name()), concat!($($doc, '\n',)*), deprecation_reason, ) }, )+ ] } } }; } define_Conf! { /// Lint: ENUM_VARIANT_NAMES, LARGE_TYPES_PASSED_BY_VALUE, TRIVIALLY_COPY_PASS_BY_REF, UNNECESSARY_WRAPS, UPPER_CASE_ACRONYMS, WRONG_SELF_CONVENTION, BOX_COLLECTION, REDUNDANT_ALLOCATION, RC_BUFFER, VEC_BOX, OPTION_OPTION, LINKEDLIST, RC_MUTEX. /// /// Suppress lints whenever the suggested change would cause breakage for other crates. (avoid_breaking_exported_api: bool = true), /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED. /// /// The minimum rust version that the project supports (msrv: Option<String> = None), /// Lint: BLACKLISTED_NAME. /// /// The list of blacklisted names to lint about. NB: `bar` is not here since it has legitimate uses (blacklisted_names: Vec<String> = ["foo", "baz", "quux"].iter().map(ToString::to_string).collect()), /// Lint: COGNITIVE_COMPLEXITY. /// /// The maximum cognitive complexity a function can have (cognitive_complexity_threshold: u64 = 25), /// DEPRECATED LINT: CYCLOMATIC_COMPLEXITY. /// /// Use the Cognitive Complexity lint instead. #[conf_deprecated("Please use `cognitive-complexity-threshold` instead")] (cyclomatic_complexity_threshold: Option<u64> = None), /// Lint: DOC_MARKDOWN. /// /// The list of words this lint should not consider as identifiers needing ticks (doc_valid_idents: Vec<String> = [ "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "DirectX", "ECMAScript", "GPLv2", "GPLv3", "GitHub", "GitLab", "IPv4", "IPv6", "ClojureScript", "CoffeeScript", "JavaScript", "PureScript", "TypeScript", "NaN", "NaNs", "OAuth", "GraphQL", "OCaml", "OpenGL", "OpenMP", "OpenSSH", "OpenSSL", "OpenStreetMap", "OpenDNS", "WebGL", "TensorFlow", "TrueType", "iOS", "macOS", "FreeBSD", "TeX", "LaTeX", "BibTeX", "BibLaTeX", "MinGW", "CamelCase", ].iter().map(ToString::to_string).collect()), /// Lint: TOO_MANY_ARGUMENTS. /// /// The maximum number of argument a function or method can have (too_many_arguments_threshold: u64 = 7), /// Lint: TYPE_COMPLEXITY. /// /// The maximum complexity a type can have (type_complexity_threshold: u64 = 250), /// Lint: MANY_SINGLE_CHAR_NAMES. /// /// The maximum number of single char bindings a scope may have (single_char_binding_names_threshold: u64 = 4), /// Lint: BOXED_LOCAL, USELESS_VEC. /// /// The maximum size of objects (in bytes) that will be linted. Larger objects are ok on the heap (too_large_for_stack: u64 = 200), /// Lint: ENUM_VARIANT_NAMES. /// /// The minimum number of enum variants for the lints about variant names to trigger (enum_variant_name_threshold: u64 = 3), /// Lint: LARGE_ENUM_VARIANT. /// /// The maximum size of an enum's variant to avoid box suggestion (enum_variant_size_threshold: u64 = 200), /// Lint: VERBOSE_BIT_MASK. /// /// The maximum allowed size of a bit mask before suggesting to use 'trailing_zeros' (verbose_bit_mask_threshold: u64 = 1), /// Lint: DECIMAL_LITERAL_REPRESENTATION. /// /// The lower bound for linting decimal literals (literal_representation_threshold: u64 = 16384), /// Lint: TRIVIALLY_COPY_PASS_BY_REF. /// /// The maximum size (in bytes) to consider a `Copy` type for passing by value instead of by reference. (trivial_copy_size_limit: Option<u64> = None), /// Lint: LARGE_TYPE_PASS_BY_MOVE. /// /// The minimum size (in bytes) to consider a type for passing by reference instead of by value. (pass_by_value_size_limit: u64 = 256), /// Lint: TOO_MANY_LINES. /// /// The maximum number of lines a function or method can have (too_many_lines_threshold: u64 = 100), /// Lint: LARGE_STACK_ARRAYS, LARGE_CONST_ARRAYS. /// /// The maximum allowed size for arrays on the stack (array_size_threshold: u64 = 512_000), /// Lint: VEC_BOX. /// /// The size of the boxed type in bytes, where boxing in a `Vec` is allowed (vec_box_size_threshold: u64 = 4096), /// Lint: TYPE_REPETITION_IN_BOUNDS. /// /// The maximum number of bounds a trait can have to be linted (max_trait_bounds: u64 = 3), /// Lint: STRUCT_EXCESSIVE_BOOLS. /// /// The maximum number of bool fields a struct can have (max_struct_bools: u64 = 3), /// Lint: FN_PARAMS_EXCESSIVE_BOOLS. /// /// The maximum number of bool parameters a function can have (max_fn_params_bools: u64 = 3), /// Lint: WILDCARD_IMPORTS. /// /// Whether to allow certain wildcard imports (prelude, super in tests). (warn_on_all_wildcard_imports: bool = false), /// Lint: DISALLOWED_METHODS. /// /// The list of disallowed methods, written as fully qualified paths. (disallowed_methods: Vec<crate::utils::conf::DisallowedMethod> = Vec::new()), /// Lint: DISALLOWED_TYPES. /// /// The list of disallowed types, written as fully qualified paths. (disallowed_types: Vec<crate::utils::conf::DisallowedType> = Vec::new()), /// Lint: UNREADABLE_LITERAL. /// /// Should the fraction of a decimal be linted to include separators. (unreadable_literal_lint_fractions: bool = true), /// Lint: UPPER_CASE_ACRONYMS. /// /// Enables verbose mode. Triggers if there is more than one uppercase char next to each other (upper_case_acronyms_aggressive: bool = false), /// Lint: _CARGO_COMMON_METADATA. /// /// For internal testing only, ignores the current `publish` settings in the Cargo manifest. (cargo_ignore_publish: bool = false), /// Lint: NONSTANDARD_MACRO_BRACES. /// /// Enforce the named macros always use the braces specified. /// /// A `MacroMatcher` can be added like so `{ name = "macro_name", brace = "(" }`. If the macro /// is could be used with a full path two `MacroMatcher`s have to be added one with the full path /// `crate_name::macro_name` and one with just the macro name. (standard_macro_braces: Vec<crate::nonstandard_macro_braces::MacroMatcher> = Vec::new()), /// Lint: MISSING_ENFORCED_IMPORT_RENAMES. /// /// The list of imports to always rename, a fully qualified path followed by the rename. (enforced_import_renames: Vec<crate::utils::conf::Rename> = Vec::new()), /// Lint: DISALLOWED_SCRIPT_IDENTS. /// /// The list of unicode scripts allowed to be used in the scope. (allowed_scripts: Vec<String> = ["Latin"].iter().map(ToString::to_string).collect()), /// Lint: NON_SEND_FIELDS_IN_SEND_TY. /// /// Whether to apply the raw pointer heuristic to determine if a type is `Send`. (enable_raw_pointer_heuristic_for_send: bool = true), /// Lint: INDEX_REFUTABLE_SLICE. /// /// When Clippy suggests using a slice pattern, this is the maximum number of elements allowed in /// the slice pattern that is suggested. If more elements would be necessary, the lint is suppressed. /// For example, `[_, _, _, e, ..]` is a slice pattern with 4 elements. (max_suggested_slice_pattern_length: u64 = 3), /// Lint: AWAIT_HOLDING_INVALID_TYPE (await_holding_invalid_types: Vec<crate::utils::conf::DisallowedType> = Vec::new()), } /// Search for the configuration file. pub fn lookup_conf_file() -> io::Result<Option<PathBuf>> { /// Possible filename to search for. const CONFIG_FILE_NAMES: [&str; 2] = [".clippy.toml", "clippy.toml"]; // Start looking for a config file in CLIPPY_CONF_DIR, or failing that, CARGO_MANIFEST_DIR. // If neither of those exist, use ".". let mut current = env::var_os("CLIPPY_CONF_DIR") .or_else(|| env::var_os("CARGO_MANIFEST_DIR")) .map_or_else(|| PathBuf::from("."), PathBuf::from); let mut found_config: Option<PathBuf> = None; loop { for config_file_name in &CONFIG_FILE_NAMES { if let Ok(config_file) = current.join(config_file_name).canonicalize() { match fs::metadata(&config_file) { Err(e) if e.kind() == io::ErrorKind::NotFound => {}, Err(e) => return Err(e), Ok(md) if md.is_dir() => {}, Ok(_) => { // warn if we happen to find two config files #8323 if let Some(ref found_config_) = found_config { eprintln!( "Using config file `{}`\nWarning: `{}` will be ignored.", found_config_.display(), config_file.display(), ); } else { found_config = Some(config_file); } }, } } } if found_config.is_some() { return Ok(found_config); } // If the current directory has no parent, we're done searching. if !current.pop() { return Ok(None); } } } /// Read the `toml` configuration file. /// /// In case of error, the function tries to continue as much as possible. pub fn read(path: &Path) -> TryConf { let content = match fs::read_to_string(path) { Err(e) => return TryConf::from_error(e), Ok(content) => content, }; toml::from_str(&content).unwrap_or_else(TryConf::from_error) }
39.762032
554
0.586107
f5656e73e7b2a8d939a5e003729259a514629c82
1,143
#[macro_use] extern crate diesel; use diesel::*; use diesel::sqlite::{Sqlite, SqliteQueryBuilder, SqliteConnection}; use diesel::backend::Backend; use diesel::types::{Integer, VarChar}; table! { users { id -> Integer, name -> VarChar, } } pub struct User { id: i32, name: String, } use diesel::types::FromSqlRow; impl<DB: Backend> Queryable<(Integer, VarChar), DB> for User where (i32, String): FromSqlRow<(Integer, VarChar), DB>, { type Row = (i32, String); fn build(row: Self::Row) -> Self { User { id: row.0, name: row.1, } } } pub struct NewUser(String); Insertable! { (users) pub struct NewUser(#[column_name(name)] String,); } fn main() { let connection = SqliteConnection::establish(":memory:").unwrap(); insert(&NewUser("Hello".into())) .into(users::table) .get_result::<User>(&connection); //~^ ERROR: SupportsReturningClause insert(&NewUser("Hello".into())) .into(users::table) .returning(users::name) .get_result(&connection); //~^ ERROR: SupportsReturningClause }
20.052632
70
0.59755
64687e2e6d78b8278a1525a34f69df1c6603b4da
16,990
// Copyright 2020-2021 Kevin Reid under the terms of the MIT License as detailed // in the accompanying file README.md or <https://opensource.org/licenses/MIT>. //! Draw 2D graphics and text into [`Space`]s, using a general adapter for //! [`embedded_graphics`]'s drawing algorithms. //! //! The [`VoxelBrush`] type can also be useful in direct 3D drawing. //! //! ## Coordinate system differences //! //! [`embedded_graphics`] uses coordinates which are different from ours in //! two ways that should be kept in mind when trying to align 2D and 3D shapes: //! //! * Text drawing presumes that +X is rightward and +Y is downward. Hence, //! text will be upside-down unless the chosen transformation inverts Y (or //! otherwise transforms to suit the orientation the text is being viewed from). //! * Coordinates are considered to refer to pixel centers rather than low corners, //! and rectangles have inclusive upper bounds (whereas our [`Grid`]s have //! exclusive upper bounds). use cgmath::{EuclideanSpace as _, Transform as _}; use embedded_graphics::geometry::{Dimensions, Point, Size}; use embedded_graphics::pixelcolor::{PixelColor, Rgb888, RgbColor}; use embedded_graphics::prelude::{DrawTarget, Drawable, Pixel}; use embedded_graphics::primitives::Rectangle; use std::borrow::{Borrow, Cow}; use std::marker::PhantomData; use std::ops::{Range, RangeInclusive}; /// Re-export the version of the [`embedded_graphics`] crate we're using. pub use embedded_graphics; use crate::block::{space_to_blocks, Block, BlockAttributes, Resolution}; use crate::math::{Face, GridCoordinate, GridMatrix, GridPoint, GridVector, Rgb, Rgba}; use crate::space::{Grid, SetCubeError, Space, SpacePhysics}; use crate::universe::Universe; /// Adapter to use a [`Space`] as a [`DrawTarget`]. /// Use [`Space::draw_target`] to construct this. /// /// `'s` is the lifetime of the [`Space`]. /// `C` is the “color” type to use, which should implement [`VoxelColor`]. pub struct DrawingPlane<'s, C> { space: &'s mut Space, /// Defines the coordinate transformation from 2D graphics to the [`Space`]. transform: GridMatrix, _color: PhantomData<fn(C)>, } impl<'s, C> DrawingPlane<'s, C> { pub(crate) fn new(space: &'s mut Space, transform: GridMatrix) -> Self { Self { space, transform, _color: PhantomData, } } // TODO: We should probably have ways to stack more transforms /// Converts 2D point to 3D point. Helper for multiple `impl DrawTarget`s. fn convert_point(&self, point: Point) -> GridPoint { self.transform .transform_point(GridPoint::new(point.x, point.y, 0)) } } /// A [`DrawingPlane`] accepts any color type that implements [`VoxelColor`]. impl<'c, C> DrawTarget for DrawingPlane<'_, C> where C: VoxelColor<'c>, { type Color = C; type Error = SetCubeError; fn draw_iter<I>(&mut self, pixels: I) -> Result<(), Self::Error> where I: IntoIterator<Item = Pixel<Self::Color>>, { for Pixel(point, color) in pixels.into_iter() { // TODO: Add a cache so we're not reconstructing the block for every single pixel. // (This is possible because `PixelColor: PartialEq`.) // TODO: Need to rotate the brush to match our transform color .into_blocks() .paint(self.space, self.convert_point(point))?; } Ok(()) } } impl<C> Dimensions for DrawingPlane<'_, C> { fn bounding_box(&self) -> Rectangle { // Invert our coordinate transform to bring the space's bounds into the drawing // coordinate system. If the transform fails, return a 1×1×1 placeholder rather // than panic. let grid = self .transform .inverse_transform() .and_then(|t| self.space.grid().transform(t)) .unwrap_or_else(|| Grid::for_block(1)); let size = grid.unsigned_size(); Rectangle { top_left: Point { x: grid.lower_bounds().x, y: grid.upper_bounds().y, }, size: Size { width: size.x, height: size.y, }, } } } /// Adapt embedded_graphics's most general color type to ours. // TODO: Also adapt the other types, so that if someone wants to use them they can. impl From<Rgb888> for Rgb { #[inline] fn from(color: Rgb888) -> Rgb { Rgba::from_srgb_32bit([color.r(), color.g(), color.b(), u8::MAX]).to_rgb() } } /// Allows “drawing” blocks onto a [`DrawingPlane`], a two-dimensional coordinate system /// established within a [`Space`]. /// /// Builds on [`PixelColor`] by defining a conversion to [`Block`]s and tracking depth. /// [`PixelColor::Raw`] is ignored; the supertrait is present only because /// [`embedded_graphics`] requires it. pub trait VoxelColor<'a>: PixelColor { /// Returns a corresponding [`VoxelBrush`], the most general form of blocky drawing. fn into_blocks(self) -> VoxelBrush<'a>; /// Returns the range of Z coordinates that the blocks painted by this color value /// occupy. /// /// The default implementation assumes there is no depth beyond the Z=0 plane. fn depth_range(self) -> RangeInclusive<GridCoordinate> { 0..=0 } } impl<'a> PixelColor for &'a Block { type Raw = (); } impl<'a> VoxelColor<'a> for &'a Block { fn into_blocks(self) -> VoxelBrush<'a> { VoxelBrush::new(vec![([0, 0, 0], self)]) } } impl PixelColor for Rgb { type Raw = (); } impl<'a> VoxelColor<'a> for Rgb { fn into_blocks(self) -> VoxelBrush<'a> { VoxelBrush::single(Block::from(self)) } } impl PixelColor for Rgba { type Raw = (); } impl<'a> VoxelColor<'a> for Rgba { fn into_blocks(self) -> VoxelBrush<'a> { VoxelBrush::single(Block::from(self)) } } /// Adapt embedded_graphics's most general color type to ours. impl<'a> VoxelColor<'a> for Rgb888 { fn into_blocks(self) -> VoxelBrush<'a> { VoxelBrush::single(Block::from(Rgb::from(self))) } } /// A shape of multiple blocks to “paint” with. This may be used to make copies of a /// simple shape, or to make multi-layered "2.5D" drawings using [`DrawingPlane`]. /// /// Note that only `&VoxelBrush` implements [`PixelColor`]; this is because `PixelColor` /// requires a value implementing [`Copy`]. #[derive(Clone, Debug, Eq, PartialEq)] pub struct VoxelBrush<'a>(Vec<(GridPoint, Cow<'a, Block>)>); impl<'a> VoxelBrush<'a> { /// Makes a [`VoxelBrush`] which paints the specified blocks at the specified offsets /// from each pixel position. // TODO: revisit what generics the parameter types have. pub fn new<V, B>(blocks: Vec<(V, B)>) -> Self where V: Into<GridPoint>, B: Into<Cow<'a, Block>>, { Self( blocks .into_iter() .map(|(offset, block)| (offset.into(), block.into())) .collect(), ) } /// Makes a [`VoxelBrush`] which paints the specified block with no offset. pub fn single<B>(block: B) -> Self where B: Into<Cow<'a, Block>>, { Self::new(vec![((0, 0, 0), block)]) } /// Copies each of the brush's blocks into the `Space` relative to the given origin /// point. /// /// Unlike [`Space::set`], it is not considered an error if any of the affected cubes /// fall outside of the `Space`'s bounds. pub fn paint(&self, space: &mut Space, origin: GridPoint) -> Result<(), SetCubeError> { for (offset, block) in &self.0 { ignore_out_of_bounds(space.set(origin + offset.to_vec(), Cow::borrow(block)))?; } Ok(()) } /// Converts a `&VoxelBrush` into a `VoxelBrush` that borrows it. pub fn as_ref(&self) -> VoxelBrush<'_> { VoxelBrush( self.0 .iter() .map(|(v, b)| (*v, Cow::Borrowed(b.as_ref()))) .collect(), ) } /// Converts a `VoxelBrush` with borrowed blocks to one with owned blocks. pub fn into_owned(self) -> VoxelBrush<'static> { VoxelBrush( self.0 .into_iter() .map(|(v, b)| (v, Cow::Owned(b.into_owned()))) .collect(), ) } /// Add the given offset to the offset of each blocks, offsetting everything drawn. pub fn translate<V: Into<GridVector>>(mut self, offset: V) -> Self { let offset = offset.into(); for (block_offset, _) in self.0.iter_mut() { // TODO: use explicitly checked add for a good error? *block_offset += offset; } self } } impl<'a> PixelColor for &'a VoxelBrush<'a> { type Raw = (); } impl<'a> VoxelColor<'a> for &'a VoxelBrush<'a> { fn into_blocks(self) -> VoxelBrush<'a> { self.as_ref() } fn depth_range(self) -> RangeInclusive<GridCoordinate> { let zs = self.0.iter().map(|&(GridPoint { z, .. }, _)| z); let min = zs.clone().fold(0, GridCoordinate::min); let max = zs.fold(0, GridCoordinate::max); min..=max } } /// Converts the return value of [`Space::set`] to the return value of /// [`DrawTarget::draw_pixel`], by making out-of-bounds not an error. fn ignore_out_of_bounds(result: Result<bool, SetCubeError>) -> Result<(), SetCubeError> { match result { Ok(_) => Ok(()), // Drawing out of bounds is not an error. Err(SetCubeError::OutOfBounds { .. }) => Ok(()), Err(e) => Err(e), } } /// Generate a set of blocks which together display the given [`Drawable`] which may be /// larger than one block. /// /// `z` specifies the origin z-coordinate within the blocks. /// `z_range` specifies the range which is available for drawing; keeping this small /// increases performance due to not processing many empty voxels. /// /// Returns a `Space` containing all the blocks properly arranged, or an error if reading /// the `Drawable`'s color-blocks fails. pub fn draw_to_blocks<'c, D, C>( universe: &mut Universe, resolution: Resolution, z: GridCoordinate, z_range: Range<GridCoordinate>, attributes: BlockAttributes, object: &D, ) -> Result<Space, SetCubeError> where D: Drawable<Color = C> + Dimensions, C: VoxelColor<'c>, { assert!(z_range.contains(&z)); let bbox = object.bounding_box(); let top_left_2d = bbox.top_left; let bottom_right_2d = bbox.bottom_right().unwrap_or(top_left_2d); // Compute corners as Grid knows them. Note that the Y coordinate is flipped because // for text drawing, embedded_graphics assumes a Y-down coordinate system. // TODO: Instead, apply matrix transform to bounds let drawing_grid = Grid::from_lower_upper( [top_left_2d.x, -bottom_right_2d.y, z_range.start], [bottom_right_2d.x, -top_left_2d.y, z_range.end], ); if false { dbg!(top_left_2d, bottom_right_2d, drawing_grid); } let mut drawing_space = Space::builder(drawing_grid) .physics(SpacePhysics::DEFAULT_FOR_BLOCK) .build_empty(); object.draw(&mut drawing_space.draw_target(GridMatrix::from_origin( [0, 0, z], Face::PX, Face::NY, Face::PZ, )))?; Ok(space_to_blocks( resolution, attributes, // TODO: give caller control over name used universe.insert_anonymous(drawing_space), ) .unwrap()) } #[cfg(test)] mod tests { use super::*; use crate::content::make_some_blocks; use crate::math::Rgba; use crate::raytracer::print_space; use crate::universe::Universe; use embedded_graphics::primitives::{Primitive, PrimitiveStyle, Rectangle}; /// Test using a particular color type with [`DrawingPlane`]. fn test_color_drawing<'c, C>(color_value: C, expected_block: &Block) where C: VoxelColor<'c>, { let mut space = Space::empty_positive(100, 100, 100); let mut display = space.draw_target(GridMatrix::from_translation([1, 2, 4])); Pixel(Point::new(2, 3), color_value) .draw(&mut display) .unwrap(); assert_eq!(space[(3, 5, 4)], *expected_block); } #[test] fn draw_with_block_ref() { let [block] = make_some_blocks(); test_color_drawing(&block, &block); } #[test] fn draw_with_eg_rgb888() { // Note that there is a conversion from sRGB to linear. test_color_drawing( Rgb888::new(0, 127, 255), &Rgba::new(0.0, 0.21223073, 1.0, 1.0).into(), ); } #[test] fn draw_with_our_rgb() { let color = Rgb::new(0.73, 0.27, 0.11); test_color_drawing(color, &color.into()); } #[test] fn draw_with_our_rgba() { let color = Rgba::new(0.73, 0.27, 0.11, 0.9); test_color_drawing(color, &color.into()); } #[test] fn draw_with_brush() -> Result<(), SetCubeError> { let [block_0, block_1] = make_some_blocks(); let mut space = Space::empty_positive(100, 100, 100); let brush = VoxelBrush::new(vec![((0, 0, 0), &block_0), ((0, 1, 1), &block_1)]); Pixel(Point::new(2, 3), &brush) .draw(&mut space.draw_target(GridMatrix::from_translation([0, 0, 4])))?; assert_eq!(&space[(2, 3, 4)], &block_0); assert_eq!(&space[(2, 4, 5)], &block_1); Ok(()) } #[test] fn draw_out_of_bounds_is_ok() -> Result<(), SetCubeError> { let mut space = Space::empty_positive(100, 100, 100); // This should not fail with SetCubeError::OutOfBounds Pixel(Point::new(-10, 0), Rgb888::new(0, 127, 255)) .draw(&mut space.draw_target(GridMatrix::from_translation([0, 0, 4])))?; Ok(()) } #[test] #[ignore] fn draw_set_failure() { todo!("test a case where a SetCubeError is propagated"); } fn a_primitive_style() -> PrimitiveStyle<Rgba> { PrimitiveStyle::with_fill(a_primitive_color()) } /// Cube color corresponding to a_primitive_style(). fn a_primitive_color() -> Rgba { Rgba::new(0.0, 0.5, 1.5, 1.0) } #[test] fn draw_to_blocks_bounds_one_block() { let resolution: GridCoordinate = 16; let z = 4; let mut universe = Universe::new(); let drawable = Rectangle::with_corners(Point::new(0, 0), Point::new(2, 3)) .into_styled(a_primitive_style()); let space = draw_to_blocks( &mut universe, resolution as Resolution, z, z..z + 1, BlockAttributes::default(), &drawable, ) .unwrap(); assert_eq!(space.grid(), Grid::new((0, -1, 0), (1, 1, 1))); if let Block::Recur { space: block_space_ref, offset, .. } = &space[(0, -1, 0)] { // TODO: This printing does not produce a useful result; fix it. print_space(&*block_space_ref.borrow(), (0., 0., -1.)); assert_eq!(*offset, GridPoint::new(0, -resolution, 0)); assert_eq!( block_space_ref.borrow()[(0, -2, z)].color(), a_primitive_color() ); } else { panic!("not a recursive block"); } } #[test] fn draw_to_blocks_bounds_negative_coords_one_block() { let resolution: GridCoordinate = 16; let z = 4; let mut universe = Universe::new(); let drawable = Rectangle::with_corners(Point::new(-3, -2), Point::new(0, 0)) .into_styled(a_primitive_style()); let space = draw_to_blocks( &mut universe, resolution as Resolution, z, z..z + 1, BlockAttributes::default(), &drawable, ) .unwrap(); assert_eq!(space.grid(), Grid::new((-1, 0, 0), (1, 1, 1))); if let Block::Recur { space: block_space_ref, offset, .. } = &space[(-1, 0, 0)] { print_space(&*block_space_ref.borrow(), (0., 0., -1.)); assert_eq!(*offset, GridPoint::new(-resolution, 0, 0)); assert_eq!( block_space_ref.borrow()[(-2, 1, z)].color(), a_primitive_color() ); } else { panic!("not a recursive block"); } } #[test] fn voxel_brush_single() { let [block] = make_some_blocks(); assert_eq!( VoxelBrush::single(&block), VoxelBrush::new(vec![((0, 0, 0), &block)]), ); } #[test] fn voxel_brush_translate() { let [block] = make_some_blocks(); assert_eq!( VoxelBrush::new(vec![((1, 2, 3), &block)]).translate((10, 20, 30)), VoxelBrush::new(vec![((11, 22, 33), &block)]), ); } }
33.183594
94
0.589759
336deaec83310ee4486e24c5bf8b2506a952725f
2,292
use crate::cli::cli::{pre_exec, Colors, Palette}; use clap::{App, Arg, ArgMatches}; use fancy_regex::Regex; pub struct Cmd {} impl Cmd { pub fn new() -> App<'static> { App::new("env") .args(&[ Arg::new("ignore-environment").long("ignore-environment").short('i').about("start with an empty environment"), Arg::new("null").long("null").short('0').about("end each output line with NUL, not newline"), Arg::new("unset").long("unset").short('u').takes_value(true).about("remove variable from the environment"), Arg::new("chdir").long("chdir").short('C').takes_value(true).about("change working directory to DIR"), Arg::new("split-string").long("split-string").short('S').takes_value(true).about("process and split S into separate arguments; used to pass multiple arguments on shebang lines"), Arg::new("block-signal").long("block-signal").takes_value(true).about("block delivery of SIG signal(s) to COMMAND"), Arg::new("default-signal").long("default-signal").takes_value(true).about("reset handling of SIG signal(s) to the default"), Arg::new("ignore-signal").long("ignore-signal").takes_value(true).about("set handling of SIG signals(s) to do nothing"), Arg::new("list-signal-handling").long("list-signal-handling").about("list non default signal handling to stderr"), Arg::new("debug").long("debug").short('v').about("print verbose information for each processing step"), Arg::new("help").long("help").about("display this help and exit"), Arg::new("version").long("version").about("output version information and exit"), ]) .about("env") } pub fn parse(_app: &ArgMatches) { // print!("{:?}", app); pre_exec(Cmd::palette()); } fn palette() -> Vec<Palette<'static>> { vec![ // Main Palette { regexp: Regex::new(r#"^([^=]+)(=)(.*)$"#).unwrap(), colors: vec![ &Colors::Default, &Colors::Cyan, &Colors::White, &Colors::Yellow, ], }, ] } }
48.765957
194
0.552356
16ad7e09f001f917b1a95b38f83a10be69ff493c
426
pub mod ascii_vec_sequence; pub mod slice_sequence; pub mod two_bit_vec_sequence; pub mod vec_sequence_store; /// The default genome type that achieves a good balance between speed and size. pub type DefaultGenome = two_bit_vec_sequence::TwoBitVectorGenome; /// The default genome subsequence type that achieves a good balance between speed and size. pub type DefaultSubGenome = two_bit_vec_sequence::TwoBitVectorSubGenome;
38.727273
92
0.828638
87f01eed607b2a220ed9cb24d85b7ad3b96bcabd
2,034
#![doc = "generated by AutoRust 0.1.0"] #[cfg(feature = "package-2018-01")] pub mod package_2018_01; use azure_core::setters; #[cfg(all(feature = "package-2018-01", not(feature = "no-default-version")))] pub use package_2018_01::{models, operations}; pub fn config( http_client: std::sync::Arc<dyn azure_core::HttpClient>, token_credential: Box<dyn azure_core::TokenCredential>, ) -> OperationConfigBuilder { OperationConfigBuilder { http_client, base_path: None, token_credential, token_credential_resource: None, } } pub struct OperationConfigBuilder { http_client: std::sync::Arc<dyn azure_core::HttpClient>, base_path: Option<String>, token_credential: Box<dyn azure_core::TokenCredential>, token_credential_resource: Option<String>, } impl OperationConfigBuilder { setters! { base_path : String => Some (base_path) , token_credential_resource : String => Some (token_credential_resource) , } pub fn build(self) -> OperationConfig { OperationConfig { http_client: self.http_client, base_path: self.base_path.unwrap_or("https://management.azure.com".to_owned()), token_credential: Some(self.token_credential), token_credential_resource: self.token_credential_resource.unwrap_or("https://management.azure.com/".to_owned()), } } } pub struct OperationConfig { http_client: std::sync::Arc<dyn azure_core::HttpClient>, base_path: String, token_credential: Option<Box<dyn azure_core::TokenCredential>>, token_credential_resource: String, } impl OperationConfig { pub fn http_client(&self) -> &dyn azure_core::HttpClient { self.http_client.as_ref() } pub fn base_path(&self) -> &str { self.base_path.as_str() } pub fn token_credential(&self) -> Option<&dyn azure_core::TokenCredential> { self.token_credential.as_deref() } pub fn token_credential_resource(&self) -> &str { self.token_credential_resource.as_str() } }
36.981818
130
0.692724
8973852caa86b351d594d5ae1e730198ade33fe6
105,947
// Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use attributes; use back::bytecode::{self, RLIB_BYTECODE_EXTENSION}; use back::lto::{self, ThinBuffer, SerializedModule}; use back::link::{self, get_linker, remove}; use base; use consts; use memmap; use rustc_incremental::{copy_cgu_workproducts_to_incr_comp_cache_dir, in_incr_comp_dir, in_incr_comp_dir_sess}; use rustc::dep_graph::{WorkProduct, WorkProductId, WorkProductFileKind}; use rustc::dep_graph::cgu_reuse_tracker::CguReuseTracker; use rustc::middle::cstore::EncodedMetadata; use rustc::session::config::{self, OutputFilenames, OutputType, Passes, Sanitizer, Lto}; use rustc::session::Session; use rustc::util::nodemap::FxHashMap; use time_graph::{self, TimeGraph, Timeline}; use llvm::{self, DiagnosticInfo, PassManager, SMDiagnostic}; use llvm_util; use {CodegenResults, ModuleCodegen, CompiledModule, ModuleKind, // ModuleLlvm, CachedModuleCodegen}; use CrateInfo; use rustc::hir::def_id::{CrateNum, LOCAL_CRATE}; use rustc::ty::TyCtxt; use rustc::util::common::{time_ext, time_depth, set_time_depth, print_time_passes_entry}; use rustc_fs_util::{path2cstr, link_or_copy}; use rustc_data_structures::small_c_str::SmallCStr; use rustc_data_structures::svh::Svh; use rustc_codegen_utils::command::Command; use rustc_codegen_utils::linker::LinkerInfo; use rustc_codegen_utils::symbol_export::ExportedSymbols; use errors::{self, Handler, Level, DiagnosticBuilder, FatalError, DiagnosticId}; use errors::emitter::{Emitter}; use syntax::attr; use syntax::ext::hygiene::Mark; use syntax_pos::MultiSpan; use syntax_pos::symbol::Symbol; use type_::Type; use context::{is_pie_binary, get_reloc_model}; use common::{C_bytes_in_context, val_ty}; use jobserver::{Client, Acquired}; use rustc_demangle; use std::any::Any; use std::ffi::{CString, CStr}; use std::fs; use std::io::{self, Write}; use std::mem; use std::path::{Path, PathBuf}; use std::str; use std::sync::Arc; use std::sync::mpsc::{channel, Sender, Receiver}; use std::slice; use std::time::Instant; use std::thread; use libc::{c_uint, c_void, c_char, size_t}; pub const RELOC_MODEL_ARGS : [(&str, llvm::RelocMode); 7] = [ ("pic", llvm::RelocMode::PIC), ("static", llvm::RelocMode::Static), ("default", llvm::RelocMode::Default), ("dynamic-no-pic", llvm::RelocMode::DynamicNoPic), ("ropi", llvm::RelocMode::ROPI), ("rwpi", llvm::RelocMode::RWPI), ("ropi-rwpi", llvm::RelocMode::ROPI_RWPI), ]; pub const CODE_GEN_MODEL_ARGS: &[(&str, llvm::CodeModel)] = &[ ("small", llvm::CodeModel::Small), ("kernel", llvm::CodeModel::Kernel), ("medium", llvm::CodeModel::Medium), ("large", llvm::CodeModel::Large), ]; pub const TLS_MODEL_ARGS : [(&str, llvm::ThreadLocalMode); 4] = [ ("global-dynamic", llvm::ThreadLocalMode::GeneralDynamic), ("local-dynamic", llvm::ThreadLocalMode::LocalDynamic), ("initial-exec", llvm::ThreadLocalMode::InitialExec), ("local-exec", llvm::ThreadLocalMode::LocalExec), ]; const PRE_THIN_LTO_BC_EXT: &str = "pre-thin-lto.bc"; pub fn llvm_err(handler: &errors::Handler, msg: &str) -> FatalError { match llvm::last_error() { Some(err) => handler.fatal(&format!("{}: {}", msg, err)), None => handler.fatal(&msg), } } pub fn write_output_file( handler: &errors::Handler, target: &'ll llvm::TargetMachine, pm: &llvm::PassManager<'ll>, m: &'ll llvm::Module, output: &Path, file_type: llvm::FileType) -> Result<(), FatalError> { unsafe { let output_c = path2cstr(output); let result = llvm::LLVMRustWriteOutputFile(target, pm, m, output_c.as_ptr(), file_type); if result.into_result().is_err() { let msg = format!("could not write output to {}", output.display()); Err(llvm_err(handler, &msg)) } else { Ok(()) } } } fn get_llvm_opt_level(optimize: config::OptLevel) -> llvm::CodeGenOptLevel { match optimize { config::OptLevel::No => llvm::CodeGenOptLevel::None, config::OptLevel::Less => llvm::CodeGenOptLevel::Less, config::OptLevel::Default => llvm::CodeGenOptLevel::Default, config::OptLevel::Aggressive => llvm::CodeGenOptLevel::Aggressive, _ => llvm::CodeGenOptLevel::Default, } } fn get_llvm_opt_size(optimize: config::OptLevel) -> llvm::CodeGenOptSize { match optimize { config::OptLevel::Size => llvm::CodeGenOptSizeDefault, config::OptLevel::SizeMin => llvm::CodeGenOptSizeAggressive, _ => llvm::CodeGenOptSizeNone, } } pub fn create_target_machine( sess: &Session, find_features: bool, ) -> &'static mut llvm::TargetMachine { target_machine_factory(sess, find_features)().unwrap_or_else(|err| { llvm_err(sess.diagnostic(), &err).raise() }) } // If find_features is true this won't access `sess.crate_types` by assuming // that `is_pie_binary` is false. When we discover LLVM target features // `sess.crate_types` is uninitialized so we cannot access it. pub fn target_machine_factory(sess: &Session, find_features: bool) -> Arc<dyn Fn() -> Result<&'static mut llvm::TargetMachine, String> + Send + Sync> { let reloc_model = get_reloc_model(sess); let opt_level = get_llvm_opt_level(sess.opts.optimize); let use_softfp = sess.opts.cg.soft_float; let ffunction_sections = sess.target.target.options.function_sections; let fdata_sections = ffunction_sections; let code_model_arg = sess.opts.cg.code_model.as_ref().or( sess.target.target.options.code_model.as_ref(), ); let code_model = match code_model_arg { Some(s) => { match CODE_GEN_MODEL_ARGS.iter().find(|arg| arg.0 == s) { Some(x) => x.1, _ => { sess.err(&format!("{:?} is not a valid code model", code_model_arg)); sess.abort_if_errors(); bug!(); } } } None => llvm::CodeModel::None, }; let features = attributes::llvm_target_features(sess).collect::<Vec<_>>(); let mut singlethread = sess.target.target.options.singlethread; // On the wasm target once the `atomics` feature is enabled that means that // we're no longer single-threaded, or otherwise we don't want LLVM to // lower atomic operations to single-threaded operations. if singlethread && sess.target.target.llvm_target.contains("wasm32") && features.iter().any(|s| *s == "+atomics") { singlethread = false; } let triple = SmallCStr::new(&sess.target.target.llvm_target); let cpu = SmallCStr::new(llvm_util::target_cpu(sess)); let features = features.join(","); let features = CString::new(features).unwrap(); let is_pie_binary = !find_features && is_pie_binary(sess); let trap_unreachable = sess.target.target.options.trap_unreachable; let emit_stack_size_section = sess.opts.debugging_opts.emit_stack_sizes; let asm_comments = sess.asm_comments(); Arc::new(move || { let tm = unsafe { llvm::LLVMRustCreateTargetMachine( triple.as_ptr(), cpu.as_ptr(), features.as_ptr(), code_model, reloc_model, opt_level, use_softfp, is_pie_binary, ffunction_sections, fdata_sections, trap_unreachable, singlethread, asm_comments, emit_stack_size_section, ) }; tm.ok_or_else(|| { format!("Could not create LLVM TargetMachine for triple: {}", triple.to_str().unwrap()) }) }) } /// Module-specific configuration for `optimize_and_codegen`. pub struct ModuleConfig { /// Names of additional optimization passes to run. passes: Vec<String>, /// Some(level) to optimize at a certain level, or None to run /// absolutely no optimizations (used for the metadata module). pub opt_level: Option<llvm::CodeGenOptLevel>, /// Some(level) to optimize binary size, or None to not affect program size. opt_size: Option<llvm::CodeGenOptSize>, pgo_gen: Option<String>, pgo_use: String, // Flags indicating which outputs to produce. pub emit_pre_thin_lto_bc: bool, emit_no_opt_bc: bool, emit_bc: bool, emit_bc_compressed: bool, emit_lto_bc: bool, emit_ir: bool, emit_asm: bool, emit_obj: bool, // Miscellaneous flags. These are mostly copied from command-line // options. pub verify_llvm_ir: bool, no_prepopulate_passes: bool, no_builtins: bool, time_passes: bool, vectorize_loop: bool, vectorize_slp: bool, merge_functions: bool, inline_threshold: Option<usize>, // Instead of creating an object file by doing LLVM codegen, just // make the object file bitcode. Provides easy compatibility with // emscripten's ecc compiler, when used as the linker. obj_is_bitcode: bool, no_integrated_as: bool, embed_bitcode: bool, embed_bitcode_marker: bool, } impl ModuleConfig { fn new(passes: Vec<String>) -> ModuleConfig { ModuleConfig { passes, opt_level: None, opt_size: None, pgo_gen: None, pgo_use: String::new(), emit_no_opt_bc: false, emit_pre_thin_lto_bc: false, emit_bc: false, emit_bc_compressed: false, emit_lto_bc: false, emit_ir: false, emit_asm: false, emit_obj: false, obj_is_bitcode: false, embed_bitcode: false, embed_bitcode_marker: false, no_integrated_as: false, verify_llvm_ir: false, no_prepopulate_passes: false, no_builtins: false, time_passes: false, vectorize_loop: false, vectorize_slp: false, merge_functions: false, inline_threshold: None } } fn set_flags(&mut self, sess: &Session, no_builtins: bool) { self.verify_llvm_ir = sess.verify_llvm_ir(); self.no_prepopulate_passes = sess.opts.cg.no_prepopulate_passes; self.no_builtins = no_builtins || sess.target.target.options.no_builtins; self.time_passes = sess.time_passes(); self.inline_threshold = sess.opts.cg.inline_threshold; self.obj_is_bitcode = sess.target.target.options.obj_is_bitcode || sess.opts.debugging_opts.cross_lang_lto.enabled(); let embed_bitcode = sess.target.target.options.embed_bitcode || sess.opts.debugging_opts.embed_bitcode; if embed_bitcode { match sess.opts.optimize { config::OptLevel::No | config::OptLevel::Less => { self.embed_bitcode_marker = embed_bitcode; } _ => self.embed_bitcode = embed_bitcode, } } // Copy what clang does by turning on loop vectorization at O2 and // slp vectorization at O3. Otherwise configure other optimization aspects // of this pass manager builder. // Turn off vectorization for emscripten, as it's not very well supported. self.vectorize_loop = !sess.opts.cg.no_vectorize_loops && (sess.opts.optimize == config::OptLevel::Default || sess.opts.optimize == config::OptLevel::Aggressive) && !sess.target.target.options.is_like_emscripten; self.vectorize_slp = !sess.opts.cg.no_vectorize_slp && sess.opts.optimize == config::OptLevel::Aggressive && !sess.target.target.options.is_like_emscripten; self.merge_functions = sess.opts.optimize == config::OptLevel::Default || sess.opts.optimize == config::OptLevel::Aggressive; } pub fn bitcode_needed(&self) -> bool { self.emit_bc || self.obj_is_bitcode || self.emit_bc_compressed || self.embed_bitcode } } /// Assembler name and command used by codegen when no_integrated_as is enabled struct AssemblerCommand { name: PathBuf, cmd: Command, } /// Additional resources used by optimize_and_codegen (not module specific) #[derive(Clone)] pub struct CodegenContext { // Resources needed when running LTO pub time_passes: bool, pub lto: Lto, pub no_landing_pads: bool, pub save_temps: bool, pub fewer_names: bool, pub exported_symbols: Option<Arc<ExportedSymbols>>, pub opts: Arc<config::Options>, pub crate_types: Vec<config::CrateType>, pub each_linked_rlib_for_lto: Vec<(CrateNum, PathBuf)>, output_filenames: Arc<OutputFilenames>, regular_module_config: Arc<ModuleConfig>, metadata_module_config: Arc<ModuleConfig>, allocator_module_config: Arc<ModuleConfig>, pub tm_factory: Arc<dyn Fn() -> Result<&'static mut llvm::TargetMachine, String> + Send + Sync>, pub msvc_imps_needed: bool, pub target_pointer_width: String, debuginfo: config::DebugInfo, // Number of cgus excluding the allocator/metadata modules pub total_cgus: usize, // Handler to use for diagnostics produced during codegen. pub diag_emitter: SharedEmitter, // LLVM passes added by plugins. pub plugin_passes: Vec<String>, // LLVM optimizations for which we want to print remarks. pub remark: Passes, // Worker thread number pub worker: usize, // The incremental compilation session directory, or None if we are not // compiling incrementally pub incr_comp_session_dir: Option<PathBuf>, // Used to update CGU re-use information during the thinlto phase. pub cgu_reuse_tracker: CguReuseTracker, // Channel back to the main control thread to send messages to coordinator_send: Sender<Box<dyn Any + Send>>, // A reference to the TimeGraph so we can register timings. None means that // measuring is disabled. time_graph: Option<TimeGraph>, // The assembler command if no_integrated_as option is enabled, None otherwise assembler_cmd: Option<Arc<AssemblerCommand>>, } impl CodegenContext { pub fn create_diag_handler(&self) -> Handler { Handler::with_emitter(true, false, Box::new(self.diag_emitter.clone())) } pub(crate) fn config(&self, kind: ModuleKind) -> &ModuleConfig { match kind { ModuleKind::Regular => &self.regular_module_config, ModuleKind::Metadata => &self.metadata_module_config, ModuleKind::Allocator => &self.allocator_module_config, } } pub(crate) fn save_temp_bitcode(&self, module: &ModuleCodegen, name: &str) { if !self.save_temps { return } unsafe { let ext = format!("{}.bc", name); let cgu = Some(&module.name[..]); let path = self.output_filenames.temp_path_ext(&ext, cgu); let cstr = path2cstr(&path); let llmod = module.module_llvm.llmod(); llvm::LLVMWriteBitcodeToFile(llmod, cstr.as_ptr()); } } } pub struct DiagnosticHandlers<'a> { data: *mut (&'a CodegenContext, &'a Handler), llcx: &'a llvm::Context, } impl<'a> DiagnosticHandlers<'a> { pub fn new(cgcx: &'a CodegenContext, handler: &'a Handler, llcx: &'a llvm::Context) -> Self { let data = Box::into_raw(Box::new((cgcx, handler))); unsafe { llvm::LLVMRustSetInlineAsmDiagnosticHandler(llcx, inline_asm_handler, data as *mut _); llvm::LLVMContextSetDiagnosticHandler(llcx, diagnostic_handler, data as *mut _); } DiagnosticHandlers { data, llcx } } } impl<'a> Drop for DiagnosticHandlers<'a> { fn drop(&mut self) { use std::ptr::null_mut; unsafe { llvm::LLVMRustSetInlineAsmDiagnosticHandler(self.llcx, inline_asm_handler, null_mut()); llvm::LLVMContextSetDiagnosticHandler(self.llcx, diagnostic_handler, null_mut()); drop(Box::from_raw(self.data)); } } } unsafe extern "C" fn report_inline_asm<'a, 'b>(cgcx: &'a CodegenContext, msg: &'b str, cookie: c_uint) { cgcx.diag_emitter.inline_asm_error(cookie as u32, msg.to_owned()); } unsafe extern "C" fn inline_asm_handler(diag: &SMDiagnostic, user: *const c_void, cookie: c_uint) { if user.is_null() { return } let (cgcx, _) = *(user as *const (&CodegenContext, &Handler)); let msg = llvm::build_string(|s| llvm::LLVMRustWriteSMDiagnosticToString(diag, s)) .expect("non-UTF8 SMDiagnostic"); report_inline_asm(cgcx, &msg, cookie); } unsafe extern "C" fn diagnostic_handler(info: &DiagnosticInfo, user: *mut c_void) { if user.is_null() { return } let (cgcx, diag_handler) = *(user as *const (&CodegenContext, &Handler)); match llvm::diagnostic::Diagnostic::unpack(info) { llvm::diagnostic::InlineAsm(inline) => { report_inline_asm(cgcx, &llvm::twine_to_string(inline.message), inline.cookie); } llvm::diagnostic::Optimization(opt) => { let enabled = match cgcx.remark { Passes::All => true, Passes::Some(ref v) => v.iter().any(|s| *s == opt.pass_name), }; if enabled { diag_handler.note_without_error(&format!("optimization {} for {} at {}:{}:{}: {}", opt.kind.describe(), opt.pass_name, opt.filename, opt.line, opt.column, opt.message)); } } llvm::diagnostic::PGO(diagnostic_ref) | llvm::diagnostic::Linker(diagnostic_ref) => { let msg = llvm::build_string(|s| { llvm::LLVMRustWriteDiagnosticInfoToString(diagnostic_ref, s) }).expect("non-UTF8 diagnostic"); diag_handler.warn(&msg); } llvm::diagnostic::UnknownDiagnostic(..) => {}, } } // Unsafe due to LLVM calls. unsafe fn optimize(cgcx: &CodegenContext, diag_handler: &Handler, module: &ModuleCodegen, config: &ModuleConfig, timeline: &mut Timeline) -> Result<(), FatalError> { let llmod = module.module_llvm.llmod(); let llcx = &*module.module_llvm.llcx; let tm = &*module.module_llvm.tm; let _handlers = DiagnosticHandlers::new(cgcx, diag_handler, llcx); let module_name = module.name.clone(); let module_name = Some(&module_name[..]); if config.emit_no_opt_bc { let out = cgcx.output_filenames.temp_path_ext("no-opt.bc", module_name); let out = path2cstr(&out); llvm::LLVMWriteBitcodeToFile(llmod, out.as_ptr()); } if config.opt_level.is_some() { // Create the two optimizing pass managers. These mirror what clang // does, and are by populated by LLVM's default PassManagerBuilder. // Each manager has a different set of passes, but they also share // some common passes. let fpm = llvm::LLVMCreateFunctionPassManagerForModule(llmod); let mpm = llvm::LLVMCreatePassManager(); { // If we're verifying or linting, add them to the function pass // manager. let addpass = |pass_name: &str| { let pass_name = SmallCStr::new(pass_name); let pass = match llvm::LLVMRustFindAndCreatePass(pass_name.as_ptr()) { Some(pass) => pass, None => return false, }; let pass_manager = match llvm::LLVMRustPassKind(pass) { llvm::PassKind::Function => &*fpm, llvm::PassKind::Module => &*mpm, llvm::PassKind::Other => { diag_handler.err("Encountered LLVM pass kind we can't handle"); return true }, }; llvm::LLVMRustAddPass(pass_manager, pass); true }; if config.verify_llvm_ir { assert!(addpass("verify")); } // Some options cause LLVM bitcode to be emitted, which uses ThinLTOBuffers, so we need // to make sure we run LLVM's NameAnonGlobals pass when emitting bitcode; otherwise // we'll get errors in LLVM. let using_thin_buffers = config.bitcode_needed(); let mut have_name_anon_globals_pass = false; if !config.no_prepopulate_passes { llvm::LLVMRustAddAnalysisPasses(tm, fpm, llmod); llvm::LLVMRustAddAnalysisPasses(tm, mpm, llmod); let opt_level = config.opt_level.unwrap_or(llvm::CodeGenOptLevel::None); let prepare_for_thin_lto = cgcx.lto == Lto::Thin || cgcx.lto == Lto::ThinLocal || (cgcx.lto != Lto::Fat && cgcx.opts.debugging_opts.cross_lang_lto.enabled()); have_name_anon_globals_pass = have_name_anon_globals_pass || prepare_for_thin_lto; if using_thin_buffers && !prepare_for_thin_lto { assert!(addpass("name-anon-globals")); have_name_anon_globals_pass = true; } with_llvm_pmb(llmod, &config, opt_level, prepare_for_thin_lto, &mut |b| { llvm::LLVMPassManagerBuilderPopulateFunctionPassManager(b, fpm); llvm::LLVMPassManagerBuilderPopulateModulePassManager(b, mpm); }) } for pass in &config.passes { if !addpass(pass) { diag_handler.warn(&format!("unknown pass `{}`, ignoring", pass)); } if pass == "name-anon-globals" { have_name_anon_globals_pass = true; } } for pass in &cgcx.plugin_passes { if !addpass(pass) { diag_handler.err(&format!("a plugin asked for LLVM pass \ `{}` but LLVM does not \ recognize it", pass)); } if pass == "name-anon-globals" { have_name_anon_globals_pass = true; } } if using_thin_buffers && !have_name_anon_globals_pass { // As described above, this will probably cause an error in LLVM if config.no_prepopulate_passes { diag_handler.err("The current compilation is going to use thin LTO buffers \ without running LLVM's NameAnonGlobals pass. \ This will likely cause errors in LLVM. Consider adding \ -C passes=name-anon-globals to the compiler command line."); } else { bug!("We are using thin LTO buffers without running the NameAnonGlobals pass. \ This will likely cause errors in LLVM and should never happen."); } } } diag_handler.abort_if_errors(); // Finally, run the actual optimization passes time_ext(config.time_passes, None, &format!("llvm function passes [{}]", module_name.unwrap()), || { llvm::LLVMRustRunFunctionPassManager(fpm, llmod) }); timeline.record("fpm"); time_ext(config.time_passes, None, &format!("llvm module passes [{}]", module_name.unwrap()), || { llvm::LLVMRunPassManager(mpm, llmod) }); // Deallocate managers that we're now done with llvm::LLVMDisposePassManager(fpm); llvm::LLVMDisposePassManager(mpm); } Ok(()) } fn generate_lto_work(cgcx: &CodegenContext, modules: Vec<ModuleCodegen>, import_only_modules: Vec<(SerializedModule, WorkProduct)>) -> Vec<(WorkItem, u64)> { let mut timeline = cgcx.time_graph.as_ref().map(|tg| { tg.start(CODEGEN_WORKER_TIMELINE, CODEGEN_WORK_PACKAGE_KIND, "generate lto") }).unwrap_or(Timeline::noop()); let (lto_modules, copy_jobs) = lto::run(cgcx, modules, import_only_modules, &mut timeline) .unwrap_or_else(|e| e.raise()); let lto_modules = lto_modules.into_iter().map(|module| { let cost = module.cost(); (WorkItem::LTO(module), cost) }); let copy_jobs = copy_jobs.into_iter().map(|wp| { (WorkItem::CopyPostLtoArtifacts(CachedModuleCodegen { name: wp.cgu_name.clone(), source: wp, }), 0) }); lto_modules.chain(copy_jobs).collect() } unsafe fn codegen(cgcx: &CodegenContext, diag_handler: &Handler, module: ModuleCodegen, config: &ModuleConfig, timeline: &mut Timeline) -> Result<CompiledModule, FatalError> { timeline.record("codegen"); { let llmod = module.module_llvm.llmod(); let llcx = &*module.module_llvm.llcx; let tm = &*module.module_llvm.tm; let module_name = module.name.clone(); let module_name = Some(&module_name[..]); let handlers = DiagnosticHandlers::new(cgcx, diag_handler, llcx); if cgcx.msvc_imps_needed { create_msvc_imps(cgcx, llcx, llmod); } // A codegen-specific pass manager is used to generate object // files for an LLVM module. // // Apparently each of these pass managers is a one-shot kind of // thing, so we create a new one for each type of output. The // pass manager passed to the closure should be ensured to not // escape the closure itself, and the manager should only be // used once. unsafe fn with_codegen<'ll, F, R>(tm: &'ll llvm::TargetMachine, llmod: &'ll llvm::Module, no_builtins: bool, f: F) -> R where F: FnOnce(&'ll mut PassManager<'ll>) -> R, { let cpm = llvm::LLVMCreatePassManager(); llvm::LLVMRustAddAnalysisPasses(tm, cpm, llmod); llvm::LLVMRustAddLibraryInfo(cpm, llmod, no_builtins); f(cpm) } // If we don't have the integrated assembler, then we need to emit asm // from LLVM and use `gcc` to create the object file. let asm_to_obj = config.emit_obj && config.no_integrated_as; // Change what we write and cleanup based on whether obj files are // just llvm bitcode. In that case write bitcode, and possibly // delete the bitcode if it wasn't requested. Don't generate the // machine code, instead copy the .o file from the .bc let write_bc = config.emit_bc || config.obj_is_bitcode; let rm_bc = !config.emit_bc && config.obj_is_bitcode; let write_obj = config.emit_obj && !config.obj_is_bitcode && !asm_to_obj; let copy_bc_to_obj = config.emit_obj && config.obj_is_bitcode; let bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name); let obj_out = cgcx.output_filenames.temp_path(OutputType::Object, module_name); if write_bc || config.emit_bc_compressed || config.embed_bitcode { let thin = ThinBuffer::new(llmod); let data = thin.data(); timeline.record("make-bc"); if write_bc { if let Err(e) = fs::write(&bc_out, data) { diag_handler.err(&format!("failed to write bytecode: {}", e)); } timeline.record("write-bc"); } if config.embed_bitcode { embed_bitcode(cgcx, llcx, llmod, Some(data)); timeline.record("embed-bc"); } if config.emit_bc_compressed { let dst = bc_out.with_extension(RLIB_BYTECODE_EXTENSION); let data = bytecode::encode(&module.name, data); if let Err(e) = fs::write(&dst, data) { diag_handler.err(&format!("failed to write bytecode: {}", e)); } timeline.record("compress-bc"); } } else if config.embed_bitcode_marker { embed_bitcode(cgcx, llcx, llmod, None); } time_ext(config.time_passes, None, &format!("codegen passes [{}]", module_name.unwrap()), || -> Result<(), FatalError> { if config.emit_ir { let out = cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name); let out = path2cstr(&out); extern "C" fn demangle_callback(input_ptr: *const c_char, input_len: size_t, output_ptr: *mut c_char, output_len: size_t) -> size_t { let input = unsafe { slice::from_raw_parts(input_ptr as *const u8, input_len as usize) }; let input = match str::from_utf8(input) { Ok(s) => s, Err(_) => return 0, }; let output = unsafe { slice::from_raw_parts_mut(output_ptr as *mut u8, output_len as usize) }; let mut cursor = io::Cursor::new(output); let demangled = match rustc_demangle::try_demangle(input) { Ok(d) => d, Err(_) => return 0, }; if let Err(_) = write!(cursor, "{:#}", demangled) { // Possible only if provided buffer is not big enough return 0; } cursor.position() as size_t } with_codegen(tm, llmod, config.no_builtins, |cpm| { llvm::LLVMRustPrintModule(cpm, llmod, out.as_ptr(), demangle_callback); llvm::LLVMDisposePassManager(cpm); }); timeline.record("ir"); } if config.emit_asm || asm_to_obj { let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name); // We can't use the same module for asm and binary output, because that triggers // various errors like invalid IR or broken binaries, so we might have to clone the // module to produce the asm output let llmod = if config.emit_obj { llvm::LLVMCloneModule(llmod) } else { llmod }; with_codegen(tm, llmod, config.no_builtins, |cpm| { write_output_file(diag_handler, tm, cpm, llmod, &path, llvm::FileType::AssemblyFile) })?; timeline.record("asm"); } if write_obj { with_codegen(tm, llmod, config.no_builtins, |cpm| { write_output_file(diag_handler, tm, cpm, llmod, &obj_out, llvm::FileType::ObjectFile) })?; timeline.record("obj"); } else if asm_to_obj { let assembly = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name); run_assembler(cgcx, diag_handler, &assembly, &obj_out); timeline.record("asm_to_obj"); if !config.emit_asm && !cgcx.save_temps { drop(fs::remove_file(&assembly)); } } Ok(()) })?; if copy_bc_to_obj { debug!("copying bitcode {:?} to obj {:?}", bc_out, obj_out); if let Err(e) = link_or_copy(&bc_out, &obj_out) { diag_handler.err(&format!("failed to copy bitcode to object file: {}", e)); } } if rm_bc { debug!("removing_bitcode {:?}", bc_out); if let Err(e) = fs::remove_file(&bc_out) { diag_handler.err(&format!("failed to remove bitcode: {}", e)); } } drop(handlers); } Ok(module.into_compiled_module(config.emit_obj, config.emit_bc, config.emit_bc_compressed, &cgcx.output_filenames)) } /// Embed the bitcode of an LLVM module in the LLVM module itself. /// /// This is done primarily for iOS where it appears to be standard to compile C /// code at least with `-fembed-bitcode` which creates two sections in the /// executable: /// /// * __LLVM,__bitcode /// * __LLVM,__cmdline /// /// It appears *both* of these sections are necessary to get the linker to /// recognize what's going on. For us though we just always throw in an empty /// cmdline section. /// /// Furthermore debug/O1 builds don't actually embed bitcode but rather just /// embed an empty section. /// /// Basically all of this is us attempting to follow in the footsteps of clang /// on iOS. See #35968 for lots more info. unsafe fn embed_bitcode(cgcx: &CodegenContext, llcx: &llvm::Context, llmod: &llvm::Module, bitcode: Option<&[u8]>) { let llconst = C_bytes_in_context(llcx, bitcode.unwrap_or(&[])); let llglobal = llvm::LLVMAddGlobal( llmod, val_ty(llconst), "rustc.embedded.module\0".as_ptr() as *const _, ); llvm::LLVMSetInitializer(llglobal, llconst); let is_apple = cgcx.opts.target_triple.triple().contains("-ios") || cgcx.opts.target_triple.triple().contains("-darwin"); let section = if is_apple { "__LLVM,__bitcode\0" } else { ".llvmbc\0" }; llvm::LLVMSetSection(llglobal, section.as_ptr() as *const _); llvm::LLVMRustSetLinkage(llglobal, llvm::Linkage::PrivateLinkage); llvm::LLVMSetGlobalConstant(llglobal, llvm::True); let llconst = C_bytes_in_context(llcx, &[]); let llglobal = llvm::LLVMAddGlobal( llmod, val_ty(llconst), "rustc.embedded.cmdline\0".as_ptr() as *const _, ); llvm::LLVMSetInitializer(llglobal, llconst); let section = if is_apple { "__LLVM,__cmdline\0" } else { ".llvmcmd\0" }; llvm::LLVMSetSection(llglobal, section.as_ptr() as *const _); llvm::LLVMRustSetLinkage(llglobal, llvm::Linkage::PrivateLinkage); } pub(crate) struct CompiledModules { pub modules: Vec<CompiledModule>, pub metadata_module: CompiledModule, pub allocator_module: Option<CompiledModule>, } fn need_crate_bitcode_for_rlib(sess: &Session) -> bool { sess.crate_types.borrow().contains(&config::CrateType::Rlib) && sess.opts.output_types.contains_key(&OutputType::Exe) } fn need_pre_thin_lto_bitcode_for_incr_comp(sess: &Session) -> bool { if sess.opts.incremental.is_none() { return false } match sess.lto() { Lto::Fat | Lto::No => false, Lto::Thin | Lto::ThinLocal => true, } } pub fn start_async_codegen(tcx: TyCtxt, time_graph: Option<TimeGraph>, metadata: EncodedMetadata, coordinator_receive: Receiver<Box<dyn Any + Send>>, total_cgus: usize) -> OngoingCodegen { let sess = tcx.sess; let crate_name = tcx.crate_name(LOCAL_CRATE); let crate_hash = tcx.crate_hash(LOCAL_CRATE); let no_builtins = attr::contains_name(&tcx.hir.krate().attrs, "no_builtins"); let subsystem = attr::first_attr_value_str_by_name(&tcx.hir.krate().attrs, "windows_subsystem"); let windows_subsystem = subsystem.map(|subsystem| { if subsystem != "windows" && subsystem != "console" { tcx.sess.fatal(&format!("invalid windows subsystem `{}`, only \ `windows` and `console` are allowed", subsystem)); } subsystem.to_string() }); let linker_info = LinkerInfo::new(tcx); let crate_info = CrateInfo::new(tcx); // Figure out what we actually need to build. let mut modules_config = ModuleConfig::new(sess.opts.cg.passes.clone()); let mut metadata_config = ModuleConfig::new(vec![]); let mut allocator_config = ModuleConfig::new(vec![]); if let Some(ref sanitizer) = sess.opts.debugging_opts.sanitizer { match *sanitizer { Sanitizer::Address => { modules_config.passes.push("asan".to_owned()); modules_config.passes.push("asan-module".to_owned()); } Sanitizer::Memory => { modules_config.passes.push("msan".to_owned()) } Sanitizer::Thread => { modules_config.passes.push("tsan".to_owned()) } _ => {} } } if sess.opts.debugging_opts.profile { modules_config.passes.push("insert-gcov-profiling".to_owned()) } modules_config.pgo_gen = sess.opts.debugging_opts.pgo_gen.clone(); modules_config.pgo_use = sess.opts.debugging_opts.pgo_use.clone(); modules_config.opt_level = Some(get_llvm_opt_level(sess.opts.optimize)); modules_config.opt_size = Some(get_llvm_opt_size(sess.opts.optimize)); // Save all versions of the bytecode if we're saving our temporaries. if sess.opts.cg.save_temps { modules_config.emit_no_opt_bc = true; modules_config.emit_pre_thin_lto_bc = true; modules_config.emit_bc = true; modules_config.emit_lto_bc = true; metadata_config.emit_bc = true; allocator_config.emit_bc = true; } // Emit compressed bitcode files for the crate if we're emitting an rlib. // Whenever an rlib is created, the bitcode is inserted into the archive in // order to allow LTO against it. if need_crate_bitcode_for_rlib(sess) { modules_config.emit_bc_compressed = true; allocator_config.emit_bc_compressed = true; } modules_config.emit_pre_thin_lto_bc = need_pre_thin_lto_bitcode_for_incr_comp(sess); modules_config.no_integrated_as = tcx.sess.opts.cg.no_integrated_as || tcx.sess.target.target.options.no_integrated_as; for output_type in sess.opts.output_types.keys() { match *output_type { OutputType::Bitcode => { modules_config.emit_bc = true; } OutputType::LlvmAssembly => { modules_config.emit_ir = true; } OutputType::Assembly => { modules_config.emit_asm = true; // If we're not using the LLVM assembler, this function // could be invoked specially with output_type_assembly, so // in this case we still want the metadata object file. if !sess.opts.output_types.contains_key(&OutputType::Assembly) { metadata_config.emit_obj = true; allocator_config.emit_obj = true; } } OutputType::Object => { modules_config.emit_obj = true; } OutputType::Metadata => { metadata_config.emit_obj = true; } OutputType::Exe => { modules_config.emit_obj = true; metadata_config.emit_obj = true; allocator_config.emit_obj = true; }, OutputType::Mir => {} OutputType::DepInfo => {} } } modules_config.set_flags(sess, no_builtins); metadata_config.set_flags(sess, no_builtins); allocator_config.set_flags(sess, no_builtins); // Exclude metadata and allocator modules from time_passes output, since // they throw off the "LLVM passes" measurement. metadata_config.time_passes = false; allocator_config.time_passes = false; let (shared_emitter, shared_emitter_main) = SharedEmitter::new(); let (codegen_worker_send, codegen_worker_receive) = channel(); let coordinator_thread = start_executing_work(tcx, &crate_info, shared_emitter, codegen_worker_send, coordinator_receive, total_cgus, sess.jobserver.clone(), time_graph.clone(), Arc::new(modules_config), Arc::new(metadata_config), Arc::new(allocator_config)); OngoingCodegen { crate_name, crate_hash, metadata, windows_subsystem, linker_info, crate_info, time_graph, coordinator_send: tcx.tx_to_llvm_workers.lock().clone(), codegen_worker_receive, shared_emitter_main, future: coordinator_thread, output_filenames: tcx.output_filenames(LOCAL_CRATE), } } fn copy_all_cgu_workproducts_to_incr_comp_cache_dir( sess: &Session, compiled_modules: &CompiledModules, ) -> FxHashMap<WorkProductId, WorkProduct> { let mut work_products = FxHashMap::default(); if sess.opts.incremental.is_none() { return work_products; } for module in compiled_modules.modules.iter().filter(|m| m.kind == ModuleKind::Regular) { let mut files = vec![]; if let Some(ref path) = module.object { files.push((WorkProductFileKind::Object, path.clone())); } if let Some(ref path) = module.bytecode { files.push((WorkProductFileKind::Bytecode, path.clone())); } if let Some(ref path) = module.bytecode_compressed { files.push((WorkProductFileKind::BytecodeCompressed, path.clone())); } if let Some((id, product)) = copy_cgu_workproducts_to_incr_comp_cache_dir(sess, &module.name, &files) { work_products.insert(id, product); } } work_products } fn produce_final_output_artifacts(sess: &Session, compiled_modules: &CompiledModules, crate_output: &OutputFilenames) { let mut user_wants_bitcode = false; let mut user_wants_objects = false; // Produce final compile outputs. let copy_gracefully = |from: &Path, to: &Path| { if let Err(e) = fs::copy(from, to) { sess.err(&format!("could not copy {:?} to {:?}: {}", from, to, e)); } }; let copy_if_one_unit = |output_type: OutputType, keep_numbered: bool| { if compiled_modules.modules.len() == 1 { // 1) Only one codegen unit. In this case it's no difficulty // to copy `foo.0.x` to `foo.x`. let module_name = Some(&compiled_modules.modules[0].name[..]); let path = crate_output.temp_path(output_type, module_name); copy_gracefully(&path, &crate_output.path(output_type)); if !sess.opts.cg.save_temps && !keep_numbered { // The user just wants `foo.x`, not `foo.#module-name#.x`. remove(sess, &path); } } else { let ext = crate_output.temp_path(output_type, None) .extension() .unwrap() .to_str() .unwrap() .to_owned(); if crate_output.outputs.contains_key(&output_type) { // 2) Multiple codegen units, with `--emit foo=some_name`. We have // no good solution for this case, so warn the user. sess.warn(&format!("ignoring emit path because multiple .{} files \ were produced", ext)); } else if crate_output.single_output_file.is_some() { // 3) Multiple codegen units, with `-o some_name`. We have // no good solution for this case, so warn the user. sess.warn(&format!("ignoring -o because multiple .{} files \ were produced", ext)); } else { // 4) Multiple codegen units, but no explicit name. We // just leave the `foo.0.x` files in place. // (We don't have to do any work in this case.) } } }; // Flag to indicate whether the user explicitly requested bitcode. // Otherwise, we produced it only as a temporary output, and will need // to get rid of it. for output_type in crate_output.outputs.keys() { match *output_type { OutputType::Bitcode => { user_wants_bitcode = true; // Copy to .bc, but always keep the .0.bc. There is a later // check to figure out if we should delete .0.bc files, or keep // them for making an rlib. copy_if_one_unit(OutputType::Bitcode, true); } OutputType::LlvmAssembly => { copy_if_one_unit(OutputType::LlvmAssembly, false); } OutputType::Assembly => { copy_if_one_unit(OutputType::Assembly, false); } OutputType::Object => { user_wants_objects = true; copy_if_one_unit(OutputType::Object, true); } OutputType::Mir | OutputType::Metadata | OutputType::Exe | OutputType::DepInfo => {} } } // Clean up unwanted temporary files. // We create the following files by default: // - #crate#.#module-name#.bc // - #crate#.#module-name#.o // - #crate#.crate.metadata.bc // - #crate#.crate.metadata.o // - #crate#.o (linked from crate.##.o) // - #crate#.bc (copied from crate.##.bc) // We may create additional files if requested by the user (through // `-C save-temps` or `--emit=` flags). if !sess.opts.cg.save_temps { // Remove the temporary .#module-name#.o objects. If the user didn't // explicitly request bitcode (with --emit=bc), and the bitcode is not // needed for building an rlib, then we must remove .#module-name#.bc as // well. // Specific rules for keeping .#module-name#.bc: // - If the user requested bitcode (`user_wants_bitcode`), and // codegen_units > 1, then keep it. // - If the user requested bitcode but codegen_units == 1, then we // can toss .#module-name#.bc because we copied it to .bc earlier. // - If we're not building an rlib and the user didn't request // bitcode, then delete .#module-name#.bc. // If you change how this works, also update back::link::link_rlib, // where .#module-name#.bc files are (maybe) deleted after making an // rlib. let needs_crate_object = crate_output.outputs.contains_key(&OutputType::Exe); let keep_numbered_bitcode = user_wants_bitcode && sess.codegen_units() > 1; let keep_numbered_objects = needs_crate_object || (user_wants_objects && sess.codegen_units() > 1); for module in compiled_modules.modules.iter() { if let Some(ref path) = module.object { if !keep_numbered_objects { remove(sess, path); } } if let Some(ref path) = module.bytecode { if !keep_numbered_bitcode { remove(sess, path); } } } if !user_wants_bitcode { if let Some(ref path) = compiled_modules.metadata_module.bytecode { remove(sess, &path); } if let Some(ref allocator_module) = compiled_modules.allocator_module { if let Some(ref path) = allocator_module.bytecode { remove(sess, path); } } } } // We leave the following files around by default: // - #crate#.o // - #crate#.crate.metadata.o // - #crate#.bc // These are used in linking steps and will be cleaned up afterward. } pub(crate) fn dump_incremental_data(_codegen_results: &CodegenResults) { // FIXME(mw): This does not work at the moment because the situation has // become more complicated due to incremental LTO. Now a CGU // can have more than two caching states. // println!("[incremental] Re-using {} out of {} modules", // codegen_results.modules.iter().filter(|m| m.pre_existing).count(), // codegen_results.modules.len()); } enum WorkItem { /// Optimize a newly codegened, totally unoptimized module. Optimize(ModuleCodegen), /// Copy the post-LTO artifacts from the incremental cache to the output /// directory. CopyPostLtoArtifacts(CachedModuleCodegen), /// Perform (Thin)LTO on the given module. LTO(lto::LtoModuleCodegen), } impl WorkItem { fn module_kind(&self) -> ModuleKind { match *self { WorkItem::Optimize(ref m) => m.kind, WorkItem::CopyPostLtoArtifacts(_) | WorkItem::LTO(_) => ModuleKind::Regular, } } fn name(&self) -> String { match *self { WorkItem::Optimize(ref m) => format!("optimize: {}", m.name), WorkItem::CopyPostLtoArtifacts(ref m) => format!("copy post LTO artifacts: {}", m.name), WorkItem::LTO(ref m) => format!("lto: {}", m.name()), } } } enum WorkItemResult { Compiled(CompiledModule), NeedsLTO(ModuleCodegen), } fn execute_work_item(cgcx: &CodegenContext, work_item: WorkItem, timeline: &mut Timeline) -> Result<WorkItemResult, FatalError> { let module_config = cgcx.config(work_item.module_kind()); match work_item { WorkItem::Optimize(module) => { execute_optimize_work_item(cgcx, module, module_config, timeline) } WorkItem::CopyPostLtoArtifacts(module) => { execute_copy_from_cache_work_item(cgcx, module, module_config, timeline) } WorkItem::LTO(module) => { execute_lto_work_item(cgcx, module, module_config, timeline) } } } fn execute_optimize_work_item(cgcx: &CodegenContext, module: ModuleCodegen, module_config: &ModuleConfig, timeline: &mut Timeline) -> Result<WorkItemResult, FatalError> { let diag_handler = cgcx.create_diag_handler(); unsafe { optimize(cgcx, &diag_handler, &module, module_config, timeline)?; } let linker_does_lto = cgcx.opts.debugging_opts.cross_lang_lto.enabled(); // After we've done the initial round of optimizations we need to // decide whether to synchronously codegen this module or ship it // back to the coordinator thread for further LTO processing (which // has to wait for all the initial modules to be optimized). // // Here we dispatch based on the `cgcx.lto` and kind of module we're // codegenning... let needs_lto = match cgcx.lto { Lto::No => false, // If the linker does LTO, we don't have to do it. Note that we // keep doing full LTO, if it is requested, as not to break the // assumption that the output will be a single module. Lto::Thin | Lto::ThinLocal if linker_does_lto => false, // Here we've got a full crate graph LTO requested. We ignore // this, however, if the crate type is only an rlib as there's // no full crate graph to process, that'll happen later. // // This use case currently comes up primarily for targets that // require LTO so the request for LTO is always unconditionally // passed down to the backend, but we don't actually want to do // anything about it yet until we've got a final product. Lto::Fat | Lto::Thin => { cgcx.crate_types.len() != 1 || cgcx.crate_types[0] != config::CrateType::Rlib } // When we're automatically doing ThinLTO for multi-codegen-unit // builds we don't actually want to LTO the allocator modules if // it shows up. This is due to various linker shenanigans that // we'll encounter later. Lto::ThinLocal => { module.kind != ModuleKind::Allocator } }; // Metadata modules never participate in LTO regardless of the lto // settings. let needs_lto = needs_lto && module.kind != ModuleKind::Metadata; if needs_lto { Ok(WorkItemResult::NeedsLTO(module)) } else { let module = unsafe { codegen(cgcx, &diag_handler, module, module_config, timeline)? }; Ok(WorkItemResult::Compiled(module)) } } fn execute_copy_from_cache_work_item(cgcx: &CodegenContext, module: CachedModuleCodegen, module_config: &ModuleConfig, _: &mut Timeline) -> Result<WorkItemResult, FatalError> { let incr_comp_session_dir = cgcx.incr_comp_session_dir .as_ref() .unwrap(); let mut object = None; let mut bytecode = None; let mut bytecode_compressed = None; for (kind, saved_file) in &module.source.saved_files { let obj_out = match kind { WorkProductFileKind::Object => { let path = cgcx.output_filenames.temp_path(OutputType::Object, Some(&module.name)); object = Some(path.clone()); path } WorkProductFileKind::Bytecode => { let path = cgcx.output_filenames.temp_path(OutputType::Bitcode, Some(&module.name)); bytecode = Some(path.clone()); path } WorkProductFileKind::BytecodeCompressed => { let path = cgcx.output_filenames.temp_path(OutputType::Bitcode, Some(&module.name)) .with_extension(RLIB_BYTECODE_EXTENSION); bytecode_compressed = Some(path.clone()); path } }; let source_file = in_incr_comp_dir(&incr_comp_session_dir, &saved_file); debug!("copying pre-existing module `{}` from {:?} to {}", module.name, source_file, obj_out.display()); if let Err(err) = link_or_copy(&source_file, &obj_out) { let diag_handler = cgcx.create_diag_handler(); diag_handler.err(&format!("unable to copy {} to {}: {}", source_file.display(), obj_out.display(), err)); } } assert_eq!(object.is_some(), module_config.emit_obj); assert_eq!(bytecode.is_some(), module_config.emit_bc); assert_eq!(bytecode_compressed.is_some(), module_config.emit_bc_compressed); Ok(WorkItemResult::Compiled(CompiledModule { name: module.name, kind: ModuleKind::Regular, object, bytecode, bytecode_compressed, })) } fn execute_lto_work_item(cgcx: &CodegenContext, mut module: lto::LtoModuleCodegen, module_config: &ModuleConfig, timeline: &mut Timeline) -> Result<WorkItemResult, FatalError> { let diag_handler = cgcx.create_diag_handler(); unsafe { let module = module.optimize(cgcx, timeline)?; let module = codegen(cgcx, &diag_handler, module, module_config, timeline)?; Ok(WorkItemResult::Compiled(module)) } } enum Message { Token(io::Result<Acquired>), NeedsLTO { result: ModuleCodegen, worker_id: usize, }, Done { result: Result<CompiledModule, ()>, worker_id: usize, }, CodegenDone { llvm_work_item: WorkItem, cost: u64, }, AddImportOnlyModule { module_data: SerializedModule, work_product: WorkProduct, }, CodegenComplete, CodegenItem, CodegenAborted, } struct Diagnostic { msg: String, code: Option<DiagnosticId>, lvl: Level, } #[derive(PartialEq, Clone, Copy, Debug)] enum MainThreadWorkerState { Idle, Codegenning, LLVMing, } fn start_executing_work(tcx: TyCtxt, crate_info: &CrateInfo, shared_emitter: SharedEmitter, codegen_worker_send: Sender<Message>, coordinator_receive: Receiver<Box<dyn Any + Send>>, total_cgus: usize, jobserver: Client, time_graph: Option<TimeGraph>, modules_config: Arc<ModuleConfig>, metadata_config: Arc<ModuleConfig>, allocator_config: Arc<ModuleConfig>) -> thread::JoinHandle<Result<CompiledModules, ()>> { let coordinator_send = tcx.tx_to_llvm_workers.lock().clone(); let sess = tcx.sess; // Compute the set of symbols we need to retain when doing LTO (if we need to) let exported_symbols = { let mut exported_symbols = FxHashMap::default(); let copy_symbols = |cnum| { let symbols = tcx.exported_symbols(cnum) .iter() .map(|&(s, lvl)| (s.symbol_name(tcx).to_string(), lvl)) .collect(); Arc::new(symbols) }; match sess.lto() { Lto::No => None, Lto::ThinLocal => { exported_symbols.insert(LOCAL_CRATE, copy_symbols(LOCAL_CRATE)); Some(Arc::new(exported_symbols)) } Lto::Fat | Lto::Thin => { exported_symbols.insert(LOCAL_CRATE, copy_symbols(LOCAL_CRATE)); for &cnum in tcx.crates().iter() { exported_symbols.insert(cnum, copy_symbols(cnum)); } Some(Arc::new(exported_symbols)) } } }; // First up, convert our jobserver into a helper thread so we can use normal // mpsc channels to manage our messages and such. // After we've requested tokens then we'll, when we can, // get tokens on `coordinator_receive` which will // get managed in the main loop below. let coordinator_send2 = coordinator_send.clone(); let helper = jobserver.into_helper_thread(move |token| { drop(coordinator_send2.send(Box::new(Message::Token(token)))); }).expect("failed to spawn helper thread"); let mut each_linked_rlib_for_lto = Vec::new(); drop(link::each_linked_rlib(sess, crate_info, &mut |cnum, path| { if link::ignored_for_lto(sess, crate_info, cnum) { return } each_linked_rlib_for_lto.push((cnum, path.to_path_buf())); })); let assembler_cmd = if modules_config.no_integrated_as { // HACK: currently we use linker (gcc) as our assembler let (linker, flavor) = link::linker_and_flavor(sess); let (name, mut cmd) = get_linker(sess, &linker, flavor); cmd.args(&sess.target.target.options.asm_args); Some(Arc::new(AssemblerCommand { name, cmd })) } else { None }; let cgcx = CodegenContext { crate_types: sess.crate_types.borrow().clone(), each_linked_rlib_for_lto, lto: sess.lto(), no_landing_pads: sess.no_landing_pads(), fewer_names: sess.fewer_names(), save_temps: sess.opts.cg.save_temps, opts: Arc::new(sess.opts.clone()), time_passes: sess.time_passes(), exported_symbols, plugin_passes: sess.plugin_llvm_passes.borrow().clone(), remark: sess.opts.cg.remark.clone(), worker: 0, incr_comp_session_dir: sess.incr_comp_session_dir_opt().map(|r| r.clone()), cgu_reuse_tracker: sess.cgu_reuse_tracker.clone(), coordinator_send, diag_emitter: shared_emitter.clone(), time_graph, output_filenames: tcx.output_filenames(LOCAL_CRATE), regular_module_config: modules_config, metadata_module_config: metadata_config, allocator_module_config: allocator_config, tm_factory: target_machine_factory(tcx.sess, false), total_cgus, msvc_imps_needed: msvc_imps_needed(tcx), target_pointer_width: tcx.sess.target.target.target_pointer_width.clone(), debuginfo: tcx.sess.opts.debuginfo, assembler_cmd, }; // This is the "main loop" of parallel work happening for parallel codegen. // It's here that we manage parallelism, schedule work, and work with // messages coming from clients. // // There are a few environmental pre-conditions that shape how the system // is set up: // // - Error reporting only can happen on the main thread because that's the // only place where we have access to the compiler `Session`. // - LLVM work can be done on any thread. // - Codegen can only happen on the main thread. // - Each thread doing substantial work most be in possession of a `Token` // from the `Jobserver`. // - The compiler process always holds one `Token`. Any additional `Tokens` // have to be requested from the `Jobserver`. // // Error Reporting // =============== // The error reporting restriction is handled separately from the rest: We // set up a `SharedEmitter` the holds an open channel to the main thread. // When an error occurs on any thread, the shared emitter will send the // error message to the receiver main thread (`SharedEmitterMain`). The // main thread will periodically query this error message queue and emit // any error messages it has received. It might even abort compilation if // has received a fatal error. In this case we rely on all other threads // being torn down automatically with the main thread. // Since the main thread will often be busy doing codegen work, error // reporting will be somewhat delayed, since the message queue can only be // checked in between to work packages. // // Work Processing Infrastructure // ============================== // The work processing infrastructure knows three major actors: // // - the coordinator thread, // - the main thread, and // - LLVM worker threads // // The coordinator thread is running a message loop. It instructs the main // thread about what work to do when, and it will spawn off LLVM worker // threads as open LLVM WorkItems become available. // // The job of the main thread is to codegen CGUs into LLVM work package // (since the main thread is the only thread that can do this). The main // thread will block until it receives a message from the coordinator, upon // which it will codegen one CGU, send it to the coordinator and block // again. This way the coordinator can control what the main thread is // doing. // // The coordinator keeps a queue of LLVM WorkItems, and when a `Token` is // available, it will spawn off a new LLVM worker thread and let it process // that a WorkItem. When a LLVM worker thread is done with its WorkItem, // it will just shut down, which also frees all resources associated with // the given LLVM module, and sends a message to the coordinator that the // has been completed. // // Work Scheduling // =============== // The scheduler's goal is to minimize the time it takes to complete all // work there is, however, we also want to keep memory consumption low // if possible. These two goals are at odds with each other: If memory // consumption were not an issue, we could just let the main thread produce // LLVM WorkItems at full speed, assuring maximal utilization of // Tokens/LLVM worker threads. However, since codegen usual is faster // than LLVM processing, the queue of LLVM WorkItems would fill up and each // WorkItem potentially holds on to a substantial amount of memory. // // So the actual goal is to always produce just enough LLVM WorkItems as // not to starve our LLVM worker threads. That means, once we have enough // WorkItems in our queue, we can block the main thread, so it does not // produce more until we need them. // // Doing LLVM Work on the Main Thread // ---------------------------------- // Since the main thread owns the compiler processes implicit `Token`, it is // wasteful to keep it blocked without doing any work. Therefore, what we do // in this case is: We spawn off an additional LLVM worker thread that helps // reduce the queue. The work it is doing corresponds to the implicit // `Token`. The coordinator will mark the main thread as being busy with // LLVM work. (The actual work happens on another OS thread but we just care // about `Tokens`, not actual threads). // // When any LLVM worker thread finishes while the main thread is marked as // "busy with LLVM work", we can do a little switcheroo: We give the Token // of the just finished thread to the LLVM worker thread that is working on // behalf of the main thread's implicit Token, thus freeing up the main // thread again. The coordinator can then again decide what the main thread // should do. This allows the coordinator to make decisions at more points // in time. // // Striking a Balance between Throughput and Memory Consumption // ------------------------------------------------------------ // Since our two goals, (1) use as many Tokens as possible and (2) keep // memory consumption as low as possible, are in conflict with each other, // we have to find a trade off between them. Right now, the goal is to keep // all workers busy, which means that no worker should find the queue empty // when it is ready to start. // How do we do achieve this? Good question :) We actually never know how // many `Tokens` are potentially available so it's hard to say how much to // fill up the queue before switching the main thread to LLVM work. Also we // currently don't have a means to estimate how long a running LLVM worker // will still be busy with it's current WorkItem. However, we know the // maximal count of available Tokens that makes sense (=the number of CPU // cores), so we can take a conservative guess. The heuristic we use here // is implemented in the `queue_full_enough()` function. // // Some Background on Jobservers // ----------------------------- // It's worth also touching on the management of parallelism here. We don't // want to just spawn a thread per work item because while that's optimal // parallelism it may overload a system with too many threads or violate our // configuration for the maximum amount of cpu to use for this process. To // manage this we use the `jobserver` crate. // // Job servers are an artifact of GNU make and are used to manage // parallelism between processes. A jobserver is a glorified IPC semaphore // basically. Whenever we want to run some work we acquire the semaphore, // and whenever we're done with that work we release the semaphore. In this // manner we can ensure that the maximum number of parallel workers is // capped at any one point in time. // // LTO and the coordinator thread // ------------------------------ // // The final job the coordinator thread is responsible for is managing LTO // and how that works. When LTO is requested what we'll to is collect all // optimized LLVM modules into a local vector on the coordinator. Once all // modules have been codegened and optimized we hand this to the `lto` // module for further optimization. The `lto` module will return back a list // of more modules to work on, which the coordinator will continue to spawn // work for. // // Each LLVM module is automatically sent back to the coordinator for LTO if // necessary. There's already optimizations in place to avoid sending work // back to the coordinator if LTO isn't requested. return thread::spawn(move || { // We pretend to be within the top-level LLVM time-passes task here: set_time_depth(1); let max_workers = ::num_cpus::get(); let mut worker_id_counter = 0; let mut free_worker_ids = Vec::new(); let mut get_worker_id = |free_worker_ids: &mut Vec<usize>| { if let Some(id) = free_worker_ids.pop() { id } else { let id = worker_id_counter; worker_id_counter += 1; id } }; // This is where we collect codegen units that have gone all the way // through codegen and LLVM. let mut compiled_modules = vec![]; let mut compiled_metadata_module = None; let mut compiled_allocator_module = None; let mut needs_lto = Vec::new(); let mut lto_import_only_modules = Vec::new(); let mut started_lto = false; let mut codegen_aborted = false; // This flag tracks whether all items have gone through codegens let mut codegen_done = false; // This is the queue of LLVM work items that still need processing. let mut work_items = Vec::<(WorkItem, u64)>::new(); // This are the Jobserver Tokens we currently hold. Does not include // the implicit Token the compiler process owns no matter what. let mut tokens = Vec::new(); let mut main_thread_worker_state = MainThreadWorkerState::Idle; let mut running = 0; let mut llvm_start_time = None; // Run the message loop while there's still anything that needs message // processing. Note that as soon as codegen is aborted we simply want to // wait for all existing work to finish, so many of the conditions here // only apply if codegen hasn't been aborted as they represent pending // work to be done. while !codegen_done || running > 0 || (!codegen_aborted && ( work_items.len() > 0 || needs_lto.len() > 0 || lto_import_only_modules.len() > 0 || main_thread_worker_state != MainThreadWorkerState::Idle )) { // While there are still CGUs to be codegened, the coordinator has // to decide how to utilize the compiler processes implicit Token: // For codegenning more CGU or for running them through LLVM. if !codegen_done { if main_thread_worker_state == MainThreadWorkerState::Idle { if !queue_full_enough(work_items.len(), running, max_workers) { // The queue is not full enough, codegen more items: if let Err(_) = codegen_worker_send.send(Message::CodegenItem) { panic!("Could not send Message::CodegenItem to main thread") } main_thread_worker_state = MainThreadWorkerState::Codegenning; } else { // The queue is full enough to not let the worker // threads starve. Use the implicit Token to do some // LLVM work too. let (item, _) = work_items.pop() .expect("queue empty - queue_full_enough() broken?"); let cgcx = CodegenContext { worker: get_worker_id(&mut free_worker_ids), .. cgcx.clone() }; maybe_start_llvm_timer(cgcx.config(item.module_kind()), &mut llvm_start_time); main_thread_worker_state = MainThreadWorkerState::LLVMing; spawn_work(cgcx, item); } } } else if codegen_aborted { // don't queue up any more work if codegen was aborted, we're // just waiting for our existing children to finish } else { // If we've finished everything related to normal codegen // then it must be the case that we've got some LTO work to do. // Perform the serial work here of figuring out what we're // going to LTO and then push a bunch of work items onto our // queue to do LTO if work_items.len() == 0 && running == 0 && main_thread_worker_state == MainThreadWorkerState::Idle { assert!(!started_lto); assert!(needs_lto.len() + lto_import_only_modules.len() > 0); started_lto = true; let modules = mem::replace(&mut needs_lto, Vec::new()); let import_only_modules = mem::replace(&mut lto_import_only_modules, Vec::new()); for (work, cost) in generate_lto_work(&cgcx, modules, import_only_modules) { let insertion_index = work_items .binary_search_by_key(&cost, |&(_, cost)| cost) .unwrap_or_else(|e| e); work_items.insert(insertion_index, (work, cost)); if !cgcx.opts.debugging_opts.no_parallel_llvm { helper.request_token(); } } } // In this branch, we know that everything has been codegened, // so it's just a matter of determining whether the implicit // Token is free to use for LLVM work. match main_thread_worker_state { MainThreadWorkerState::Idle => { if let Some((item, _)) = work_items.pop() { let cgcx = CodegenContext { worker: get_worker_id(&mut free_worker_ids), .. cgcx.clone() }; maybe_start_llvm_timer(cgcx.config(item.module_kind()), &mut llvm_start_time); main_thread_worker_state = MainThreadWorkerState::LLVMing; spawn_work(cgcx, item); } else { // There is no unstarted work, so let the main thread // take over for a running worker. Otherwise the // implicit token would just go to waste. // We reduce the `running` counter by one. The // `tokens.truncate()` below will take care of // giving the Token back. debug_assert!(running > 0); running -= 1; main_thread_worker_state = MainThreadWorkerState::LLVMing; } } MainThreadWorkerState::Codegenning => { bug!("codegen worker should not be codegenning after \ codegen was already completed") } MainThreadWorkerState::LLVMing => { // Already making good use of that token } } } // Spin up what work we can, only doing this while we've got available // parallelism slots and work left to spawn. while !codegen_aborted && work_items.len() > 0 && running < tokens.len() { let (item, _) = work_items.pop().unwrap(); maybe_start_llvm_timer(cgcx.config(item.module_kind()), &mut llvm_start_time); let cgcx = CodegenContext { worker: get_worker_id(&mut free_worker_ids), .. cgcx.clone() }; spawn_work(cgcx, item); running += 1; } // Relinquish accidentally acquired extra tokens tokens.truncate(running); let msg = coordinator_receive.recv().unwrap(); match *msg.downcast::<Message>().ok().unwrap() { // Save the token locally and the next turn of the loop will use // this to spawn a new unit of work, or it may get dropped // immediately if we have no more work to spawn. Message::Token(token) => { match token { Ok(token) => { tokens.push(token); if main_thread_worker_state == MainThreadWorkerState::LLVMing { // If the main thread token is used for LLVM work // at the moment, we turn that thread into a regular // LLVM worker thread, so the main thread is free // to react to codegen demand. main_thread_worker_state = MainThreadWorkerState::Idle; running += 1; } } Err(e) => { let msg = &format!("failed to acquire jobserver token: {}", e); shared_emitter.fatal(msg); // Exit the coordinator thread panic!("{}", msg) } } } Message::CodegenDone { llvm_work_item, cost } => { // We keep the queue sorted by estimated processing cost, // so that more expensive items are processed earlier. This // is good for throughput as it gives the main thread more // time to fill up the queue and it avoids scheduling // expensive items to the end. // Note, however, that this is not ideal for memory // consumption, as LLVM module sizes are not evenly // distributed. let insertion_index = work_items.binary_search_by_key(&cost, |&(_, cost)| cost); let insertion_index = match insertion_index { Ok(idx) | Err(idx) => idx }; work_items.insert(insertion_index, (llvm_work_item, cost)); if !cgcx.opts.debugging_opts.no_parallel_llvm { helper.request_token(); } assert!(!codegen_aborted); assert_eq!(main_thread_worker_state, MainThreadWorkerState::Codegenning); main_thread_worker_state = MainThreadWorkerState::Idle; } Message::CodegenComplete => { codegen_done = true; assert!(!codegen_aborted); assert_eq!(main_thread_worker_state, MainThreadWorkerState::Codegenning); main_thread_worker_state = MainThreadWorkerState::Idle; } // If codegen is aborted that means translation was aborted due // to some normal-ish compiler error. In this situation we want // to exit as soon as possible, but we want to make sure all // existing work has finished. Flag codegen as being done, and // then conditions above will ensure no more work is spawned but // we'll keep executing this loop until `running` hits 0. Message::CodegenAborted => { assert!(!codegen_aborted); codegen_done = true; codegen_aborted = true; assert_eq!(main_thread_worker_state, MainThreadWorkerState::Codegenning); } // If a thread exits successfully then we drop a token associated // with that worker and update our `running` count. We may later // re-acquire a token to continue running more work. We may also not // actually drop a token here if the worker was running with an // "ephemeral token" // // Note that if the thread failed that means it panicked, so we // abort immediately. Message::Done { result: Ok(compiled_module), worker_id } => { if main_thread_worker_state == MainThreadWorkerState::LLVMing { main_thread_worker_state = MainThreadWorkerState::Idle; } else { running -= 1; } free_worker_ids.push(worker_id); match compiled_module.kind { ModuleKind::Regular => { compiled_modules.push(compiled_module); } ModuleKind::Metadata => { assert!(compiled_metadata_module.is_none()); compiled_metadata_module = Some(compiled_module); } ModuleKind::Allocator => { assert!(compiled_allocator_module.is_none()); compiled_allocator_module = Some(compiled_module); } } } Message::NeedsLTO { result, worker_id } => { assert!(!started_lto); if main_thread_worker_state == MainThreadWorkerState::LLVMing { main_thread_worker_state = MainThreadWorkerState::Idle; } else { running -= 1; } free_worker_ids.push(worker_id); needs_lto.push(result); } Message::AddImportOnlyModule { module_data, work_product } => { assert!(!started_lto); assert!(!codegen_done); assert_eq!(main_thread_worker_state, MainThreadWorkerState::Codegenning); lto_import_only_modules.push((module_data, work_product)); main_thread_worker_state = MainThreadWorkerState::Idle; } Message::Done { result: Err(()), worker_id: _ } => { bug!("worker thread panicked"); } Message::CodegenItem => { bug!("the coordinator should not receive codegen requests") } } } if let Some(llvm_start_time) = llvm_start_time { let total_llvm_time = Instant::now().duration_since(llvm_start_time); // This is the top-level timing for all of LLVM, set the time-depth // to zero. set_time_depth(0); print_time_passes_entry(cgcx.time_passes, "LLVM passes", total_llvm_time); } // Regardless of what order these modules completed in, report them to // the backend in the same order every time to ensure that we're handing // out deterministic results. compiled_modules.sort_by(|a, b| a.name.cmp(&b.name)); let compiled_metadata_module = compiled_metadata_module .expect("Metadata module not compiled?"); Ok(CompiledModules { modules: compiled_modules, metadata_module: compiled_metadata_module, allocator_module: compiled_allocator_module, }) }); // A heuristic that determines if we have enough LLVM WorkItems in the // queue so that the main thread can do LLVM work instead of codegen fn queue_full_enough(items_in_queue: usize, workers_running: usize, max_workers: usize) -> bool { // Tune me, plz. items_in_queue > 0 && items_in_queue >= max_workers.saturating_sub(workers_running / 2) } fn maybe_start_llvm_timer(config: &ModuleConfig, llvm_start_time: &mut Option<Instant>) { // We keep track of the -Ztime-passes output manually, // since the closure-based interface does not fit well here. if config.time_passes { if llvm_start_time.is_none() { *llvm_start_time = Some(Instant::now()); } } } } pub const CODEGEN_WORKER_ID: usize = ::std::usize::MAX; pub const CODEGEN_WORKER_TIMELINE: time_graph::TimelineId = time_graph::TimelineId(CODEGEN_WORKER_ID); pub const CODEGEN_WORK_PACKAGE_KIND: time_graph::WorkPackageKind = time_graph::WorkPackageKind(&["#DE9597", "#FED1D3", "#FDC5C7", "#B46668", "#88494B"]); const LLVM_WORK_PACKAGE_KIND: time_graph::WorkPackageKind = time_graph::WorkPackageKind(&["#7DB67A", "#C6EEC4", "#ACDAAA", "#579354", "#3E6F3C"]); fn spawn_work(cgcx: CodegenContext, work: WorkItem) { let depth = time_depth(); thread::spawn(move || { set_time_depth(depth); // Set up a destructor which will fire off a message that we're done as // we exit. struct Bomb { coordinator_send: Sender<Box<dyn Any + Send>>, result: Option<WorkItemResult>, worker_id: usize, } impl Drop for Bomb { fn drop(&mut self) { let worker_id = self.worker_id; let msg = match self.result.take() { Some(WorkItemResult::Compiled(m)) => { Message::Done { result: Ok(m), worker_id } } Some(WorkItemResult::NeedsLTO(m)) => { Message::NeedsLTO { result: m, worker_id } } None => Message::Done { result: Err(()), worker_id } }; drop(self.coordinator_send.send(Box::new(msg))); } } let mut bomb = Bomb { coordinator_send: cgcx.coordinator_send.clone(), result: None, worker_id: cgcx.worker, }; // Execute the work itself, and if it finishes successfully then flag // ourselves as a success as well. // // Note that we ignore any `FatalError` coming out of `execute_work_item`, // as a diagnostic was already sent off to the main thread - just // surface that there was an error in this worker. bomb.result = { let timeline = cgcx.time_graph.as_ref().map(|tg| { tg.start(time_graph::TimelineId(cgcx.worker), LLVM_WORK_PACKAGE_KIND, &work.name()) }); let mut timeline = timeline.unwrap_or(Timeline::noop()); execute_work_item(&cgcx, work, &mut timeline).ok() }; }); } pub fn run_assembler(cgcx: &CodegenContext, handler: &Handler, assembly: &Path, object: &Path) { let assembler = cgcx.assembler_cmd .as_ref() .expect("cgcx.assembler_cmd is missing?"); let pname = &assembler.name; let mut cmd = assembler.cmd.clone(); cmd.arg("-c").arg("-o").arg(object).arg(assembly); debug!("{:?}", cmd); match cmd.output() { Ok(prog) => { if !prog.status.success() { let mut note = prog.stderr.clone(); note.extend_from_slice(&prog.stdout); handler.struct_err(&format!("linking with `{}` failed: {}", pname.display(), prog.status)) .note(&format!("{:?}", &cmd)) .note(str::from_utf8(&note[..]).unwrap()) .emit(); handler.abort_if_errors(); } }, Err(e) => { handler.err(&format!("could not exec the linker `{}`: {}", pname.display(), e)); handler.abort_if_errors(); } } } pub unsafe fn with_llvm_pmb(llmod: &llvm::Module, config: &ModuleConfig, opt_level: llvm::CodeGenOptLevel, prepare_for_thin_lto: bool, f: &mut dyn FnMut(&llvm::PassManagerBuilder)) { use std::ptr; // Create the PassManagerBuilder for LLVM. We configure it with // reasonable defaults and prepare it to actually populate the pass // manager. let builder = llvm::LLVMPassManagerBuilderCreate(); let opt_size = config.opt_size.unwrap_or(llvm::CodeGenOptSizeNone); let inline_threshold = config.inline_threshold; let pgo_gen_path = config.pgo_gen.as_ref().map(|s| { let s = if s.is_empty() { "default_%m.profraw" } else { s }; CString::new(s.as_bytes()).unwrap() }); let pgo_use_path = if config.pgo_use.is_empty() { None } else { Some(CString::new(config.pgo_use.as_bytes()).unwrap()) }; llvm::LLVMRustConfigurePassManagerBuilder( builder, opt_level, config.merge_functions, config.vectorize_slp, config.vectorize_loop, prepare_for_thin_lto, pgo_gen_path.as_ref().map_or(ptr::null(), |s| s.as_ptr()), pgo_use_path.as_ref().map_or(ptr::null(), |s| s.as_ptr()), ); llvm::LLVMPassManagerBuilderSetSizeLevel(builder, opt_size as u32); if opt_size != llvm::CodeGenOptSizeNone { llvm::LLVMPassManagerBuilderSetDisableUnrollLoops(builder, 1); } llvm::LLVMRustAddBuilderLibraryInfo(builder, llmod, config.no_builtins); // Here we match what clang does (kinda). For O0 we only inline // always-inline functions (but don't add lifetime intrinsics), at O1 we // inline with lifetime intrinsics, and O2+ we add an inliner with a // thresholds copied from clang. match (opt_level, opt_size, inline_threshold) { (.., Some(t)) => { llvm::LLVMPassManagerBuilderUseInlinerWithThreshold(builder, t as u32); } (llvm::CodeGenOptLevel::Aggressive, ..) => { llvm::LLVMPassManagerBuilderUseInlinerWithThreshold(builder, 275); } (_, llvm::CodeGenOptSizeDefault, _) => { llvm::LLVMPassManagerBuilderUseInlinerWithThreshold(builder, 75); } (_, llvm::CodeGenOptSizeAggressive, _) => { llvm::LLVMPassManagerBuilderUseInlinerWithThreshold(builder, 25); } (llvm::CodeGenOptLevel::None, ..) => { llvm::LLVMRustAddAlwaysInlinePass(builder, false); } (llvm::CodeGenOptLevel::Less, ..) => { llvm::LLVMRustAddAlwaysInlinePass(builder, true); } (llvm::CodeGenOptLevel::Default, ..) => { llvm::LLVMPassManagerBuilderUseInlinerWithThreshold(builder, 225); } (llvm::CodeGenOptLevel::Other, ..) => { bug!("CodeGenOptLevel::Other selected") } } f(builder); llvm::LLVMPassManagerBuilderDispose(builder); } enum SharedEmitterMessage { Diagnostic(Diagnostic), InlineAsmError(u32, String), AbortIfErrors, Fatal(String), } #[derive(Clone)] pub struct SharedEmitter { sender: Sender<SharedEmitterMessage>, } pub struct SharedEmitterMain { receiver: Receiver<SharedEmitterMessage>, } impl SharedEmitter { pub fn new() -> (SharedEmitter, SharedEmitterMain) { let (sender, receiver) = channel(); (SharedEmitter { sender }, SharedEmitterMain { receiver }) } fn inline_asm_error(&self, cookie: u32, msg: String) { drop(self.sender.send(SharedEmitterMessage::InlineAsmError(cookie, msg))); } fn fatal(&self, msg: &str) { drop(self.sender.send(SharedEmitterMessage::Fatal(msg.to_string()))); } } impl Emitter for SharedEmitter { fn emit(&mut self, db: &DiagnosticBuilder) { drop(self.sender.send(SharedEmitterMessage::Diagnostic(Diagnostic { msg: db.message(), code: db.code.clone(), lvl: db.level, }))); for child in &db.children { drop(self.sender.send(SharedEmitterMessage::Diagnostic(Diagnostic { msg: child.message(), code: None, lvl: child.level, }))); } drop(self.sender.send(SharedEmitterMessage::AbortIfErrors)); } } impl SharedEmitterMain { pub fn check(&self, sess: &Session, blocking: bool) { loop { let message = if blocking { match self.receiver.recv() { Ok(message) => Ok(message), Err(_) => Err(()), } } else { match self.receiver.try_recv() { Ok(message) => Ok(message), Err(_) => Err(()), } }; match message { Ok(SharedEmitterMessage::Diagnostic(diag)) => { let handler = sess.diagnostic(); match diag.code { Some(ref code) => { handler.emit_with_code(&MultiSpan::new(), &diag.msg, code.clone(), diag.lvl); } None => { handler.emit(&MultiSpan::new(), &diag.msg, diag.lvl); } } } Ok(SharedEmitterMessage::InlineAsmError(cookie, msg)) => { match Mark::from_u32(cookie).expn_info() { Some(ei) => sess.span_err(ei.call_site, &msg), None => sess.err(&msg), } } Ok(SharedEmitterMessage::AbortIfErrors) => { sess.abort_if_errors(); } Ok(SharedEmitterMessage::Fatal(msg)) => { sess.fatal(&msg); } Err(_) => { break; } } } } } pub struct OngoingCodegen { crate_name: Symbol, crate_hash: Svh, metadata: EncodedMetadata, windows_subsystem: Option<String>, linker_info: LinkerInfo, crate_info: CrateInfo, time_graph: Option<TimeGraph>, coordinator_send: Sender<Box<dyn Any + Send>>, codegen_worker_receive: Receiver<Message>, shared_emitter_main: SharedEmitterMain, future: thread::JoinHandle<Result<CompiledModules, ()>>, output_filenames: Arc<OutputFilenames>, } impl OngoingCodegen { pub(crate) fn join( self, sess: &Session ) -> (CodegenResults, FxHashMap<WorkProductId, WorkProduct>) { self.shared_emitter_main.check(sess, true); let compiled_modules = match self.future.join() { Ok(Ok(compiled_modules)) => compiled_modules, Ok(Err(())) => { sess.abort_if_errors(); panic!("expected abort due to worker thread errors") }, Err(_) => { bug!("panic during codegen/LLVM phase"); } }; sess.cgu_reuse_tracker.check_expected_reuse(sess); sess.abort_if_errors(); if let Some(time_graph) = self.time_graph { time_graph.dump(&format!("{}-timings", self.crate_name)); } let work_products = copy_all_cgu_workproducts_to_incr_comp_cache_dir(sess, &compiled_modules); produce_final_output_artifacts(sess, &compiled_modules, &self.output_filenames); // FIXME: time_llvm_passes support - does this use a global context or // something? if sess.codegen_units() == 1 && sess.time_llvm_passes() { unsafe { llvm::LLVMRustPrintPassTimings(); } } (CodegenResults { crate_name: self.crate_name, crate_hash: self.crate_hash, metadata: self.metadata, windows_subsystem: self.windows_subsystem, linker_info: self.linker_info, crate_info: self.crate_info, modules: compiled_modules.modules, allocator_module: compiled_modules.allocator_module, metadata_module: compiled_modules.metadata_module, }, work_products) } pub(crate) fn submit_pre_codegened_module_to_llvm(&self, tcx: TyCtxt, module: ModuleCodegen) { self.wait_for_signal_to_codegen_item(); self.check_for_errors(tcx.sess); // These are generally cheap and won't through off scheduling. let cost = 0; submit_codegened_module_to_llvm(tcx, module, cost); } pub fn codegen_finished(&self, tcx: TyCtxt) { self.wait_for_signal_to_codegen_item(); self.check_for_errors(tcx.sess); drop(self.coordinator_send.send(Box::new(Message::CodegenComplete))); } /// Consume this context indicating that codegen was entirely aborted, and /// we need to exit as quickly as possible. /// /// This method blocks the current thread until all worker threads have /// finished, and all worker threads should have exited or be real close to /// exiting at this point. pub fn codegen_aborted(self) { // Signal to the coordinator it should spawn no more work and start // shutdown. drop(self.coordinator_send.send(Box::new(Message::CodegenAborted))); drop(self.future.join()); } pub fn check_for_errors(&self, sess: &Session) { self.shared_emitter_main.check(sess, false); } pub fn wait_for_signal_to_codegen_item(&self) { match self.codegen_worker_receive.recv() { Ok(Message::CodegenItem) => { // Nothing to do } Ok(_) => panic!("unexpected message"), Err(_) => { // One of the LLVM threads must have panicked, fall through so // error handling can be reached. } } } } // impl Drop for OngoingCodegen { // fn drop(&mut self) { // } // } pub(crate) fn submit_codegened_module_to_llvm(tcx: TyCtxt, module: ModuleCodegen, cost: u64) { let llvm_work_item = WorkItem::Optimize(module); drop(tcx.tx_to_llvm_workers.lock().send(Box::new(Message::CodegenDone { llvm_work_item, cost, }))); } pub(crate) fn submit_post_lto_module_to_llvm(tcx: TyCtxt, module: CachedModuleCodegen) { let llvm_work_item = WorkItem::CopyPostLtoArtifacts(module); drop(tcx.tx_to_llvm_workers.lock().send(Box::new(Message::CodegenDone { llvm_work_item, cost: 0, }))); } pub(crate) fn submit_pre_lto_module_to_llvm(tcx: TyCtxt, module: CachedModuleCodegen) { let filename = pre_lto_bitcode_filename(&module.name); let bc_path = in_incr_comp_dir_sess(tcx.sess, &filename); let file = fs::File::open(&bc_path).unwrap_or_else(|e| { panic!("failed to open bitcode file `{}`: {}", bc_path.display(), e) }); let mmap = unsafe { memmap::Mmap::map(&file).unwrap_or_else(|e| { panic!("failed to mmap bitcode file `{}`: {}", bc_path.display(), e) }) }; // Schedule the module to be loaded drop(tcx.tx_to_llvm_workers.lock().send(Box::new(Message::AddImportOnlyModule { module_data: SerializedModule::FromUncompressedFile(mmap), work_product: module.source, }))); } pub(super) fn pre_lto_bitcode_filename(module_name: &str) -> String { format!("{}.{}", module_name, PRE_THIN_LTO_BC_EXT) } fn msvc_imps_needed(tcx: TyCtxt) -> bool { // This should never be true (because it's not supported). If it is true, // something is wrong with commandline arg validation. assert!(!(tcx.sess.opts.debugging_opts.cross_lang_lto.enabled() && tcx.sess.target.target.options.is_like_msvc && tcx.sess.opts.cg.prefer_dynamic)); tcx.sess.target.target.options.is_like_msvc && tcx.sess.crate_types.borrow().iter().any(|ct| *ct == config::CrateType::Rlib) && // ThinLTO can't handle this workaround in all cases, so we don't // emit the `__imp_` symbols. Instead we make them unnecessary by disallowing // dynamic linking when cross-language LTO is enabled. !tcx.sess.opts.debugging_opts.cross_lang_lto.enabled() } // Create a `__imp_<symbol> = &symbol` global for every public static `symbol`. // This is required to satisfy `dllimport` references to static data in .rlibs // when using MSVC linker. We do this only for data, as linker can fix up // code references on its own. // See #26591, #27438 fn create_msvc_imps(cgcx: &CodegenContext, llcx: &llvm::Context, llmod: &llvm::Module) { if !cgcx.msvc_imps_needed { return } // The x86 ABI seems to require that leading underscores are added to symbol // names, so we need an extra underscore on 32-bit. There's also a leading // '\x01' here which disables LLVM's symbol mangling (e.g. no extra // underscores added in front). let prefix = if cgcx.target_pointer_width == "32" { "\x01__imp__" } else { "\x01__imp_" }; unsafe { let i8p_ty = Type::i8p_llcx(llcx); let globals = base::iter_globals(llmod) .filter(|&val| { llvm::LLVMRustGetLinkage(val) == llvm::Linkage::ExternalLinkage && llvm::LLVMIsDeclaration(val) == 0 }) .map(move |val| { let name = CStr::from_ptr(llvm::LLVMGetValueName(val)); let mut imp_name = prefix.as_bytes().to_vec(); imp_name.extend(name.to_bytes()); let imp_name = CString::new(imp_name).unwrap(); (imp_name, val) }) .collect::<Vec<_>>(); for (imp_name, val) in globals { let imp = llvm::LLVMAddGlobal(llmod, i8p_ty, imp_name.as_ptr() as *const _); llvm::LLVMSetInitializer(imp, consts::ptrcast(val, i8p_ty)); llvm::LLVMRustSetLinkage(imp, llvm::Linkage::ExternalLinkage); } } }
40.748846
100
0.568662
bb98125672d8983c702fd3eb984bb33ab8d2e29d
14,402
use { bincode::{deserialize, serialize}, crossbeam_channel::{unbounded, Receiver, Sender}, futures::{future, prelude::stream::StreamExt}, solana_banks_interface::{ Banks, BanksRequest, BanksResponse, BanksTransactionResultWithSimulation, TransactionConfirmationStatus, TransactionSimulationDetails, TransactionStatus, }, solana_runtime::{ bank::{Bank, TransactionSimulationResult}, bank_forks::BankForks, commitment::BlockCommitmentCache, }, solana_sdk::{ account::Account, clock::Slot, commitment_config::CommitmentLevel, feature_set::FeatureSet, fee_calculator::FeeCalculator, hash::Hash, message::{Message, SanitizedMessage}, pubkey::Pubkey, signature::Signature, transaction::{self, SanitizedTransaction, Transaction}, }, solana_send_transaction_service::{ send_transaction_service::{SendTransactionService, TransactionInfo}, tpu_info::NullTpuInfo, }, std::{ convert::TryFrom, io, net::{Ipv4Addr, SocketAddr}, sync::{Arc, RwLock}, thread::Builder, time::Duration, }, tarpc::{ context::Context, serde_transport::tcp, server::{self, incoming::Incoming, Channel}, transport::{self, channel::UnboundedChannel}, ClientMessage, Response, }, tokio::time::sleep, tokio_serde::formats::Bincode, }; #[derive(Clone)] struct BanksServer { bank_forks: Arc<RwLock<BankForks>>, block_commitment_cache: Arc<RwLock<BlockCommitmentCache>>, transaction_sender: Sender<TransactionInfo>, poll_signature_status_sleep_duration: Duration, } impl BanksServer { /// Return a BanksServer that forwards transactions to the /// given sender. If unit-testing, those transactions can go to /// a bank in the given BankForks. Otherwise, the receiver should /// forward them to a validator in the leader schedule. fn new( bank_forks: Arc<RwLock<BankForks>>, block_commitment_cache: Arc<RwLock<BlockCommitmentCache>>, transaction_sender: Sender<TransactionInfo>, poll_signature_status_sleep_duration: Duration, ) -> Self { Self { bank_forks, block_commitment_cache, transaction_sender, poll_signature_status_sleep_duration, } } fn run(bank_forks: Arc<RwLock<BankForks>>, transaction_receiver: Receiver<TransactionInfo>) { while let Ok(info) = transaction_receiver.recv() { let mut transaction_infos = vec![info]; while let Ok(info) = transaction_receiver.try_recv() { transaction_infos.push(info); } let transactions: Vec<_> = transaction_infos .into_iter() .map(|info| deserialize(&info.wire_transaction).unwrap()) .collect(); let bank = bank_forks.read().unwrap().working_bank(); let _ = bank.try_process_transactions(transactions.iter()); } } /// Useful for unit-testing fn new_loopback( bank_forks: Arc<RwLock<BankForks>>, block_commitment_cache: Arc<RwLock<BlockCommitmentCache>>, poll_signature_status_sleep_duration: Duration, ) -> Self { let (transaction_sender, transaction_receiver) = unbounded(); let bank = bank_forks.read().unwrap().working_bank(); let slot = bank.slot(); { // ensure that the commitment cache and bank are synced let mut w_block_commitment_cache = block_commitment_cache.write().unwrap(); w_block_commitment_cache.set_all_slots(slot, slot); } let server_bank_forks = bank_forks.clone(); Builder::new() .name("solana-bank-forks-client".to_string()) .spawn(move || Self::run(server_bank_forks, transaction_receiver)) .unwrap(); Self::new( bank_forks, block_commitment_cache, transaction_sender, poll_signature_status_sleep_duration, ) } fn slot(&self, commitment: CommitmentLevel) -> Slot { self.block_commitment_cache .read() .unwrap() .slot_with_commitment(commitment) } fn bank(&self, commitment: CommitmentLevel) -> Arc<Bank> { self.bank_forks.read().unwrap()[self.slot(commitment)].clone() } async fn poll_signature_status( self, signature: &Signature, blockhash: &Hash, last_valid_block_height: u64, commitment: CommitmentLevel, ) -> Option<transaction::Result<()>> { let mut status = self .bank(commitment) .get_signature_status_with_blockhash(signature, blockhash); while status.is_none() { sleep(self.poll_signature_status_sleep_duration).await; let bank = self.bank(commitment); if bank.block_height() > last_valid_block_height { break; } status = bank.get_signature_status_with_blockhash(signature, blockhash); } status } } fn verify_transaction( transaction: &Transaction, feature_set: &Arc<FeatureSet>, ) -> transaction::Result<()> { if let Err(err) = transaction.verify() { Err(err) } else if let Err(err) = transaction.verify_precompiles(feature_set) { Err(err) } else { Ok(()) } } #[tarpc::server] impl Banks for BanksServer { async fn send_transaction_with_context(self, _: Context, transaction: Transaction) { let blockhash = &transaction.message.recent_blockhash; let last_valid_block_height = self .bank_forks .read() .unwrap() .root_bank() .get_blockhash_last_valid_block_height(blockhash) .unwrap(); let signature = transaction.signatures.get(0).cloned().unwrap_or_default(); let info = TransactionInfo::new( signature, serialize(&transaction).unwrap(), last_valid_block_height, None, None, ); self.transaction_sender.send(info).unwrap(); } async fn get_fees_with_commitment_and_context( self, _: Context, commitment: CommitmentLevel, ) -> (FeeCalculator, Hash, u64) { let bank = self.bank(commitment); let blockhash = bank.last_blockhash(); let lamports_per_signature = bank.get_lamports_per_signature(); let last_valid_block_height = bank .get_blockhash_last_valid_block_height(&blockhash) .unwrap(); ( FeeCalculator::new(lamports_per_signature), blockhash, last_valid_block_height, ) } async fn get_transaction_status_with_context( self, _: Context, signature: Signature, ) -> Option<TransactionStatus> { let bank = self.bank(CommitmentLevel::Processed); let (slot, status) = bank.get_signature_status_slot(&signature)?; let r_block_commitment_cache = self.block_commitment_cache.read().unwrap(); let optimistically_confirmed_bank = self.bank(CommitmentLevel::Confirmed); let optimistically_confirmed = optimistically_confirmed_bank.get_signature_status_slot(&signature); let confirmations = if r_block_commitment_cache.root() >= slot && r_block_commitment_cache.highest_confirmed_root() >= slot { None } else { r_block_commitment_cache .get_confirmation_count(slot) .or(Some(0)) }; Some(TransactionStatus { slot, confirmations, err: status.err(), confirmation_status: if confirmations.is_none() { Some(TransactionConfirmationStatus::Finalized) } else if optimistically_confirmed.is_some() { Some(TransactionConfirmationStatus::Confirmed) } else { Some(TransactionConfirmationStatus::Processed) }, }) } async fn get_slot_with_context(self, _: Context, commitment: CommitmentLevel) -> Slot { self.slot(commitment) } async fn get_block_height_with_context(self, _: Context, commitment: CommitmentLevel) -> u64 { self.bank(commitment).block_height() } async fn process_transaction_with_preflight_and_commitment_and_context( self, ctx: Context, transaction: Transaction, commitment: CommitmentLevel, ) -> BanksTransactionResultWithSimulation { let sanitized_transaction = match SanitizedTransaction::try_from_legacy_transaction(transaction.clone()) { Err(err) => { return BanksTransactionResultWithSimulation { result: Some(Err(err)), simulation_details: None, }; } Ok(tx) => tx, }; if let TransactionSimulationResult { result: Err(err), logs, post_simulation_accounts: _, units_consumed, } = self .bank(commitment) .simulate_transaction_unchecked(sanitized_transaction) { return BanksTransactionResultWithSimulation { result: Some(Err(err)), simulation_details: Some(TransactionSimulationDetails { logs, units_consumed, }), }; } BanksTransactionResultWithSimulation { result: self .process_transaction_with_commitment_and_context(ctx, transaction, commitment) .await, simulation_details: None, } } async fn process_transaction_with_commitment_and_context( self, _: Context, transaction: Transaction, commitment: CommitmentLevel, ) -> Option<transaction::Result<()>> { if let Err(err) = verify_transaction(&transaction, &self.bank(commitment).feature_set) { return Some(Err(err)); } let blockhash = &transaction.message.recent_blockhash; let last_valid_block_height = self .bank(commitment) .get_blockhash_last_valid_block_height(blockhash) .unwrap(); let signature = transaction.signatures.get(0).cloned().unwrap_or_default(); let info = TransactionInfo::new( signature, serialize(&transaction).unwrap(), last_valid_block_height, None, None, ); self.transaction_sender.send(info).unwrap(); self.poll_signature_status(&signature, blockhash, last_valid_block_height, commitment) .await } async fn get_account_with_commitment_and_context( self, _: Context, address: Pubkey, commitment: CommitmentLevel, ) -> Option<Account> { let bank = self.bank(commitment); bank.get_account(&address).map(Account::from) } async fn get_latest_blockhash_with_context(self, _: Context) -> Hash { let bank = self.bank(CommitmentLevel::default()); bank.last_blockhash() } async fn get_latest_blockhash_with_commitment_and_context( self, _: Context, commitment: CommitmentLevel, ) -> Option<(Hash, u64)> { let bank = self.bank(commitment); let blockhash = bank.last_blockhash(); let last_valid_block_height = bank.get_blockhash_last_valid_block_height(&blockhash)?; Some((blockhash, last_valid_block_height)) } async fn get_fee_for_message_with_commitment_and_context( self, _: Context, commitment: CommitmentLevel, message: Message, ) -> Option<u64> { let bank = self.bank(commitment); let sanitized_message = SanitizedMessage::try_from(message).ok()?; bank.get_fee_for_message(&sanitized_message) } } pub async fn start_local_server( bank_forks: Arc<RwLock<BankForks>>, block_commitment_cache: Arc<RwLock<BlockCommitmentCache>>, poll_signature_status_sleep_duration: Duration, ) -> UnboundedChannel<Response<BanksResponse>, ClientMessage<BanksRequest>> { let banks_server = BanksServer::new_loopback( bank_forks, block_commitment_cache, poll_signature_status_sleep_duration, ); let (client_transport, server_transport) = transport::channel::unbounded(); let server = server::BaseChannel::with_defaults(server_transport).execute(banks_server.serve()); tokio::spawn(server); client_transport } pub async fn start_tcp_server( listen_addr: SocketAddr, tpu_addr: SocketAddr, bank_forks: Arc<RwLock<BankForks>>, block_commitment_cache: Arc<RwLock<BlockCommitmentCache>>, ) -> io::Result<()> { // Note: These settings are copied straight from the tarpc example. let server = tcp::listen(listen_addr, Bincode::default) .await? // Ignore accept errors. .filter_map(|r| future::ready(r.ok())) .map(server::BaseChannel::with_defaults) // Limit channels to 1 per IP. .max_channels_per_key(1, |t| { t.as_ref() .peer_addr() .map(|x| x.ip()) .unwrap_or_else(|_| Ipv4Addr::new(0, 0, 0, 0).into()) }) // serve is generated by the service attribute. It takes as input any type implementing // the generated Banks trait. .map(move |chan| { let (sender, receiver) = unbounded(); SendTransactionService::new::<NullTpuInfo>( tpu_addr, &bank_forks, None, receiver, 5_000, 0, ); let server = BanksServer::new( bank_forks.clone(), block_commitment_cache.clone(), sender, Duration::from_millis(200), ); chan.execute(server.serve()) }) // Max 10 channels. .buffer_unordered(10) .for_each(|_| async {}); server.await; Ok(()) }
34.372315
100
0.608249
f8af381839f7003b94e3213642c41e93d4f2b3a0
1,414
pub struct IconDownloadDone { props: crate::Props, } impl yew::Component for IconDownloadDone { type Properties = crate::Props; type Message = (); fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self { Self { props } } fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender { true } fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender { false } fn view(&self) -> yew::prelude::Html { yew::prelude::html! { <svg class=self.props.class.unwrap_or("") width=self.props.size.unwrap_or(24).to_string() height=self.props.size.unwrap_or(24).to_string() viewBox="0 0 24 24" fill=self.props.fill.unwrap_or("none") stroke=self.props.color.unwrap_or("currentColor") stroke-width=self.props.stroke_width.unwrap_or(2).to_string() stroke-linecap=self.props.stroke_linecap.unwrap_or("round") stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round") > <svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M0 0h24v24H0V0z" fill="none"/><path d="M5 18h14v2H5v-2zm4.6-2.7L5 10.7l2-1.9 2.6 2.6L17 4l2 2-9.4 9.3z"/></svg> </svg> } } }
30.73913
215
0.571429
fcbc5a41c87dd91a297364a6ac632d6c2febad5e
1,356
//Copyright 2021-2023 WHTCORPS INC // Licensed under the Apache License, Version 2.0 (the "License"); you may not use // this file File except in compliance with the License. You may obtain a copy of the // License at http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software distributed // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. use std::cell::Cell; use std::rc::Rc; #[derive(Clone)] pub struct RcPetri { c: Rc<Cell<usize>>, } /// A simple shared Petri. impl RcPetri { pub fn with_initial(value: usize) -> Self { RcPetri { c: Rc::new(Cell::new(value)) } } pub fn new() -> Self { RcPetri { c: Rc::new(Cell::new(0)) } } /// Return the next value in the sequence. /// /// ``` /// use einsteindb_core::petri::RcPetri; /// /// let c = RcPetri::with_initial(3); /// assert_eq!(c.next(), 3); /// assert_eq!(c.next(), 4); /// let d = c.clone(); /// assert_eq!(d.next(), 5); /// assert_eq!(c.next(), 6); /// ``` pub fn next(&self) -> usize { let current = self.c.get(); self.c.replace(current + 1) } }
30.133333
85
0.619469
e25856ddf01f6db661cdde1962790a77cb9b4e1b
9,089
use std::collections::VecDeque; use std::sync::Arc; use std::time::{Duration, Instant}; use anyhow::{Context, Result}; use futures::future::Either; use rdkafka::error::{KafkaError, RDKafkaErrorCode}; use rdkafka::producer::{DeliveryFuture, FutureProducer, FutureRecord}; use tiny_adnl::utils::*; use tokio::sync::Mutex; use crate::config::*; pub struct KafkaProducer { config: KafkaProducerConfig, batch_flush_threshold: Duration, producer: FutureProducer, batches: FxDashMap<i32, Arc<Batch>>, fixed_partitions: bool, } pub enum Partitions<T> { Fixed(T), Any, } impl Partitions<std::iter::Empty<i32>> { pub fn any() -> Self { Self::Any } } impl KafkaProducer { pub fn new( config: KafkaProducerConfig, partitions: Partitions<impl Iterator<Item = i32>>, ) -> Result<Self> { let mut client_config = rdkafka::config::ClientConfig::new(); client_config.set("bootstrap.servers", &config.brokers); if let Some(message_timeout_ms) = config.message_timeout_ms { client_config.set("message.timeout.ms", message_timeout_ms.to_string()); } if let Some(message_max_size) = config.message_max_size { client_config.set("message.max.bytes", message_max_size.to_string()); } #[cfg(feature = "sasl")] if let Some(SecurityConfig::Sasl(sasl)) = &config.security_config { client_config .set("security.protocol", &sasl.security_protocol) .set("ssl.ca.location", &sasl.ssl_ca_location) .set("sasl.mechanism", &sasl.sasl_mechanism) .set("sasl.username", &sasl.sasl_username) .set("sasl.password", &sasl.sasl_password); } let producer = client_config.create()?; let batch_flush_threshold = Duration::from_millis(config.batch_flush_threshold_ms); let (batches, fixed_partitions) = match partitions { Partitions::Fixed(partitions) => ( partitions .map(|partition| (partition, Default::default())) .collect(), true, ), Partitions::Any => (Default::default(), false), }; Ok(Self { config, batch_flush_threshold, producer, batches, fixed_partitions, }) } pub async fn write( &self, partition: i32, key: Vec<u8>, value: Vec<u8>, timestamp: Option<i64>, ) -> Result<()> { let batch = if self.fixed_partitions { self.batches .get(&partition) .context("Partition not found")? .clone() } else { self.batches.entry(partition).or_default().clone() }; let mut records = batch.records.lock().await; // Check if batch is big enough to check if records.len() > self.config.batch_flush_threshold_size { let now = Instant::now(); let mut batch_to_retry: Option<Vec<(Vec<u8>, Vec<u8>)>> = None; // Check pending records while let Some(item) = records.front() { // Break if successfully reached recent records if now.saturating_duration_since(item.created_at) < self.batch_flush_threshold { break; } // Pop the oldest item let item = match records.pop_front() { Some(item) => item, None => break, }; // Check if it was delivered if let Err((e, _)) = item.delivery_future.await.with_context(|| { format!( "Delivery future cancelled for tx {}", hex::encode(&item.key) ) })? { log::error!( "Batch item delivery error tx {}: {:?}. Retrying full batch", hex::encode(&item.key), e ); } else { // Continue to next pending record on successful delivery continue; } // Create batch to retry batch_to_retry = Some( futures::future::join_all( // Include first failed item std::iter::once(Either::Left(futures::future::ready(( item.key, item.value, )))) .chain( // Wait all subsequent records and add them despite result std::mem::take(&mut *records).into_iter().map(|item| { Either::Right(async move { item.delivery_future.await.ok(); (item.key, item.value) }) }), ), ) .await, ); } // Write batch if let Some(batch_to_retry) = batch_to_retry { log::error!( "FOUND BATCH TO RETRY: {} items in partition {}", batch_to_retry.len(), partition ); let batch_len = batch_to_retry.len(); // Send all items sequentially for (mut key, mut value) in batch_to_retry { // Repeat as many times loop { let now = chrono::Utc::now().timestamp(); // Send single record let record = self.send_record(partition, key, value, Some(now)).await; // Wait until it is delivered match record.delivery_future.await.with_context(|| { format!( "Delivery future cancelled for tx {}", hex::encode(&record.key) ) })? { // Move to the next item on successful delivery Ok(_) => break, // Log error and retry on failure Err((e, _)) => log::error!( "Batch item delivery error tx {}: {:?}. Retrying full batch", hex::encode(&record.key), e ), } // Update key and value key = record.key; value = record.value; } } // Done log::info!("Retried batch of {} elements", batch_len); } } // Append record to the batch records.push_back(self.send_record(partition, key, value, timestamp).await); Ok(()) } async fn send_record( &self, partition: i32, key: Vec<u8>, value: Vec<u8>, timestamp: Option<i64>, ) -> PendingRecord { const HEADER_NAME: &str = "raw_block_timestamp"; let header_value = timestamp.unwrap_or_default().to_be_bytes(); let headers = rdkafka::message::OwnedHeaders::new().add(HEADER_NAME, &header_value); let interval = Duration::from_millis(self.config.attempt_interval_ms); let mut record = FutureRecord::to(&self.config.topic) .partition(partition) .key(&key) .payload(&value) .headers(headers.clone()); loop { match self.producer.send_result(record) { Ok(delivery_future) => { break PendingRecord { key, value, created_at: Instant::now(), delivery_future, } } Err((e, sent_record)) if e == KafkaError::MessageProduction(RDKafkaErrorCode::QueueFull) => { record = sent_record; tokio::time::sleep(Duration::from_millis(100)).await; } Err((e, sent_record)) => { record = sent_record; log::warn!( "Failed to send message to kafka topic {}: {:?}", self.config.topic, e ); tokio::time::sleep(interval).await; } }; } } } #[derive(Default)] struct Batch { records: Mutex<VecDeque<PendingRecord>>, } struct PendingRecord { key: Vec<u8>, value: Vec<u8>, created_at: Instant, delivery_future: DeliveryFuture, }
33.662963
96
0.463967
623b7ce22ff55987315c2cd2d896d31d9c7fdc39
12,799
/* * OpenAPI Petstore * * This is a sample server Petstore server. For this sample, you can use the api key `special-key` to test the authorization filters. * * The version of the OpenAPI document: 1.0.0 * * Generated by: https://openapi-generator.tech */ use reqwest; use crate::apis::ResponseContent; use super::{Error, configuration}; /// struct for typed errors of method `create_user` #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum CreateUserError { DefaultResponse(), UnknownValue(serde_json::Value), } /// struct for typed errors of method `create_users_with_array_input` #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum CreateUsersWithArrayInputError { DefaultResponse(), UnknownValue(serde_json::Value), } /// struct for typed errors of method `create_users_with_list_input` #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum CreateUsersWithListInputError { DefaultResponse(), UnknownValue(serde_json::Value), } /// struct for typed errors of method `delete_user` #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum DeleteUserError { Status400(), Status404(), UnknownValue(serde_json::Value), } /// struct for typed errors of method `get_user_by_name` #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum GetUserByNameError { Status400(), Status404(), UnknownValue(serde_json::Value), } /// struct for typed errors of method `login_user` #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum LoginUserError { Status400(), UnknownValue(serde_json::Value), } /// struct for typed errors of method `logout_user` #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum LogoutUserError { DefaultResponse(), UnknownValue(serde_json::Value), } /// struct for typed errors of method `update_user` #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum UpdateUserError { Status400(), Status404(), UnknownValue(serde_json::Value), } /// This can only be done by the logged in user. pub fn create_user(configuration: &configuration::Configuration, body: crate::models::User) -> Result<(), Error<CreateUserError>> { let local_var_client = &configuration.client; let local_var_uri_str = format!("{}/user", configuration.base_path); let mut local_var_req_builder = local_var_client.request(reqwest::Method::POST, local_var_uri_str.as_str()); if let Some(ref local_var_user_agent) = configuration.user_agent { local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone()); } local_var_req_builder = local_var_req_builder.json(&body); let local_var_req = local_var_req_builder.build()?; let mut local_var_resp = local_var_client.execute(local_var_req)?; let local_var_status = local_var_resp.status(); let local_var_content = local_var_resp.text()?; if !local_var_status.is_client_error() && !local_var_status.is_server_error() { Ok(()) } else { let local_var_entity: Option<CreateUserError> = serde_json::from_str(&local_var_content).ok(); let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity }; Err(Error::ResponseError(local_var_error)) } } pub fn create_users_with_array_input(configuration: &configuration::Configuration, body: Vec<crate::models::User>) -> Result<(), Error<CreateUsersWithArrayInputError>> { let local_var_client = &configuration.client; let local_var_uri_str = format!("{}/user/createWithArray", configuration.base_path); let mut local_var_req_builder = local_var_client.request(reqwest::Method::POST, local_var_uri_str.as_str()); if let Some(ref local_var_user_agent) = configuration.user_agent { local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone()); } local_var_req_builder = local_var_req_builder.json(&body); let local_var_req = local_var_req_builder.build()?; let mut local_var_resp = local_var_client.execute(local_var_req)?; let local_var_status = local_var_resp.status(); let local_var_content = local_var_resp.text()?; if !local_var_status.is_client_error() && !local_var_status.is_server_error() { Ok(()) } else { let local_var_entity: Option<CreateUsersWithArrayInputError> = serde_json::from_str(&local_var_content).ok(); let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity }; Err(Error::ResponseError(local_var_error)) } } pub fn create_users_with_list_input(configuration: &configuration::Configuration, body: Vec<crate::models::User>) -> Result<(), Error<CreateUsersWithListInputError>> { let local_var_client = &configuration.client; let local_var_uri_str = format!("{}/user/createWithList", configuration.base_path); let mut local_var_req_builder = local_var_client.request(reqwest::Method::POST, local_var_uri_str.as_str()); if let Some(ref local_var_user_agent) = configuration.user_agent { local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone()); } local_var_req_builder = local_var_req_builder.json(&body); let local_var_req = local_var_req_builder.build()?; let mut local_var_resp = local_var_client.execute(local_var_req)?; let local_var_status = local_var_resp.status(); let local_var_content = local_var_resp.text()?; if !local_var_status.is_client_error() && !local_var_status.is_server_error() { Ok(()) } else { let local_var_entity: Option<CreateUsersWithListInputError> = serde_json::from_str(&local_var_content).ok(); let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity }; Err(Error::ResponseError(local_var_error)) } } /// This can only be done by the logged in user. pub fn delete_user(configuration: &configuration::Configuration, username: &str) -> Result<(), Error<DeleteUserError>> { let local_var_client = &configuration.client; let local_var_uri_str = format!("{}/user/{username}", configuration.base_path, username=crate::apis::urlencode(username)); let mut local_var_req_builder = local_var_client.request(reqwest::Method::DELETE, local_var_uri_str.as_str()); if let Some(ref local_var_user_agent) = configuration.user_agent { local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone()); } let local_var_req = local_var_req_builder.build()?; let mut local_var_resp = local_var_client.execute(local_var_req)?; let local_var_status = local_var_resp.status(); let local_var_content = local_var_resp.text()?; if !local_var_status.is_client_error() && !local_var_status.is_server_error() { Ok(()) } else { let local_var_entity: Option<DeleteUserError> = serde_json::from_str(&local_var_content).ok(); let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity }; Err(Error::ResponseError(local_var_error)) } } pub fn get_user_by_name(configuration: &configuration::Configuration, username: &str) -> Result<crate::models::User, Error<GetUserByNameError>> { let local_var_client = &configuration.client; let local_var_uri_str = format!("{}/user/{username}", configuration.base_path, username=crate::apis::urlencode(username)); let mut local_var_req_builder = local_var_client.request(reqwest::Method::GET, local_var_uri_str.as_str()); if let Some(ref local_var_user_agent) = configuration.user_agent { local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone()); } let local_var_req = local_var_req_builder.build()?; let mut local_var_resp = local_var_client.execute(local_var_req)?; let local_var_status = local_var_resp.status(); let local_var_content = local_var_resp.text()?; if !local_var_status.is_client_error() && !local_var_status.is_server_error() { serde_json::from_str(&local_var_content).map_err(Error::from) } else { let local_var_entity: Option<GetUserByNameError> = serde_json::from_str(&local_var_content).ok(); let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity }; Err(Error::ResponseError(local_var_error)) } } pub fn login_user(configuration: &configuration::Configuration, username: &str, password: &str) -> Result<String, Error<LoginUserError>> { let local_var_client = &configuration.client; let local_var_uri_str = format!("{}/user/login", configuration.base_path); let mut local_var_req_builder = local_var_client.request(reqwest::Method::GET, local_var_uri_str.as_str()); local_var_req_builder = local_var_req_builder.query(&[("username", &username.to_string())]); local_var_req_builder = local_var_req_builder.query(&[("password", &password.to_string())]); if let Some(ref local_var_user_agent) = configuration.user_agent { local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone()); } let local_var_req = local_var_req_builder.build()?; let mut local_var_resp = local_var_client.execute(local_var_req)?; let local_var_status = local_var_resp.status(); let local_var_content = local_var_resp.text()?; if !local_var_status.is_client_error() && !local_var_status.is_server_error() { serde_json::from_str(&local_var_content).map_err(Error::from) } else { let local_var_entity: Option<LoginUserError> = serde_json::from_str(&local_var_content).ok(); let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity }; Err(Error::ResponseError(local_var_error)) } } pub fn logout_user(configuration: &configuration::Configuration, ) -> Result<(), Error<LogoutUserError>> { let local_var_client = &configuration.client; let local_var_uri_str = format!("{}/user/logout", configuration.base_path); let mut local_var_req_builder = local_var_client.request(reqwest::Method::GET, local_var_uri_str.as_str()); if let Some(ref local_var_user_agent) = configuration.user_agent { local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone()); } let local_var_req = local_var_req_builder.build()?; let mut local_var_resp = local_var_client.execute(local_var_req)?; let local_var_status = local_var_resp.status(); let local_var_content = local_var_resp.text()?; if !local_var_status.is_client_error() && !local_var_status.is_server_error() { Ok(()) } else { let local_var_entity: Option<LogoutUserError> = serde_json::from_str(&local_var_content).ok(); let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity }; Err(Error::ResponseError(local_var_error)) } } /// This can only be done by the logged in user. pub fn update_user(configuration: &configuration::Configuration, username: &str, body: crate::models::User) -> Result<(), Error<UpdateUserError>> { let local_var_client = &configuration.client; let local_var_uri_str = format!("{}/user/{username}", configuration.base_path, username=crate::apis::urlencode(username)); let mut local_var_req_builder = local_var_client.request(reqwest::Method::PUT, local_var_uri_str.as_str()); if let Some(ref local_var_user_agent) = configuration.user_agent { local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone()); } local_var_req_builder = local_var_req_builder.json(&body); let local_var_req = local_var_req_builder.build()?; let mut local_var_resp = local_var_client.execute(local_var_req)?; let local_var_status = local_var_resp.status(); let local_var_content = local_var_resp.text()?; if !local_var_status.is_client_error() && !local_var_status.is_server_error() { Ok(()) } else { let local_var_entity: Option<UpdateUserError> = serde_json::from_str(&local_var_content).ok(); let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity }; Err(Error::ResponseError(local_var_error)) } }
42.240924
169
0.740214
28b17885dc29fa9b2ca367abd79b0828cc1affbb
4,726
// //! Copyright 2020 Alibaba Group Holding Limited. //! //! Licensed under the Apache License, Version 2.0 (the "License"); //! you may not use this file except in compliance with the License. //! You may obtain a copy of the License at //! //! http://www.apache.org/licenses/LICENSE-2.0 //! //! Unless required by applicable law or agreed to in writing, software //! distributed under the License is distributed on an "AS IS" BASIS, //! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //! See the License for the specific language governing permissions and //! limitations under the License. use crate::v2::api::{VertexId, LabelId, PropertyId, EdgeId, Records, SerialId, SnapshotId}; use crate::v2::api::types::{Vertex, Edge, EdgeRelation}; use crate::v2::api::condition::Condition; use crate::v2::GraphResult; /// Snapshot of a graph partition. All the interfaces should be thread-safe pub trait PartitionSnapshot { type V: Vertex; type E: Edge; /// Returns the vertex entity of given `vertex_id`, properties are filtered /// by the `property_ids` optionally. /// /// If `label_id` is [`None`], all vertex labels will be searched and the /// first match vertex will be returned. fn get_vertex( &self, vertex_id: VertexId, label_id: Option<LabelId>, property_ids: Option<&Vec<PropertyId>>, ) -> GraphResult<Option<Self::V>>; /// Returns the edge entity of given `edge_id`, properties are filtered /// by the `property_ids` optionally. /// /// If `edge_relation` is [`None`], all edge relations will be searched and /// the first match edge will be returned. fn get_edge( &self, edge_id: EdgeId, edge_relation: Option<&EdgeRelation>, property_ids: Option<&Vec<PropertyId>>, ) -> GraphResult<Option<Self::E>>; /// Returns all vertices, filtered by `label_id` and `condition` /// optionally. /// /// Properties of the vertices are filtered by the `property_ids` /// optionally. fn scan_vertex( &self, label_id: Option<LabelId>, condition: Option<&Condition>, property_ids: Option<&Vec<PropertyId>>, ) -> GraphResult<Records<Self::V>>; /// Returns all edges, filtered by `label_id` and `condition` /// optionally. /// /// Properties of the edges are filtered by the `property_ids` optionally. fn scan_edge( &self, label_id: Option<LabelId>, condition: Option<&Condition>, property_ids: Option<&Vec<PropertyId>>, ) -> GraphResult<Records<Self::E>>; /// Returns out edges of vertex `vertex_id`, filtered by `label_id` and /// `condition` optionally. /// /// Properties of the edges are filtered by the `property_ids` optionally. fn get_out_edges( &self, vertex_id: VertexId, label_id: Option<LabelId>, condition: Option<&Condition>, property_ids: Option<&Vec<PropertyId>>, ) -> GraphResult<Records<Self::E>>; /// Returns in edges of vertex `vertex_id`, filtered by `label_id` and /// `condition` optionally. /// /// Properties of the edges are filtered by the `property_ids` optionally. fn get_in_edges( &self, vertex_id: VertexId, label_id: Option<LabelId>, condition: Option<&Condition>, property_ids: Option<&Vec<PropertyId>>, ) -> GraphResult<Records<Self::E>>; /// Returns the out-degree of vertex `vertex_id` in `edge_relation` fn get_out_degree( &self, vertex_id: VertexId, edge_relation: &EdgeRelation, ) -> GraphResult<usize>; /// Returns the in-degree of vertex `vertex_id` in `edge_relation` fn get_in_degree( &self, vertex_id: VertexId, edge_relation: &EdgeRelation, ) -> GraphResult<usize>; /// Returns the `k`th out edge of vertex `vertex_id` in `edge_relation`. /// /// Properties of the edge are filtered by the `property_ids` optionally. fn get_kth_out_edge( &self, vertex_id: VertexId, edge_relation: &EdgeRelation, k: SerialId, property_ids: Option<&Vec<PropertyId>>, ) -> GraphResult<Option<Self::E>>; /// Returns the `k`th in edge of vertex `vertex_id` in `edge_relation`. /// /// Properties of the edge are filtered by the `property_ids` optionally. fn get_kth_in_edge( &self, vertex_id: VertexId, edge_relation: &EdgeRelation, k: SerialId, property_ids: Option<&Vec<PropertyId>>, ) -> GraphResult<Option<Self::E>>; /// Returns the id of the snapshot fn get_snapshot_id(&self) -> SnapshotId; }
34.75
91
0.643673
bbceadd1544ffe26e1bf1238e9993ab452770ec8
152
extern crate cracken; fn main() { if let Err(e) = cracken::runner::run(None) { eprintln!("{}", &e); std::process::exit(2); } }
16.888889
48
0.506579
acd198d8a0a8c8907207c871244d8c20b5925eff
4,227
use crate::{Module, Config}; use frame_system as system; use sp_core::H256; use frame_support::{impl_outer_origin, impl_outer_event, parameter_types, weights::Weight}; use sp_runtime::{ Perbill, ModuleId, testing::Header, traits::{BlakeTwo256, IdentityLookup}, }; impl_outer_origin! { pub enum Origin for Test {} } mod moloch_v2 { pub use crate::Event; } impl_outer_event! { pub enum Event for Test { system<T>, pallet_balances<T>, moloch_v2<T>, } } // Configure a mock runtime to test the pallet. #[derive(Clone, Eq, PartialEq)] pub struct Test; parameter_types! { pub const BlockHashCount: u64 = 250; pub const MaximumBlockWeight: Weight = 1024; pub const MaximumBlockLength: u32 = 2 * 1024; pub const AvailableBlockRatio: Perbill = Perbill::from_percent(75); } parameter_types! { // for testing, set max to 10**8 pub const MolochV2ModuleId: ModuleId = ModuleId(*b"py/moloc"); // HARD-CODED LIMITS // These numbers are quite arbitrary; they are small enough to avoid overflows when doing calculations // with periods or shares, yet big enough to not limit reasonable use cases. pub const MaxVotingPeriodLength: u128 = 100_000_000; // maximum length of voting period pub const MaxGracePeriodLength: u128 = 100_000_000; // maximum length of grace period pub const MaxDilutionBound: u128 = 100_000_000; // maximum dilution bound pub const MaxShares: u128 = 100_000_000; // maximum number of shares that can be minted } impl system::Trait for Test { type BaseCallFilter = (); type Origin = Origin; type Call = (); type Index = u64; type BlockNumber = u64; type Hash = H256; type Hashing = BlakeTwo256; type AccountId = u64; type Lookup = IdentityLookup<Self::AccountId>; type Header = Header; type Event = Event; type BlockHashCount = BlockHashCount; type MaximumBlockWeight = MaximumBlockWeight; type DbWeight = (); type BlockExecutionWeight = (); type ExtrinsicBaseWeight = (); type MaximumExtrinsicWeight = MaximumBlockWeight; type MaximumBlockLength = MaximumBlockLength; type AvailableBlockRatio = AvailableBlockRatio; type Version = (); type PalletInfo = (); type AccountData = pallet_balances::AccountData<u64>; type OnNewAccount = (); type OnKilledAccount = (); type SystemWeightInfo = (); } parameter_types! { pub const ExistentialDeposit: u64 = 1; } impl pallet_balances::Trait for Test { type MaxLocks = (); type Balance = u64; type Event = Event; type DustRemoval = (); type ExistentialDeposit = ExistentialDeposit; type AccountStore = System; type WeightInfo = (); } impl pallet_timestamp::Trait for Test { type Moment = u64; type OnTimestampSet = (); type MinimumPeriod = (); type WeightInfo = (); } impl Config for Test { type ModuleId = MolochV2ModuleId; // The Balances pallet implements the ReservableCurrency trait. // https://substrate.dev/rustdocs/v2.0.0/pallet_balances/index.html#implementations-2 type Currency = pallet_balances::Module<Test>; // No action is taken when deposits are forfeited. type Slashed = (); type Event = Event; type AdminOrigin = frame_system::EnsureRoot<u64>; // maximum length of voting period type MaxVotingPeriodLength = MaxVotingPeriodLength; // maximum length of grace period type MaxGracePeriodLength = MaxGracePeriodLength; // maximum dilution bound type MaxDilutionBound = MaxDilutionBound; // maximum number of shares type MaxShares = MaxShares; } pub type System = frame_system::Module<Test>; pub type Balances = pallet_balances::Module<Test>; pub type MolochV2 = Module<Test>; pub type Timestamp = pallet_timestamp::Module<Test>; // Build genesis storage according to the mock runtime. pub fn new_test_ext() -> sp_io::TestExternalities { // system::GenesisConfig::default().build_storage::<Test>().unwrap().into() let mut t = system::GenesisConfig::default().build_storage::<Test>().unwrap(); pallet_balances::GenesisConfig::<Test>{ // Total issuance will be 1000 with internal account initialized at ED. balances: vec![(0, 3000), (1, 3000), (2, 3000), (3, 3000), (4, 3000), (5, 3000)], }.assimilate_storage(&mut t).unwrap(); system::GenesisConfig::default().assimilate_storage::<Test>(&mut t).unwrap(); t.into() }
30.410072
106
0.729832
0966060a260f547dc2bd96159394959f98fbc5ff
3,172
use std::cmp::min; use colored::Colorize; use stork_lib::{HighlightRange, Output}; use textwrap::termwidth; fn highlight_string(string: &str, ranges: &Vec<HighlightRange>) -> String { let mut highlighted = String::new(); let mut last_end = 0; for range in ranges { highlighted.push_str(&string[last_end..range.beginning]); highlighted.push_str(&string[range.beginning..range.end].yellow().to_string()); last_end = range.end; } highlighted.push_str(&string[last_end..]); highlighted } pub fn pretty_print_search_results(results: &Output) -> String { let mut output = String::new(); let textwrap_options = textwrap::Options::new(min(120, termwidth())) .initial_indent(" - ") .subsequent_indent(" "); for result in results.results.iter() { output.push_str(&format!( "{}\n<{}{}>", result.entry.title.bold().green(), results.url_prefix, result.entry.url )); for excerpt in result.excerpts.iter() { output.push_str(&format!( "\n{}", textwrap::fill( &highlight_string(&excerpt.text, &excerpt.highlight_ranges), &textwrap_options ) )); } output.push_str("\n\n"); } output.push_str(&format!( "{} total results available", results.total_hit_count )); output } #[cfg(test)] mod tests { use std::collections::HashMap; use super::*; use pretty_assertions::assert_eq; #[test] fn display_pretty_search_results_given_output() { let results = Output { results: vec![stork_lib::StorkResult { entry: stork_lib::Entry { title: "Some Document Title".to_string(), url: "https://example.com".to_string(), fields: HashMap::new(), }, score: 25, excerpts: vec![stork_lib::Excerpt { text: "This is the excerpt of the text".to_string(), highlight_ranges: vec![stork_lib::HighlightRange { beginning: 0, end: 1, }], internal_annotations: vec![stork_lib::InternalWordAnnotation::UrlSuffix( "#25".to_string(), )], fields: HashMap::new(), score: 12, }], title_highlight_ranges: vec![stork_lib::HighlightRange { beginning: 0, end: 5, }], }], total_hit_count: 21, url_prefix: "".to_string(), }; assert_eq!( pretty_print_search_results(&results), format!( "{}{}{}{}", "Some Document Title".bold().green(), "\n<https://example.com>\n - ", "T".yellow(), "his is the excerpt of the text\n\n21 total results available".normal() ) ); } }
30.5
92
0.497793
5033e4fe5fd51cf16147444d06b0ae6d837c597c
28,380
// LNP/BP Core Library implementing LNPBP specifications & standards // Written in 2020-2022 by // Dr. Maxim Orlovsky <[email protected]> // // To the extent possible under law, the author(s) have dedicated all // copyright and related and neighboring rights to this software to // the public domain worldwide. This software is distributed without // any warranty. // // You should have received a copy of the MIT License // along with this software. // If not, see <https://opensource.org/licenses/MIT>. use std::collections::BTreeMap; use bitcoin::blockdata::opcodes::all::*; use bitcoin::blockdata::script; use bitcoin::secp256k1::PublicKey; use bitcoin::{OutPoint, Transaction, TxIn, TxOut}; use lnp2p::legacy::{ChannelId, Messages}; use p2p::legacy::ChannelType; use wallet::hlc::{HashLock, HashPreimage}; use wallet::psbt; use wallet::psbt::{Psbt, PsbtVersion}; use wallet::scripts::{LockScript, PubkeyScript, WitnessScript}; use crate::channel::bolt::util::UpdateReq; use crate::channel::bolt::{BoltExt, ChannelState, Error, TxType}; use crate::channel::tx_graph::TxGraph; use crate::{ChannelExtension, Extension}; #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] #[derive(StrictEncode, StrictDecode)] #[cfg_attr( feature = "serde", derive(Serialize, Deserialize), serde(crate = "serde_crate") )] pub struct HtlcKnown { pub amount: u64, pub preimage: HashPreimage, pub id: u64, pub cltv_expiry: u32, } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] #[derive(StrictEncode, StrictDecode)] #[cfg_attr( feature = "serde", derive(Serialize, Deserialize), serde(crate = "serde_crate") )] pub struct HtlcSecret { pub amount: u64, pub hashlock: HashLock, pub id: u64, pub cltv_expiry: u32, } #[derive(Getters, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] #[derive(StrictEncode, StrictDecode)] pub struct Htlc { /// Set if the feature `option_anchors_zero_fee_htlc_tx` was negotiated via /// `channel_type`. Indicates that HTLC transactions will use zero fees and /// will be pushed through an anchor transaction. anchors_zero_fee_htlc_tx: bool, // Sets of HTLC informations offered_htlcs: BTreeMap<u64, HtlcSecret>, received_htlcs: BTreeMap<u64, HtlcSecret>, resolved_htlcs: BTreeMap<u64, HtlcKnown>, // Commitment round specific information to_self_delay: u16, local_revocation_basepoint: PublicKey, remote_revocation_basepoint: PublicKey, local_basepoint: PublicKey, remote_basepoint: PublicKey, local_delayed_basepoint: PublicKey, // Channel specific information channel_id: ChannelId, /// indicates the smallest value HTLC this node will accept. htlc_minimum_msat: u64, max_htlc_value_in_flight_msat: u64, max_accepted_htlcs: u16, next_recieved_htlc_id: u64, next_offered_htlc_id: u64, } impl Default for Htlc { fn default() -> Self { Htlc { anchors_zero_fee_htlc_tx: false, offered_htlcs: empty!(), received_htlcs: empty!(), resolved_htlcs: empty!(), to_self_delay: 0, local_revocation_basepoint: dumb_pubkey!(), remote_revocation_basepoint: dumb_pubkey!(), local_basepoint: dumb_pubkey!(), remote_basepoint: dumb_pubkey!(), local_delayed_basepoint: dumb_pubkey!(), channel_id: Default::default(), htlc_minimum_msat: 0, max_htlc_value_in_flight_msat: 0, max_accepted_htlcs: 0, next_recieved_htlc_id: 0, next_offered_htlc_id: 0, } } } impl Htlc { pub fn offer_htlc( &mut self, amount_msat: u64, payment_hash: HashLock, cltv_expiry: u32, ) -> u64 { let htlc_id = self.next_offered_htlc_id; self.next_offered_htlc_id += 1; self.offered_htlcs.insert(htlc_id, HtlcSecret { amount: amount_msat, hashlock: payment_hash, id: htlc_id, cltv_expiry, }); htlc_id } } impl Extension<BoltExt> for Htlc { fn identity(&self) -> BoltExt { BoltExt::Htlc } fn update_from_local(&mut self, _message: &()) -> Result<(), Error> { // Nothing to do here so far Ok(()) } fn state_change( &mut self, request: &UpdateReq, message: &mut Messages, ) -> Result<(), Error> { match (request, message) { ( UpdateReq::PayBolt(_), Messages::UpdateAddHtlc(update_add_htlc), ) => { let htlc_id = self.offer_htlc( update_add_htlc.amount_msat, update_add_htlc.payment_hash, update_add_htlc.cltv_expiry, ); update_add_htlc.htlc_id = htlc_id; } (UpdateReq::PayBolt(_), _) => unreachable!( "state change request must match provided LN P2P message" ), } Ok(()) } fn update_from_peer(&mut self, message: &Messages) -> Result<(), Error> { match message { Messages::OpenChannel(open_channel) => { self.anchors_zero_fee_htlc_tx = open_channel .channel_type .map(ChannelType::has_anchors_zero_fee_htlc_tx) .unwrap_or_default(); self.htlc_minimum_msat = open_channel.htlc_minimum_msat; self.max_accepted_htlcs = open_channel.max_accepted_htlcs; self.max_htlc_value_in_flight_msat = open_channel.max_htlc_value_in_flight_msat; self.remote_basepoint = open_channel.htlc_basepoint; self.remote_revocation_basepoint = open_channel.revocation_basepoint; self.local_delayed_basepoint = open_channel.delayed_payment_basepoint; self.to_self_delay = open_channel.to_self_delay; } Messages::AcceptChannel(accept_channel) => { self.anchors_zero_fee_htlc_tx = accept_channel .channel_type .map(ChannelType::has_anchors_zero_fee_htlc_tx) .unwrap_or_default(); self.htlc_minimum_msat = accept_channel.htlc_minimum_msat; self.max_accepted_htlcs = accept_channel.max_accepted_htlcs; self.max_htlc_value_in_flight_msat = accept_channel.max_htlc_value_in_flight_msat; self.remote_basepoint = accept_channel.htlc_basepoint; self.remote_revocation_basepoint = accept_channel.revocation_basepoint; self.local_delayed_basepoint = accept_channel.delayed_payment_basepoint; self.to_self_delay = accept_channel.to_self_delay; } Messages::UpdateAddHtlc(message) => { // TODO: Filter messages by channel_id at channel level with // special API if message.channel_id == self.channel_id { // Checks // 1. sending node should afford current fee rate after // adding this htlc to its local // commitment including anchor outputs // if opt in. if message.amount_msat == 0 || message.amount_msat < self.htlc_minimum_msat { return Err(Error::Htlc( "amount_msat has to be greater than 0".to_string(), )); } else if self.received_htlcs.len() >= self.max_accepted_htlcs as usize { return Err(Error::Htlc( "max no. of HTLC limit exceeded".to_string(), )); } else if message.cltv_expiry > 500000000 { return Err(Error::Htlc( "cltv_expiry limit exceeded".to_string(), )); } else if message.amount_msat.leading_zeros() < 32 { return Err(Error::Htlc( "Leading zeros not satisfied for Bitcoin network" .to_string(), )); } else if message.htlc_id <= self.next_recieved_htlc_id { return Err(Error::Htlc( "HTLC id violation occurred".to_string(), )); // TODO handle reconnection } else { let htlc = HtlcSecret { amount: message.amount_msat, hashlock: message.payment_hash, id: message.htlc_id, cltv_expiry: message.cltv_expiry, }; self.received_htlcs.insert(htlc.id, htlc); self.next_recieved_htlc_id += 1; } } else { return Err(Error::Htlc( "Missmatched channel_id, bad remote node".to_string(), )); } } Messages::UpdateFulfillHtlc(message) => { if message.channel_id == self.channel_id { // Get the corresponding offered htlc let offered_htlc = self.received_htlcs.get(&message.htlc_id).ok_or_else( || Error::Htlc("HTLC id didn't match".to_string()), )?; // Check for correct hash preimage in the message if offered_htlc.hashlock == HashLock::from(message.payment_preimage) { self.offered_htlcs.remove(&message.htlc_id); let resolved_htlc = HtlcKnown { amount: offered_htlc.amount, preimage: message.payment_preimage, id: message.htlc_id, cltv_expiry: offered_htlc.cltv_expiry, }; self.resolved_htlcs .insert(message.htlc_id, resolved_htlc); } } else { return Err(Error::Htlc( "Missmatched channel_id, bad remote node".to_string(), )); } } Messages::UpdateFailHtlc(message) => { if message.channel_id == self.channel_id { self.offered_htlcs.remove(&message.htlc_id); // TODO the failure reason should be handled here } } Messages::UpdateFailMalformedHtlc(_) => {} Messages::CommitmentSigned(_) => {} Messages::RevokeAndAck(_) => {} Messages::ChannelReestablish(_) => {} _ => {} } Ok(()) } fn load_state(&mut self, state: &ChannelState) { self.anchors_zero_fee_htlc_tx = state .common_params .channel_type .has_anchors_zero_fee_htlc_tx(); self.offered_htlcs = state.offered_htlcs.clone(); self.received_htlcs = state.received_htlcs.clone(); self.resolved_htlcs = state.resolved_htlcs.clone(); self.to_self_delay = state.remote_params.to_self_delay; self.local_revocation_basepoint = state.local_keys.revocation_basepoint.key; self.remote_revocation_basepoint = state.remote_keys.revocation_basepoint; self.local_basepoint = state.local_keys.payment_basepoint.key; self.remote_basepoint = state.remote_keys.payment_basepoint; self.local_delayed_basepoint = state.local_keys.delayed_payment_basepoint.key; self.channel_id = state.active_channel_id.as_slice32().into(); self.htlc_minimum_msat = state.remote_params.htlc_minimum_msat; self.max_htlc_value_in_flight_msat = state.remote_params.max_htlc_value_in_flight_msat; self.max_accepted_htlcs = state.remote_params.max_accepted_htlcs; self.next_recieved_htlc_id = state.last_recieved_htlc_id; self.next_offered_htlc_id = state.last_offered_htlc_id; } fn store_state(&self, state: &mut ChannelState) { state.offered_htlcs = self.offered_htlcs.clone(); state.received_htlcs = self.received_htlcs.clone(); state.resolved_htlcs = self.resolved_htlcs.clone(); state.last_recieved_htlc_id = self.next_recieved_htlc_id; state.last_offered_htlc_id = self.next_offered_htlc_id; } } impl ChannelExtension<BoltExt> for Htlc { #[inline] fn new() -> Box<dyn ChannelExtension<BoltExt>> where Self: Sized, { Box::new(Htlc::default()) } fn build_graph( &self, tx_graph: &mut TxGraph, _as_remote_node: bool, ) -> Result<(), Error> { // Process offered HTLCs for (index, offered) in self.offered_htlcs.iter() { let htlc_output = ScriptGenerators::ln_offered_htlc( offered.amount, self.remote_revocation_basepoint, self.local_basepoint, self.remote_basepoint, offered.hashlock, ); tx_graph.cmt_outs.push(htlc_output); // Should htlc outputs be inside graph.cmt? let htlc_tx = Psbt::ln_htlc( offered.amount, // TODO: do a two-staged graph generation process OutPoint::default(), offered.cltv_expiry, self.remote_revocation_basepoint, self.local_delayed_basepoint, self.to_self_delay, ); // Last index of transaction in graph let last_index = tx_graph.last_index(TxType::HtlcTimeout) + 1; tx_graph.insert_tx( TxType::HtlcTimeout, last_index as u64 + index, htlc_tx, ); } // Process received HTLCs for (index, recieved) in self.received_htlcs.iter() { let htlc_output = ScriptGenerators::ln_received_htlc( recieved.amount, self.remote_revocation_basepoint, self.local_basepoint, self.remote_basepoint, recieved.cltv_expiry, recieved.hashlock, ); tx_graph.cmt_outs.push(htlc_output); let htlc_tx = Psbt::ln_htlc( recieved.amount, // TODO: do a two-staged graph generation process OutPoint::default(), recieved.cltv_expiry, self.remote_revocation_basepoint, self.local_delayed_basepoint, self.to_self_delay, ); // Figure out the last index of transaction in graph let last_index = tx_graph.last_index(TxType::HtlcSuccess) + 1; tx_graph.insert_tx( TxType::HtlcSuccess, last_index as u64 + index, htlc_tx, ); } Ok(()) } } pub trait ScriptGenerators { fn ln_offered_htlc( amount: u64, revocationpubkey: PublicKey, local_htlcpubkey: PublicKey, remote_htlcpubkey: PublicKey, payment_hash: HashLock, ) -> Self; fn ln_received_htlc( amount: u64, revocationpubkey: PublicKey, local_htlcpubkey: PublicKey, remote_htlcpubkey: PublicKey, cltv_expiry: u32, payment_hash: HashLock, ) -> Self; fn ln_htlc_output( amount: u64, revocationpubkey: PublicKey, local_delayedpubkey: PublicKey, to_self_delay: u16, ) -> Self; } impl ScriptGenerators for LockScript { fn ln_offered_htlc( _: u64, revocationpubkey: PublicKey, local_htlcpubkey: PublicKey, remote_htlcpubkey: PublicKey, payment_hash: HashLock, ) -> Self { script::Builder::new() .push_opcode(OP_DUP) .push_opcode(OP_HASH160) .push_slice( &bitcoin::PublicKey::new(revocationpubkey).pubkey_hash(), ) .push_opcode(OP_EQUAL) .push_opcode(OP_IF) .push_opcode(OP_CHECKSIG) .push_opcode(OP_ELSE) .push_key(&bitcoin::PublicKey::new(remote_htlcpubkey)) .push_opcode(OP_SWAP) .push_opcode(OP_SIZE) .push_int(32) .push_opcode(OP_EQUAL) .push_opcode(OP_NOTIF) .push_opcode(OP_DROP) .push_int(2) .push_opcode(OP_SWAP) .push_key(&bitcoin::PublicKey::new(local_htlcpubkey)) .push_int(2) .push_opcode(OP_CHECKMULTISIG) .push_opcode(OP_ELSE) .push_opcode(OP_HASH160) .push_slice(payment_hash.as_ref()) .push_opcode(OP_EQUALVERIFY) .push_opcode(OP_CHECKSIG) .push_opcode(OP_ENDIF) .push_opcode(OP_ENDIF) .into_script() .into() } fn ln_received_htlc( _: u64, revocationpubkey: PublicKey, local_htlcpubkey: PublicKey, remote_htlcpubkey: PublicKey, cltv_expiry: u32, payment_hash: HashLock, ) -> Self { script::Builder::new() .push_opcode(OP_DUP) .push_opcode(OP_HASH160) .push_slice( &bitcoin::PublicKey::new(revocationpubkey).pubkey_hash(), ) .push_opcode(OP_EQUAL) .push_opcode(OP_IF) .push_opcode(OP_CHECKSIG) .push_opcode(OP_ELSE) .push_key(&bitcoin::PublicKey::new(remote_htlcpubkey)) .push_opcode(OP_SWAP) .push_opcode(OP_SIZE) .push_int(32) .push_opcode(OP_EQUAL) .push_opcode(OP_IF) .push_opcode(OP_HASH160) .push_slice(payment_hash.as_ref()) .push_opcode(OP_EQUALVERIFY) .push_int(2) .push_opcode(OP_SWAP) .push_key(&bitcoin::PublicKey::new(local_htlcpubkey)) .push_int(2) .push_opcode(OP_CHECKMULTISIG) .push_opcode(OP_ELSE) .push_opcode(OP_DROP) .push_int(cltv_expiry as i64) .push_opcode(OP_CLTV) .push_opcode(OP_DROP) .push_opcode(OP_CHECKSIG) .push_opcode(OP_ENDIF) .push_opcode(OP_ENDIF) .into_script() .into() } fn ln_htlc_output( _: u64, revocationpubkey: PublicKey, local_delayedpubkey: PublicKey, to_self_delay: u16, ) -> Self { script::Builder::new() .push_opcode(OP_IF) .push_key(&bitcoin::PublicKey::new(revocationpubkey)) .push_opcode(OP_ELSE) .push_int(to_self_delay as i64) .push_opcode(OP_CSV) .push_opcode(OP_DROP) .push_key(&bitcoin::PublicKey::new(local_delayedpubkey)) .push_opcode(OP_ENDIF) .push_opcode(OP_CHECKSIG) .into_script() .into() } } impl ScriptGenerators for WitnessScript { #[inline] fn ln_offered_htlc( amount: u64, revocationpubkey: PublicKey, local_htlcpubkey: PublicKey, remote_htlcpubkey: PublicKey, payment_hash: HashLock, ) -> Self { LockScript::ln_offered_htlc( amount, revocationpubkey, local_htlcpubkey, remote_htlcpubkey, payment_hash, ) .into() } #[inline] fn ln_received_htlc( amount: u64, revocationpubkey: PublicKey, local_htlcpubkey: PublicKey, remote_htlcpubkey: PublicKey, cltv_expiry: u32, payment_hash: HashLock, ) -> Self { LockScript::ln_received_htlc( amount, revocationpubkey, local_htlcpubkey, remote_htlcpubkey, cltv_expiry, payment_hash, ) .into() } #[inline] fn ln_htlc_output( amount: u64, revocationpubkey: PublicKey, local_delayedpubkey: PublicKey, to_self_delay: u16, ) -> Self { LockScript::ln_htlc_output( amount, revocationpubkey, local_delayedpubkey, to_self_delay, ) .into() } } impl ScriptGenerators for PubkeyScript { #[inline] fn ln_offered_htlc( amount: u64, revocationpubkey: PublicKey, local_htlcpubkey: PublicKey, remote_htlcpubkey: PublicKey, payment_hash: HashLock, ) -> Self { WitnessScript::ln_offered_htlc( amount, revocationpubkey, local_htlcpubkey, remote_htlcpubkey, payment_hash, ) .to_p2wsh() } #[inline] fn ln_received_htlc( amount: u64, revocationpubkey: PublicKey, local_htlcpubkey: PublicKey, remote_htlcpubkey: PublicKey, cltv_expiry: u32, payment_hash: HashLock, ) -> Self { WitnessScript::ln_received_htlc( amount, revocationpubkey, local_htlcpubkey, remote_htlcpubkey, cltv_expiry, payment_hash, ) .to_p2wsh() } #[inline] fn ln_htlc_output( amount: u64, revocationpubkey: PublicKey, local_delayedpubkey: PublicKey, to_self_delay: u16, ) -> Self { WitnessScript::ln_htlc_output( amount, revocationpubkey, local_delayedpubkey, to_self_delay, ) .to_p2wsh() } } impl ScriptGenerators for TxOut { #[inline] fn ln_offered_htlc( amount: u64, revocationpubkey: PublicKey, local_htlcpubkey: PublicKey, remote_htlcpubkey: PublicKey, payment_hash: HashLock, ) -> Self { TxOut { value: amount, script_pubkey: PubkeyScript::ln_offered_htlc( amount, revocationpubkey, local_htlcpubkey, remote_htlcpubkey, payment_hash, ) .into(), } } #[inline] fn ln_received_htlc( amount: u64, revocationpubkey: PublicKey, local_htlcpubkey: PublicKey, remote_htlcpubkey: PublicKey, cltv_expiry: u32, payment_hash: HashLock, ) -> Self { TxOut { value: amount, script_pubkey: PubkeyScript::ln_received_htlc( amount, revocationpubkey, local_htlcpubkey, remote_htlcpubkey, cltv_expiry, payment_hash, ) .into(), } } #[inline] fn ln_htlc_output( amount: u64, revocationpubkey: PublicKey, local_delayedpubkey: PublicKey, to_self_delay: u16, ) -> Self { TxOut { value: amount, script_pubkey: PubkeyScript::ln_htlc_output( amount, revocationpubkey, local_delayedpubkey, to_self_delay, ) .into(), } } } impl ScriptGenerators for psbt::Output { #[inline] fn ln_offered_htlc( amount: u64, revocationpubkey: PublicKey, local_htlcpubkey: PublicKey, remote_htlcpubkey: PublicKey, payment_hash: HashLock, ) -> Self { let witness_script = WitnessScript::ln_offered_htlc( amount, revocationpubkey, local_htlcpubkey, remote_htlcpubkey, payment_hash, ) .into(); let txout = TxOut::ln_offered_htlc( amount, revocationpubkey, local_htlcpubkey, remote_htlcpubkey, payment_hash, ); let output = bitcoin::psbt::Output { witness_script: Some(witness_script), ..Default::default() }; psbt::Output::with(0, output, txout) } #[inline] fn ln_received_htlc( amount: u64, revocationpubkey: PublicKey, local_htlcpubkey: PublicKey, remote_htlcpubkey: PublicKey, cltv_expiry: u32, payment_hash: HashLock, ) -> Self { let witness_script = WitnessScript::ln_received_htlc( amount, revocationpubkey, local_htlcpubkey, remote_htlcpubkey, cltv_expiry, payment_hash, ) .into(); let txout = TxOut::ln_received_htlc( amount, revocationpubkey, local_htlcpubkey, remote_htlcpubkey, cltv_expiry, payment_hash, ); let output = bitcoin::psbt::Output { witness_script: Some(witness_script), ..Default::default() }; psbt::Output::with(0, output, txout) } #[inline] fn ln_htlc_output( amount: u64, revocationpubkey: PublicKey, local_delayedpubkey: PublicKey, to_self_delay: u16, ) -> Self { let witness_script = WitnessScript::ln_htlc_output( amount, revocationpubkey, local_delayedpubkey, to_self_delay, ) .into(); let txout = TxOut::ln_htlc_output( amount, revocationpubkey, local_delayedpubkey, to_self_delay, ); let output = bitcoin::psbt::Output { witness_script: Some(witness_script), ..Default::default() }; psbt::Output::with(0, output, txout) } } pub trait TxGenerators { fn ln_htlc( amount: u64, outpoint: OutPoint, cltv_expiry: u32, revocationpubkey: PublicKey, local_delayedpubkey: PublicKey, to_self_delay: u16, ) -> Self; } impl TxGenerators for Transaction { /// NB: For HTLC Success transaction always set `cltv_expiry` parameter /// to zero! fn ln_htlc( amount: u64, outpoint: OutPoint, cltv_expiry: u32, revocationpubkey: PublicKey, local_delayedpubkey: PublicKey, to_self_delay: u16, ) -> Self { let txout = TxOut::ln_htlc_output( amount, revocationpubkey, local_delayedpubkey, to_self_delay, ); Transaction { version: 2, lock_time: cltv_expiry, input: vec![TxIn { previous_output: outpoint, script_sig: none!(), sequence: 0, witness: empty!(), }], output: vec![txout], } } } impl TxGenerators for Psbt { fn ln_htlc( amount: u64, outpoint: OutPoint, cltv_expiry: u32, revocationpubkey: PublicKey, local_delayedpubkey: PublicKey, to_self_delay: u16, ) -> Self { let output = psbt::Output::ln_htlc_output( amount, revocationpubkey, local_delayedpubkey, to_self_delay, ); let mut psbt = Psbt::with( Transaction::ln_htlc( amount, outpoint, cltv_expiry, revocationpubkey, local_delayedpubkey, to_self_delay, ), PsbtVersion::V0, ) .expect("Tx has empty sigs so PSBT creation does not fail"); psbt.outputs[0] = output; psbt } }
31.92351
92
0.550282
e60bb38babd9722645fe3cde2a0f40d892449cb0
6,726
//! Helper functions and an extension trait for Ethereum 2 ENRs. pub use discv5::enr::{self, CombinedKey, EnrBuilder}; use super::enr_ext::CombinedKeyExt; use super::ENR_FILENAME; use crate::types::{Enr, EnrBitfield}; use crate::NetworkConfig; use libp2p::core::identity::Keypair; use slog::{debug, warn}; use ssz::{Decode, Encode}; use ssz_types::BitVector; use std::fs::File; use std::io::prelude::*; use std::path::Path; use std::str::FromStr; use types::{EnrForkId, EthSpec}; /// The ENR field specifying the fork id. pub const ETH2_ENR_KEY: &str = "eth2"; /// The ENR field specifying the subnet bitfield. pub const BITFIELD_ENR_KEY: &str = "attnets"; /// Extension trait for ENR's within Eth2. pub trait Eth2Enr { /// The subnet bitfield associated with the ENR. fn bitfield<TSpec: EthSpec>(&self) -> Result<EnrBitfield<TSpec>, &'static str>; fn eth2(&self) -> Result<EnrForkId, &'static str>; } impl Eth2Enr for Enr { fn bitfield<TSpec: EthSpec>(&self) -> Result<EnrBitfield<TSpec>, &'static str> { let bitfield_bytes = self .get(BITFIELD_ENR_KEY) .ok_or_else(|| "ENR bitfield non-existent")?; BitVector::<TSpec::SubnetBitfieldLength>::from_ssz_bytes(bitfield_bytes) .map_err(|_| "Could not decode the ENR SSZ bitfield") } fn eth2(&self) -> Result<EnrForkId, &'static str> { let eth2_bytes = self .get(ETH2_ENR_KEY) .ok_or_else(|| "ENR has no eth2 field")?; EnrForkId::from_ssz_bytes(eth2_bytes).map_err(|_| "Could not decode EnrForkId") } } /// Loads an ENR from file if it exists and matches the current NodeId and sequence number. If none /// exists, generates a new one. /// /// If an ENR exists, with the same NodeId, this function checks to see if the loaded ENR from /// disk is suitable to use, otherwise we increment our newly generated ENR's sequence number. pub fn build_or_load_enr<T: EthSpec>( local_key: Keypair, config: &NetworkConfig, enr_fork_id: EnrForkId, log: &slog::Logger, ) -> Result<Enr, String> { // Build the local ENR. // Note: Discovery should update the ENR record's IP to the external IP as seen by the // majority of our peers, if the CLI doesn't expressly forbid it. let enr_key = CombinedKey::from_libp2p(&local_key)?; let mut local_enr = build_enr::<T>(&enr_key, config, enr_fork_id)?; let enr_f = config.network_dir.join(ENR_FILENAME); if let Ok(mut enr_file) = File::open(enr_f.clone()) { let mut enr_string = String::new(); match enr_file.read_to_string(&mut enr_string) { Err(_) => debug!(log, "Could not read ENR from file"), Ok(_) => { match Enr::from_str(&enr_string) { Ok(disk_enr) => { // if the same node id, then we may need to update our sequence number if local_enr.node_id() == disk_enr.node_id() { if compare_enr(&local_enr, &disk_enr) { debug!(log, "ENR loaded from disk"; "file" => format!("{:?}", enr_f)); // the stored ENR has the same configuration, use it return Ok(disk_enr); } // same node id, different configuration - update the sequence number let new_seq_no = disk_enr.seq().checked_add(1).ok_or_else(|| "ENR sequence number on file is too large. Remove it to generate a new NodeId")?; local_enr.set_seq(new_seq_no, &enr_key).map_err(|e| { format!("Could not update ENR sequence number: {:?}", e) })?; debug!(log, "ENR sequence number increased"; "seq" => new_seq_no); } } Err(e) => { warn!(log, "ENR from file could not be decoded"; "error" => format!("{:?}", e)); } } } } } save_enr_to_disk(&config.network_dir, &local_enr, log); Ok(local_enr) } /// Builds a lighthouse ENR given a `NetworkConfig`. pub fn build_enr<T: EthSpec>( enr_key: &CombinedKey, config: &NetworkConfig, enr_fork_id: EnrForkId, ) -> Result<Enr, String> { let mut builder = EnrBuilder::new("v4"); if let Some(enr_address) = config.enr_address { builder.ip(enr_address); } if let Some(udp_port) = config.enr_udp_port { builder.udp(udp_port); } // we always give it our listening tcp port // TODO: Add uPnP support to map udp and tcp ports let tcp_port = config.enr_tcp_port.unwrap_or_else(|| config.libp2p_port); builder.tcp(tcp_port); // set the `eth2` field on our ENR builder.add_value(ETH2_ENR_KEY.into(), enr_fork_id.as_ssz_bytes()); // set the "attnets" field on our ENR let bitfield = BitVector::<T::SubnetBitfieldLength>::new(); builder.add_value(BITFIELD_ENR_KEY.into(), bitfield.as_ssz_bytes()); builder .tcp(config.libp2p_port) .build(enr_key) .map_err(|e| format!("Could not build Local ENR: {:?}", e)) } /// Defines the conditions under which we use the locally built ENR or the one stored on disk. /// If this function returns true, we use the `disk_enr`. fn compare_enr(local_enr: &Enr, disk_enr: &Enr) -> bool { // take preference over disk_enr address if one is not specified (local_enr.ip().is_none() || local_enr.ip() == disk_enr.ip()) // tcp ports must match && local_enr.tcp() == disk_enr.tcp() // must match on the same fork && local_enr.get(ETH2_ENR_KEY) == disk_enr.get(ETH2_ENR_KEY) // take preference over disk udp port if one is not specified && (local_enr.udp().is_none() || local_enr.udp() == disk_enr.udp()) // we need the BITFIELD_ENR_KEY key to match, otherwise we use a new ENR. This will likely only // be true for non-validating nodes && local_enr.get(BITFIELD_ENR_KEY) == disk_enr.get(BITFIELD_ENR_KEY) } /// Saves an ENR to disk pub fn save_enr_to_disk(dir: &Path, enr: &Enr, log: &slog::Logger) { let _ = std::fs::create_dir_all(dir); match File::create(dir.join(Path::new(ENR_FILENAME))) .and_then(|mut f| f.write_all(&enr.to_base64().as_bytes())) { Ok(_) => { debug!(log, "ENR written to disk"); } Err(e) => { warn!( log, "Could not write ENR to file"; "file" => format!("{:?}{:?}",dir, ENR_FILENAME), "error" => format!("{}", e) ); } } }
39.564706
170
0.598573
ffecd6dfda9b2ab0044a269b2264162bfb8ace84
3,803
use crate::error::Result; use crate::xsd::annotation::Annotation; use crate::xsd::attributes::{add_attributes_from_xml, Attributes}; use crate::xsd::constants::{ANNOTATION, ATTRIBUTE_GROUP, NAME, REF}; use crate::xsd::id::{Id, Lineage, RootNodeType}; use crate::xsd::{name_attribute, ref_attribute, Xsd}; #[derive(Clone, Debug)] pub enum AttributeGroup { Def(AttributeGroupDef), Ref(AttributeGroupRef), } #[derive(Clone, Debug)] pub struct AttributeGroupDef { pub id: Id, pub name: String, pub annotation: Option<Annotation>, pub attributes: Attributes, } #[derive(Clone, Debug)] pub struct AttributeGroupRef { pub id: Id, pub annotation: Option<Annotation>, pub ref_: String, } impl AttributeGroup { pub fn id(&self) -> &Id { match self { AttributeGroup::Def(x) => &x.id, AttributeGroup::Ref(x) => &x.id, } } pub fn documentation(&self) -> String { if let AttributeGroup::Def(def) = self { def.documentation() } else { "".to_owned() } } pub fn from_xml(node: &exile::Element, lineage: Lineage, xsd: &Xsd) -> Result<AttributeGroup> { check!(ATTRIBUTE_GROUP, node, xsd)?; let (id, lineage) = Id::make(lineage, node)?; if let Some(_ref_) = node.attributes.map().get(REF) { Ok(AttributeGroup::Ref(AttributeGroupRef::from_xml( node, lineage, xsd, )?)) } else { Ok(AttributeGroup::Def(AttributeGroupDef::from_xml( node, lineage, xsd, )?)) } } pub fn is_ref(&self) -> bool { match self { AttributeGroup::Def(_) => false, AttributeGroup::Ref(_) => true, } } pub fn is_def(&self) -> bool { match self { AttributeGroup::Def(_) => true, AttributeGroup::Ref(_) => false, } } } impl AttributeGroupDef { pub fn documentation(&self) -> String { if let Some(a) = &self.annotation { a.documentation() } else { "".to_owned() } } pub fn from_xml(node: &exile::Element, lineage: Lineage, xsd: &Xsd) -> Result<Self> { let name = name_attribute(node)?; let id = lineage.parent().unwrap(); Ok(Self { id: id.clone(), name, annotation: Self::parse_annotation(node, lineage.clone(), xsd)?, attributes: add_attributes_from_xml(node, lineage.clone(), xsd)?, }) } fn parse_annotation( node: &exile::Element, lineage: Lineage, xsd: &Xsd, ) -> Result<Option<Annotation>> { for inner in node.children() { if inner.name.as_str() == ANNOTATION { return Ok(Some(Annotation::from_xml(inner, lineage, xsd)?)); } } Ok(None) } } impl AttributeGroupRef { pub fn documentation(&self) -> String { if let Some(a) = &self.annotation { a.documentation() } else { "".to_owned() } } pub fn from_xml(node: &exile::Element, lineage: Lineage, xsd: &Xsd) -> Result<Self> { let ref_ = ref_attribute(node)?; let id = lineage.parent().unwrap(); Ok(Self { id: id.clone(), annotation: Self::parse_annotation(node, lineage.clone(), xsd)?, ref_, }) } fn parse_annotation( node: &exile::Element, lineage: Lineage, xsd: &Xsd, ) -> Result<Option<Annotation>> { for inner in node.children() { if inner.name.as_str() == ANNOTATION { return Ok(Some(Annotation::from_xml(inner, lineage, xsd)?)); } } Ok(None) } }
27.164286
99
0.541941
9bb7572a180061a84c426d71968823412b1b2b11
12,067
// Copyright 2017 Lyndon Brown // // This file is part of the PulseAudio Rust language linking library. // // Licensed under the MIT license or the Apache license (version 2.0), at your option. You may not // copy, modify, or distribute this file except in compliance with said license. You can find copies // of these licenses either in the LICENSE-MIT and LICENSE-APACHE files, or alternatively at // <http://opensource.org/licenses/MIT> and <http://www.apache.org/licenses/LICENSE-2.0> // respectively. // // Portions of documentation are copied from the LGPL 2.1+ licensed PulseAudio C headers on a // fair-use basis, as discussed in the overall project readme (available in the git repository). //! Audio streams for input, output and sample upload. use std::os::raw::{c_char, c_void}; use num_derive::{FromPrimitive, ToPrimitive}; use crate::sample::{pa_sample_spec, pa_usec_t}; use crate::def::{pa_buffer_attr, pa_timing_info, pa_free_cb_t}; use crate::proplist::{pa_proplist, pa_update_mode_t}; use crate::{context::pa_context, channelmap::pa_channel_map, format::pa_format_info}; use crate::{volume::pa_cvolume, operation::pa_operation}; /// An opaque stream for playback or recording. #[repr(C)] pub struct pa_stream { _private: [u8; 0] } #[repr(C)] #[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(FromPrimitive, ToPrimitive)] pub enum pa_stream_state_t { Unconnected, Creating, Ready, Failed, Terminated, } pub const PA_STREAM_UNCONNECTED: pa_stream_state_t = pa_stream_state_t::Unconnected; pub const PA_STREAM_CREATING: pa_stream_state_t = pa_stream_state_t::Creating; pub const PA_STREAM_READY: pa_stream_state_t = pa_stream_state_t::Ready; pub const PA_STREAM_FAILED: pa_stream_state_t = pa_stream_state_t::Failed; pub const PA_STREAM_TERMINATED: pa_stream_state_t = pa_stream_state_t::Terminated; /// Checks if the passed state is one of the connected states (returns `true` if so). #[inline(always)] pub fn pa_stream_is_good(state: pa_stream_state_t) -> bool { state == pa_stream_state_t::Creating || state == pa_stream_state_t::Ready } /// Stream direction. #[repr(C)] #[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(FromPrimitive, ToPrimitive)] pub enum pa_stream_direction_t { /// Invalid. Invalid, /// Playback. Playback, /// Record. Record, /// Upload. Upload, } pub const PA_STREAM_NODIRECTION: pa_stream_direction_t = pa_stream_direction_t::Invalid; pub const PA_STREAM_PLAYBACK: pa_stream_direction_t = pa_stream_direction_t::Playback; pub const PA_STREAM_RECORD: pa_stream_direction_t = pa_stream_direction_t::Record; pub const PA_STREAM_UPLOAD: pa_stream_direction_t = pa_stream_direction_t::Upload; pub type pa_stream_flags_t = u32; pub use self::flags::*; /// Some special flags for stream connections. pub mod flags { use super::pa_stream_flags_t; pub const PA_STREAM_NOFLAGS: pa_stream_flags_t = 0; pub const PA_STREAM_START_CORKED: pa_stream_flags_t = 1 << 0; pub const PA_STREAM_INTERPOLATE_TIMING: pa_stream_flags_t = 1 << 1; pub const PA_STREAM_NOT_MONOTONIC: pa_stream_flags_t = 1 << 2; pub const PA_STREAM_AUTO_TIMING_UPDATE: pa_stream_flags_t = 1 << 3; pub const PA_STREAM_NO_REMAP_CHANNELS: pa_stream_flags_t = 1 << 4; pub const PA_STREAM_NO_REMIX_CHANNELS: pa_stream_flags_t = 1 << 5; pub const PA_STREAM_FIX_FORMAT: pa_stream_flags_t = 1 << 6; pub const PA_STREAM_FIX_RATE: pa_stream_flags_t = 1 << 7; pub const PA_STREAM_FIX_CHANNELS: pa_stream_flags_t = 1 << 8; pub const PA_STREAM_DONT_MOVE: pa_stream_flags_t = 1 << 9; pub const PA_STREAM_VARIABLE_RATE: pa_stream_flags_t = 1 << 10; pub const PA_STREAM_PEAK_DETECT: pa_stream_flags_t = 1 << 11; pub const PA_STREAM_START_MUTED: pa_stream_flags_t = 1 << 12; pub const PA_STREAM_ADJUST_LATENCY: pa_stream_flags_t = 1 << 13; pub const PA_STREAM_EARLY_REQUESTS: pa_stream_flags_t = 1 << 14; pub const PA_STREAM_DONT_INHIBIT_AUTO_SUSPEND: pa_stream_flags_t = 1 << 15; pub const PA_STREAM_START_UNMUTED: pa_stream_flags_t = 1 << 16; pub const PA_STREAM_FAIL_ON_SUSPEND: pa_stream_flags_t = 1 << 17; pub const PA_STREAM_RELATIVE_VOLUME: pa_stream_flags_t = 1 << 18; pub const PA_STREAM_PASSTHROUGH: pa_stream_flags_t = 1 << 19; } /// Seek mode. #[repr(C)] #[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(FromPrimitive, ToPrimitive)] pub enum pa_seek_mode_t { /// Seek relatively to the write index. Relative = 0, /// Seek relatively to the start of the buffer queue. Absolute = 1, /// Seek relatively to the read index. RelativeOnRead = 2, /// Seek relatively to the current end of the buffer queue. RelativeEnd = 3, } pub const PA_SEEK_RELATIVE: pa_seek_mode_t = pa_seek_mode_t::Relative; pub const PA_SEEK_ABSOLUTE: pa_seek_mode_t = pa_seek_mode_t::Absolute; pub const PA_SEEK_RELATIVE_ON_READ: pa_seek_mode_t = pa_seek_mode_t::RelativeOnRead; pub const PA_SEEK_RELATIVE_END: pa_seek_mode_t = pa_seek_mode_t::RelativeEnd; pub const PA_STREAM_EVENT_REQUEST_CORK: &str = "request-cork"; pub const PA_STREAM_EVENT_REQUEST_UNCORK: &str = "request-uncork"; pub const PA_STREAM_EVENT_FORMAT_LOST: &str = "format-lost"; pub type pa_stream_success_cb_t = Option<extern "C" fn(s: *mut pa_stream, success: i32, userdata: *mut c_void)>; pub type pa_stream_request_cb_t = Option<extern "C" fn(p: *mut pa_stream, nbytes: usize, userdata: *mut c_void)>; pub type pa_stream_notify_cb_t = Option<extern "C" fn(p: *mut pa_stream, userdata: *mut c_void)>; pub type pa_stream_event_cb_t = Option<extern "C" fn(p: *mut pa_stream, name: *const c_char, pl: *mut pa_proplist, userdata: *mut c_void)>; #[link(name="pulse")] extern "C" { pub fn pa_stream_connect_upload(s: *mut pa_stream, length: usize) -> i32; pub fn pa_stream_finish_upload(s: *mut pa_stream) -> i32; pub fn pa_stream_new(c: *mut pa_context, name: *const c_char, ss: *const pa_sample_spec, map: *const pa_channel_map) -> *mut pa_stream; pub fn pa_stream_new_with_proplist(c: *mut pa_context, name: *const c_char, ss: *const pa_sample_spec, map: *const pa_channel_map, p: *mut pa_proplist) -> *mut pa_stream; pub fn pa_stream_new_extended(c: *mut pa_context, name: *const c_char, formats: *const *const pa_format_info, n_formats: u32, p: *mut pa_proplist) -> *mut pa_stream; pub fn pa_stream_unref(s: *mut pa_stream); pub fn pa_stream_ref(s: *mut pa_stream) -> *mut pa_stream; pub fn pa_stream_get_state(s: *const pa_stream) -> pa_stream_state_t; pub fn pa_stream_get_context(s: *const pa_stream) -> *mut pa_context; pub fn pa_stream_get_index(s: *const pa_stream) -> u32; pub fn pa_stream_get_device_index(s: *const pa_stream) -> u32; pub fn pa_stream_get_device_name(s: *const pa_stream) -> *const c_char; pub fn pa_stream_is_suspended(s: *const pa_stream) -> i32; pub fn pa_stream_is_corked(s: *const pa_stream) -> i32; pub fn pa_stream_connect_playback(s: *mut pa_stream, dev: *const c_char, attr: *const pa_buffer_attr, flags: pa_stream_flags_t, volume: *const pa_cvolume, sync_stream: *mut pa_stream) -> i32; pub fn pa_stream_connect_record(s: *mut pa_stream, dev: *const c_char, attr: *const pa_buffer_attr, flags: pa_stream_flags_t) -> i32; pub fn pa_stream_disconnect(s: *mut pa_stream) -> i32; pub fn pa_stream_begin_write(s: *mut pa_stream, data: *mut *mut c_void, nbytes: *mut usize) -> i32; pub fn pa_stream_cancel_write(s: *mut pa_stream) -> i32; pub fn pa_stream_write(s: *mut pa_stream, data: *const c_void, nbytes: usize, free_cb: pa_free_cb_t, offset: i64, seek: pa_seek_mode_t) -> i32; #[cfg(any(doc, feature = "pa_v6"))] #[cfg_attr(docsrs, doc(cfg(feature = "pa_v6")))] pub fn pa_stream_write_ext_free(s: *mut pa_stream, data: *const c_void, nbytes: usize, free_cb: pa_free_cb_t, free_cb_data: *mut c_void, offset: i64, seek: pa_seek_mode_t) -> i32; pub fn pa_stream_peek(s: *mut pa_stream, data: *mut *const c_void, nbytes: *mut usize) -> i32; pub fn pa_stream_drop(s: *mut pa_stream) -> i32; pub fn pa_stream_writable_size(s: *const pa_stream) -> usize; pub fn pa_stream_readable_size(s: *const pa_stream) -> usize; pub fn pa_stream_drain(s: *mut pa_stream, cb: pa_stream_success_cb_t, userdata: *mut c_void) -> *mut pa_operation; pub fn pa_stream_update_timing_info(s: *mut pa_stream, cb: pa_stream_success_cb_t, userdata: *mut c_void) -> *mut pa_operation; pub fn pa_stream_set_state_callback(s: *mut pa_stream, cb: pa_stream_notify_cb_t, userdata: *mut c_void); pub fn pa_stream_set_write_callback(s: *mut pa_stream, cb: pa_stream_request_cb_t, userdata: *mut c_void); pub fn pa_stream_set_read_callback(s: *mut pa_stream, cb: pa_stream_request_cb_t, userdata: *mut c_void); pub fn pa_stream_set_overflow_callback(s: *mut pa_stream, cb: pa_stream_notify_cb_t, userdata: *mut c_void); pub fn pa_stream_get_underflow_index(s: *const pa_stream) -> i64; pub fn pa_stream_set_underflow_callback(s: *mut pa_stream, cb: pa_stream_notify_cb_t, userdata: *mut c_void); pub fn pa_stream_set_started_callback(s: *mut pa_stream, cb: pa_stream_notify_cb_t, userdata: *mut c_void); pub fn pa_stream_set_latency_update_callback(s: *mut pa_stream, cb: pa_stream_notify_cb_t, userdata: *mut c_void); pub fn pa_stream_set_moved_callback(s: *mut pa_stream, cb: pa_stream_notify_cb_t, userdata: *mut c_void); pub fn pa_stream_set_suspended_callback(s: *mut pa_stream, cb: pa_stream_notify_cb_t, userdata: *mut c_void); pub fn pa_stream_set_event_callback(s: *mut pa_stream, cb: pa_stream_event_cb_t, userdata: *mut c_void); pub fn pa_stream_set_buffer_attr_callback(s: *mut pa_stream, cb: pa_stream_notify_cb_t, userdata: *mut c_void); pub fn pa_stream_cork(s: *mut pa_stream, b: i32, cb: pa_stream_success_cb_t, userdata: *mut c_void) -> *mut pa_operation; pub fn pa_stream_flush(s: *mut pa_stream, cb: pa_stream_success_cb_t, userdata: *mut c_void) -> *mut pa_operation; pub fn pa_stream_prebuf(s: *mut pa_stream, cb: pa_stream_success_cb_t, userdata: *mut c_void) -> *mut pa_operation; pub fn pa_stream_trigger(s: *mut pa_stream, cb: pa_stream_success_cb_t, userdata: *mut c_void) -> *mut pa_operation; pub fn pa_stream_set_name(s: *mut pa_stream, name: *const c_char, cb: pa_stream_success_cb_t, userdata: *mut c_void) -> *mut pa_operation; pub fn pa_stream_get_time(s: *mut pa_stream, r_usec: *mut pa_usec_t) -> i32; pub fn pa_stream_get_latency(s: *mut pa_stream, r_usec: *mut pa_usec_t, negative: *mut i32) -> i32; pub fn pa_stream_get_timing_info(s: *mut pa_stream) -> *const pa_timing_info; pub fn pa_stream_get_sample_spec(s: *mut pa_stream) -> *const pa_sample_spec; pub fn pa_stream_get_channel_map(s: *mut pa_stream) -> *const pa_channel_map; pub fn pa_stream_get_format_info(s: *const pa_stream) -> *mut pa_format_info; pub fn pa_stream_get_buffer_attr(s: *mut pa_stream) -> *const pa_buffer_attr; pub fn pa_stream_set_buffer_attr(s: *mut pa_stream, attr: *const pa_buffer_attr, cb: pa_stream_success_cb_t, userdata: *mut c_void) -> *mut pa_operation; pub fn pa_stream_update_sample_rate(s: *mut pa_stream, rate: u32, cb: pa_stream_success_cb_t, userdata: *mut c_void) -> *mut pa_operation; pub fn pa_stream_proplist_update(s: *mut pa_stream, mode: pa_update_mode_t, p: *mut pa_proplist, cb: pa_stream_success_cb_t, userdata: *mut c_void) -> *mut pa_operation; pub fn pa_stream_proplist_remove(s: *mut pa_stream, keys: *const *const c_char, cb: pa_stream_success_cb_t, userdata: *mut c_void) -> *mut pa_operation; pub fn pa_stream_set_monitor_stream(s: *mut pa_stream, sink_input_idx: u32) -> i32; pub fn pa_stream_get_monitor_stream(s: *const pa_stream) -> u32; }
61.253807
195
0.730422
64c14b14fd2ca055f22e51a0dd7cd2007783d9ca
7,591
use crate::password::v2::{Password, PasswordStore}; use crate::rclio::{CliInputOutput, OutputType}; /// Used to indicate lists should have a number, ie: 23 Google [email protected] pub const WITH_NUMBERS: bool = true; /// Used to indicate lists should not have a number, ie: Google [email protected] pub const WITHOUT_NUMBERS: bool = false; fn get_list_of_passwords(passwords: &Vec<&Password>, with_numbers: bool) -> Vec<String> { // Find the app name column length let longest_app_name = passwords.iter().fold(0, |acc, p| { if p.name.len() > acc { p.name.len() } else { acc } }); // Find the username column length let longest_username = passwords.iter().fold(0, |acc, p| { if p.username.len() > acc { p.username.len() } else { acc } }); // Find the number column length let i_width = ((passwords.len() as f64).log10() + 1 as f64).floor() as usize; let mut list = Vec::new(); for (i, p) in passwords.iter().enumerate() { let s = match with_numbers { WITH_NUMBERS => format!( "{:i_width$} {:app_name_width$} {:username_width$}", i + 1, p.name, p.username, i_width = i_width, app_name_width = longest_app_name, username_width = longest_username, ), WITHOUT_NUMBERS => format!( "{:app_name_width$} {:username_width$}", p.name, p.username, app_name_width = longest_app_name, username_width = longest_username, ), }; list.push(s); } list } pub fn print_list_of_passwords( passwords: &Vec<&Password>, with_numbers: bool, io: &mut impl CliInputOutput, ) { let list = get_list_of_passwords(passwords, with_numbers); for s in list { io.info(s, OutputType::Standard); } } fn request_password_index_from_stdin( passwords: &Vec<&Password>, prompt: &str, io: &mut impl CliInputOutput, ) -> usize { assert!(!passwords.is_empty()); // Read the index from the command line and convert to a number loop { if passwords.len() > 1 { io.info(prompt, OutputType::Standard); io.write( format!("Type a number from 1 to {}: ", passwords.len()), OutputType::Standard, ); } else if passwords.len() == 1 { io.write( "If this is the password you mean, type \"1\" and hit ENTER: ", OutputType::Standard, ); } match io.read_line() { Ok(line) => { match line.trim().parse::<usize>() { Ok(index) => { if index == 0 || index > passwords.len() { io.write( format!( "I need a number between 1 and {}. Let's try again:", passwords.len() ), OutputType::Standard, ); continue; } return index - 1; } Err(err) => { io.write( format!("This isn't a valid number (reason: {}). Let's try again (1 to {}): ", err, passwords.len()), OutputType::Standard, ); continue; } }; } Err(err) => { io.write( format!( "I couldn't read that (reason: {}). Let's try again (1 to {}): ", err, passwords.len() ), OutputType::Standard, ); } } } } fn choose_password_in_list( passwords: &Vec<&Password>, with_numbers: bool, prompt: &str, io: &mut impl CliInputOutput, ) -> usize { print_list_of_passwords(passwords, with_numbers, io); io.nl(OutputType::Standard); request_password_index_from_stdin(passwords, prompt, io) } pub fn search_and_choose_password<'a>( store: &'a PasswordStore, query: &str, with_numbers: bool, prompt: &str, io: &mut impl CliInputOutput, ) -> Option<&'a Password> { let passwords = store.search_passwords(query); if passwords.len() == 0 { io.error( format!("Woops, I can't find any passwords for \"{}\".", query), OutputType::Error, ); return None; } if let Some(&password) = passwords .iter() .find(|p| p.name.to_lowercase() == query.to_lowercase()) { return Some(&password); } let index = choose_password_in_list(&passwords, with_numbers, prompt, io); Some(passwords[index]) } #[cfg(test)] mod test { use super::get_list_of_passwords; use crate::list::{WITHOUT_NUMBERS, WITH_NUMBERS}; use crate::password::v2::Password; use crate::rutil::safe_string::SafeString; // Creates a list of at least two passwords, and more if specified fn get_passwords(mut additional: i32) -> Vec<Password> { let google = Password::new( format!("google"), format!("short un"), SafeString::from_string(format!("xxxx")), ); let mut list = vec![ Password::new( format!("youtube.com"), format!("that long username"), SafeString::from_string(format!("xxxx")), ), google.clone(), ]; while additional > 0 { list.push(google.clone()); additional -= 1; } list } #[test] fn password_list_has_right_format_with_numbers() { // With 2 passwords (number width 1) let passwords = get_passwords(0); let list = get_list_of_passwords(&passwords.iter().collect(), WITH_NUMBERS); assert_eq!( list, &[ "1 youtube.com that long username", "2 google short un ", ] ); // Now with 10 passwords (number width 2) let passwords = get_passwords(8); let list = get_list_of_passwords(&passwords.iter().collect(), WITH_NUMBERS); assert_eq!( list, &[ " 1 youtube.com that long username", " 2 google short un ", " 3 google short un ", " 4 google short un ", " 5 google short un ", " 6 google short un ", " 7 google short un ", " 8 google short un ", " 9 google short un ", "10 google short un ", ] ); } #[test] fn password_list_has_right_format_without_numbers() { let passwords = get_passwords(0); let list = get_list_of_passwords(&passwords.iter().collect(), WITHOUT_NUMBERS); assert_eq!( list, &[ "youtube.com that long username", "google short un ", ] ); } }
30.123016
151
0.478198
e211bbfd4d4475c603a4939f7984b7357e7622bf
2,374
mod common; use std::time::Duration; use common::{Binary, LogSide, TestContext}; use anyhow::{Context, Result}; const L: LogSide = LogSide::Test; #[tokio::test] async fn test_device_credentials_already_active() -> Result<()> { let mut ctx = TestContext::new().context("Error building test context")?; let mfg_server = ctx .start_test_server( Binary::ManufacturingServer, |cfg| { Ok(cfg.prepare_config_file(None, |cfg| { cfg.insert("rendezvous_port", "1337"); cfg.insert("diun_key_type", "FileSystem"); Ok(()) })?) }, |_| Ok(()), ) .context("Error creating manufacturing server")?; ctx.wait_until_servers_ready() .await .context("Error waiting for servers to start")?; let client_result = ctx .run_client( Binary::ManufacturingClient, Some(&mfg_server), |cfg| { cfg.env("DEVICE_CREDENTIAL_FILENAME", "devicecredential.dc") .env("MANUFACTURING_INFO", "testdevice") .env("DIUN_PUB_KEY_INSECURE", "true"); Ok(()) }, Duration::from_secs(5), ) .context("Error running manufacturing client")?; client_result .expect_success() .context("Manufacturing client failed")?; client_result.expect_stderr_line("Trusting any certificate as root")?; let dc_path = client_result.client_path().join("devicecredential.dc"); L.l(format!("Device Credential should be in {:?}", dc_path)); let client_result = ctx .run_client( Binary::ManufacturingClient, Some(&mfg_server), |cfg| { cfg.env("DEVICE_CREDENTIAL_FILENAME", "devicecredential.dc") .env("MANUFACTURING_INFO", "testdevice") .env("DEVICE_CREDENTIAL", dc_path) .env("DIUN_PUB_KEY_INSECURE", "true"); Ok(()) }, Duration::from_secs(5), ) .context("Error running manufacturing client")?; client_result .expect_success() .context("Manufacturing client failed")?; client_result.expect_stderr_line("Device credential already active")?; Ok(()) }
32.520548
77
0.557287
e2c9a048dee0e72e0668976c642fbb577207e84a
5,293
// SPDX-License-Identifier: Apache-2.0 // Copyright 2021 Keylime Authors use log::*; use std::convert::TryFrom; use std::fs::File; use std::io::prelude::*; use std::io::BufReader; use thiserror::Error; use tss_esapi::{ abstraction::pcr, handles::PcrHandle, interface_types::algorithm::HashingAlgorithm, structures::{Digest, DigestValues, PcrSelectionListBuilder, PcrSlot}, Context, Tcti, }; const IMA_ML: &str = "/sys/kernel/security/ima/ascii_runtime_measurements"; const START_HASH: &[u8; 20] = &[0u8; 20]; const FF_HASH: &[u8; 20] = &[0xffu8; 20]; #[derive(Error, Debug)] enum ImaEmulatorError { #[error("Invalid envvar")] VarError(#[from] std::env::VarError), #[error("TPM error")] TssEsapiError(#[from] tss_esapi::Error), #[error("Decoding error")] FromHexError(#[from] hex::FromHexError), #[error("I/O error")] IoError(#[from] std::io::Error), #[error("{0}")] Other(String), } type Result<T> = std::result::Result<T, ImaEmulatorError>; fn ml_extend( context: &mut Context, ml: &str, mut position: usize, search_hash: Option<&Digest>, ) -> Result<usize> { let f = File::open(ml)?; let mut reader = BufReader::new(f); for line in reader.by_ref().lines().skip(position) { let line = line?; if line.is_empty() { continue; } let tokens: Vec<&str> = line.splitn(5, ' ').collect(); if tokens.len() < 5 { error!("invalid measurement list file line: -{}-", line); } position += 1; let path = tokens[4]; let template_hash = hex::decode(tokens[1])?; let template_hash = if template_hash == START_HASH { Digest::try_from(&FF_HASH[..]) } else { Digest::try_from(template_hash) }?; match search_hash { None => { println!( "extending hash {} for {}", hex::encode(template_hash.value()), &path ); let mut vals = DigestValues::new(); vals.set(HashingAlgorithm::Sha1, template_hash); // TODO: Add support for other hash algorithms context.execute_with_nullauth_session(|ctx| { ctx.pcr_extend(PcrHandle::Pcr10, vals) })?; } Some(search_hash) => { let mut hasher = openssl::sha::Sha1::new(); hasher.update(START_HASH); hasher.update(&template_hash); let running_hash: Vec<u8> = hasher.finish().into(); let running_hash = Digest::try_from(running_hash)?; let mut vals = DigestValues::new(); vals.set(HashingAlgorithm::Sha1, running_hash.clone()); if running_hash == *search_hash { println!("Located last IMA file updated: {}", path); return Ok(position); } } } } if search_hash.is_some() { return Err(ImaEmulatorError::Other( "Unable to find current measurement list position, Resetting the TPM emulator may be neccesary".to_string())); } Ok(position) } fn main() -> std::result::Result<(), ImaEmulatorError> { let tcti = match Tcti::from_environment_variable() { Ok(tcti) => tcti, Err(_) => return Err(ImaEmulatorError::Other( "This stub requires TCTI environment variable set properly" .to_string(), )), }; let mut context = Context::new(tcti)?; if !tss_esapi::utils::get_tpm_vendor(&mut context)?.contains("SW") { return Err(ImaEmulatorError::Other( "This stub should only be used with a TPM emulator".to_string(), )); } // check if pcr is clean let pcr_list = PcrSelectionListBuilder::new() .with_selection(HashingAlgorithm::Sha1, &[PcrSlot::Slot10]) .build(); let pcr_data = context .execute_without_session(|ctx| pcr::read_all(ctx, pcr_list))?; let digest = pcr_data .pcr_bank(HashingAlgorithm::Sha1) .ok_or_else(|| { ImaEmulatorError::Other( "IMA slot does not have SHA-1 bank".to_string(), ) })? .get_digest(PcrSlot::Slot10) .ok_or_else(|| { ImaEmulatorError::Other( "could not read value from IMA PCR".to_string(), ) })?; let mut pos = 0; if digest.value() != START_HASH { log::warn!("IMA PCR is not empty, trying to find the last updated file in the measurement list..."); pos = ml_extend(&mut context, IMA_ML, 0, Some(digest))?; } println!("Monitoring {}", IMA_ML); loop { pos = ml_extend(&mut context, IMA_ML, pos, None)?; // FIXME: We could poll IMA_ML as in the python implementation, though // the file is not pollable: // https://github.com/torvalds/linux/blob/master/security/integrity/ima/ima_fs.c#L267 // Better idea might be to check the "runtime_measurements_count" file. let duration = std::time::Duration::from_millis(200); std::thread::sleep(duration); } }
31.885542
122
0.562819
90a130fc265f2e0aae76c0ee3e1f8f464c479903
37,258
use crate::builtins::pystr::{PyStr, PyStrRef}; use crate::builtins::pytype::{PyType, PyTypeRef}; use crate::builtins::singletons::{PyNone, PyNoneRef}; use crate::builtins::traceback::PyTracebackRef; use crate::builtins::tuple::{PyTuple, PyTupleRef}; use crate::common::lock::PyRwLock; use crate::function::FuncArgs; use crate::py_io::{self, Write}; use crate::sysmodule; use crate::types::create_type_with_slots; use crate::StaticType; use crate::VirtualMachine; use crate::{ IdProtocol, IntoPyObject, PyClassImpl, PyContext, PyIterable, PyObjectRef, PyRef, PyResult, PyValue, TryFromObject, TypeProtocol, }; use crossbeam_utils::atomic::AtomicCell; use itertools::Itertools; use std::collections::HashSet; use std::fmt; use std::fs::File; use std::io::{self, BufRead, BufReader}; #[pyclass(module = false, name = "BaseException")] pub struct PyBaseException { traceback: PyRwLock<Option<PyTracebackRef>>, cause: PyRwLock<Option<PyBaseExceptionRef>>, context: PyRwLock<Option<PyBaseExceptionRef>>, suppress_context: AtomicCell<bool>, args: PyRwLock<PyTupleRef>, } impl fmt::Debug for PyBaseException { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // TODO: implement more detailed, non-recursive Debug formatter f.write_str("PyBaseException") } } pub type PyBaseExceptionRef = PyRef<PyBaseException>; pub trait IntoPyException { fn into_pyexception(self, vm: &VirtualMachine) -> PyBaseExceptionRef; } impl PyValue for PyBaseException { fn class(vm: &VirtualMachine) -> &PyTypeRef { &vm.ctx.exceptions.base_exception_type } } #[pyimpl(flags(BASETYPE, HAS_DICT))] impl PyBaseException { pub(crate) fn new(args: Vec<PyObjectRef>, vm: &VirtualMachine) -> PyBaseException { PyBaseException { traceback: PyRwLock::new(None), cause: PyRwLock::new(None), context: PyRwLock::new(None), suppress_context: AtomicCell::new(false), args: PyRwLock::new(PyTupleRef::with_elements(args, &vm.ctx)), } } #[pyslot] pub(crate) fn tp_new( cls: PyTypeRef, args: FuncArgs, vm: &VirtualMachine, ) -> PyResult<PyRef<Self>> { PyBaseException::new(args.args, vm).into_ref_with_type(vm, cls) } #[pymethod(magic)] pub(crate) fn init(zelf: PyRef<Self>, args: FuncArgs, vm: &VirtualMachine) -> PyResult<()> { *zelf.args.write() = PyTupleRef::with_elements(args.args, &vm.ctx); Ok(()) } pub fn get_arg(&self, idx: usize) -> Option<PyObjectRef> { self.args.read().as_slice().get(idx).cloned() } #[pyproperty] pub fn args(&self) -> PyTupleRef { self.args.read().clone() } #[pyproperty(setter)] fn set_args(&self, args: PyIterable, vm: &VirtualMachine) -> PyResult<()> { let args = args.iter(vm)?.collect::<PyResult<Vec<_>>>()?; *self.args.write() = PyTupleRef::with_elements(args, &vm.ctx); Ok(()) } #[pyproperty(magic)] pub fn traceback(&self) -> Option<PyTracebackRef> { self.traceback.read().clone() } #[pyproperty(name = "__traceback__", setter)] pub fn set_traceback(&self, traceback: Option<PyTracebackRef>) { *self.traceback.write() = traceback; } #[pyproperty(magic)] pub fn cause(&self) -> Option<PyBaseExceptionRef> { self.cause.read().clone() } #[pyproperty(name = "__cause__", setter)] pub fn set_cause(&self, cause: Option<PyBaseExceptionRef>) { let mut c = self.cause.write(); self.set_suppress_context(true); *c = cause; } #[pyproperty(magic)] pub fn context(&self) -> Option<PyBaseExceptionRef> { self.context.read().clone() } #[pyproperty(name = "__context__", setter)] pub fn set_context(&self, context: Option<PyBaseExceptionRef>) { *self.context.write() = context; } #[pyproperty(name = "__suppress_context__")] fn get_suppress_context(&self) -> bool { self.suppress_context.load() } #[pyproperty(name = "__suppress_context__", setter)] fn set_suppress_context(&self, suppress_context: bool) { self.suppress_context.store(suppress_context); } #[pymethod] fn with_traceback(zelf: PyRef<Self>, tb: Option<PyTracebackRef>) -> PyResult { *zelf.traceback.write() = tb; Ok(zelf.as_object().clone()) } #[pymethod(magic)] fn str(&self, vm: &VirtualMachine) -> PyStrRef { let str_args = exception_args_as_string(vm, self.args(), true); match str_args.into_iter().exactly_one() { Err(i) if i.len() == 0 => PyStr::from("").into_ref(vm), Ok(s) => s, Err(i) => PyStr::from(format!("({})", i.format(", "))).into_ref(vm), } } #[pymethod(magic)] fn repr(zelf: PyRef<Self>, vm: &VirtualMachine) -> String { let repr_args = exception_args_as_string(vm, zelf.args(), false); let cls = zelf.class(); format!("{}({})", cls.name, repr_args.iter().format(", ")) } } pub fn chain<T>(e1: PyResult<()>, e2: PyResult<T>) -> PyResult<T> { match (e1, e2) { (Err(e1), Err(e)) => { e.set_context(Some(e1)); Err(e) } (Err(e), Ok(_)) | (Ok(()), Err(e)) => Err(e), (Ok(()), Ok(close_res)) => Ok(close_res), } } /// Print exception chain by calling sys.excepthook pub fn print_exception(vm: &VirtualMachine, exc: PyBaseExceptionRef) { let write_fallback = |exc, errstr| { if let Ok(stderr) = sysmodule::get_stderr(vm) { let mut stderr = py_io::PyWriter(stderr, vm); // if this fails stderr might be closed -- ignore it let _ = writeln!(stderr, "{}", errstr); let _ = write_exception(&mut stderr, vm, exc); } else { eprintln!("{}\nlost sys.stderr", errstr); let _ = write_exception(&mut py_io::IoWriter(io::stderr()), vm, exc); } }; if let Ok(excepthook) = vm.get_attribute(vm.sys_module.clone(), "excepthook") { let (exc_type, exc_val, exc_tb) = split(exc.clone(), vm); if let Err(eh_exc) = vm.invoke(&excepthook, vec![exc_type, exc_val, exc_tb]) { write_fallback(&eh_exc, "Error in sys.excepthook:"); write_fallback(&exc, "Original exception was:"); } } else { write_fallback(&exc, "missing sys.excepthook"); } } pub fn write_exception<W: Write>( output: &mut W, vm: &VirtualMachine, exc: &PyBaseExceptionRef, ) -> Result<(), W::Error> { let seen = &mut HashSet::<usize>::new(); write_exception_recursive(output, vm, exc, seen) } fn print_source_line<W: Write>( output: &mut W, filename: &str, lineno: usize, ) -> Result<(), W::Error> { // TODO: use io.open() method instead, when available, according to https://github.com/python/cpython/blob/master/Python/traceback.c#L393 // TODO: support different encodings let file = match File::open(filename) { Ok(file) => file, Err(_) => return Ok(()), }; let file = BufReader::new(file); for (i, line) in file.lines().enumerate() { if i + 1 == lineno { if let Ok(line) = line { // Indented with 4 spaces writeln!(output, " {}", line.trim_start())?; } return Ok(()); } } Ok(()) } /// Print exception occurrence location from traceback element fn write_traceback_entry<W: Write>( output: &mut W, tb_entry: &PyTracebackRef, ) -> Result<(), W::Error> { let filename = tb_entry.frame.code.source_path.as_str(); writeln!( output, r##" File "{}", line {}, in {}"##, filename, tb_entry.lineno, tb_entry.frame.code.obj_name )?; print_source_line(output, filename, tb_entry.lineno)?; Ok(()) } fn write_exception_recursive<W: Write>( output: &mut W, vm: &VirtualMachine, exc: &PyBaseExceptionRef, seen: &mut HashSet<usize>, ) -> Result<(), W::Error> { // This function should not be called directly, // use `wite_exception` as a public interface. // It is similar to `print_exception_recursive` from `CPython`. seen.insert(exc.as_object().get_id()); #[allow(clippy::manual_map)] if let Some((cause_or_context, msg)) = if let Some(cause) = exc.cause() { // This can be a special case: `raise e from e`, // we just ignore it and treat like `raise e` without any extra steps. Some(( cause, "\nThe above exception was the direct cause of the following exception:\n", )) } else if let Some(context) = exc.context() { // This can be a special case: // e = ValueError('e') // e.__context__ = e // In this case, we just ignore // `__context__` part from going into recursion. Some(( context, "\nDuring handling of the above exception, another exception occurred:\n", )) } else { None } { if !seen.contains(&cause_or_context.as_object().get_id()) { write_exception_recursive(output, vm, &cause_or_context, seen)?; writeln!(output, "{}", msg)?; } else { seen.insert(cause_or_context.as_object().get_id()); } } write_exception_inner(output, vm, exc) } /// Print exception with traceback pub fn write_exception_inner<W: Write>( output: &mut W, vm: &VirtualMachine, exc: &PyBaseExceptionRef, ) -> Result<(), W::Error> { if let Some(tb) = exc.traceback.read().clone() { writeln!(output, "Traceback (most recent call last):")?; for tb in tb.iter() { write_traceback_entry(output, &tb)?; } } let varargs = exc.args(); let args_repr = exception_args_as_string(vm, varargs, true); let exc_name = exc.class().name.clone(); match args_repr.len() { 0 => writeln!(output, "{}", exc_name), 1 => writeln!(output, "{}: {}", exc_name, args_repr[0]), _ => writeln!( output, "{}: ({})", exc_name, args_repr.into_iter().format(", ") ), } } fn exception_args_as_string( vm: &VirtualMachine, varargs: PyTupleRef, str_single: bool, ) -> Vec<PyStrRef> { let varargs = varargs.as_slice(); match varargs.len() { 0 => vec![], 1 => { let args0_repr = if str_single { vm.to_str(&varargs[0]) .unwrap_or_else(|_| PyStr::from("<element str() failed>").into_ref(vm)) } else { vm.to_repr(&varargs[0]) .unwrap_or_else(|_| PyStr::from("<element repr() failed>").into_ref(vm)) }; vec![args0_repr] } _ => varargs .iter() .map(|vararg| { vm.to_repr(vararg) .unwrap_or_else(|_| PyStr::from("<element repr() failed>").into_ref(vm)) }) .collect(), } } #[derive(Clone)] pub enum ExceptionCtor { Class(PyTypeRef), Instance(PyBaseExceptionRef), } impl TryFromObject for ExceptionCtor { fn try_from_object(vm: &VirtualMachine, obj: PyObjectRef) -> PyResult<Self> { obj.downcast::<PyType>() .and_then(|cls| { if cls.issubclass(&vm.ctx.exceptions.base_exception_type) { Ok(Self::Class(cls)) } else { Err(cls.into_object()) } }) .or_else(|obj| obj.downcast::<PyBaseException>().map(Self::Instance)) .map_err(|obj| { vm.new_type_error(format!( "exceptions must be classes or instances deriving from BaseException, not {}", obj.class().name )) }) } } pub fn invoke( cls: PyTypeRef, args: Vec<PyObjectRef>, vm: &VirtualMachine, ) -> PyResult<PyBaseExceptionRef> { // TODO: fast-path built-in exceptions by directly instantiating them? Is that really worth it? let res = vm.invoke(cls.as_object(), args)?; PyBaseExceptionRef::try_from_object(vm, res) } impl ExceptionCtor { pub fn instantiate(self, vm: &VirtualMachine) -> PyResult<PyBaseExceptionRef> { match self { Self::Class(cls) => invoke(cls, vec![], vm), Self::Instance(exc) => Ok(exc), } } pub fn instantiate_value( self, value: PyObjectRef, vm: &VirtualMachine, ) -> PyResult<PyBaseExceptionRef> { let exc_inst = value.clone().downcast::<PyBaseException>().ok(); match (self, exc_inst) { // both are instances; which would we choose? (Self::Instance(_exc_a), Some(_exc_b)) => { Err(vm .new_type_error("instance exception may not have a separate value".to_owned())) } // if the "type" is an instance and the value isn't, use the "type" (Self::Instance(exc), None) => Ok(exc), // if the value is an instance of the type, use the instance value (Self::Class(cls), Some(exc)) if exc.isinstance(&cls) => Ok(exc), // otherwise; construct an exception of the type using the value as args (Self::Class(cls), _) => { let args = match_class!(match value { PyNone => vec![], tup @ PyTuple => tup.as_slice().to_vec(), exc @ PyBaseException => exc.args().as_slice().to_vec(), obj => vec![obj], }); invoke(cls, args, vm) } } } } pub fn split( exc: PyBaseExceptionRef, vm: &VirtualMachine, ) -> (PyObjectRef, PyObjectRef, PyObjectRef) { let tb = exc.traceback().into_pyobject(vm); (exc.clone_class().into_object(), exc.into_object(), tb) } /// Similar to PyErr_NormalizeException in CPython pub fn normalize( exc_type: PyObjectRef, exc_val: PyObjectRef, exc_tb: PyObjectRef, vm: &VirtualMachine, ) -> PyResult<PyBaseExceptionRef> { let ctor = ExceptionCtor::try_from_object(vm, exc_type)?; let exc = ctor.instantiate_value(exc_val, vm)?; if let Some(tb) = Option::<PyTracebackRef>::try_from_object(vm, exc_tb)? { exc.set_traceback(Some(tb)); } Ok(exc) } #[derive(Debug, Clone)] pub struct ExceptionZoo { pub base_exception_type: PyTypeRef, pub system_exit: PyTypeRef, pub keyboard_interrupt: PyTypeRef, pub generator_exit: PyTypeRef, pub exception_type: PyTypeRef, pub stop_iteration: PyTypeRef, pub stop_async_iteration: PyTypeRef, pub arithmetic_error: PyTypeRef, pub floating_point_error: PyTypeRef, pub overflow_error: PyTypeRef, pub zero_division_error: PyTypeRef, pub assertion_error: PyTypeRef, pub attribute_error: PyTypeRef, pub buffer_error: PyTypeRef, pub eof_error: PyTypeRef, pub import_error: PyTypeRef, pub module_not_found_error: PyTypeRef, pub lookup_error: PyTypeRef, pub index_error: PyTypeRef, pub key_error: PyTypeRef, pub memory_error: PyTypeRef, pub name_error: PyTypeRef, pub unbound_local_error: PyTypeRef, pub os_error: PyTypeRef, pub blocking_io_error: PyTypeRef, pub child_process_error: PyTypeRef, pub connection_error: PyTypeRef, pub broken_pipe_error: PyTypeRef, pub connection_aborted_error: PyTypeRef, pub connection_refused_error: PyTypeRef, pub connection_reset_error: PyTypeRef, pub file_exists_error: PyTypeRef, pub file_not_found_error: PyTypeRef, pub interrupted_error: PyTypeRef, pub is_a_directory_error: PyTypeRef, pub not_a_directory_error: PyTypeRef, pub permission_error: PyTypeRef, pub process_lookup_error: PyTypeRef, pub timeout_error: PyTypeRef, pub reference_error: PyTypeRef, pub runtime_error: PyTypeRef, pub not_implemented_error: PyTypeRef, pub recursion_error: PyTypeRef, pub syntax_error: PyTypeRef, pub target_scope_error: PyTypeRef, pub indentation_error: PyTypeRef, pub tab_error: PyTypeRef, pub system_error: PyTypeRef, pub type_error: PyTypeRef, pub value_error: PyTypeRef, pub unicode_error: PyTypeRef, pub unicode_decode_error: PyTypeRef, pub unicode_encode_error: PyTypeRef, pub unicode_translate_error: PyTypeRef, #[cfg(feature = "jit")] pub jit_error: PyTypeRef, pub warning: PyTypeRef, pub deprecation_warning: PyTypeRef, pub pending_deprecation_warning: PyTypeRef, pub runtime_warning: PyTypeRef, pub syntax_warning: PyTypeRef, pub user_warning: PyTypeRef, pub future_warning: PyTypeRef, pub import_warning: PyTypeRef, pub unicode_warning: PyTypeRef, pub bytes_warning: PyTypeRef, pub resource_warning: PyTypeRef, } pub fn exception_slots() -> crate::slots::PyTypeSlots { let mut slots = PyBaseException::make_slots(); // make_slots produces it with a tp_name of BaseException, which is usually wrong slots.name.get_mut().take(); slots } pub fn create_exception_type(name: &str, base: &PyTypeRef) -> PyTypeRef { create_type_with_slots(name, PyType::static_type(), base, exception_slots()) } /// This macro serves a goal of generating multiple BaseException / Exception /// subtypes in a uniform and convenient manner. /// It looks like `SimpleExtendsException` in `CPython`. /// https://github.com/python/cpython/blob/main/Objects/exceptions.c /// /// We need `ctx` to be ready to add `properties` / `custom` constructors / slots / etc. /// So, we use `extend_class!` macro as the second step in exception type definition. macro_rules! extends_exception { ( $class_name: ident, $base_class: ident, $ctx_name: ident, $docs: tt ) => { #[pyexception($class_name, $base_class)] #[derive(Debug)] #[doc = $docs] struct $class_name {} // We need this to make extend mechanism work: impl PyValue for $class_name { fn class(vm: &VirtualMachine) -> &PyTypeRef { &vm.ctx.exceptions.$ctx_name } } #[pyimpl(flags(BASETYPE, HAS_DICT))] impl $class_name { #[pyslot] fn tp_new( cls: PyTypeRef, args: FuncArgs, vm: &VirtualMachine, ) -> PyResult<PyRef<PyBaseException>> { // We need this method, because of how `CPython` copies `init` // from `BaseException` in `SimpleExtendsException` macro. // See: `BaseException_new` PyBaseException::tp_new(cls, args, vm) } #[pymethod(magic)] fn init( zelf: PyRef<PyBaseException>, args: FuncArgs, vm: &VirtualMachine, ) -> PyResult<()> { // We need this method, because of how `CPython` copies `init` // from `BaseException` in `SimpleExtendsException` macro. // See: `(initproc)BaseException_init` $base_class::init(zelf, args, vm) } } }; } // Exception types that extend `BaseException`, // sorted the same way CPython does. extends_exception! { PyException, PyBaseException, exception_type, "Common base class for all non-exit exceptions." } extends_exception! { PyGeneratorExit, PyBaseException, generator_exit, "Request that a generator exit." } extends_exception! { PySystemExit, PyBaseException, system_exit, "Request to exit from the interpreter." } extends_exception! { PyKeyboardInterrupt, PyBaseException, keyboard_interrupt, "Program interrupted by user." } // Exception types that extend `Exception`, // sorted the same way CPython does. extends_exception! { PyTypeError, PyException, type_error, "Inappropriate argument type." } extends_exception! { PyStopAsyncIteration, PyException, stop_async_iteration, "Signal the end from iterator.__anext__()." } extends_exception! { PyStopIteration, PyException, stop_iteration, "Signal the end from iterator.__next__()." } extends_exception! { PyOSError, PyException, os_error, "Base class for I/O related errors." } macro_rules! extend_exception { ( $ctx:expr, $class:expr, { $($name:expr => $value:expr),* $(,)* }) => { $class.set_str_attr("__new__", $ctx.new_method("__new__", $class.clone(), PyBaseException::tp_new)); $class.set_str_attr("__init__", $ctx.new_method("__init__", $class.clone(), PyBaseException::init)); extend_class!($ctx, $class, { $( $name => $value, )* }); }; } impl ExceptionZoo { pub(crate) fn init() -> Self { // The same order as definitions: let base_exception_type = PyBaseException::init_bare_type().clone(); let exception_type = PyException::init_bare_type().clone(); let system_exit = PySystemExit::init_bare_type().clone(); let keyboard_interrupt = PyKeyboardInterrupt::init_bare_type().clone(); let generator_exit = PyGeneratorExit::init_bare_type().clone(); let stop_iteration = PyStopIteration::init_bare_type().clone(); let stop_async_iteration = PyStopAsyncIteration::init_bare_type().clone(); let arithmetic_error = create_exception_type("ArithmeticError", &exception_type); let floating_point_error = create_exception_type("FloatingPointError", &arithmetic_error); let overflow_error = create_exception_type("OverflowError", &arithmetic_error); let zero_division_error = create_exception_type("ZeroDivisionError", &arithmetic_error); let assertion_error = create_exception_type("AssertionError", &exception_type); let attribute_error = create_exception_type("AttributeError", &exception_type); let buffer_error = create_exception_type("BufferError", &exception_type); let eof_error = create_exception_type("EOFError", &exception_type); let import_error = create_exception_type("ImportError", &exception_type); let module_not_found_error = create_exception_type("ModuleNotFoundError", &import_error); let lookup_error = create_exception_type("LookupError", &exception_type); let index_error = create_exception_type("IndexError", &lookup_error); let key_error = create_exception_type("KeyError", &lookup_error); let memory_error = create_exception_type("MemoryError", &exception_type); let name_error = create_exception_type("NameError", &exception_type); let unbound_local_error = create_exception_type("UnboundLocalError", &name_error); // os errors let os_error = PyOSError::init_bare_type().clone(); let blocking_io_error = create_exception_type("BlockingIOError", &os_error); let child_process_error = create_exception_type("ChildProcessError", &os_error); let connection_error = create_exception_type("ConnectionError", &os_error); let connection_aborted_error = create_exception_type("ConnectionAbortedError", &connection_error); let connection_refused_error = create_exception_type("ConnectionRefusedError", &connection_error); let connection_reset_error = create_exception_type("ConnectionResetError", &connection_error); let file_exists_error = create_exception_type("FileExistsError", &os_error); let file_not_found_error = create_exception_type("FileNotFoundError", &os_error); let interrupted_error = create_exception_type("InterruptedError", &os_error); let is_a_directory_error = create_exception_type("IsADirectoryError", &os_error); let not_a_directory_error = create_exception_type("NotADirectoryError", &os_error); let broken_pipe_error = create_exception_type("BrokenPipeError", &connection_error); let permission_error = create_exception_type("PermissionError", &os_error); let process_lookup_error = create_exception_type("ProcessLookupError", &os_error); let timeout_error = create_exception_type("TimeoutError", &os_error); let reference_error = create_exception_type("ReferenceError", &exception_type); let runtime_error = create_exception_type("RuntimeError", &exception_type); let not_implemented_error = create_exception_type("NotImplementedError", &runtime_error); let recursion_error = create_exception_type("RecursionError", &runtime_error); let syntax_error = create_exception_type("SyntaxError", &exception_type); let indentation_error = create_exception_type("IndentationError", &syntax_error); let tab_error = create_exception_type("TabError", &indentation_error); let target_scope_error = create_exception_type("TargetScopeError", &syntax_error); let system_error = create_exception_type("SystemError", &exception_type); let type_error = PyTypeError::init_bare_type().clone(); let value_error = create_exception_type("ValueError", &exception_type); let unicode_error = create_exception_type("UnicodeError", &value_error); let unicode_decode_error = create_exception_type("UnicodeDecodeError", &unicode_error); let unicode_encode_error = create_exception_type("UnicodeEncodeError", &unicode_error); let unicode_translate_error = create_exception_type("UnicodeTranslateError", &unicode_error); #[cfg(feature = "jit")] let jit_error = create_exception_type("JitError", &exception_type); let warning = create_exception_type("Warning", &exception_type); let deprecation_warning = create_exception_type("DeprecationWarning", &warning); let pending_deprecation_warning = create_exception_type("PendingDeprecationWarning", &warning); let runtime_warning = create_exception_type("RuntimeWarning", &warning); let syntax_warning = create_exception_type("SyntaxWarning", &warning); let user_warning = create_exception_type("UserWarning", &warning); let future_warning = create_exception_type("FutureWarning", &warning); let import_warning = create_exception_type("ImportWarning", &warning); let unicode_warning = create_exception_type("UnicodeWarning", &warning); let bytes_warning = create_exception_type("BytesWarning", &warning); let resource_warning = create_exception_type("ResourceWarning", &warning); Self { base_exception_type, system_exit, keyboard_interrupt, generator_exit, exception_type, stop_iteration, stop_async_iteration, arithmetic_error, floating_point_error, overflow_error, zero_division_error, assertion_error, attribute_error, buffer_error, eof_error, import_error, module_not_found_error, lookup_error, index_error, key_error, memory_error, name_error, unbound_local_error, os_error, blocking_io_error, child_process_error, connection_error, broken_pipe_error, connection_aborted_error, connection_refused_error, connection_reset_error, file_exists_error, file_not_found_error, interrupted_error, is_a_directory_error, not_a_directory_error, permission_error, process_lookup_error, timeout_error, reference_error, runtime_error, not_implemented_error, recursion_error, syntax_error, target_scope_error, indentation_error, tab_error, system_error, type_error, value_error, unicode_error, unicode_decode_error, unicode_encode_error, unicode_translate_error, #[cfg(feature = "jit")] jit_error, warning, deprecation_warning, pending_deprecation_warning, runtime_warning, syntax_warning, user_warning, future_warning, import_warning, unicode_warning, bytes_warning, resource_warning, } } pub fn extend(ctx: &PyContext) { let excs = &ctx.exceptions; PyBaseException::extend_class(ctx, &excs.base_exception_type); PyException::extend_class(ctx, &excs.exception_type); PyGeneratorExit::extend_class(ctx, &excs.generator_exit); PySystemExit::extend_class(ctx, &excs.system_exit); extend_class!(ctx, &excs.system_exit, { "code" => ctx.new_readonly_getset("code", excs.system_exit.clone(), system_exit_code), }); PyKeyboardInterrupt::extend_class(ctx, &excs.keyboard_interrupt); PyTypeError::extend_class(ctx, &excs.type_error); PyStopAsyncIteration::extend_class(ctx, &excs.stop_async_iteration); PyStopIteration::extend_class(ctx, &excs.stop_iteration); extend_class!(ctx, &excs.stop_iteration, { "value" => ctx.new_readonly_getset("value", excs.stop_iteration.clone(), make_arg_getter(0)), }); extend_exception!(ctx, &excs.arithmetic_error, {}); extend_class!(ctx, &excs.syntax_error, { "msg" => ctx.new_readonly_getset("msg", excs.syntax_error.clone(), make_arg_getter(0)), // TODO: members "filename" => ctx.none(), "lineno" => ctx.none(), "offset" => ctx.none(), "text" => ctx.none(), }); extend_exception!(ctx, &excs.import_error, { "__init__" => ctx.new_method("__init__", excs.import_error.clone(), import_error_init), "msg" => ctx.new_readonly_getset("msg", excs.import_error.clone(), make_arg_getter(0)), }); extend_class!(ctx, &excs.key_error, { "__str__" => ctx.new_method("__str__", excs.key_error.clone(), key_error_str), }); PyOSError::extend_class(ctx, &excs.os_error); let errno_getter = ctx.new_readonly_getset("errno", excs.os_error.clone(), |exc: PyBaseExceptionRef| { let args = exc.args(); let args = args.as_slice(); args.get(0).filter(|_| args.len() > 1).cloned() }); #[cfg(windows)] extend_class!(ctx, &excs.os_error, { // TODO: this isn't really accurate "winerror" => errno_getter.clone(), }); extend_class!(ctx, &excs.os_error, { "errno" => errno_getter, "strerror" => ctx.new_readonly_getset("strerror", excs.os_error.clone(), make_arg_getter(1)), }); extend_class!(ctx, &excs.unicode_decode_error, { "encoding" => ctx.new_readonly_getset("encoding", excs.unicode_decode_error.clone(), make_arg_getter(0)), "object" => ctx.new_readonly_getset("object", excs.unicode_decode_error.clone(), make_arg_getter(1)), "start" => ctx.new_readonly_getset("start", excs.unicode_decode_error.clone(), make_arg_getter(2)), "end" => ctx.new_readonly_getset("end", excs.unicode_decode_error.clone(), make_arg_getter(3)), "reason" => ctx.new_readonly_getset("reason", excs.unicode_decode_error.clone(), make_arg_getter(4)), }); extend_class!(ctx, &excs.unicode_encode_error, { "encoding" => ctx.new_readonly_getset("encoding", excs.unicode_encode_error.clone(), make_arg_getter(0)), "object" => ctx.new_readonly_getset("object", excs.unicode_encode_error.clone(), make_arg_getter(1)), "start" => ctx.new_readonly_getset("start", excs.unicode_encode_error.clone(), make_arg_getter(2), ), "end" => ctx.new_readonly_getset("end", excs.unicode_encode_error.clone(), make_arg_getter(3)), "reason" => ctx.new_readonly_getset("reason", excs.unicode_encode_error.clone(), make_arg_getter(4)), }); extend_class!(ctx, &excs.unicode_translate_error, { "encoding" => ctx.new_readonly_getset("encoding", excs.unicode_translate_error.clone(), none_getter), "object" => ctx.new_readonly_getset("object", excs.unicode_translate_error.clone(), make_arg_getter(0)), "start" => ctx.new_readonly_getset("start", excs.unicode_translate_error.clone(), make_arg_getter(1)), "end" => ctx.new_readonly_getset("end", excs.unicode_translate_error.clone(), make_arg_getter(2)), "reason" => ctx.new_readonly_getset("reason", excs.unicode_translate_error.clone(), make_arg_getter(3)), }); } } fn import_error_init(exc_self: PyObjectRef, args: FuncArgs, vm: &VirtualMachine) -> PyResult<()> { vm.set_attr( &exc_self, "name", vm.unwrap_or_none(args.kwargs.get("name").cloned()), )?; vm.set_attr( &exc_self, "path", vm.unwrap_or_none(args.kwargs.get("path").cloned()), )?; Ok(()) } fn none_getter(_obj: PyObjectRef, vm: &VirtualMachine) -> PyNoneRef { vm.ctx.none.clone() } fn make_arg_getter(idx: usize) -> impl Fn(PyBaseExceptionRef) -> Option<PyObjectRef> { move |exc| exc.get_arg(idx) } fn key_error_str(exc: PyBaseExceptionRef, vm: &VirtualMachine) -> PyStrRef { let args = exc.args(); if args.as_slice().len() == 1 { exception_args_as_string(vm, args, false) .into_iter() .exactly_one() .unwrap() } else { exc.str(vm) } } fn system_exit_code(exc: PyBaseExceptionRef) -> Option<PyObjectRef> { exc.args.read().as_slice().first().map(|code| { match_class!(match code { ref tup @ PyTuple => match tup.as_slice() { [x] => x.clone(), _ => code.clone(), }, other => other.clone(), }) }) } pub struct SerializeException<'s> { vm: &'s VirtualMachine, exc: &'s PyBaseExceptionRef, } impl<'s> SerializeException<'s> { pub fn new(vm: &'s VirtualMachine, exc: &'s PyBaseExceptionRef) -> Self { SerializeException { vm, exc } } } impl serde::Serialize for SerializeException<'_> { fn serialize<S: serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { use serde::ser::*; let mut struc = s.serialize_struct("PyBaseException", 7)?; struc.serialize_field("exc_type", &self.exc.class().name)?; let tbs = { struct Tracebacks(PyTracebackRef); impl serde::Serialize for Tracebacks { fn serialize<S: serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut s = s.serialize_seq(None)?; for tb in self.0.iter() { s.serialize_element(&*tb)?; } s.end() } } self.exc.traceback().map(Tracebacks) }; struc.serialize_field("traceback", &tbs)?; struc.serialize_field( "cause", &self.exc.cause().as_ref().map(|e| Self::new(self.vm, e)), )?; struc.serialize_field( "context", &self.exc.context().as_ref().map(|e| Self::new(self.vm, e)), )?; struc.serialize_field("suppress_context", &self.exc.get_suppress_context())?; let args = { struct Args<'vm>(&'vm VirtualMachine, PyTupleRef); impl serde::Serialize for Args<'_> { fn serialize<S: serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { s.collect_seq( self.1 .as_slice() .iter() .map(|arg| crate::py_serde::PyObjectSerializer::new(self.0, arg)), ) } } Args(self.vm, self.exc.args()) }; struc.serialize_field("args", &args)?; let rendered = { let mut rendered = String::new(); write_exception(&mut rendered, self.vm, self.exc).map_err(S::Error::custom)?; rendered }; struc.serialize_field("rendered", &rendered)?; struc.end() } } pub(crate) fn cstring_error(vm: &VirtualMachine) -> PyBaseExceptionRef { vm.new_value_error("embedded null character".to_owned()) } impl IntoPyException for std::ffi::NulError { fn into_pyexception(self, vm: &VirtualMachine) -> PyBaseExceptionRef { cstring_error(vm) } } #[cfg(windows)] impl<C: widestring::UChar> IntoPyException for widestring::NulError<C> { fn into_pyexception(self, vm: &VirtualMachine) -> PyBaseExceptionRef { cstring_error(vm) } }
36.243191
141
0.617639
115aef1424072a86b3020395c47df3ddf1c4359c
2,025
use crate::GameState; use bevy::prelude::*; use bevy_asset_loader::{AssetCollection, AssetLoader}; use bevy_ecs_ldtk::LdtkAsset; pub struct LoadingPlugin; impl Plugin for LoadingPlugin { fn build(&self, app: &mut App) { AssetLoader::new(GameState::Loading) .with_collection::<UIAssets>() .with_collection::<GameAssets>() .continue_to_state(GameState::MainMenu) .build(app); app.add_startup_system(hot_reload); } } #[derive(AssetCollection)] pub struct UIAssets { #[asset(path = "goudy-bookletter-1911.otf")] pub font: Handle<Font>, #[asset(path = "logo.png")] pub logo: Handle<Image>, #[asset(path = "character.png")] pub character: Handle<Image>, // Badges #[asset(path = "badge-skin-light.png")] pub badge_skin_light: Handle<Image>, #[asset(path = "badge-skin-medium.png")] pub badge_skin_medium: Handle<Image>, #[asset(path = "badge-skin-dark.png")] pub badge_skin_dark: Handle<Image>, #[asset(path = "badge-male.png")] pub badge_male: Handle<Image>, #[asset(path = "badge-female.png")] pub badge_female: Handle<Image>, #[asset(path = "badge-rich.png")] pub badge_rich: Handle<Image>, #[asset(path = "badge-smart.png")] pub badge_smart: Handle<Image>, #[asset(path = "badge-strong.png")] pub badge_strong: Handle<Image>, } #[derive(AssetCollection)] pub struct GameAssets { #[asset(path = "fortuna.ldtk")] pub map: Handle<LdtkAsset>, #[asset(texture_atlas(tile_size_x = 48.0, tile_size_y = 48.0, columns = 35, rows = 1))] #[asset(path = "player.png")] pub player_atlas: Handle<TextureAtlas>, #[asset(texture_atlas(tile_size_x = 64.0, tile_size_y = 32.0, columns = 37, rows = 1))] #[asset(path = "vfx.png")] pub vfx_atlas: Handle<TextureAtlas>, #[asset(path = "jump.ogg")] pub jump_sound: Handle<AudioSource>, } fn hot_reload(_asset_server: Res<AssetServer>) { // asset_server.watch_for_changes().unwrap(); }
31.640625
91
0.647407
f8ba22483421a4fce53f6406db0bef3a004ab434
12,261
use crate::bitboards::bitboards::constants::*; use crate::bitboards::bitboards::*; use crate::bitboards::magic_constants::*; use crate::board_representation::game_state::{ file_of, rank_of, BLACK, CASTLE_ALL, CASTLE_ALL_BLACK, CASTLE_ALL_WHITE, CASTLE_BLACK_KS, CASTLE_BLACK_QS, CASTLE_WHITE_KS, CASTLE_WHITE_QS, WHITE, }; use crate::move_generation::magic::Magic; use crate::move_generation::movegen::{bishop_attack, rook_attack}; use std::fmt::Display; pub mod bitboards; //Code for generating bitboards:: pub(crate) fn arr_2d_to_string<T: Display>(arr: &[[T; 64]; 64], name: &str) -> String { let mut res_str: String = String::new(); res_str.push_str(&format!("#[rustfmt::skip]\npub const {} : [[{};64];64]= [", name, std::any::type_name::<T>(),)); for a in arr.iter() { res_str.push_str("["); for i in a.iter() { res_str.push_str(&format!("{}{}, ", *i, std::any::type_name::<T>())); } res_str.push_str("], "); } res_str.push_str("];"); res_str } pub(crate) fn side_arr_to_string<T: Display>(arr: &[[T; 64]; 2], name: &str) -> String { let mut res_str: String = String::new(); res_str.push_str(&format!("#[rustfmt::skip]\npub const {} : [[{};64];2] = [", name, std::any::type_name::<T>())); for side in 0..2 { res_str.push_str("["); for i in arr[side].iter() { res_str.push_str(&format!("{}{}, ", *i, std::any::type_name::<T>())); } res_str.push_str("],"); } res_str.push_str("];"); res_str } pub(crate) fn arr_to_string<T: Display>(arr: &[T], name: &str) -> String { let mut res_str: String = String::new(); res_str.push_str(&format!("#[rustfmt::skip]\npub const {} : [{};{}] = [", name, std::any::type_name::<T>(), arr.len())); for i in arr { res_str.push_str(&format!("{}{}, ", *i, std::any::type_name::<T>())); } res_str.push_str("];"); res_str } pub(crate) fn magic_arr_to_string(arr: &[Magic], name: &str) -> String { let mut res_str = String::new(); res_str.push_str(&format!("#[rustfmt::skip]\npub const {}: [Magic;{}] = [", name, arr.len())); for i in arr { res_str.push_str(&format!("{}, ", *i)); } res_str.push_str("];"); res_str } pub fn print_magics() { let mut res = Vec::with_capacity(0); let mut previous_offset = 0; for sq in 0..64 { let mask = OCCUPANCY_MASKS_ROOK[sq]; res.push(Magic { occupancy_mask: mask, shift: mask.count_ones() as usize, magic: MAGICS_ROOK[sq], offset: previous_offset, }); previous_offset += 1 << OCCUPANCY_MASKS_ROOK[sq].count_ones(); } println!("{}", magic_arr_to_string(&res, "MAGIC_ROOK")); println!("Offset: {}", previous_offset); let mut res = Vec::with_capacity(0); for sq in 0..64 { let mask = OCCUPANCY_MASKS_BISHOP[sq]; res.push(Magic { occupancy_mask: mask, shift: mask.count_ones() as usize, magic: MAGICS_BISHOP[sq], offset: previous_offset, }); previous_offset += 1 << OCCUPANCY_MASKS_BISHOP[sq].count_ones(); } println!("{}", magic_arr_to_string(&res, "MAGIC_BISHOP")); println!("Offset: {}", previous_offset); } pub fn print_castle_permisssion() { let mut res = [CASTLE_ALL; 64]; res[square::E1] &= !CASTLE_ALL_WHITE; res[square::A1] &= !CASTLE_WHITE_QS; res[square::H1] &= !CASTLE_WHITE_KS; res[square::E8] &= !CASTLE_ALL_BLACK; res[square::A8] &= !CASTLE_BLACK_QS; res[square::H8] &= !CASTLE_BLACK_KS; println!("{}", arr_to_string(&res, "CASTLE_PERMISSION")); } pub const fn occupancy_mask_rook(square: usize) -> u64 { ((RANKS[rank_of(square)] & !(FILES[0] | FILES[7])) | (FILES[file_of(square)] & !(RANKS[0] | RANKS[7]))) & not_square(square) } pub fn print_rook_occupancy_masks() { let mut res = [0u64; 64]; for sq in 0..64 { res[sq] = occupancy_mask_rook(sq); } println!("{}", arr_to_string(&res, "OCCUPANCY_MASKS_ROOK")); } pub fn occupancy_mask_bishops(square: usize) -> u64 { let mut res = 0u64; let rk = rank_of(square) as isize; let fl = file_of(square) as isize; let dirs: [(isize, isize); 4] = [(1, 1), (-1, -1), (1, -1), (-1, 1)]; for dir in dirs.iter() { let (file_i, rank_i) = dir; let mut rn = rk + rank_i; let mut fnn = fl + file_i; while rn >= 1 && rn <= 6 && fnn >= 1 && fnn <= 6 { res |= 1u64 << (rn * 8 + fnn); rn += rank_i; fnn += file_i; } } res } pub fn print_bishop_occupancy_masks() { let mut res = [0u64; 64]; for sq in 0..64 { res[sq] = occupancy_mask_bishops(sq); } println!("{}", arr_to_string(&res, "OCCUPANCY_MASKS_BISHOP")) } pub fn print_bishop_rays() { let mut res = [[0u64; 64]; 64]; for king_sq in 0..64 { for bishop_sq in 0..64 { res[king_sq][bishop_sq] = get_bishop_ray_slow(FREEFIELD_BISHOP_ATTACKS[king_sq], king_sq, bishop_sq); } } println!("{}", arr_2d_to_string(&res, "BISHOP_RAYS")) } //Gets the ray of one bishop into a specific direction pub fn get_bishop_ray_slow(bishop_attack_in_all_directions: u64, target_square: usize, bishop_square: usize) -> u64 { let diff = target_square as isize - bishop_square as isize; let target_rank = rank_of(target_square); let target_file = file_of(target_square); let bishop_rank = rank_of(bishop_square); let bishop_file = file_of(bishop_square); if diff > 0 { if diff % 9 == 0 { FILES_LESS_THAN[target_file] & FILES_GREATER_THAN[bishop_file] & RANKS_LESS_THAN[target_rank] & RANKS_GREATER_THAN[bishop_rank] & bishop_attack_in_all_directions } else { FILES_GREATER_THAN[target_file] & FILES_LESS_THAN[bishop_file] & RANKS_LESS_THAN[target_rank] & RANKS_GREATER_THAN[bishop_rank] & bishop_attack_in_all_directions } } else if diff % -9 == 0 { FILES_GREATER_THAN[target_file] & FILES_LESS_THAN[bishop_file] & RANKS_GREATER_THAN[target_rank] & RANKS_LESS_THAN[bishop_rank] & bishop_attack_in_all_directions } else { FILES_LESS_THAN[target_file] & FILES_GREATER_THAN[bishop_file] & RANKS_GREATER_THAN[target_rank] & RANKS_LESS_THAN[bishop_rank] & bishop_attack_in_all_directions } } pub fn print_rook_rays() { let mut res = [[0u64; 64]; 64]; for king_sq in 0..64 { for rook_sq in 0..64 { res[king_sq][rook_sq] = get_rook_ray_slow(FREEFIELD_ROOK_ATTACKS[king_sq], king_sq, rook_sq); } } println!("{}", arr_2d_to_string(&res, "ROOK_RAYS")); } //Gets the ray of one rook into a specific direction pub fn get_rook_ray_slow(rook_attacks_in_all_directions: u64, target_square: usize, rook_square: usize) -> u64 { let diff = target_square as isize - rook_square as isize; let target_rank = rank_of(target_square); let target_file = file_of(target_square); let rook_rank = rank_of(rook_square); let rook_file = file_of(rook_square); if diff > 0 { //Same vertical if target_rank == rook_rank { FILES_LESS_THAN[target_file] & FILES_GREATER_THAN[rook_file] & rook_attacks_in_all_directions } else { RANKS_LESS_THAN[target_rank] & RANKS_GREATER_THAN[rook_rank] & rook_attacks_in_all_directions } } else if target_rank == rook_rank { FILES_GREATER_THAN[target_file] & FILES_LESS_THAN[rook_file] & rook_attacks_in_all_directions } else { RANKS_GREATER_THAN[target_rank] & RANKS_LESS_THAN[rook_rank] & rook_attacks_in_all_directions } } pub fn print_king_zone() { let mut res = [0u64; 64]; for king_sq in 0..64 { let zone = 1u64 << king_sq | KING_ATTACKS[king_sq]; res[king_sq] = zone | north_one(zone) | south_one(zone); if file_of(king_sq) == 0 { res[king_sq] |= east_one(res[king_sq]); } else if file_of(king_sq) == 7 { res[king_sq] |= west_one(res[king_sq]); } } println!("{}", arr_to_string(&res, "KING_ZONE")) } pub fn print_freefield_rook_attacks() { let mut res = [0u64; 64]; for (sq, item) in res.iter_mut().enumerate() { *item = rook_attack(sq, 0u64); } println!("{}", arr_to_string(&res, "FREEFIELD_ROOK_ATTACKS")) } pub fn print_freefield_bishop_attacks() { let mut res = [0u64; 64]; for (sq, item) in res.iter_mut().enumerate() { *item = bishop_attack(sq, 0u64); } println!("{}", arr_to_string(&res, "FREEFIELD_BISHOP_ATTACKS")) } pub fn print_shielding_pawns() { let mut res = [[0u64; 64]; 2]; for sq in 0..64 { let king = 1u64 << sq; let shield = king << 8 | north_west_one(king) | north_east_one(king); res[WHITE][sq] = shield | shield << 8; let shield = king >> 8 | south_west_one(king) | south_east_one(king); res[BLACK][sq] = shield | shield >> 8; } for rank in 0..8 { for side in 0..2 { res[side][8 * rank] = res[side][8 * rank + 1]; res[side][8 * rank + 7] = res[side][8 * rank + 6]; } } println!("{}", side_arr_to_string(&res, "SHIELDING_PAWNS")); } pub fn print_diagonally_adjacent() { let mut res = [0u64; 64]; for (sq, item) in res.iter_mut().enumerate() { let board = 1u64 << sq; *item = north_east_one(board) | north_west_one(board) | south_east_one(board) | south_west_one(board); } println!("{}", arr_to_string(&res, "DIAGONALLY_ADJACENT")) } pub fn print_files_less_than() { let mut res = [0u64; 8]; for (files, item) in res.iter_mut().enumerate() { for files_less_than in 0..files { *item |= FILES[files_less_than]; } } println!("{}", arr_to_string(&res, "FILES_LESS_THAN")) } pub fn print_ranks_less_than() { let mut res = [0u64; 8]; for (ranks, item) in res.iter_mut().enumerate() { for ranks_less_than in 0..ranks { *item |= RANKS[ranks_less_than]; } } println!("{}", arr_to_string(&res, "RANKS_LESS_THAN")) } pub fn print_files_greater_than() { let mut res = [0u64; 8]; for files in 0..8 { res[files] = !FILES_LESS_THAN[files] & !FILES[files]; } println!("{}", arr_to_string(&res, "FILES_GREATER_THAN")) } pub fn print_ranks_greater_than() { let mut res = [0u64; 8]; for ranks in 0..8 { res[ranks] = !RANKS_LESS_THAN[ranks] & !RANKS[ranks]; } println!("{}", arr_to_string(&res, "RANKS_GREATER_THAN")) } fn king_attack(mut king_board: u64) -> u64 { let mut attacks = east_one(king_board) | west_one(king_board); king_board |= attacks; attacks |= south_one(king_board) | north_one(king_board); attacks } pub fn print_king_attacks() { let mut res = [0u64; 64]; for (square, item) in res.iter_mut().enumerate() { *item = king_attack(1u64 << square); } println!("{}", arr_to_string(&res, "KING_ATTACKS")) } fn knight_attack(knight: u64) -> u64 { let mut attacks; let mut east = east_one(knight); let mut west = west_one(knight); attacks = (east | west) << 16; attacks |= (east | west) >> 16; east = east_one(east); west = west_one(west); attacks |= (east | west) << 8; attacks |= (east | west) >> 8; attacks } pub fn print_knight_attacks() { let mut res = [0u64; 64]; for (square, item) in res.iter_mut().enumerate() { *item = knight_attack(1u64 << square); } println!("{}", arr_to_string(&res, "KNIGHT_ATTACKS")); } pub fn print_ranks() { let mut res = [0u64; 8]; for rank in 0..8 { if rank == 0 { res[0] = 1u64 | 1u64 << 1 | 1u64 << 2 | 1u64 << 3 | 1u64 << 4 | 1u64 << 5 | 1u64 << 6 | 1u64 << 7; } else { res[rank] = res[rank - 1] << 8; } } println!("{}", arr_to_string(&res, "RANKS")); } pub fn print_file() { let mut res = [0u64; 8]; for file in 0..8 { if file == 0 { res[0] = 1u64 | 1u64 << 8 | 1u64 << 16 | 1u64 << 24 | 1u64 << 32 | 1u64 << 40 | 1u64 << 48 | 1u64 << 56; } else { res[file] = res[file - 1] << 1; } } println!("{}", arr_to_string(&res, "FILES")) }
36.491071
173
0.601664
29b5a652f4139462499503aad3f0671a7ffaa5f7
1,881
#[doc = "Register `ACTIVE0` reader"] pub struct R(crate::R<ACTIVE0_SPEC>); impl core::ops::Deref for R { type Target = crate::R<ACTIVE0_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::convert::From<crate::R<ACTIVE0_SPEC>> for R { fn from(reader: crate::R<ACTIVE0_SPEC>) -> Self { R(reader) } } #[doc = "Field `ACT` reader - Active flag for DMA channel n. Bit n corresponds to DMA channel n. The number of bits = number of DMA channels in this device. Other bits are reserved. 0 = not active. 1 = active."] pub struct ACT_R(crate::FieldReader<u32, u32>); impl ACT_R { pub(crate) fn new(bits: u32) -> Self { ACT_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for ACT_R { type Target = crate::FieldReader<u32, u32>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl R { #[doc = "Bits 0:31 - Active flag for DMA channel n. Bit n corresponds to DMA channel n. The number of bits = number of DMA channels in this device. Other bits are reserved. 0 = not active. 1 = active."] #[inline(always)] pub fn act(&self) -> ACT_R { ACT_R::new((self.bits & 0xffff_ffff) as u32) } } #[doc = "Channel Active status for all DMA channels.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [active0](index.html) module"] pub struct ACTIVE0_SPEC; impl crate::RegisterSpec for ACTIVE0_SPEC { type Ux = u32; } #[doc = "`read()` method returns [active0::R](R) reader structure"] impl crate::Readable for ACTIVE0_SPEC { type Reader = R; } #[doc = "`reset()` method sets ACTIVE0 to value 0"] impl crate::Resettable for ACTIVE0_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
36.173077
253
0.644338
71f4caa804cb5c3ea932d63dde22ec8a6a0ad94b
2,263
#![warn(clippy::all, clippy::pedantic, clippy::nursery, clippy::cargo)] #![warn(missing_docs)] #![doc = include_str!("../README.md")] // Tell the github workflow check to not format the generated rust program bril_grammar.rs #[doc(hidden)] #[rustfmt::skip] pub mod bril_grammar; #[doc(hidden)] pub mod cli; use bril_rs::{AbstractProgram, Position}; #[doc(hidden)] #[derive(Clone)] pub struct Lines { use_pos: bool, new_lines: Vec<usize>, } impl Lines { fn new(input: &str, use_pos: bool) -> Self { Self { use_pos, new_lines: input .as_bytes() .iter() .enumerate() .filter_map(|(idx, b)| if *b == b'\n' { Some(idx) } else { None }) .collect(), } } fn get_position(&self, index: usize) -> Option<Position> { if self.use_pos { Some(self.new_lines.iter().enumerate().fold( Position { col: 1, row: 0 }, |current, (line_num, idx)| { if *idx < index { Position { row: (line_num + 2) as u64, col: (index - idx) as u64, } } else { current } }, )) } else { None } } } /// The entrance point to the bril2json parser. It takes an ```input```:[`std::io::Read`] which should be the Bril text file. You can control whether it includes source code positions with ```use_pos```. /// # Panics /// Will panic if the input is not well-formed Bril text pub fn parse_abstract_program_from_read<R: std::io::Read>( mut input: R, use_pos: bool, ) -> AbstractProgram { let mut buffer = String::new(); input.read_to_string(&mut buffer).unwrap(); let parser = bril_grammar::AbstractProgramParser::new(); parser .parse(&Lines::new(&buffer, use_pos), &buffer) .unwrap() } #[must_use] /// A wrapper around [`parse_abstract_program_from_read`] which assumes [`std::io::Stdin`] pub fn parse_abstract_program(use_pos: bool) -> AbstractProgram { parse_abstract_program_from_read(std::io::stdin(), use_pos) }
30.581081
203
0.543526
db26fbb811ad4300411036f61da9c15398d4ff35
11,423
use crate::commands::WholeStreamCommand; use crate::prelude::*; use filesize::file_real_size_fast; use glob::*; use indexmap::map::IndexMap; use nu_errors::ShellError; use nu_protocol::{ReturnSuccess, Signature, SyntaxShape, UntaggedValue, Value}; use nu_source::Tagged; use std::path::PathBuf; use std::sync::atomic::Ordering; const NAME: &str = "du"; const GLOB_PARAMS: MatchOptions = MatchOptions { case_sensitive: true, require_literal_separator: true, require_literal_leading_dot: false, }; pub struct Du; #[derive(Deserialize, Clone)] pub struct DuArgs { path: Option<Tagged<PathBuf>>, all: bool, deref: bool, exclude: Option<Tagged<String>>, #[serde(rename = "max-depth")] max_depth: Option<Tagged<u64>>, #[serde(rename = "min-size")] min_size: Option<Tagged<u64>>, } #[async_trait] impl WholeStreamCommand for Du { fn name(&self) -> &str { NAME } fn signature(&self) -> Signature { Signature::build(NAME) .optional("path", SyntaxShape::Pattern, "starting directory") .switch( "all", "Output file sizes as well as directory sizes", Some('a'), ) .switch( "deref", "Dereference symlinks to their targets for size", Some('r'), ) .named( "exclude", SyntaxShape::Pattern, "Exclude these file names", Some('x'), ) .named( "max-depth", SyntaxShape::Int, "Directory recursion limit", Some('d'), ) .named( "min-size", SyntaxShape::Int, "Exclude files below this size", Some('m'), ) } fn usage(&self) -> &str { "Find disk usage sizes of specified items" } async fn run( &self, args: CommandArgs, registry: &CommandRegistry, ) -> Result<OutputStream, ShellError> { du(args, registry) } fn examples(&self) -> Vec<Example> { vec![Example { description: "Disk usage of the current directory", example: "du", result: None, }] } } fn du(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> { let registry = registry.clone(); let tag = args.call_info.name_tag.clone(); let ctrl_c = args.ctrl_c.clone(); let ctrl_c_copy = ctrl_c.clone(); let stream = async_stream! { let (args, mut input): (DuArgs, _) = args.process(&registry).await?; let exclude = args.exclude.map_or(Ok(None), move |x| { Pattern::new(&x.item) .map(Option::Some) .map_err(|e| ShellError::labeled_error(e.msg, "glob error", x.tag.clone())) })?; let include_files = args.all; let paths = match args.path { Some(p) => { let p = p.item.to_str().expect("Why isn't this encoded properly?"); glob::glob_with(p, GLOB_PARAMS) } None => glob::glob_with("*", GLOB_PARAMS), } .map_err(|e| ShellError::labeled_error(e.msg, "glob error", tag.clone()))? .filter(move |p| { if include_files { true } else { match p { Ok(f) if f.is_dir() => true, Err(e) if e.path().is_dir() => true, _ => false, } } }) .map(|v| v.map_err(glob_err_into)); let all = args.all; let deref = args.deref; let max_depth = args.max_depth.map(|f| f.item); let min_size = args.min_size.map(|f| f.item); let params = DirBuilder { tag: tag.clone(), min: min_size, deref, exclude, all, }; let mut inp = futures::stream::iter(paths).interruptible(ctrl_c.clone()); while let Some(path) = inp.next().await { match path { Ok(p) => { if p.is_dir() { yield Ok(ReturnSuccess::Value( DirInfo::new(p, &params, max_depth, ctrl_c.clone()).into(), )) } else { for v in FileInfo::new(p, deref, tag.clone()).into_iter() { yield Ok(ReturnSuccess::Value(v.into())); } } } Err(e) => yield Err(e), } } }; Ok(stream.interruptible(ctrl_c_copy).to_output_stream()) } pub struct DirBuilder { tag: Tag, min: Option<u64>, deref: bool, exclude: Option<Pattern>, all: bool, } impl DirBuilder { pub fn new( tag: Tag, min: Option<u64>, deref: bool, exclude: Option<Pattern>, all: bool, ) -> DirBuilder { DirBuilder { tag, min, deref, exclude, all, } } } pub struct DirInfo { dirs: Vec<DirInfo>, files: Vec<FileInfo>, errors: Vec<ShellError>, size: u64, blocks: u64, path: PathBuf, tag: Tag, } struct FileInfo { path: PathBuf, size: u64, blocks: Option<u64>, tag: Tag, } impl FileInfo { fn new(path: impl Into<PathBuf>, deref: bool, tag: Tag) -> Result<Self, ShellError> { let path = path.into(); let m = if deref { std::fs::metadata(&path) } else { std::fs::symlink_metadata(&path) }; match m { Ok(d) => { let block_size = file_real_size_fast(&path, &d).ok(); Ok(FileInfo { path, blocks: block_size, size: d.len(), tag, }) } Err(e) => Err(e.into()), } } } impl DirInfo { pub fn new( path: impl Into<PathBuf>, params: &DirBuilder, depth: Option<u64>, ctrl_c: Arc<AtomicBool>, ) -> Self { let path = path.into(); let mut s = Self { dirs: Vec::new(), errors: Vec::new(), files: Vec::new(), size: 0, blocks: 0, tag: params.tag.clone(), path, }; match std::fs::read_dir(&s.path) { Ok(d) => { for f in d { if ctrl_c.load(Ordering::SeqCst) { break; } match f { Ok(i) => match i.file_type() { Ok(t) if t.is_dir() => { s = s.add_dir(i.path(), depth, &params, ctrl_c.clone()) } Ok(_t) => s = s.add_file(i.path(), &params), Err(e) => s = s.add_error(e.into()), }, Err(e) => s = s.add_error(e.into()), } } } Err(e) => s = s.add_error(e.into()), } s } fn add_dir( mut self, path: impl Into<PathBuf>, mut depth: Option<u64>, params: &DirBuilder, ctrl_c: Arc<AtomicBool>, ) -> Self { if let Some(current) = depth { if let Some(new) = current.checked_sub(1) { depth = Some(new); } else { return self; } } let d = DirInfo::new(path, &params, depth, ctrl_c); self.size += d.size; self.blocks += d.blocks; self.dirs.push(d); self } fn add_file(mut self, f: impl Into<PathBuf>, params: &DirBuilder) -> Self { let f = f.into(); let include = params .exclude .as_ref() .map_or(true, |x| !x.matches_path(&f)); if include { match FileInfo::new(f, params.deref, self.tag.clone()) { Ok(file) => { let inc = params.min.map_or(true, |s| file.size >= s); if inc { self.size += file.size; self.blocks += file.blocks.unwrap_or(0); if params.all { self.files.push(file); } } } Err(e) => self = self.add_error(e), } } self } fn add_error(mut self, e: ShellError) -> Self { self.errors.push(e); self } pub fn get_size(&self) -> u64 { self.size } } fn glob_err_into(e: GlobError) -> ShellError { let e = e.into_error(); ShellError::from(e) } fn value_from_vec<V>(vec: Vec<V>, tag: &Tag) -> Value where V: Into<Value>, { if vec.is_empty() { UntaggedValue::nothing() } else { let values = vec.into_iter().map(Into::into).collect::<Vec<Value>>(); UntaggedValue::Table(values) } .retag(tag) } impl From<DirInfo> for Value { fn from(d: DirInfo) -> Self { let mut r: IndexMap<String, Value> = IndexMap::new(); r.insert( "path".to_string(), UntaggedValue::path(d.path).retag(&d.tag), ); r.insert( "apparent".to_string(), UntaggedValue::bytes(d.size).retag(&d.tag), ); r.insert( "physical".to_string(), UntaggedValue::bytes(d.blocks).retag(&d.tag), ); r.insert("directories".to_string(), value_from_vec(d.dirs, &d.tag)); r.insert("files".to_string(), value_from_vec(d.files, &d.tag)); if !d.errors.is_empty() { let v = UntaggedValue::Table( d.errors .into_iter() .map(move |e| UntaggedValue::Error(e).into_untagged_value()) .collect::<Vec<Value>>(), ) .retag(&d.tag); r.insert("errors".to_string(), v); } Value { value: UntaggedValue::row(r), tag: d.tag, } } } impl From<FileInfo> for Value { fn from(f: FileInfo) -> Self { let mut r: IndexMap<String, Value> = IndexMap::new(); r.insert( "path".to_string(), UntaggedValue::path(f.path).retag(&f.tag), ); r.insert( "apparent".to_string(), UntaggedValue::bytes(f.size).retag(&f.tag), ); let b = f .blocks .map(UntaggedValue::bytes) .unwrap_or_else(UntaggedValue::nothing) .retag(&f.tag); r.insert("physical".to_string(), b); r.insert( "directories".to_string(), UntaggedValue::nothing().retag(&f.tag), ); r.insert("files".to_string(), UntaggedValue::nothing().retag(&f.tag)); UntaggedValue::row(r).retag(&f.tag) } } #[cfg(test)] mod tests { use super::Du; #[test] fn examples_work_as_expected() { use crate::examples::test as test_examples; test_examples(Du {}) } }
26.199541
91
0.461525
1c1f9d0efedd020c202cca64a321e9ebf3b2ebe6
931
#![no_main] use libfuzzer_sys::*; use wasmparser::{Validator, WasmFeatures}; fuzz_target!(|data: &[u8]| { let mut validator = Validator::new(); let byte1 = match data.get(0) { Some(byte) => byte, None => return, }; let byte2 = match data.get(1) { Some(byte) => byte, None => return, }; validator.wasm_features(WasmFeatures { reference_types: (byte1 & 0b0000_0001) != 0, multi_value: (byte1 & 0b0000_0010) != 0, threads: (byte1 & 0b0000_0100) != 0, simd: (byte1 & 0b0000_1000) != 0, module_linking: (byte1 & 0b0001_0000) != 0, tail_call: (byte1 & 0b0010_0000) != 0, bulk_memory: (byte1 & 0b0100_0000) != 0, deterministic_only: (byte1 & 0b1000_0000) != 0, multi_memory: (byte2 & 0b0000_0001) != 0, memory64: (byte2 & 0b0000_0010) != 0, }); drop(validator.validate_all(&data[2..])); });
30.032258
55
0.56928
5b6147c72230d53b943586280b3a6d457d103886
34,490
use rustc_ast as ast; use rustc_ast::visit::{self, AssocCtxt, FnCtxt, FnKind, Visitor}; use rustc_ast::{AssocConstraint, AssocConstraintKind, NodeId}; use rustc_ast::{PatKind, RangeEnd, VariantData}; use rustc_errors::struct_span_err; use rustc_feature::{AttributeGate, BuiltinAttribute, BUILTIN_ATTRIBUTE_MAP}; use rustc_feature::{Features, GateIssue}; use rustc_session::parse::{feature_err, feature_err_issue}; use rustc_session::Session; use rustc_span::source_map::Spanned; use rustc_span::symbol::sym; use rustc_span::Span; use tracing::debug; macro_rules! gate_feature_fn { ($visitor: expr, $has_feature: expr, $span: expr, $name: expr, $explain: expr, $help: expr) => {{ let (visitor, has_feature, span, name, explain, help) = (&*$visitor, $has_feature, $span, $name, $explain, $help); let has_feature: bool = has_feature(visitor.features); debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", name, span, has_feature); if !has_feature && !span.allows_unstable($name) { feature_err_issue(&visitor.sess.parse_sess, name, span, GateIssue::Language, explain) .help(help) .emit(); } }}; ($visitor: expr, $has_feature: expr, $span: expr, $name: expr, $explain: expr) => {{ let (visitor, has_feature, span, name, explain) = (&*$visitor, $has_feature, $span, $name, $explain); let has_feature: bool = has_feature(visitor.features); debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", name, span, has_feature); if !has_feature && !span.allows_unstable($name) { feature_err_issue(&visitor.sess.parse_sess, name, span, GateIssue::Language, explain) .emit(); } }}; } macro_rules! gate_feature_post { ($visitor: expr, $feature: ident, $span: expr, $explain: expr, $help: expr) => { gate_feature_fn!($visitor, |x: &Features| x.$feature, $span, sym::$feature, $explain, $help) }; ($visitor: expr, $feature: ident, $span: expr, $explain: expr) => { gate_feature_fn!($visitor, |x: &Features| x.$feature, $span, sym::$feature, $explain) }; } pub fn check_attribute(attr: &ast::Attribute, sess: &Session, features: &Features) { PostExpansionVisitor { sess, features }.visit_attribute(attr) } struct PostExpansionVisitor<'a> { sess: &'a Session, // `sess` contains a `Features`, but this might not be that one. features: &'a Features, } impl<'a> PostExpansionVisitor<'a> { fn check_abi(&self, abi: ast::StrLit) { let ast::StrLit { symbol_unescaped, span, .. } = abi; match symbol_unescaped.as_str() { // Stable "Rust" | "C" | "cdecl" | "stdcall" | "fastcall" | "aapcs" | "win64" | "sysv64" | "system" => {} "rust-intrinsic" => { gate_feature_post!(&self, intrinsics, span, "intrinsics are subject to change"); } "platform-intrinsic" => { gate_feature_post!( &self, platform_intrinsics, span, "platform intrinsics are experimental and possibly buggy" ); } "vectorcall" => { gate_feature_post!( &self, abi_vectorcall, span, "vectorcall is experimental and subject to change" ); } "thiscall" => { gate_feature_post!( &self, abi_thiscall, span, "thiscall is experimental and subject to change" ); } "rust-call" => { gate_feature_post!( &self, unboxed_closures, span, "rust-call ABI is subject to change" ); } "ptx-kernel" => { gate_feature_post!( &self, abi_ptx, span, "PTX ABIs are experimental and subject to change" ); } "unadjusted" => { gate_feature_post!( &self, abi_unadjusted, span, "unadjusted ABI is an implementation detail and perma-unstable" ); } "msp430-interrupt" => { gate_feature_post!( &self, abi_msp430_interrupt, span, "msp430-interrupt ABI is experimental and subject to change" ); } "x86-interrupt" => { gate_feature_post!( &self, abi_x86_interrupt, span, "x86-interrupt ABI is experimental and subject to change" ); } "amdgpu-kernel" => { gate_feature_post!( &self, abi_amdgpu_kernel, span, "amdgpu-kernel ABI is experimental and subject to change" ); } "avr-interrupt" | "avr-non-blocking-interrupt" => { gate_feature_post!( &self, abi_avr_interrupt, span, "avr-interrupt and avr-non-blocking-interrupt ABIs are experimental and subject to change" ); } "efiapi" => { gate_feature_post!( &self, abi_efiapi, span, "efiapi ABI is experimental and subject to change" ); } "C-cmse-nonsecure-call" => { gate_feature_post!( &self, abi_c_cmse_nonsecure_call, span, "C-cmse-nonsecure-call ABI is experimental and subject to change" ); } "C-unwind" => { gate_feature_post!( &self, c_unwind, span, "C-unwind ABI is experimental and subject to change" ); } "stdcall-unwind" => { gate_feature_post!( &self, c_unwind, span, "stdcall-unwind ABI is experimental and subject to change" ); } "system-unwind" => { gate_feature_post!( &self, c_unwind, span, "system-unwind ABI is experimental and subject to change" ); } "thiscall-unwind" => { gate_feature_post!( &self, c_unwind, span, "thiscall-unwind ABI is experimental and subject to change" ); } "cdecl-unwind" => { gate_feature_post!( &self, c_unwind, span, "cdecl-unwind ABI is experimental and subject to change" ); } "fastcall-unwind" => { gate_feature_post!( &self, c_unwind, span, "fastcall-unwind ABI is experimental and subject to change" ); } "vectorcall-unwind" => { gate_feature_post!( &self, c_unwind, span, "vectorcall-unwind ABI is experimental and subject to change" ); } "aapcs-unwind" => { gate_feature_post!( &self, c_unwind, span, "aapcs-unwind ABI is experimental and subject to change" ); } "win64-unwind" => { gate_feature_post!( &self, c_unwind, span, "win64-unwind ABI is experimental and subject to change" ); } "sysv64-unwind" => { gate_feature_post!( &self, c_unwind, span, "sysv64-unwind ABI is experimental and subject to change" ); } "wasm" => { gate_feature_post!( &self, wasm_abi, span, "wasm ABI is experimental and subject to change" ); } abi => { self.sess.parse_sess.span_diagnostic.delay_span_bug( span, &format!("unrecognized ABI not caught in lowering: {}", abi), ); } } } fn check_extern(&self, ext: ast::Extern) { if let ast::Extern::Explicit(abi) = ext { self.check_abi(abi); } } fn maybe_report_invalid_custom_discriminants(&self, variants: &[ast::Variant]) { let has_fields = variants.iter().any(|variant| match variant.data { VariantData::Tuple(..) | VariantData::Struct(..) => true, VariantData::Unit(..) => false, }); let discriminant_spans = variants .iter() .filter(|variant| match variant.data { VariantData::Tuple(..) | VariantData::Struct(..) => false, VariantData::Unit(..) => true, }) .filter_map(|variant| variant.disr_expr.as_ref().map(|c| c.value.span)) .collect::<Vec<_>>(); if !discriminant_spans.is_empty() && has_fields { let mut err = feature_err( &self.sess.parse_sess, sym::arbitrary_enum_discriminant, discriminant_spans.clone(), "custom discriminant values are not allowed in enums with tuple or struct variants", ); for sp in discriminant_spans { err.span_label(sp, "disallowed custom discriminant"); } for variant in variants.iter() { match &variant.data { VariantData::Struct(..) => { err.span_label(variant.span, "struct variant defined here"); } VariantData::Tuple(..) => { err.span_label(variant.span, "tuple variant defined here"); } VariantData::Unit(..) => {} } } err.emit(); } } fn check_gat(&self, generics: &ast::Generics, span: Span) { if !generics.params.is_empty() { gate_feature_post!( &self, generic_associated_types, span, "generic associated types are unstable" ); } if !generics.where_clause.predicates.is_empty() { gate_feature_post!( &self, generic_associated_types, span, "where clauses on associated types are unstable" ); } } /// Feature gate `impl Trait` inside `type Alias = $type_expr;`. fn check_impl_trait(&self, ty: &ast::Ty) { struct ImplTraitVisitor<'a> { vis: &'a PostExpansionVisitor<'a>, } impl Visitor<'_> for ImplTraitVisitor<'_> { fn visit_ty(&mut self, ty: &ast::Ty) { if let ast::TyKind::ImplTrait(..) = ty.kind { gate_feature_post!( &self.vis, type_alias_impl_trait, ty.span, "`impl Trait` in type aliases is unstable" ); } visit::walk_ty(self, ty); } } ImplTraitVisitor { vis: self }.visit_ty(ty); } } impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { fn visit_attribute(&mut self, attr: &ast::Attribute) { let attr_info = attr.ident().and_then(|ident| BUILTIN_ATTRIBUTE_MAP.get(&ident.name)); // Check feature gates for built-in attributes. if let Some(BuiltinAttribute { gate: AttributeGate::Gated(_, name, descr, has_feature), .. }) = attr_info { gate_feature_fn!(self, has_feature, attr.span, *name, descr); } // Check unstable flavors of the `#[doc]` attribute. if attr.has_name(sym::doc) { for nested_meta in attr.meta_item_list().unwrap_or_default() { macro_rules! gate_doc { ($($name:ident => $feature:ident)*) => { $(if nested_meta.has_name(sym::$name) { let msg = concat!("`#[doc(", stringify!($name), ")]` is experimental"); gate_feature_post!(self, $feature, attr.span, msg); })* }} gate_doc!( cfg => doc_cfg cfg_hide => doc_cfg_hide masked => doc_masked notable_trait => doc_notable_trait ); if nested_meta.has_name(sym::keyword) { let msg = "`#[doc(keyword)]` is meant for internal use only"; gate_feature_post!(self, rustdoc_internals, attr.span, msg); } } } // Check for unstable modifiers on `#[link(..)]` attribute if attr.has_name(sym::link) { for nested_meta in attr.meta_item_list().unwrap_or_default() { if nested_meta.has_name(sym::modifiers) { gate_feature_post!( self, native_link_modifiers, nested_meta.span(), "native link modifiers are experimental" ); if let Some(modifiers) = nested_meta.value_str() { for modifier in modifiers.as_str().split(',') { if let Some(modifier) = modifier.strip_prefix(&['+', '-']) { macro_rules! gate_modifier { ($($name:literal => $feature:ident)*) => { $(if modifier == $name { let msg = concat!("`#[link(modifiers=\"", $name, "\")]` is unstable"); gate_feature_post!( self, $feature, nested_meta.name_value_literal_span().unwrap(), msg ); })* }} gate_modifier!( "bundle" => native_link_modifiers_bundle "verbatim" => native_link_modifiers_verbatim "whole-archive" => native_link_modifiers_whole_archive "as-needed" => native_link_modifiers_as_needed ); } } } } } } // Emit errors for non-staged-api crates. if !self.features.staged_api { if attr.has_name(sym::rustc_deprecated) || attr.has_name(sym::unstable) || attr.has_name(sym::stable) || attr.has_name(sym::rustc_const_unstable) || attr.has_name(sym::rustc_const_stable) { struct_span_err!( self.sess, attr.span, E0734, "stability attributes may not be used outside of the standard library", ) .emit(); } } } fn visit_item(&mut self, i: &'a ast::Item) { match i.kind { ast::ItemKind::ForeignMod(ref foreign_module) => { if let Some(abi) = foreign_module.abi { self.check_abi(abi); } } ast::ItemKind::Fn(..) => { if self.sess.contains_name(&i.attrs, sym::start) { gate_feature_post!( &self, start, i.span, "`#[start]` functions are experimental \ and their signature may change \ over time" ); } } ast::ItemKind::Struct(..) => { for attr in self.sess.filter_by_name(&i.attrs, sym::repr) { for item in attr.meta_item_list().unwrap_or_else(Vec::new) { if item.has_name(sym::simd) { gate_feature_post!( &self, repr_simd, attr.span, "SIMD types are experimental and possibly buggy" ); } } } } ast::ItemKind::Enum(ast::EnumDef { ref variants, .. }, ..) => { for variant in variants { match (&variant.data, &variant.disr_expr) { (ast::VariantData::Unit(..), _) => {} (_, Some(disr_expr)) => gate_feature_post!( &self, arbitrary_enum_discriminant, disr_expr.value.span, "discriminants on non-unit variants are experimental" ), _ => {} } } let has_feature = self.features.arbitrary_enum_discriminant; if !has_feature && !i.span.allows_unstable(sym::arbitrary_enum_discriminant) { self.maybe_report_invalid_custom_discriminants(&variants); } } ast::ItemKind::Impl(box ast::Impl { polarity, defaultness, ref of_trait, .. }) => { if let ast::ImplPolarity::Negative(span) = polarity { gate_feature_post!( &self, negative_impls, span.to(of_trait.as_ref().map_or(span, |t| t.path.span)), "negative trait bounds are not yet fully implemented; \ use marker types for now" ); } if let ast::Defaultness::Default(_) = defaultness { gate_feature_post!(&self, specialization, i.span, "specialization is unstable"); } } ast::ItemKind::Trait(box ast::Trait { is_auto: ast::IsAuto::Yes, .. }) => { gate_feature_post!( &self, auto_traits, i.span, "auto traits are experimental and possibly buggy" ); } ast::ItemKind::TraitAlias(..) => { gate_feature_post!(&self, trait_alias, i.span, "trait aliases are experimental"); } ast::ItemKind::MacroDef(ast::MacroDef { macro_rules: false, .. }) => { let msg = "`macro` is experimental"; gate_feature_post!(&self, decl_macro, i.span, msg); } ast::ItemKind::TyAlias(box ast::TyAlias { ty: Some(ref ty), .. }) => { self.check_impl_trait(&ty) } _ => {} } visit::walk_item(self, i); } fn visit_foreign_item(&mut self, i: &'a ast::ForeignItem) { match i.kind { ast::ForeignItemKind::Fn(..) | ast::ForeignItemKind::Static(..) => { let link_name = self.sess.first_attr_value_str_by_name(&i.attrs, sym::link_name); let links_to_llvm = link_name.map_or(false, |val| val.as_str().starts_with("llvm.")); if links_to_llvm { gate_feature_post!( &self, link_llvm_intrinsics, i.span, "linking to LLVM intrinsics is experimental" ); } } ast::ForeignItemKind::TyAlias(..) => { gate_feature_post!(&self, extern_types, i.span, "extern types are experimental"); } ast::ForeignItemKind::MacCall(..) => {} } visit::walk_foreign_item(self, i) } fn visit_ty(&mut self, ty: &'a ast::Ty) { match ty.kind { ast::TyKind::BareFn(ref bare_fn_ty) => { self.check_extern(bare_fn_ty.ext); } ast::TyKind::Never => { gate_feature_post!(&self, never_type, ty.span, "the `!` type is experimental"); } _ => {} } visit::walk_ty(self, ty) } fn visit_fn_ret_ty(&mut self, ret_ty: &'a ast::FnRetTy) { if let ast::FnRetTy::Ty(ref output_ty) = *ret_ty { if let ast::TyKind::Never = output_ty.kind { // Do nothing. } else { self.visit_ty(output_ty) } } } fn visit_expr(&mut self, e: &'a ast::Expr) { match e.kind { ast::ExprKind::Box(_) => { gate_feature_post!( &self, box_syntax, e.span, "box expression syntax is experimental; you can call `Box::new` instead" ); } ast::ExprKind::Type(..) => { // To avoid noise about type ascription in common syntax errors, only emit if it // is the *only* error. if self.sess.parse_sess.span_diagnostic.err_count() == 0 { gate_feature_post!( &self, type_ascription, e.span, "type ascription is experimental" ); } } ast::ExprKind::TryBlock(_) => { gate_feature_post!(&self, try_blocks, e.span, "`try` expression is experimental"); } ast::ExprKind::Block(_, Some(label)) => { gate_feature_post!( &self, label_break_value, label.ident.span, "labels on blocks are unstable" ); } _ => {} } visit::walk_expr(self, e) } fn visit_pat(&mut self, pattern: &'a ast::Pat) { match &pattern.kind { PatKind::Slice(pats) => { for pat in pats { let inner_pat = match &pat.kind { PatKind::Ident(.., Some(pat)) => pat, _ => pat, }; if let PatKind::Range(Some(_), None, Spanned { .. }) = inner_pat.kind { gate_feature_post!( &self, half_open_range_patterns, pat.span, "`X..` patterns in slices are experimental" ); } } } PatKind::Box(..) => { gate_feature_post!( &self, box_patterns, pattern.span, "box pattern syntax is experimental" ); } PatKind::Range(_, Some(_), Spanned { node: RangeEnd::Excluded, .. }) => { gate_feature_post!( &self, exclusive_range_pattern, pattern.span, "exclusive range pattern syntax is experimental" ); } _ => {} } visit::walk_pat(self, pattern) } fn visit_fn(&mut self, fn_kind: FnKind<'a>, span: Span, _: NodeId) { if let Some(header) = fn_kind.header() { // Stability of const fn methods are covered in `visit_assoc_item` below. self.check_extern(header.ext); if let (ast::Const::Yes(_), ast::Extern::Implicit) | (ast::Const::Yes(_), ast::Extern::Explicit(_)) = (header.constness, header.ext) { gate_feature_post!( &self, const_extern_fn, span, "`const extern fn` definitions are unstable" ); } } if fn_kind.ctxt() != Some(FnCtxt::Foreign) && fn_kind.decl().c_variadic() { gate_feature_post!(&self, c_variadic, span, "C-variadic functions are unstable"); } visit::walk_fn(self, fn_kind, span) } fn visit_assoc_constraint(&mut self, constraint: &'a AssocConstraint) { if let AssocConstraintKind::Bound { .. } = constraint.kind { gate_feature_post!( &self, associated_type_bounds, constraint.span, "associated type bounds are unstable" ) } visit::walk_assoc_constraint(self, constraint) } fn visit_assoc_item(&mut self, i: &'a ast::AssocItem, ctxt: AssocCtxt) { let is_fn = match i.kind { ast::AssocItemKind::Fn(_) => true, ast::AssocItemKind::TyAlias(box ast::TyAlias { ref generics, ref ty, .. }) => { if let (Some(_), AssocCtxt::Trait) = (ty, ctxt) { gate_feature_post!( &self, associated_type_defaults, i.span, "associated type defaults are unstable" ); } if let Some(ty) = ty { self.check_impl_trait(ty); } self.check_gat(generics, i.span); false } _ => false, }; if let ast::Defaultness::Default(_) = i.kind.defaultness() { // Limit `min_specialization` to only specializing functions. gate_feature_fn!( &self, |x: &Features| x.specialization || (is_fn && x.min_specialization), i.span, sym::specialization, "specialization is unstable" ); } visit::walk_assoc_item(self, i, ctxt) } fn visit_vis(&mut self, vis: &'a ast::Visibility) { if let ast::VisibilityKind::Crate(ast::CrateSugar::JustCrate) = vis.kind { gate_feature_post!( &self, crate_visibility_modifier, vis.span, "`crate` visibility modifier is experimental" ); } visit::walk_vis(self, vis) } } pub fn check_crate(krate: &ast::Crate, sess: &Session) { maybe_stage_features(sess, krate); check_incompatible_features(sess); let mut visitor = PostExpansionVisitor { sess, features: &sess.features_untracked() }; let spans = sess.parse_sess.gated_spans.spans.borrow(); macro_rules! gate_all { ($gate:ident, $msg:literal, $help:literal) => { if let Some(spans) = spans.get(&sym::$gate) { for span in spans { gate_feature_post!(&visitor, $gate, *span, $msg, $help); } } }; ($gate:ident, $msg:literal) => { if let Some(spans) = spans.get(&sym::$gate) { for span in spans { gate_feature_post!(&visitor, $gate, *span, $msg); } } }; } gate_all!( if_let_guard, "`if let` guards are experimental", "you can write `if matches!(<expr>, <pattern>)` instead of `if let <pattern> = <expr>`" ); gate_all!(let_chains, "`let` expressions in this position are unstable"); gate_all!( async_closure, "async closures are unstable", "to use an async block, remove the `||`: `async {`" ); gate_all!(more_qualified_paths, "usage of qualified paths in this context is experimental"); gate_all!(generators, "yield syntax is experimental"); gate_all!(raw_ref_op, "raw address of syntax is experimental"); gate_all!(const_trait_impl, "const trait impls are experimental"); gate_all!(half_open_range_patterns, "half-open range patterns are unstable"); gate_all!(inline_const, "inline-const is experimental"); gate_all!(inline_const_pat, "inline-const in pattern position is experimental"); gate_all!(associated_const_equality, "associated const equality is incomplete"); // All uses of `gate_all!` below this point were added in #65742, // and subsequently disabled (with the non-early gating readded). macro_rules! gate_all { ($gate:ident, $msg:literal) => { // FIXME(eddyb) do something more useful than always // disabling these uses of early feature-gatings. if false { for span in spans.get(&sym::$gate).unwrap_or(&vec![]) { gate_feature_post!(&visitor, $gate, *span, $msg); } } }; } gate_all!(trait_alias, "trait aliases are experimental"); gate_all!(associated_type_bounds, "associated type bounds are unstable"); gate_all!(crate_visibility_modifier, "`crate` visibility modifier is experimental"); gate_all!(decl_macro, "`macro` is experimental"); gate_all!(box_patterns, "box pattern syntax is experimental"); gate_all!(exclusive_range_pattern, "exclusive range pattern syntax is experimental"); gate_all!(try_blocks, "`try` blocks are unstable"); gate_all!(label_break_value, "labels on blocks are unstable"); gate_all!(box_syntax, "box expression syntax is experimental; you can call `Box::new` instead"); // To avoid noise about type ascription in common syntax errors, // only emit if it is the *only* error. (Also check it last.) if sess.parse_sess.span_diagnostic.err_count() == 0 { gate_all!(type_ascription, "type ascription is experimental"); } visit::walk_crate(&mut visitor, krate); } fn maybe_stage_features(sess: &Session, krate: &ast::Crate) { // checks if `#![feature]` has been used to enable any lang feature // does not check the same for lib features unless there's at least one // declared lang feature use rustc_errors::Applicability; if !sess.opts.unstable_features.is_nightly_build() { let lang_features = &sess.features_untracked().declared_lang_features; if lang_features.len() == 0 { return; } for attr in krate.attrs.iter().filter(|attr| attr.has_name(sym::feature)) { let mut err = struct_span_err!( sess.parse_sess.span_diagnostic, attr.span, E0554, "`#![feature]` may not be used on the {} release channel", option_env!("CFG_RELEASE_CHANNEL").unwrap_or("(unknown)") ); let mut all_stable = true; for ident in attr.meta_item_list().into_iter().flatten().flat_map(|nested| nested.ident()) { let name = ident.name; let stable_since = lang_features .iter() .flat_map(|&(feature, _, since)| if feature == name { since } else { None }) .next(); if let Some(since) = stable_since { err.help(&format!( "the feature `{}` has been stable since {} and no longer requires \ an attribute to enable", name, since )); } else { all_stable = false; } } if all_stable { err.span_suggestion( attr.span, "remove the attribute", String::new(), Applicability::MachineApplicable, ); } err.emit(); } } } fn check_incompatible_features(sess: &Session) { let features = sess.features_untracked(); let declared_features = features .declared_lang_features .iter() .copied() .map(|(name, span, _)| (name, span)) .chain(features.declared_lib_features.iter().copied()); for (f1, f2) in rustc_feature::INCOMPATIBLE_FEATURES .iter() .filter(|&&(f1, f2)| features.enabled(f1) && features.enabled(f2)) { if let Some((f1_name, f1_span)) = declared_features.clone().find(|(name, _)| name == f1) { if let Some((f2_name, f2_span)) = declared_features.clone().find(|(name, _)| name == f2) { let spans = vec![f1_span, f2_span]; sess.struct_span_err( spans.clone(), &format!( "features `{}` and `{}` are incompatible, using them at the same time \ is not allowed", f1_name, f2_name ), ) .help("remove one of these features") .emit(); } } } }
38.026461
110
0.463961
918521c3a854b54f29eb2b5974d595f6ae3f1a89
558
//! Example parser 2 use example::prelude::*; /// The second example parser #[derive(Debug)] pub struct Parser2; #[derive(Debug, PartialEq)] /// The result of the second example parser pub struct Parser2Result; impl Parsable<()> for Parser2 { /// The actual parsing entry point fn parse<'a>(&mut self, input: &'a [u8], _: Option<&ParserResultVec>, _: Option<&mut ()>) -> IResult<&'a [u8], ParserResult> { do_parse!(input, tag!("2") >> (Box::new(Parser2Result))) } }
24.26087
64
0.566308
79ea1a872491476aa4ff2143f9458c2a483f11a3
5,751
use std::result; use std::hash::{Hash, SipHasher, Hasher}; pub type Buffer = Vec<u8>; /// Client request. #[derive(Debug, Hash, Clone, PartialEq, RustcEncodable, RustcDecodable)] pub struct Request { pub action: Action, pub consistency: Consistency, } /// `Request`'s possible actions to be performed by a `handler`. #[derive(Debug, Hash, Clone, PartialEq, RustcEncodable, RustcDecodable)] pub enum Action { /// Read the given `Key` and receive a `Response::Value`. Read { key: Key, }, /// Write the given `Value` for `Key` and receive a `Response::WriteAck`. Write { key: Key, content: Buffer, }, /// Delete the given `Key` and receive a `Response::WriteAck`. Delete { key: Key, }, } /// A `Request`'s `Response` message envelope. #[derive(Debug, Hash, Clone, PartialEq, RustcEncodable, RustcDecodable)] pub struct ResponseMessage { pub message: Response, pub consistency: Consistency, } /// A `Request`'s response. #[derive(Debug, Hash, Clone, PartialEq, RustcEncodable, RustcDecodable)] pub enum Response { /// An stored value, stored in the shard's `StorageNode`s, according to the /// required `Consistency`. Value { key: Key, value: Value, }, /// The `Request`ed write for a `Value` has been stored in the `Key`'s /// shard's `StorageNode`s, according to the required `Consistency`. WriteAck { key: Key, timestamp: u64, }, /// There was an error performing the operation on `Key`. Error { key: Key, message: String, }, } /// The `Key` used to lookup a given `Value`. #[derive(Debug, Hash, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable)] pub struct Key { pub dataset: Buffer, pub pkey: Buffer, pub lkey: Buffer, } impl Key { /// Return the ring hash for this `Key`. pub fn hash(&self) -> u64 { let mut s = SipHasher::new(); &self.pkey.hash(&mut s); s.finish() } /// Return the corresponding shard for this key, given a `shard_count` /// amount of shards. pub fn shard(&self, shard_count: usize) -> usize { (self.hash() % (shard_count as u64)) as usize } } /// Any of the possible stored values on a `StorageNode`. #[derive(Debug, Hash, Clone, PartialEq, RustcEncodable, RustcDecodable)] pub enum Value { /// There's no value stored for the given `Key` in the `StorageNode`s. None, /// The value stored for the given `Key` in the `StorageNode`s. Value { content: Buffer, timestamp: u64, }, /// A deleted value for the given `Key` in the `StorageNode`s. Tombstone { timestamp: u64, }, } /// Request operations performed by a `handler` to the `StorageNode`s. #[derive(Debug, Hash, Clone, PartialEq, RustcEncodable, RustcDecodable)] pub enum InternodeRequest { Read { key: Key, }, Write { key: Key, value: Value, }, } /// Request Response for a `handler` from a `StorageNode`. #[derive(Debug, Hash, Clone, PartialEq, RustcEncodable, RustcDecodable)] pub enum InternodeResponse { Value { key: Key, value: Value, }, WriteAck { key: Key, timestamp: u64, }, Error { key: Key, message: String, }, } impl InternodeResponse { /// If the `Value` has a timestamp, return it. pub fn get_timestamp(&self) -> Option<u64> { match self { &InternodeResponse::Value {ref value, ..} => { match *value { Value::None => None, Value::Value {timestamp, ..} => Some(timestamp), Value::Tombstone {timestamp} => Some(timestamp), } } &InternodeResponse::WriteAck {ref timestamp, ..} => Some(*timestamp), &InternodeResponse::Error {..} => None, } } /// Cast this `InternodeResponse` into a `handler` -> `Client` `Response`. pub fn to_response(self) -> Response { match self { InternodeResponse::Value {key, value} => { Response::Value { key: key, value: value, } } InternodeResponse::WriteAck {key, timestamp} => { Response::WriteAck { key: key, timestamp: timestamp, } } InternodeResponse::Error {key, message} => { Response::Error { key: key, message: message, } } } } } /// Consistency level for the `Request` or `Response`. Determines the conflict /// resolution when `StorageNode`s for a given shard diverge and the persistence /// assurance when writing. #[derive(Debug, Hash, Clone, PartialEq, RustcEncodable, RustcDecodable)] pub enum Consistency { /// Only wait for one `StorageNode` to successfully reply before responding. One, /// Wait for all `StorageNode`s to reply and send a `Response` with the /// newest `Value`. Latest, } #[derive(Debug, Hash, Clone, PartialEq)] pub enum Error { /// Error when binary encoding a message. EncodeError, /// Error when decoding a binary into a message. DecodeError, /// Connection error. ConnectionError, } pub type Result<T> = result::Result<T, Error>; #[cfg(test)] mod tests { use super::*; #[test] fn key_hash() { let key = Key { dataset: vec![1], pkey: vec![1], lkey: vec![1], }; assert_eq!(8934463522374858327, key.hash()); assert_eq!(0, key.shard(1 as usize)); } }
27.385714
81
0.573639
0e10c7946479281fe5cc4c6464d3c37b688d2a4a
8,938
use egui::TextureId; use vulkano::device::{Device, DeviceExtensions}; use vulkano::image::{AttachmentImage, ImageUsage}; use vulkano::instance::Instance; use vulkano::swapchain::{AcquireError, Swapchain, SwapchainCreationError}; use vulkano::sync::GpuFuture; use vulkano::{swapchain, Version}; use vulkano_win::VkSurfaceBuild; use winit::event::{Event, WindowEvent}; use winit::event_loop::EventLoop; use winit::window::WindowBuilder; use egui_vulkano_backend::{EguiVulkanoBackend, ScreenDescriptor}; use crate::renderer::TeapotRenderer; use once_cell::sync::OnceCell; use std::sync::Arc; use vulkano::command_buffer::{AutoCommandBufferBuilder, CommandBufferUsage}; use vulkano::device::physical::PhysicalDevice; mod model; mod renderer; fn main() { // The start of this examples is exactly the same as `triangle`. You should read the // `triangle` examples if you haven't done so yet. let required_extensions = vulkano_win::required_extensions(); static INSTANCE: OnceCell<Arc<Instance>> = OnceCell::new(); INSTANCE .set(Instance::new(None, Version::V1_0, &required_extensions, None).unwrap()) .unwrap(); let physical = PhysicalDevice::enumerate(INSTANCE.get().unwrap()) .next() .unwrap(); println!( "Using device: {} (type: {:?})", physical.properties().device_name, physical.properties().device_type ); let event_loop: EventLoop<()> = EventLoop::with_user_event(); let surface = WindowBuilder::new() .with_title("Egui Vulkano Backend sample") .build_vk_surface(&event_loop, INSTANCE.get().unwrap().clone()) .unwrap(); let queue_family = physical .queue_families() .find(|&q| q.supports_graphics() && surface.is_supported(q).unwrap_or(false)) .unwrap(); let device_ext = DeviceExtensions { khr_swapchain: true, ..DeviceExtensions::none() }; let (device, mut queues) = Device::new( physical, physical.supported_features(), &device_ext, [(queue_family, 0.5)].iter().cloned(), ) .unwrap(); let queue = queues.next().unwrap(); let (mut swapchain, images) = { let caps = surface.capabilities(physical).unwrap(); assert!(caps.supported_formats.contains(&( vulkano::format::Format::R8G8B8A8_SRGB, vulkano::swapchain::ColorSpace::SrgbNonLinear ))); let alpha = caps.supported_composite_alpha.iter().next().unwrap(); let dimensions: [u32; 2] = surface.window().inner_size().into(); Swapchain::start(device.clone(), surface.clone()) .format(vulkano::format::Format::R8G8B8A8_SRGB) .dimensions(dimensions) .composite_alpha(alpha) .num_images(caps.min_image_count) .usage(ImageUsage::color_attachment()) .sharing_mode(&queue) .clipped(true) .color_space(vulkano::swapchain::ColorSpace::SrgbNonLinear) .build() .unwrap() }; let mut egui = EguiVulkanoBackend::new( surface.clone(), device.clone(), queue.clone(), swapchain.format(), ); egui.create_frame_buffers(images.as_slice()); //init egui // create relation between TextureID and render target let (texture_id, mut render_target) = egui .painter_mut() .init_vulkano_image_with_dimensions([1280, 720]) .unwrap(); let size = surface.window().inner_size(); //create renderer let mut teapot_renderer = TeapotRenderer::new(device.clone(), queue); //set render target teapot_renderer.set_render_target(render_target.clone()); let mut screen_descriptor = ScreenDescriptor { physical_width: size.width, physical_height: size.height, scale_factor: surface.window().scale_factor() as f32, }; let mut rotate = 0.0; let mut height_percent = 0.7; let mut image_size = [size.width as f32, size.height as f32 * height_percent]; let mut needs_to_resize_teapot_rt = false; event_loop.run(move |event, _, control_flow| { let mut redraw = || { // Begin to draw the UI frame. egui.begin_frame(); egui::CentralPanel::default().show(egui.ctx(), |ui| { ui.vertical(|ui| { ui.image(texture_id, image_size); ui.horizontal(|ui| { //add Model view adjuster if ui .add(egui::widgets::Slider::new(&mut height_percent, 0.1..=0.9)) .changed() { needs_to_resize_teapot_rt = true; } //add rotation control if ui .add(egui::Slider::new( &mut rotate, -std::f32::consts::PI..=std::f32::consts::PI, )) .changed() { teapot_renderer.set_rotate(rotate) } }); }) }); // End the UI frame. We could now handle the output and draw the UI with the backend. let (needs_repaint, shapes) = egui.end_frame(); if needs_repaint { surface.window().request_redraw(); winit::event_loop::ControlFlow::Poll } else { winit::event_loop::ControlFlow::Wait }; let mut previous_frame_end = Some(vulkano::sync::now(device.clone()).boxed()); previous_frame_end.as_mut().unwrap().cleanup_finished(); let (image_num, suboptimal, acquire_future) = match swapchain::acquire_next_image(swapchain.clone(), None) { Ok(r) => r, Err(AcquireError::OutOfDate) => { return; } Err(e) => panic!("Failed to acquire next image: {:?}", e), }; if suboptimal { return; } if needs_to_resize_teapot_rt { needs_to_resize_teapot_rt = false; let size = surface.window().inner_size(); image_size = [size.width as f32, size.height as f32 * height_percent]; render_target = teapot_rt_resize(size.into(), texture_id, height_percent, egui.painter_mut()); teapot_renderer.set_render_target(render_target.clone()); } teapot_renderer.draw(); let mut command_buffer_builder = AutoCommandBufferBuilder::primary( device.clone(), queue_family, CommandBufferUsage::OneTimeSubmit, ) .unwrap(); egui.paint(image_num, &mut command_buffer_builder, shapes); egui.painter_mut() .present_to_screen(command_buffer_builder.build().unwrap(), acquire_future); }; match event { winit::event::Event::RedrawEventsCleared if cfg!(windows) => redraw(), winit::event::Event::RedrawRequested(_) if !cfg!(windows) => redraw(), Event::WindowEvent { event: WindowEvent::Resized(size), .. } => { match swapchain.recreate().dimensions(size.into()).build() { Ok(r) => { swapchain = r.0; egui.create_frame_buffers(&r.1); } Err(SwapchainCreationError::UnsupportedDimensions) => return, Err(e) => panic!("Failed to recreate swapchain: {:?}", e), } //resize render_target needs_to_resize_teapot_rt = true; //set screen descriptor screen_descriptor.physical_height = size.height; screen_descriptor.physical_width = size.width; } winit::event::Event::WindowEvent { event, .. } => { if egui.is_quit_event(&event) { *control_flow = winit::event_loop::ControlFlow::Exit; } egui.on_event(&event); surface.window().request_redraw(); // TODO: ask egui if the events warrants a repaint instead } _ => (), } }); } fn teapot_rt_resize( size: [u32; 2], texture_id: TextureId, height_percent: f32, painter: &mut egui_vulkano_backend::painter::Painter, ) -> Arc<AttachmentImage> { painter .recreate_vulkano_texture_with_dimensions( texture_id, [size[0], (size[1] as f32 * height_percent) as u32], ) .unwrap() }
36.781893
109
0.553815
75bd48d931abeda697730d2efb8789e8cc391bb4
1,777
// Copyright 2020 ZomboDB, LLC <[email protected]>. All rights reserved. Use of this source code is // governed by the MIT license that can be found in the LICENSE file. use crate::{direct_function_call_as_datum, pg_sys, FromDatum, IntoDatum}; impl FromDatum for pg_sys::BOX { unsafe fn from_datum(datum: pg_sys::Datum, is_null: bool, _: pg_sys::Oid) -> Option<Self> where Self: Sized, { if is_null { None } else if datum == 0 { panic!("BOX datum declared not null, but datum is zero") } else { let the_box = datum as *mut pg_sys::BOX; Some(the_box.read()) } } } impl IntoDatum for pg_sys::BOX { fn into_datum(mut self) -> Option<pg_sys::Datum> { let the_box = &mut self; direct_function_call_as_datum( pg_sys::box_out, vec![Some(the_box as *mut pg_sys::BOX as pg_sys::Datum)], ) } fn type_oid() -> pg_sys::Oid { pg_sys::BOXOID } } impl FromDatum for pg_sys::Point { unsafe fn from_datum(datum: pg_sys::Datum, is_null: bool, _: pg_sys::Oid) -> Option<Self> where Self: Sized, { if is_null { None } else if datum == 0 { panic!("Point datum declared not null, but datum is zero") } else { let point = datum as *mut pg_sys::Point; Some(point.read()) } } } impl IntoDatum for pg_sys::Point { fn into_datum(mut self) -> Option<usize> { let point = &mut self; direct_function_call_as_datum( pg_sys::point_out, vec![Some(point as *mut pg_sys::Point as pg_sys::Datum)], ) } fn type_oid() -> pg_sys::Oid { pg_sys::POINTOID } }
27.338462
99
0.569499
2693041e9addce789350a60a85395e592f0ee6ce
2,223
use std::fs; fn main() { let input = read_input(); let result = process(&input); println!("Result: {}\n", result); } fn read_input() -> String { let input_filename = String::from("input.txt"); fs::read_to_string(input_filename) .expect("Failed to read file") } fn process(input: &str) -> u32 { let mut data = input_to_vec(&input); let node = make_node(&mut data); return node.sum_metadata(); } fn input_to_vec(input: &str) -> Vec<u32> { let mut vec: Vec<u32> = input.trim().split(" ").map(|s| { s.parse().unwrap() }).collect(); vec.reverse(); vec } fn make_node(input: &mut Vec<u32>) -> Node { let num_children = input.pop().unwrap(); let num_metadatum = input.pop().unwrap(); let mut children = Vec::new(); for _ in 0..num_children { children.push(Box::new(make_node(input))); } let mut metadata = Vec::new(); for _ in 0..num_metadatum { metadata.push(input.pop().unwrap()); } Node {children, metadata} } struct Node { children: Vec<Box<Node>>, metadata: Vec<u32>, } impl Node { #[cfg(test)] fn num_all_children(&self) -> usize { let mut num_all_children: usize = 0; for child in &self.children { num_all_children += child.num_all_children(); } return num_all_children + self.children.len(); } fn sum_metadata(&self) -> u32 { let mut sum: u32 = self.metadata.iter().sum(); for child in &self.children { sum += child.sum_metadata(); } return sum; } } #[cfg(test)] mod tests { use super::*; #[test] fn test_input_to_vec() { let input = "2 3 0 3 10 11 12 1 1 0 1 99 2 1 1 2"; let result = input_to_vec(input); let mut expected = vec![2, 3, 0, 3, 10, 11, 12, 1, 1, 0, 1, 99, 2, 1, 1, 2,]; expected.reverse(); assert_eq!(expected, result); } #[test] fn test_vec_to_node() { let input = "2 3 0 3 10 11 12 1 1 0 1 99 2 1 1 2"; let mut vec = input_to_vec(input); let result = make_node(&mut vec); assert_eq!(3, result.num_all_children()); assert_eq!(138, result.sum_metadata()); } }
23.648936
94
0.565002
d6c47a168f5e8b410b989d139ca9fee3c691b87a
263
use bitcoin::secp256k1; use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct LightningGateway { pub mint_pub_key: secp256k1::schnorrsig::PublicKey, pub node_pub_key: secp256k1::PublicKey, pub api: String, }
26.3
55
0.745247
aba833f449d69f404479e52639cf86cd50165bbb
1,954
// Copyright (C) 2019 Robin Krahl <[email protected]> // SPDX-License-Identifier: MIT mod dialog; mod stdio; mod zenity; mod kdialog; pub use crate::backends::dialog::Dialog; pub use crate::backends::stdio::Stdio; pub use crate::backends::zenity::Zenity; pub use crate::backends::kdialog::KDialog; use std::env; use std::path; use crate::Result; /// A dialog backend. /// /// A dialog backend is a program that can be used to display dialog boxes. Use the /// [`default_backend`][] function to create a new instance of the default backend, or choose a /// backend and create an instance manually. To use a backend, pass it to the [`show_with`][] /// method of a dialog box. /// /// [`default_backend`]: ../fn.default_backend.html /// [`show_with`]: ../trait.DialogBox.html#method.show_with pub trait Backend { /// Shows the given input dialog and returns the input. fn show_input(&self, input: &super::Input) -> Result<Option<String>>; /// Shows the given message dialog. fn show_message(&self, message: &super::Message) -> Result<()>; /// Shows the given password dialog and returns the password. fn show_password(&self, password: &super::Password) -> Result<Option<String>>; /// Shows the given question dialog and returns the choice. fn show_question(&self, question: &super::Question) -> Result<super::Choice>; } pub(crate) fn is_available(name: &str) -> bool { if let Ok(path) = env::var("PATH") { for part in path.split(':') { if path::Path::new(part).join(name).exists() { return true; } } } false } pub(crate) fn from_str(s: &str) -> Option<Box<dyn Backend>> { match s.to_lowercase().as_ref() { "dialog" => Some(Box::new(Dialog::new())), "kdialog" => Some(Box::new(KDialog::new())), "stdio" => Some(Box::new(Stdio::new())), "zenity" => Some(Box::new(Zenity::new())), _ => None, } }
31.516129
95
0.635619
919d7d13c7e73d70710d19b46fd9bd5b7de0859c
2,950
use futures_core::future::Future; use futures_core::ready; use futures_core::task::{Context, Poll}; use futures_io::AsyncRead; use std::io; use std::pin::Pin; use std::vec::Vec; /// Future for the [`read_to_end`](super::AsyncReadExt::read_to_end) method. #[derive(Debug)] #[must_use = "futures do nothing unless you `.await` or poll them"] pub struct ReadToEnd<'a, R: ?Sized> { reader: &'a mut R, buf: &'a mut Vec<u8>, start_len: usize, } impl<R: ?Sized + Unpin> Unpin for ReadToEnd<'_, R> {} impl<'a, R: AsyncRead + ?Sized + Unpin> ReadToEnd<'a, R> { pub(super) fn new(reader: &'a mut R, buf: &'a mut Vec<u8>) -> Self { let start_len = buf.len(); Self { reader, buf, start_len } } } struct Guard<'a> { buf: &'a mut Vec<u8>, len: usize, } impl Drop for Guard<'_> { fn drop(&mut self) { unsafe { self.buf.set_len(self.len); } } } // This uses an adaptive system to extend the vector when it fills. We want to // avoid paying to allocate and zero a huge chunk of memory if the reader only // has 4 bytes while still making large reads if the reader does have a ton // of data to return. Simply tacking on an extra DEFAULT_BUF_SIZE space every // time is 4,500 times (!) slower than this if the reader has a very small // amount of data to return. // // Because we're extending the buffer with uninitialized data for trusted // readers, we need to make sure to truncate that if any of this panics. pub(super) fn read_to_end_internal<R: AsyncRead + ?Sized>( mut rd: Pin<&mut R>, cx: &mut Context<'_>, buf: &mut Vec<u8>, start_len: usize, ) -> Poll<io::Result<usize>> { let mut g = Guard { len: buf.len(), buf }; loop { if g.len == g.buf.len() { unsafe { g.buf.reserve(32); let capacity = g.buf.capacity(); g.buf.set_len(capacity); super::initialize(&rd, &mut g.buf[g.len..]); } } let buf = &mut g.buf[g.len..]; match ready!(rd.as_mut().poll_read(cx, buf)) { Ok(0) => return Poll::Ready(Ok(g.len - start_len)), Ok(n) => { // We can't allow bogus values from read. If it is too large, the returned vec could have its length // set past its capacity, or if it overflows the vec could be shortened which could create an invalid // string if this is called via read_to_string. assert!(n <= buf.len()); g.len += n; } Err(e) => return Poll::Ready(Err(e)), } } } impl<A> Future for ReadToEnd<'_, A> where A: AsyncRead + ?Sized + Unpin, { type Output = io::Result<usize>; fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { let this = &mut *self; read_to_end_internal(Pin::new(&mut this.reader), cx, this.buf, this.start_len) } }
32.065217
117
0.588136
18644a07846a80f53fededf5019b5ec0848e9269
582
use noise_fns::NoiseFn; /// Noise function that outputs the smaller of the two output values from two source /// functions. pub struct Min<'a, T: 'a> { /// Outputs a value. pub source1: &'a NoiseFn<T>, /// Outputs a value. pub source2: &'a NoiseFn<T>, } impl<'a, T> Min<'a, T> { pub fn new(source1: &'a NoiseFn<T>, source2: &'a NoiseFn<T>) -> Self { Min { source1, source2 } } } impl<'a, T> NoiseFn<T> for Min<'a, T> where T: Copy, { fn get(&self, point: T) -> f64 { (self.source1.get(point)).min(self.source2.get(point)) } }
21.555556
84
0.579038
14107ed9ac24216e2c6d793df652484c90d5a3fe
14,079
use std::borrow::{Borrow, Cow}; use std::sync::Arc; use arrow_format::ipc::planus::Builder; use crate::array::*; use crate::chunk::Chunk; use crate::datatypes::*; use crate::error::{ArrowError, Result}; use crate::io::ipc::endianess::is_native_little_endian; use crate::io::ipc::read::Dictionaries; use super::super::IpcField; use super::{write, write_dictionary}; /// Compression codec #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum Compression { /// LZ4 (framed) LZ4, /// ZSTD ZSTD, } /// Options declaring the behaviour of writing to IPC #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)] pub struct WriteOptions { /// Whether the buffers should be compressed and which codec to use. /// Note: to use compression the crate must be compiled with feature `io_ipc_compression`. pub compression: Option<Compression>, } fn encode_dictionary( field: &IpcField, array: &Arc<dyn Array>, options: &WriteOptions, dictionary_tracker: &mut DictionaryTracker, encoded_dictionaries: &mut Vec<EncodedData>, ) -> Result<()> { use PhysicalType::*; match array.data_type().to_physical_type() { Utf8 | LargeUtf8 | Binary | LargeBinary | Primitive(_) | Boolean | Null | FixedSizeBinary => Ok(()), Dictionary(key_type) => match_integer_type!(key_type, |$T| { let dict_id = field.dictionary_id .ok_or_else(|| ArrowError::InvalidArgumentError("Dictionaries must have an associated id".to_string()))?; let values = array.as_any().downcast_ref::<DictionaryArray<$T>>().unwrap().values(); encode_dictionary(field, values, options, dictionary_tracker, encoded_dictionaries )?; let emit = dictionary_tracker.insert(dict_id, array)?; if emit { encoded_dictionaries.push(dictionary_batch_to_bytes( dict_id, array.as_ref(), options, is_native_little_endian(), )); }; Ok(()) }), Struct => { let array = array.as_any().downcast_ref::<StructArray>().unwrap(); let fields = field.fields.as_slice(); if array.fields().len() != fields.len() { return Err(ArrowError::InvalidArgumentError( "The number of fields in a struct must equal the number of children in IpcField".to_string(), )); } fields .iter() .zip(array.values().iter()) .try_for_each(|(field, values)| { encode_dictionary( field, values, options, dictionary_tracker, encoded_dictionaries, ) }) } List => { let values = array .as_any() .downcast_ref::<ListArray<i32>>() .unwrap() .values(); let field = &field.fields[0]; // todo: error instead encode_dictionary( field, values, options, dictionary_tracker, encoded_dictionaries, ) } LargeList => { let values = array .as_any() .downcast_ref::<ListArray<i64>>() .unwrap() .values(); let field = &field.fields[0]; // todo: error instead encode_dictionary( field, values, options, dictionary_tracker, encoded_dictionaries, ) } FixedSizeList => { let values = array .as_any() .downcast_ref::<FixedSizeListArray>() .unwrap() .values(); let field = &field.fields[0]; // todo: error instead encode_dictionary( field, values, options, dictionary_tracker, encoded_dictionaries, ) } Union => { let values = array .as_any() .downcast_ref::<UnionArray>() .unwrap() .fields(); let fields = &field.fields[..]; // todo: error instead if values.len() != fields.len() { return Err(ArrowError::InvalidArgumentError( "The number of fields in a union must equal the number of children in IpcField" .to_string(), )); } fields .iter() .zip(values.iter()) .try_for_each(|(field, values)| { encode_dictionary( field, values, options, dictionary_tracker, encoded_dictionaries, ) }) } Map => { let values = array.as_any().downcast_ref::<MapArray>().unwrap().field(); let field = &field.fields[0]; // todo: error instead encode_dictionary( field, values, options, dictionary_tracker, encoded_dictionaries, ) } } } pub fn encode_chunk( columns: &Chunk<Arc<dyn Array>>, fields: &[IpcField], dictionary_tracker: &mut DictionaryTracker, options: &WriteOptions, ) -> Result<(Vec<EncodedData>, EncodedData)> { let mut encoded_dictionaries = vec![]; for (field, array) in fields.iter().zip(columns.as_ref()) { encode_dictionary( field, array, options, dictionary_tracker, &mut encoded_dictionaries, )?; } let encoded_message = columns_to_bytes(columns, options); Ok((encoded_dictionaries, encoded_message)) } fn serialize_compression( compression: Option<Compression>, ) -> Option<Box<arrow_format::ipc::BodyCompression>> { if let Some(compression) = compression { let codec = match compression { Compression::LZ4 => arrow_format::ipc::CompressionType::Lz4Frame, Compression::ZSTD => arrow_format::ipc::CompressionType::Zstd, }; Some(Box::new(arrow_format::ipc::BodyCompression { codec, method: arrow_format::ipc::BodyCompressionMethod::Buffer, })) } else { None } } /// Write [`Chunk`] into two sets of bytes, one for the header (ipc::Schema::Message) and the /// other for the batch's data fn columns_to_bytes(columns: &Chunk<Arc<dyn Array>>, options: &WriteOptions) -> EncodedData { let mut nodes: Vec<arrow_format::ipc::FieldNode> = vec![]; let mut buffers: Vec<arrow_format::ipc::Buffer> = vec![]; let mut arrow_data: Vec<u8> = vec![]; let mut offset = 0; for array in columns.arrays() { write( array.as_ref(), &mut buffers, &mut arrow_data, &mut nodes, &mut offset, is_native_little_endian(), options.compression, ) } let compression = serialize_compression(options.compression); let message = arrow_format::ipc::Message { version: arrow_format::ipc::MetadataVersion::V5, header: Some(arrow_format::ipc::MessageHeader::RecordBatch(Box::new( arrow_format::ipc::RecordBatch { length: columns.len() as i64, nodes: Some(nodes), buffers: Some(buffers), compression, }, ))), body_length: arrow_data.len() as i64, custom_metadata: None, }; let mut builder = Builder::new(); let ipc_message = builder.finish(&message, None); EncodedData { ipc_message: ipc_message.to_vec(), arrow_data, } } /// Write dictionary values into two sets of bytes, one for the header (ipc::Schema::Message) and the /// other for the data fn dictionary_batch_to_bytes( dict_id: i64, array: &dyn Array, options: &WriteOptions, is_little_endian: bool, ) -> EncodedData { let mut nodes: Vec<arrow_format::ipc::FieldNode> = vec![]; let mut buffers: Vec<arrow_format::ipc::Buffer> = vec![]; let mut arrow_data: Vec<u8> = vec![]; let length = write_dictionary( array, &mut buffers, &mut arrow_data, &mut nodes, &mut 0, is_little_endian, options.compression, false, ); let compression = serialize_compression(options.compression); let message = arrow_format::ipc::Message { version: arrow_format::ipc::MetadataVersion::V5, header: Some(arrow_format::ipc::MessageHeader::DictionaryBatch(Box::new( arrow_format::ipc::DictionaryBatch { id: dict_id, data: Some(Box::new(arrow_format::ipc::RecordBatch { length: length as i64, nodes: Some(nodes), buffers: Some(buffers), compression, })), is_delta: false, }, ))), body_length: arrow_data.len() as i64, custom_metadata: None, }; let mut builder = Builder::new(); let ipc_message = builder.finish(&message, None); EncodedData { ipc_message: ipc_message.to_vec(), arrow_data, } } /// Keeps track of dictionaries that have been written, to avoid emitting the same dictionary /// multiple times. Can optionally error if an update to an existing dictionary is attempted, which /// isn't allowed in the `FileWriter`. pub struct DictionaryTracker { written: Dictionaries, error_on_replacement: bool, } impl DictionaryTracker { pub fn new(error_on_replacement: bool) -> Self { Self { written: Dictionaries::new(), error_on_replacement, } } /// Keep track of the dictionary with the given ID and values. Behavior: /// /// * If this ID has been written already and has the same data, return `Ok(false)` to indicate /// that the dictionary was not actually inserted (because it's already been seen). /// * If this ID has been written already but with different data, and this tracker is /// configured to return an error, return an error. /// * If the tracker has not been configured to error on replacement or this dictionary /// has never been seen before, return `Ok(true)` to indicate that the dictionary was just /// inserted. pub fn insert(&mut self, dict_id: i64, array: &Arc<dyn Array>) -> Result<bool> { let values = match array.data_type() { DataType::Dictionary(key_type, _, _) => { match_integer_type!(key_type, |$T| { let array = array .as_any() .downcast_ref::<DictionaryArray<$T>>() .unwrap(); array.values() }) } _ => unreachable!(), }; // If a dictionary with this id was already emitted, check if it was the same. if let Some(last) = self.written.get(&dict_id) { if last.as_ref() == values.as_ref() { // Same dictionary values => no need to emit it again return Ok(false); } else if self.error_on_replacement { return Err(ArrowError::InvalidArgumentError( "Dictionary replacement detected when writing IPC file format. \ Arrow IPC files only support a single dictionary for a given field \ across all batches." .to_string(), )); } }; self.written.insert(dict_id, values.clone()); Ok(true) } } /// Stores the encoded data, which is an ipc::Schema::Message, and optional Arrow data #[derive(Debug)] pub struct EncodedData { /// An encoded ipc::Schema::Message pub ipc_message: Vec<u8>, /// Arrow buffers to be written, should be an empty vec for schema messages pub arrow_data: Vec<u8>, } /// Calculate an 8-byte boundary and return the number of bytes needed to pad to 8 bytes #[inline] pub(crate) fn pad_to_8(len: usize) -> usize { (((len + 7) & !7) - len) as usize } /// An array [`Chunk`] with optional accompanying IPC fields. #[derive(Debug, Clone, PartialEq)] pub struct Record<'a> { columns: Cow<'a, Chunk<Arc<dyn Array>>>, fields: Option<Cow<'a, [IpcField]>>, } impl<'a> Record<'a> { /// Get the IPC fields for this record. pub fn fields(&self) -> Option<&[IpcField]> { self.fields.as_deref() } /// Get the Arrow columns in this record. pub fn columns(&self) -> &Chunk<Arc<dyn Array>> { self.columns.borrow() } } impl From<Chunk<Arc<dyn Array>>> for Record<'static> { fn from(columns: Chunk<Arc<dyn Array>>) -> Self { Self { columns: Cow::Owned(columns), fields: None, } } } impl<'a, F> From<(Chunk<Arc<dyn Array>>, Option<F>)> for Record<'a> where F: Into<Cow<'a, [IpcField]>>, { fn from((columns, fields): (Chunk<Arc<dyn Array>>, Option<F>)) -> Self { Self { columns: Cow::Owned(columns), fields: fields.map(|f| f.into()), } } } impl<'a, F> From<(&'a Chunk<Arc<dyn Array>>, Option<F>)> for Record<'a> where F: Into<Cow<'a, [IpcField]>>, { fn from((columns, fields): (&'a Chunk<Arc<dyn Array>>, Option<F>)) -> Self { Self { columns: Cow::Borrowed(columns), fields: fields.map(|f| f.into()), } } }
32.365517
121
0.540521
bb153d9ff7e81ad3503a31e3cdcac9351d942008
27,332
/************* * FetchType * *************/ #[derive(Clone,Copy)] pub enum FetchType { Accumulator, /* Immediate also doubles as Relative */ Immediate, ZeroPage, ZeroPageX, ZeroPageY, Absolute, AbsoluteX, AbsoluteY, Indirect, IndirectX, IndirectY, Implicit, } /************* * Operation * *************/ #[derive(Clone,Copy)] pub enum Operation { Adc, And, Asl, Bcc, Bcs, Beq, Bit, Bmi, Bne, Bpl, Brk, Bvc, Bvs, Clc, Cld, Cli, Clv, Cmp, Cpx, Cpy, Dec, Dex, Dey, Eor, Inc, Inx, Iny, Jmp, Jsr, Lda, Ldx, Ldy, Lsr, Nop, Ora, Pha, Php, Pla, Plp, Rol, Ror, Rti, Rts, Sbc, Sec, Sed, Sei, Sta, Stx, Sty, Tax, Tay, Tsx, Txa, Txs, Tya, Unknown, } /********** * Opcode * **********/ /* This is an adapted version of what FCEUX does for opcodes */ #[derive(Clone,Copy)] pub struct Opcode { pub fetch: FetchType, pub cycles: u32, pub op: Operation, } impl Opcode { pub fn print(&self, operand: u16) { match self.op { Operation::Adc => print!("ADC"), Operation::And => print!("AND"), Operation::Asl => print!("ASL"), Operation::Bcc => print!("BCC"), Operation::Bcs => print!("BCS"), Operation::Beq => print!("BEQ"), Operation::Bit => print!("BIT"), Operation::Bmi => print!("BMI"), Operation::Bne => print!("BNE"), Operation::Bpl => print!("BPL"), Operation::Brk => print!("BRK"), Operation::Bvc => print!("BVC"), Operation::Bvs => print!("BVS"), Operation::Clc => print!("CLC"), Operation::Cld => print!("CLD"), Operation::Cli => print!("CLI"), Operation::Clv => print!("CLV"), Operation::Cmp => print!("CMP"), Operation::Cpx => print!("CPX"), Operation::Cpy => print!("CPY"), Operation::Dec => print!("DEC"), Operation::Dex => print!("DEX"), Operation::Dey => print!("DEY"), Operation::Eor => print!("EOR"), Operation::Inc => print!("INC"), Operation::Inx => print!("INX"), Operation::Iny => print!("INY"), Operation::Jmp => print!("JMP"), Operation::Jsr => print!("JSR"), Operation::Lda => print!("LDA"), Operation::Ldx => print!("LDX"), Operation::Ldy => print!("LDY"), Operation::Lsr => print!("LSR"), Operation::Nop => print!("NOP"), Operation::Ora => print!("ORA"), Operation::Pha => print!("PHA"), Operation::Php => print!("PHP"), Operation::Pla => print!("PLA"), Operation::Plp => print!("PLP"), Operation::Rol => print!("ROL"), Operation::Ror => print!("ROR"), Operation::Rti => print!("RTI"), Operation::Rts => print!("RTS"), Operation::Sbc => print!("SBC"), Operation::Sec => print!("SEC"), Operation::Sed => print!("SED"), Operation::Sei => print!("SEI"), Operation::Sta => print!("STA"), Operation::Stx => print!("STX"), Operation::Sty => print!("STY"), Operation::Tax => print!("TAX"), Operation::Tay => print!("TAY"), Operation::Tsx => print!("TSX"), Operation::Txa => print!("TXA"), Operation::Txs => print!("TXS"), Operation::Tya => print!("TYA"), Operation::Unknown => print!("ERR"), } match self.fetch { FetchType::Accumulator => { } FetchType::Immediate => print!("#${:02x}", operand as u8), FetchType::ZeroPage => print!("${:02x}", operand as u8), FetchType::ZeroPageX => print!("${:02x}, x", operand as u8), FetchType::ZeroPageY => print!("${:02x}, y", operand as u8), FetchType::Absolute => print!("${:04x}", operand), FetchType::AbsoluteX => print!("${:04x}, x", operand), FetchType::AbsoluteY => print!("${:04x}, y", operand), FetchType::Indirect => print!("(${:04x})", operand), FetchType::IndirectX => print!("(${:02x}, x)", operand as u8), FetchType::IndirectY => print!("(${:02x}), y", operand as u8), FetchType::Implicit => { } } println!(""); } } pub static OP_TABLE: [Opcode; 0x100] = [ /* 0x00 */ Opcode{ fetch: FetchType::Implicit, cycles: 7, op: Operation::Brk }, /* 0x01 */ Opcode{ fetch: FetchType::IndirectX, cycles: 6, op: Operation::Ora }, /* 0x02 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x03 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x04 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x05 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 3, op: Operation::Ora }, /* 0x06 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 5, op: Operation::Asl }, /* 0x07 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x08 */ Opcode{ fetch: FetchType::Implicit, cycles: 3, op: Operation::Php }, /* 0x09 */ Opcode{ fetch: FetchType::Immediate, cycles: 2, op: Operation::Ora }, /* 0x0a */ Opcode{ fetch: FetchType::Accumulator, cycles: 2, op: Operation::Asl }, /* 0x0b */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x0c */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x0d */ Opcode{ fetch: FetchType::Absolute, cycles: 4, op: Operation::Ora }, /* 0x0e */ Opcode{ fetch: FetchType::Absolute, cycles: 6, op: Operation::Asl }, /* 0x0f */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x10 */ Opcode{ fetch: FetchType::Immediate, cycles: 2, op: Operation::Bpl }, /* 0x11 */ Opcode{ fetch: FetchType::IndirectY, cycles: 5, op: Operation::Ora }, /* 0x12 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x13 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x14 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x15 */ Opcode{ fetch: FetchType::ZeroPageX, cycles: 4, op: Operation::Ora }, /* 0x16 */ Opcode{ fetch: FetchType::ZeroPageX, cycles: 5, op: Operation::Asl }, /* 0x17 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x18 */ Opcode{ fetch: FetchType::Implicit, cycles: 2, op: Operation::Clc }, /* 0x19 */ Opcode{ fetch: FetchType::AbsoluteY, cycles: 4, op: Operation::Ora }, /* 0x1a */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x1b */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x1c */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x1d */ Opcode{ fetch: FetchType::AbsoluteX, cycles: 4, op: Operation::Ora }, /* 0x1e */ Opcode{ fetch: FetchType::AbsoluteX, cycles: 7, op: Operation::Asl }, /* 0x1f */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x20 */ Opcode{ fetch: FetchType::Absolute, cycles: 6, op: Operation::Jsr }, /* 0x21 */ Opcode{ fetch: FetchType::IndirectX, cycles: 6, op: Operation::And }, /* 0x22 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x23 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x24 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 3, op: Operation::Bit }, /* 0x25 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 3, op: Operation::And }, /* 0x26 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 5, op: Operation::Rol }, /* 0x27 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x28 */ Opcode{ fetch: FetchType::Implicit, cycles: 4, op: Operation::Plp }, /* 0x29 */ Opcode{ fetch: FetchType::Immediate, cycles: 2, op: Operation::And }, /* 0x2a */ Opcode{ fetch: FetchType::Accumulator, cycles: 2, op: Operation::Rol }, /* 0x2b */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x2c */ Opcode{ fetch: FetchType::Absolute, cycles: 4, op: Operation::Bit }, /* 0x2d */ Opcode{ fetch: FetchType::Absolute, cycles: 4, op: Operation::And }, /* 0x2e */ Opcode{ fetch: FetchType::Absolute, cycles: 6, op: Operation::Rol }, /* 0x2f */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x30 */ Opcode{ fetch: FetchType::Immediate, cycles: 2, op: Operation::Bmi }, /* 0x31 */ Opcode{ fetch: FetchType::IndirectY, cycles: 5, op: Operation::And }, /* 0x32 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x33 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x34 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x35 */ Opcode{ fetch: FetchType::ZeroPageX, cycles: 4, op: Operation::And }, /* 0x36 */ Opcode{ fetch: FetchType::ZeroPageX, cycles: 6, op: Operation::Rol }, /* 0x37 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x38 */ Opcode{ fetch: FetchType::Implicit, cycles: 2, op: Operation::Sec }, /* 0x39 */ Opcode{ fetch: FetchType::AbsoluteY, cycles: 4, op: Operation::And }, /* 0x3a */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x3b */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x3c */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x3d */ Opcode{ fetch: FetchType::AbsoluteX, cycles: 4, op: Operation::And }, /* 0x3e */ Opcode{ fetch: FetchType::AbsoluteX, cycles: 7, op: Operation::Rol }, /* 0x3f */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x40 */ Opcode{ fetch: FetchType::Implicit, cycles: 6, op: Operation::Rti }, /* 0x41 */ Opcode{ fetch: FetchType::IndirectY, cycles: 5, op: Operation::Eor }, /* 0x42 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x43 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x44 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x45 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 3, op: Operation::Eor }, /* 0x46 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 5, op: Operation::Lsr }, /* 0x47 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x48 */ Opcode{ fetch: FetchType::Implicit, cycles: 3, op: Operation::Pha }, /* 0x49 */ Opcode{ fetch: FetchType::Immediate, cycles: 2, op: Operation::Eor }, /* 0x4a */ Opcode{ fetch: FetchType::Accumulator, cycles: 2, op: Operation::Lsr }, /* 0x4b */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x4c */ Opcode{ fetch: FetchType::Absolute, cycles: 3, op: Operation::Jmp }, /* 0x4d */ Opcode{ fetch: FetchType::Absolute, cycles: 4, op: Operation::Eor }, /* 0x4e */ Opcode{ fetch: FetchType::Absolute, cycles: 6, op: Operation::Lsr }, /* 0x4f */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x50 */ Opcode{ fetch: FetchType::Immediate, cycles: 2, op: Operation::Bvc }, /* 0x51 */ Opcode{ fetch: FetchType::IndirectY, cycles: 5, op: Operation::Eor }, /* 0x52 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x53 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x54 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x55 */ Opcode{ fetch: FetchType::ZeroPageX, cycles: 4, op: Operation::Eor }, /* 0x56 */ Opcode{ fetch: FetchType::ZeroPageX, cycles: 6, op: Operation::Lsr }, /* 0x57 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x58 */ Opcode{ fetch: FetchType::Implicit, cycles: 2, op: Operation::Cli }, /* 0x59 */ Opcode{ fetch: FetchType::AbsoluteY, cycles: 4, op: Operation::Eor }, /* 0x5a */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x5b */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x5c */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x5d */ Opcode{ fetch: FetchType::AbsoluteX, cycles: 4, op: Operation::Eor }, /* 0x5e */ Opcode{ fetch: FetchType::AbsoluteX, cycles: 7, op: Operation::Lsr }, /* 0x5f */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x60 */ Opcode{ fetch: FetchType::Implicit, cycles: 6, op: Operation::Rts }, /* 0x61 */ Opcode{ fetch: FetchType::IndirectX, cycles: 6, op: Operation::Adc }, /* 0x62 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x63 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x64 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x65 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 3, op: Operation::Adc }, /* 0x66 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 5, op: Operation::Ror }, /* 0x67 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x68 */ Opcode{ fetch: FetchType::Implicit, cycles: 4, op: Operation::Pla }, /* 0x69 */ Opcode{ fetch: FetchType::Immediate, cycles: 2, op: Operation::Adc }, /* 0x6a */ Opcode{ fetch: FetchType::Accumulator, cycles: 2, op: Operation::Ror }, /* 0x6b */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x6c */ Opcode{ fetch: FetchType::Indirect, cycles: 5, op: Operation::Jmp }, /* 0x6d */ Opcode{ fetch: FetchType::Absolute, cycles: 4, op: Operation::Adc }, /* 0x6e */ Opcode{ fetch: FetchType::Absolute, cycles: 6, op: Operation::Ror }, /* 0x6f */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x70 */ Opcode{ fetch: FetchType::Immediate, cycles: 2, op: Operation::Bvs }, /* 0x71 */ Opcode{ fetch: FetchType::IndirectY, cycles: 5, op: Operation::Adc }, /* 0x72 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x73 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x74 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x75 */ Opcode{ fetch: FetchType::ZeroPageX, cycles: 4, op: Operation::Adc }, /* 0x76 */ Opcode{ fetch: FetchType::ZeroPageX, cycles: 6, op: Operation::Ror }, /* 0x77 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x78 */ Opcode{ fetch: FetchType::Implicit, cycles: 2, op: Operation::Sei }, /* 0x79 */ Opcode{ fetch: FetchType::AbsoluteY, cycles: 4, op: Operation::Adc }, /* 0x7a */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x7b */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x7c */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x7d */ Opcode{ fetch: FetchType::AbsoluteX, cycles: 4, op: Operation::Adc }, /* 0x7e */ Opcode{ fetch: FetchType::AbsoluteX, cycles: 7, op: Operation::Ror }, /* 0x7f */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x80 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x81 */ Opcode{ fetch: FetchType::IndirectX, cycles: 6, op: Operation::Sta }, /* 0x82 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x83 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x84 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 3, op: Operation::Sty }, /* 0x85 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 3, op: Operation::Sta }, /* 0x86 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 3, op: Operation::Stx }, /* 0x87 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x88 */ Opcode{ fetch: FetchType::Implicit, cycles: 2, op: Operation::Dey }, /* 0x89 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x8a */ Opcode{ fetch: FetchType::Implicit, cycles: 2, op: Operation::Txa }, /* 0x8b */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x8c */ Opcode{ fetch: FetchType::Absolute, cycles: 4, op: Operation::Sty }, /* 0x8d */ Opcode{ fetch: FetchType::Absolute, cycles: 4, op: Operation::Sta }, /* 0x8e */ Opcode{ fetch: FetchType::Absolute, cycles: 4, op: Operation::Stx }, /* 0x8f */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x90 */ Opcode{ fetch: FetchType::Immediate, cycles: 2, op: Operation::Bcc }, /* 0x91 */ Opcode{ fetch: FetchType::IndirectY, cycles: 6, op: Operation::Sta }, /* 0x92 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x93 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x94 */ Opcode{ fetch: FetchType::ZeroPageX, cycles: 4, op: Operation::Sty }, /* 0x95 */ Opcode{ fetch: FetchType::ZeroPageX, cycles: 4, op: Operation::Sta }, /* 0x96 */ Opcode{ fetch: FetchType::ZeroPageY, cycles: 4, op: Operation::Stx }, /* 0x97 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x98 */ Opcode{ fetch: FetchType::Implicit, cycles: 2, op: Operation::Tya }, /* 0x99 */ Opcode{ fetch: FetchType::AbsoluteY, cycles: 5, op: Operation::Sta }, /* 0x9a */ Opcode{ fetch: FetchType::Implicit, cycles: 2, op: Operation::Txs }, /* 0x9b */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x9c */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x9d */ Opcode{ fetch: FetchType::AbsoluteX, cycles: 5, op: Operation::Sta }, /* 0x9e */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0x9f */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xa0 */ Opcode{ fetch: FetchType::Immediate, cycles: 2, op: Operation::Ldy }, /* 0xa1 */ Opcode{ fetch: FetchType::IndirectX, cycles: 6, op: Operation::Lda }, /* 0xa2 */ Opcode{ fetch: FetchType::Immediate, cycles: 2, op: Operation::Ldx }, /* 0xa3 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xa4 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 3, op: Operation::Ldy }, /* 0xa5 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 3, op: Operation::Lda }, /* 0xa6 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 3, op: Operation::Ldx }, /* 0xa7 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xa8 */ Opcode{ fetch: FetchType::Implicit, cycles: 2, op: Operation::Tay }, /* 0xa9 */ Opcode{ fetch: FetchType::Immediate, cycles: 2, op: Operation::Lda }, /* 0xaa */ Opcode{ fetch: FetchType::Implicit, cycles: 2, op: Operation::Tax }, /* 0xab */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xac */ Opcode{ fetch: FetchType::Absolute, cycles: 4, op: Operation::Ldy }, /* 0xad */ Opcode{ fetch: FetchType::Absolute, cycles: 4, op: Operation::Lda }, /* 0xae */ Opcode{ fetch: FetchType::Absolute, cycles: 4, op: Operation::Ldx }, /* 0xaf */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xb0 */ Opcode{ fetch: FetchType::Immediate, cycles: 2, op: Operation::Bcs }, /* 0xb1 */ Opcode{ fetch: FetchType::IndirectY, cycles: 5, op: Operation::Lda }, /* 0xb2 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xb3 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xb4 */ Opcode{ fetch: FetchType::ZeroPageX, cycles: 4, op: Operation::Ldy }, /* 0xb5 */ Opcode{ fetch: FetchType::ZeroPageX, cycles: 4, op: Operation::Lda }, /* 0xb6 */ Opcode{ fetch: FetchType::ZeroPageY, cycles: 4, op: Operation::Ldx }, /* 0xb7 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xb8 */ Opcode{ fetch: FetchType::Implicit, cycles: 2, op: Operation::Clv }, /* 0xb9 */ Opcode{ fetch: FetchType::AbsoluteY, cycles: 4, op: Operation::Lda }, /* 0xba */ Opcode{ fetch: FetchType::Implicit, cycles: 2, op: Operation::Tsx }, /* 0xbb */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xbc */ Opcode{ fetch: FetchType::AbsoluteX, cycles: 4, op: Operation::Ldy }, /* 0xbd */ Opcode{ fetch: FetchType::AbsoluteX, cycles: 4, op: Operation::Lda }, /* 0xbe */ Opcode{ fetch: FetchType::AbsoluteY, cycles: 4, op: Operation::Ldx }, /* 0xbf */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xc0 */ Opcode{ fetch: FetchType::Immediate, cycles: 2, op: Operation::Cpy }, /* 0xc1 */ Opcode{ fetch: FetchType::IndirectX, cycles: 6, op: Operation::Cmp }, /* 0xc2 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xc3 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xc4 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 3, op: Operation::Cpy }, /* 0xc5 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 3, op: Operation::Cmp }, /* 0xc6 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 5, op: Operation::Dec }, /* 0xc7 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xc8 */ Opcode{ fetch: FetchType::Implicit, cycles: 2, op: Operation::Iny }, /* 0xc9 */ Opcode{ fetch: FetchType::Immediate, cycles: 2, op: Operation::Cmp }, /* 0xca */ Opcode{ fetch: FetchType::Implicit, cycles: 2, op: Operation::Dex }, /* 0xcb */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xcc */ Opcode{ fetch: FetchType::Absolute, cycles: 4, op: Operation::Cpy }, /* 0xcd */ Opcode{ fetch: FetchType::Absolute, cycles: 4, op: Operation::Cmp }, /* 0xce */ Opcode{ fetch: FetchType::Absolute, cycles: 6, op: Operation::Dec }, /* 0xcf */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xd0 */ Opcode{ fetch: FetchType::Immediate, cycles: 2, op: Operation::Bne }, /* 0xd1 */ Opcode{ fetch: FetchType::IndirectY, cycles: 5, op: Operation::Cmp }, /* 0xd2 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xd3 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xd4 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xd5 */ Opcode{ fetch: FetchType::ZeroPageX, cycles: 4, op: Operation::Cmp }, /* 0xd6 */ Opcode{ fetch: FetchType::ZeroPageX, cycles: 6, op: Operation::Dec }, /* 0xd7 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xd8 */ Opcode{ fetch: FetchType::Implicit, cycles: 2, op: Operation::Cld }, /* 0xd9 */ Opcode{ fetch: FetchType::AbsoluteY, cycles: 4, op: Operation::Cmp }, /* 0xda */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xdb */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xdc */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xdd */ Opcode{ fetch: FetchType::AbsoluteX, cycles: 4, op: Operation::Cmp }, /* 0xde */ Opcode{ fetch: FetchType::AbsoluteX, cycles: 7, op: Operation::Dec }, /* 0xdf */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xe0 */ Opcode{ fetch: FetchType::Immediate, cycles: 2, op: Operation::Cpx }, /* 0xe1 */ Opcode{ fetch: FetchType::IndirectX, cycles: 6, op: Operation::Sbc }, /* 0xe2 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xe3 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xe4 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 3, op: Operation::Cpx }, /* 0xe5 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 3, op: Operation::Sbc }, /* 0xe6 */ Opcode{ fetch: FetchType::ZeroPage, cycles: 5, op: Operation::Inc }, /* 0xe7 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xe8 */ Opcode{ fetch: FetchType::Implicit, cycles: 2, op: Operation::Inx }, /* 0xe9 */ Opcode{ fetch: FetchType::Immediate, cycles: 2, op: Operation::Sbc }, /* 0xea */ Opcode{ fetch: FetchType::Implicit, cycles: 2, op: Operation::Nop }, /* 0xeb */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xec */ Opcode{ fetch: FetchType::Absolute, cycles: 4, op: Operation::Cpx }, /* 0xed */ Opcode{ fetch: FetchType::Absolute, cycles: 4, op: Operation::Sbc }, /* 0xee */ Opcode{ fetch: FetchType::Absolute, cycles: 6, op: Operation::Inc }, /* 0xef */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xf0 */ Opcode{ fetch: FetchType::Immediate, cycles: 2, op: Operation::Beq }, /* 0xf1 */ Opcode{ fetch: FetchType::IndirectY, cycles: 5, op: Operation::Sbc }, /* 0xf2 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xf3 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xf4 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xf5 */ Opcode{ fetch: FetchType::ZeroPageX, cycles: 4, op: Operation::Sbc }, /* 0xf6 */ Opcode{ fetch: FetchType::ZeroPageX, cycles: 6, op: Operation::Inc }, /* 0xf7 */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xf8 */ Opcode{ fetch: FetchType::Implicit, cycles: 2, op: Operation::Sed }, /* 0xf9 */ Opcode{ fetch: FetchType::AbsoluteY, cycles: 4, op: Operation::Sbc }, /* 0xfa */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xfb */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xfc */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, /* 0xfd */ Opcode{ fetch: FetchType::AbsoluteX, cycles: 4, op: Operation::Sbc }, /* 0xfe */ Opcode{ fetch: FetchType::AbsoluteX, cycles: 7, op: Operation::Inc }, /* 0xff */ Opcode{ fetch: FetchType::Implicit, cycles: 0, op: Operation::Unknown }, ];
53.175097
85
0.593663
d56a945f09ad0bd8b3976d5ec96539b2dd24fb74
9,505
// Copyright 2017-2020 Parity Technologies (UK) Ltd. // This file is part of Polkadot. // Polkadot is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Polkadot is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Polkadot. If not, see <http://www.gnu.org/licenses/>. //! Autogenerated weights for `pallet_staking` //! //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 3.0.0 //! DATE: 2021-07-01, STEPS: `[50, ]`, REPEAT: 20, LOW RANGE: `[]`, HIGH RANGE: `[]` //! EXECUTION: Some(Wasm), WASM-EXECUTION: Compiled, CHAIN: Some("polkadot-dev"), DB CACHE: 128 // Executed Command: // target/release/polkadot // benchmark // --chain=polkadot-dev // --steps=50 // --repeat=20 // --pallet=pallet_staking // --extrinsic=* // --execution=wasm // --wasm-execution=compiled // --heap-pages=4096 // --header=./file_header.txt // --output=./runtime/polkadot/src/weights/ #![allow(unused_parens)] #![allow(unused_imports)] use frame_support::{traits::Get, weights::Weight}; use sp_std::marker::PhantomData; /// Weight functions for `pallet_staking`. pub struct WeightInfo<T>(PhantomData<T>); impl<T: frame_system::Config> pallet_staking::WeightInfo for WeightInfo<T> { fn bond() -> Weight { (70_259_000 as Weight) .saturating_add(T::DbWeight::get().reads(5 as Weight)) .saturating_add(T::DbWeight::get().writes(4 as Weight)) } fn bond_extra() -> Weight { (53_621_000 as Weight) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } fn unbond() -> Weight { (57_038_000 as Weight) .saturating_add(T::DbWeight::get().reads(6 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } fn withdraw_unbonded_update(s: u32, ) -> Weight { (49_182_000 as Weight) // Standard Error: 0 .saturating_add((31_000 as Weight).saturating_mul(s as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } fn withdraw_unbonded_kill(s: u32, ) -> Weight { (81_006_000 as Weight) // Standard Error: 1_000 .saturating_add((2_333_000 as Weight).saturating_mul(s as Weight)) .saturating_add(T::DbWeight::get().reads(8 as Weight)) .saturating_add(T::DbWeight::get().writes(6 as Weight)) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) } fn validate() -> Weight { (31_525_000 as Weight) .saturating_add(T::DbWeight::get().reads(6 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } fn kick(k: u32, ) -> Weight { (10_487_000 as Weight) // Standard Error: 7_000 .saturating_add((16_334_000 as Weight).saturating_mul(k as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(k as Weight))) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(k as Weight))) } fn nominate(n: u32, ) -> Weight { (38_083_000 as Weight) // Standard Error: 10_000 .saturating_add((5_185_000 as Weight).saturating_mul(n as Weight)) .saturating_add(T::DbWeight::get().reads(7 as Weight)) .saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(n as Weight))) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } fn chill() -> Weight { (16_783_000 as Weight) .saturating_add(T::DbWeight::get().reads(3 as Weight)) } fn set_payee() -> Weight { (11_391_000 as Weight) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } fn set_controller() -> Weight { (24_470_000 as Weight) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } fn set_validator_count() -> Weight { (1_879_000 as Weight) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } fn force_no_eras() -> Weight { (2_139_000 as Weight) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } fn force_new_era() -> Weight { (2_096_000 as Weight) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } fn force_new_era_always() -> Weight { (2_089_000 as Weight) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } fn set_invulnerables(v: u32, ) -> Weight { (2_143_000 as Weight) // Standard Error: 0 .saturating_add((23_000 as Weight).saturating_mul(v as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } fn force_unstake(s: u32, ) -> Weight { (58_264_000 as Weight) // Standard Error: 1_000 .saturating_add((2_309_000 as Weight).saturating_mul(s as Weight)) .saturating_add(T::DbWeight::get().reads(6 as Weight)) .saturating_add(T::DbWeight::get().writes(6 as Weight)) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) } fn cancel_deferred_slash(s: u32, ) -> Weight { (3_444_385_000 as Weight) // Standard Error: 224_000 .saturating_add((19_743_000 as Weight).saturating_mul(s as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } fn payout_stakers_dead_controller(n: u32, ) -> Weight { (106_496_000 as Weight) // Standard Error: 13_000 .saturating_add((46_186_000 as Weight).saturating_mul(n as Weight)) .saturating_add(T::DbWeight::get().reads(10 as Weight)) .saturating_add(T::DbWeight::get().reads((3 as Weight).saturating_mul(n as Weight))) .saturating_add(T::DbWeight::get().writes(2 as Weight)) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(n as Weight))) } fn payout_stakers_alive_staked(n: u32, ) -> Weight { (131_706_000 as Weight) // Standard Error: 20_000 .saturating_add((60_519_000 as Weight).saturating_mul(n as Weight)) .saturating_add(T::DbWeight::get().reads(11 as Weight)) .saturating_add(T::DbWeight::get().reads((5 as Weight).saturating_mul(n as Weight))) .saturating_add(T::DbWeight::get().writes(3 as Weight)) .saturating_add(T::DbWeight::get().writes((3 as Weight).saturating_mul(n as Weight))) } fn rebond(l: u32, ) -> Weight { (46_089_000 as Weight) // Standard Error: 1_000 .saturating_add((64_000 as Weight).saturating_mul(l as Weight)) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } fn set_history_depth(e: u32, ) -> Weight { (0 as Weight) // Standard Error: 67_000 .saturating_add((32_486_000 as Weight).saturating_mul(e as Weight)) .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(4 as Weight)) .saturating_add(T::DbWeight::get().writes((7 as Weight).saturating_mul(e as Weight))) } fn reap_stash(s: u32, ) -> Weight { (69_019_000 as Weight) // Standard Error: 0 .saturating_add((2_317_000 as Weight).saturating_mul(s as Weight)) .saturating_add(T::DbWeight::get().reads(7 as Weight)) .saturating_add(T::DbWeight::get().writes(8 as Weight)) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) } fn new_era(v: u32, n: u32, ) -> Weight { (0 as Weight) // Standard Error: 666_000 .saturating_add((306_698_000 as Weight).saturating_mul(v as Weight)) // Standard Error: 33_000 .saturating_add((47_483_000 as Weight).saturating_mul(n as Weight)) .saturating_add(T::DbWeight::get().reads(10 as Weight)) .saturating_add(T::DbWeight::get().reads((3 as Weight).saturating_mul(v as Weight))) .saturating_add(T::DbWeight::get().reads((3 as Weight).saturating_mul(n as Weight))) .saturating_add(T::DbWeight::get().writes(4 as Weight)) .saturating_add(T::DbWeight::get().writes((3 as Weight).saturating_mul(v as Weight))) } fn get_npos_voters(v: u32, n: u32, s: u32, ) -> Weight { (0 as Weight) // Standard Error: 97_000 .saturating_add((25_109_000 as Weight).saturating_mul(v as Weight)) // Standard Error: 97_000 .saturating_add((27_162_000 as Weight).saturating_mul(n as Weight)) // Standard Error: 3_332_000 .saturating_add((50_488_000 as Weight).saturating_mul(s as Weight)) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().reads((3 as Weight).saturating_mul(v as Weight))) .saturating_add(T::DbWeight::get().reads((3 as Weight).saturating_mul(n as Weight))) .saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(s as Weight))) } fn get_npos_targets(v: u32, ) -> Weight { (0 as Weight) // Standard Error: 31_000 .saturating_add((10_220_000 as Weight).saturating_mul(v as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(v as Weight))) } fn set_staking_limits() -> Weight { (5_584_000 as Weight) .saturating_add(T::DbWeight::get().writes(5 as Weight)) } fn chill_other() -> Weight { (39_524_000 as Weight) .saturating_add(T::DbWeight::get().reads(7 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } }
41.147186
95
0.699001
56382b1011c449fefa955a9a753a305b9864c67f
463
fn main() { println!("Test", 123, 456, 789); //~^ ERROR multiple unused formatting arguments println!("Test2", 123, //~ ERROR multiple unused formatting arguments 456, 789 ); println!("Some stuff", UNUSED="args"); //~ ERROR named argument never used println!("Some more $STUFF", "woo!", //~ ERROR multiple unused formatting arguments STUFF= "things" , UNUSED="args"); }
24.368421
78
0.557235